Commit 0410fb86 authored by Stan Hu's avatar Stan Hu

Fix CI instance variable cache misses

Previously the CI variable instance cache always attempted to read
Redis for a timestamp to determine whether the value was
up-to-date. The problem was that in a system with multiple processes
reading from the cache, the timestamp would constantly be advancing,
making it appear as though the cache always were stale. Not only did
this incur one Redis call per lookup, but also this would result in
frequent cache misses.

Since we mostly just use this cache data set in the creation of
pipelines, we can likely live with a 30-second delay between the time
when the variable is set and when it is live.

For the `gitlab-org/gitlab` project, this should reduce the overall
pipeline creation time by around 1.5 seconds (11% of the time), not to
mention reduce the network and memory overhead.

Relates to https://gitlab.com/gitlab-org/gitlab/-/issues/322386

Changelog: fixed
parent 0bd9840e
...@@ -2,7 +2,6 @@ ...@@ -2,7 +2,6 @@
module Ci module Ci
class InstanceVariable < Ci::ApplicationRecord class InstanceVariable < Ci::ApplicationRecord
extend Gitlab::ProcessMemoryCache::Helper
include Ci::NewHasVariable include Ci::NewHasVariable
include Ci::Maskable include Ci::Maskable
include Limitable include Limitable
...@@ -36,15 +35,23 @@ module Ci ...@@ -36,15 +35,23 @@ module Ci
cached_data[:unprotected] cached_data[:unprotected]
end end
def invalidate_memory_cache(key)
cache_backend.delete(key)
end
private private
def cached_data def cached_data
fetch_memory_cache(:ci_instance_variable_data) do cache_backend.fetch(:ci_instance_variable_data, expires_in: 30.seconds) do
all_records = unscoped.all.to_a all_records = unscoped.all.to_a
{ all: all_records, unprotected: all_records.reject(&:protected?) } { all: all_records, unprotected: all_records.reject(&:protected?) }
end end
end end
def cache_backend
Gitlab::ProcessMemoryCache.cache_backend
end
end end
end end
end end
# frozen_string_literal: true
module Gitlab
class ProcessMemoryCache
module Helper
def fetch_memory_cache(key, &payload)
cache = cache_backend.read(key)
if cache && !stale_cache?(key, cache)
cache[:data]
else
store_cache(key, &payload)
end
end
def invalidate_memory_cache(key)
touch_cache_timestamp(key)
end
private
def touch_cache_timestamp(key, time = Time.current.to_f)
shared_backend.write(key, time)
end
def stale_cache?(key, cache_info)
shared_timestamp = shared_backend.read(key)
return true unless shared_timestamp
shared_timestamp.to_f > cache_info[:cached_at].to_f
end
def store_cache(key)
data = yield
time = Time.current.to_f
cache_backend.write(key, data: data, cached_at: time)
touch_cache_timestamp(key, time)
data
end
def shared_backend
Rails.cache
end
def cache_backend
::Gitlab::ProcessMemoryCache.cache_backend
end
end
end
end
...@@ -252,7 +252,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Seed do ...@@ -252,7 +252,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Seed do
extra_jobs = 2 extra_jobs = 2
non_handled_sql_queries = 2 non_handled_sql_queries = 2
# 1. Ci::InstanceVariable Load => `Ci::InstanceVariable#cached_data` => already cached with `fetch_memory_cache` # 1. Ci::InstanceVariable Load => `Ci::InstanceVariable#cached_data` => already cached with `ProcessMemoryCache`
# 2. Ci::Variable Load => `Project#ci_variables_for` => already cached with `Gitlab::SafeRequestStore` # 2. Ci::Variable Load => `Project#ci_variables_for` => already cached with `Gitlab::SafeRequestStore`
extra_jobs * non_handled_sql_queries extra_jobs * non_handled_sql_queries
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::ProcessMemoryCache::Helper, :use_clean_rails_memory_store_caching do
let(:minimal_test_class) do
Class.new do
include Gitlab::ProcessMemoryCache::Helper
def cached_content
fetch_memory_cache(:cached_content_instance_key) { expensive_computation }
end
def clear_cached_content
invalidate_memory_cache(:cached_content_instance_key)
end
end
end
before do
stub_const("MinimalTestClass", minimal_test_class)
end
subject { MinimalTestClass.new }
describe '.fetch_memory_cache' do
it 'memoizes the result' do
is_expected.to receive(:expensive_computation).once.and_return(1)
2.times do
expect(subject.cached_content).to eq(1)
end
end
it 'resets the cache when the shared key is missing', :aggregate_failures do
expect(Rails.cache).to receive(:read).with(:cached_content_instance_key).twice.and_return(nil)
is_expected.to receive(:expensive_computation).thrice.and_return(1, 2, 3)
3.times do |index|
expect(subject.cached_content).to eq(index + 1)
end
end
end
describe '.invalidate_memory_cache' do
it 'invalidates the cache' do
is_expected.to receive(:expensive_computation).twice.and_return(1, 2)
expect { subject.clear_cached_content }.to change { subject.cached_content }
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment