Commit 21b3bc3e authored by Heinrich Lee Yu's avatar Heinrich Lee Yu

Batch loading of open issues count from Redis

For API endpoints that have a list of projects, we batch the fetching of
open issue counts using MSET to reduce Redis calls

Changelog: performance
parent 017dfebd
......@@ -4,9 +4,10 @@ module RendersProjectsList
def prepare_projects_for_rendering(projects)
preload_max_member_access_for_collection(Project, projects)
# Call the forks count method on every project, so the BatchLoader would load them all at
# Call the count methods on every project, so the BatchLoader would load them all at
# once when the entities are rendered
projects.each(&:forks_count)
projects.each(&:open_issues_count)
projects
end
......
......@@ -1805,7 +1805,15 @@ class Project < ApplicationRecord
# rubocop: disable CodeReuse/ServiceClass
def open_issues_count(current_user = nil)
Projects::OpenIssuesCountService.new(self, current_user).count
return Projects::OpenIssuesCountService.new(self, current_user).count unless current_user.nil?
BatchLoader.for(self).batch(replace_methods: false) do |projects, loader|
issues_count_per_project = ::Projects::BatchOpenIssuesCountService.new(projects).refresh_cache_and_retrieve_data
issues_count_per_project.each do |project, count|
loader.call(project, count)
end
end
end
# rubocop: enable CodeReuse/ServiceClass
......@@ -2285,7 +2293,7 @@ class Project < ApplicationRecord
# rubocop: disable CodeReuse/ServiceClass
def forks_count
BatchLoader.for(self).batch do |projects, loader|
BatchLoader.for(self).batch(replace_methods: false) do |projects, loader|
fork_count_per_project = ::Projects::BatchForksCountService.new(projects).refresh_cache_and_retrieve_data
fork_count_per_project.each do |project, count|
......
......@@ -9,13 +9,19 @@ module Projects
@projects = projects
end
def refresh_cache
@projects.each do |project|
service = count_service.new(project)
unless service.count_stored?
service.refresh_cache { global_count[project.id].to_i }
def refresh_cache_and_retrieve_data
count_services = @projects.map { |project| count_service.new(project) }
services_by_cache_key = count_services.index_by(&:cache_key)
results = Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
Rails.cache.fetch_multi(*services_by_cache_key.keys) do |key|
service = services_by_cache_key[key]
global_count[service.project.id].to_i
end
end
results.transform_keys! { |cache_key| services_by_cache_key[cache_key].project }
end
def project_ids
......
......@@ -5,21 +5,6 @@
# because the service use maps to retrieve the project ids
module Projects
class BatchForksCountService < Projects::BatchCountService
def refresh_cache_and_retrieve_data
count_services = @projects.map { |project| count_service.new(project) }
values = Gitlab::Instrumentation::RedisClusterValidator.allow_cross_slot_commands do
Rails.cache.fetch_multi(*(count_services.map { |ser| ser.cache_key } )) { |key| nil }
end
results_per_service = Hash[count_services.zip(values.values)]
projects_to_refresh = results_per_service.select { |_k, value| value.nil? }
projects_to_refresh = recreate_cache(projects_to_refresh)
results_per_service.update(projects_to_refresh)
results_per_service.transform_keys { |k| k.project }
end
# rubocop: disable CodeReuse/ActiveRecord
def global_count
@global_count ||= begin
......@@ -33,13 +18,5 @@ module Projects
def count_service
::Projects::ForksCountService
end
def recreate_cache(projects_to_refresh)
projects_to_refresh.each_with_object({}) do |(service, _v), hash|
count = global_count[service.project.id].to_i
service.refresh_cache { count }
hash[service] = count
end
end
end
end
......@@ -9,6 +9,8 @@ module Projects
# all caches.
VERSION = 1
attr_reader :project
def initialize(project)
@project = project
end
......
......@@ -3,8 +3,6 @@
module Projects
# Service class for getting and caching the number of forks of a project.
class ForksCountService < Projects::CountService
attr_reader :project
def cache_key_name
'forks_count'
end
......
......@@ -49,6 +49,14 @@ module API
end
# rubocop: enable CodeReuse/ActiveRecord
def self.execute_batch_counting(projects_relation)
# Call the count methods on every project, so the BatchLoader would load them all at
# once when the entities are rendered
projects_relation.each(&:forks_count)
super
end
private
alias_method :project, :object
......
......@@ -144,8 +144,13 @@ module API
end
# rubocop: enable CodeReuse/ActiveRecord
def self.forks_counting_projects(projects_relation)
projects_relation + projects_relation.map(&:forked_from_project).compact
def self.execute_batch_counting(projects_relation)
# Call the count methods on every project, so the BatchLoader would load them all at
# once when the entities are rendered
projects_relation.each(&:open_issues_count)
projects_relation.map(&:forked_from_project).compact.each(&:forks_count)
super
end
end
end
......
......@@ -7,28 +7,21 @@ module API
class_methods do
def prepare_relation(projects_relation, options = {})
projects_relation = preload_relation(projects_relation, options)
execute_batch_counting(projects_relation)
# Call the forks count method on every project, so the BatchLoader would load them all at
# once when the entities are rendered
projects_relation.each(&:forks_count)
projects_relation
end
# This is overridden by the specific Entity class to
# preload assocations that it needs
def preload_relation(projects_relation, options = {})
projects_relation
end
def forks_counting_projects(projects_relation)
projects_relation
end
def batch_open_issues_counting(projects_relation)
::Projects::BatchOpenIssuesCountService.new(projects_relation).refresh_cache
end
# This is overridden by the specific Entity class to
# batch load certain counts
def execute_batch_counting(projects_relation)
batch_open_issues_counting(projects_relation)
end
end
end
......
......@@ -1176,8 +1176,11 @@ RSpec.describe Issue do
it 'refreshes the number of open issues of the project' do
project = subject.project
expect { subject.destroy! }
.to change { project.open_issues_count }.from(1).to(0)
expect do
subject.destroy!
BatchLoader::Executor.clear_current
end.to change { project.open_issues_count }.from(1).to(0)
end
end
......
......@@ -1053,12 +1053,12 @@ RSpec.describe Project, factory_default: :keep do
project.open_issues_count(user)
end
it 'invokes the count service with no current_user' do
count_service = instance_double(Projects::OpenIssuesCountService)
expect(Projects::OpenIssuesCountService).to receive(:new).with(project, nil).and_return(count_service)
expect(count_service).to receive(:count)
it 'invokes the batch count service with no current_user' do
count_service = instance_double(Projects::BatchOpenIssuesCountService)
expect(Projects::BatchOpenIssuesCountService).to receive(:new).with([project]).and_return(count_service)
expect(count_service).to receive(:refresh_cache_and_retrieve_data).and_return({})
project.open_issues_count
project.open_issues_count.to_s
end
end
......
......@@ -58,8 +58,11 @@ RSpec.describe Issues::CloseService do
end
it 'refreshes the number of open issues', :use_clean_rails_memory_store_caching do
expect { service.execute(issue) }
.to change { project.open_issues_count }.from(1).to(0)
expect do
service.execute(issue)
BatchLoader::Executor.clear_current
end.to change { project.open_issues_count }.from(1).to(0)
end
it 'invalidates counter cache for assignees' do
......
......@@ -91,7 +91,11 @@ RSpec.describe Issues::CreateService do
end
it 'refreshes the number of open issues', :use_clean_rails_memory_store_caching do
expect { issue }.to change { project.open_issues_count }.from(0).to(1)
expect do
issue
BatchLoader::Executor.clear_current
end.to change { project.open_issues_count }.from(0).to(1)
end
context 'when current user cannot set issue metadata in the project' do
......
......@@ -39,8 +39,11 @@ RSpec.describe Issues::ReopenService do
it 'refreshes the number of opened issues' do
service = described_class.new(project: project, current_user: user)
expect { service.execute(issue) }
.to change { project.open_issues_count }.from(0).to(1)
expect do
service.execute(issue)
BatchLoader::Executor.clear_current
end.to change { project.open_issues_count }.from(0).to(1)
end
it 'deletes milestone issue counters cache' do
......
......@@ -146,8 +146,11 @@ RSpec.describe Issues::UpdateService, :mailer do
it 'refreshes the number of open issues when the issue is made confidential', :use_clean_rails_memory_store_caching do
issue # make sure the issue is created first so our counts are correct.
expect { update_issue(confidential: true) }
.to change { project.open_issues_count }.from(1).to(0)
expect do
update_issue(confidential: true)
BatchLoader::Executor.clear_current
end.to change { project.open_issues_count }.from(1).to(0)
end
it 'enqueues ConfidentialIssueWorker when an issue is made confidential' do
......
......@@ -8,7 +8,7 @@ RSpec.describe Projects::BatchOpenIssuesCountService do
let(:subject) { described_class.new([project_1, project_2]) }
describe '#refresh_cache', :use_clean_rails_memory_store_caching do
describe '#refresh_cache_and_retrieve_data', :use_clean_rails_memory_store_caching do
before do
create(:issue, project: project_1)
create(:issue, project: project_1, confidential: true)
......@@ -19,7 +19,7 @@ RSpec.describe Projects::BatchOpenIssuesCountService do
context 'when cache is clean' do
it 'refreshes cache keys correctly' do
subject.refresh_cache
subject.refresh_cache_and_retrieve_data
# It does not update total issues cache
expect(Rails.cache.read(get_cache_key(subject, project_1))).to eq(nil)
......@@ -29,19 +29,6 @@ RSpec.describe Projects::BatchOpenIssuesCountService do
expect(Rails.cache.read(get_cache_key(subject, project_1, true))).to eq(1)
end
end
context 'when issues count is already cached' do
before do
create(:issue, project: project_2)
subject.refresh_cache
end
it 'does update cache again' do
expect(Rails.cache).not_to receive(:write)
subject.refresh_cache
end
end
end
def get_cache_key(subject, project, public_key = false)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment