Commit eb02cc90 authored by Dmitry Gruzd's avatar Dmitry Gruzd

Merge branch '297279-invalidate-namespace-cache-when-namespaces-moved' into 'master'

Invalidate ES namespace cache when transferring groups

See merge request gitlab-org/gitlab!70005
parents 4a1fa966 64b6bc92
......@@ -215,6 +215,10 @@ module EE
::Gitlab::Elastic::ElasticsearchEnabledCache.delete_record(:project, project_id)
end
def invalidate_elasticsearch_indexes_cache_for_namespace!(namespace_id)
::Gitlab::Elastic::ElasticsearchEnabledCache.delete_record(:namespace, namespace_id)
end
def elasticsearch_limited_projects(ignore_namespaces = false)
return ::Project.where(id: ElasticsearchIndexedProject.select(:project_id)) if ignore_namespaces
......
......@@ -398,6 +398,10 @@ module EE
::Gitlab::CurrentSettings.elasticsearch_indexes_namespace?(self)
end
def invalidate_elasticsearch_indexes_cache!
::Gitlab::CurrentSettings.invalidate_elasticsearch_indexes_cache_for_namespace!(self.id)
end
def enable_temporary_storage_increase!
update(temporary_storage_increase_ends_on: TEMPORARY_STORAGE_INCREASE_DAYS.days.from_now)
end
......
......@@ -17,7 +17,7 @@ module EE
def post_update_hooks(updated_project_ids)
super
update_elasticsearch_hooks(updated_project_ids)
update_elasticsearch_hooks
end
def lost_groups
......@@ -30,11 +30,13 @@ module EE
end
end
def update_elasticsearch_hooks(updated_project_ids)
def update_elasticsearch_hooks
# When a group is moved to a new group, there is no way to know whether the group was using Elasticsearch
# before the transfer. If Elasticsearch limit indexing is enabled, each project has the ES cache
# before the transfer. If Elasticsearch limit indexing is enabled, the group and each project has the ES cache
# invalidated. Reindex all projects and associated data to make sure the namespace_ancestry field gets
# updated in each document.
group.invalidate_elasticsearch_indexes_cache! if ::Gitlab::CurrentSettings.elasticsearch_limit_indexing?
::Project.id_in(group.all_projects.select(:id)).find_each do |project|
project.invalidate_elasticsearch_indexes_cache! if ::Gitlab::CurrentSettings.elasticsearch_limit_indexing?
::Elastic::ProcessInitialBookkeepingService.backfill_projects!(project) if project.maintaining_elasticsearch?
......
......@@ -500,6 +500,15 @@ RSpec.describe ApplicationSetting do
end
end
describe '#invalidate_elasticsearch_indexes_cache_for_namespace!' do
it 'deletes the ElasticsearchEnabledCache for a namespace' do
namespace_id = 1
expect(::Gitlab::Elastic::ElasticsearchEnabledCache).to receive(:delete_record).with(:namespace, namespace_id)
setting.invalidate_elasticsearch_indexes_cache_for_namespace!(namespace_id)
end
end
describe '#search_using_elasticsearch?' do
# Constructs a truth table to run the specs against
where(indexing: [true, false], searching: [true, false], limiting: [true, false], advanced_global_search_for_limited_indexing: [true, false])
......
......@@ -110,6 +110,16 @@ RSpec.describe Namespace do
end
end
describe '#invalidate_elasticsearch_indexes_cache!' do
let(:namespace) { create :namespace }
it 'clears the cache for the namespace' do
expect(::Gitlab::Elastic::ElasticsearchEnabledCache).to receive(:delete_record).with(:namespace, namespace.id)
namespace.invalidate_elasticsearch_indexes_cache!
end
end
describe '#actual_plan_name' do
let(:namespace) { create(:namespace) }
......
......@@ -29,11 +29,12 @@ RSpec.describe Groups::TransferService, '#execute' do
create(:elasticsearch_indexed_namespace, namespace: new_group)
end
it 'invalidates the cache and indexes the project and all associated data' do
it 'invalidates the namespace and project cache and indexes the project and all associated data' do
expect(project).not_to receive(:maintain_elasticsearch_update)
expect(project).not_to receive(:maintain_elasticsearch_destroy)
expect(::Elastic::ProcessInitialBookkeepingService).to receive(:backfill_projects!).with(project)
expect(::Gitlab::CurrentSettings).to receive(:invalidate_elasticsearch_indexes_cache_for_project!).with(project.id).and_call_original
expect(::Gitlab::CurrentSettings).to receive(:invalidate_elasticsearch_indexes_cache_for_namespace!).with(group.id).and_call_original
transfer_service.execute(new_group)
end
......@@ -45,11 +46,12 @@ RSpec.describe Groups::TransferService, '#execute' do
create(:elasticsearch_indexed_namespace, namespace: new_group)
end
it 'invalidates the cache and indexes the project and all associated data' do
it 'invalidates the namespace and project cache and indexes the project and all associated data' do
expect(project).not_to receive(:maintain_elasticsearch_update)
expect(project).not_to receive(:maintain_elasticsearch_destroy)
expect(::Elastic::ProcessInitialBookkeepingService).to receive(:backfill_projects!).with(project)
expect(::Gitlab::CurrentSettings).to receive(:invalidate_elasticsearch_indexes_cache_for_project!).with(project.id).and_call_original
expect(::Gitlab::CurrentSettings).to receive(:invalidate_elasticsearch_indexes_cache_for_namespace!).with(group.id).and_call_original
transfer_service.execute(new_group)
end
......@@ -59,11 +61,12 @@ RSpec.describe Groups::TransferService, '#execute' do
context 'when elasticsearch_limit_indexing is off' do
let(:new_group) { create(:group, :private) }
it 'does not invalidate the cache and reindexes projects and associated data' do
it 'does not invalidate the namespace or project cache and reindexes projects and associated data' do
project1 = create(:project, :repository, :public, namespace: group)
project2 = create(:project, :repository, :public, namespace: group)
project3 = create(:project, :repository, :private, namespace: group)
expect(::Gitlab::CurrentSettings).not_to receive(:invalidate_elasticsearch_indexes_cache_for_namespace!)
expect(::Gitlab::CurrentSettings).not_to receive(:invalidate_elasticsearch_indexes_cache_for_project!)
expect(::Elastic::ProcessInitialBookkeepingService).to receive(:backfill_projects!).with(project1)
expect(::Elastic::ProcessInitialBookkeepingService).to receive(:backfill_projects!).with(project2)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment