Commit 3b942454 authored by Valery Sizov's avatar Valery Sizov Committed by Michael Kozono

Snippet Replication - consume events

This commit ads the code that sync snippets
parent 2ae6c5f3
......@@ -12,7 +12,7 @@ class SnippetRepository < ApplicationRecord
belongs_to :snippet, inverse_of: :snippet_repository
delegate :repository, to: :snippet
delegate :repository, :repository_storage, to: :snippet
class << self
def find_snippet(disk_path)
......
......@@ -13,6 +13,7 @@ module Geo
end
def handle_after_create_commit
return false unless Gitlab::Geo.enabled?
return unless self.class.enabled?
publish(:created, **created_params)
......
......@@ -8,18 +8,58 @@ module Geo
include Gitlab::Geo::LogHelpers
included do
event :created
event :updated
event :deleted
end
# Called by Gitlab::Geo::Replicator#consume
def consume_event_updated(**params)
# not implemented yet
return unless in_replicables_for_geo_node?
sync_repository
end
# Called by Gitlab::Geo::Replicator#consume
def consume_event_created(**params)
consume_event_updated(**params)
end
# Called by Gitlab::Geo::Replicator#consume
def consume_event_deleted(**params)
# not implemented yet
replicate_destroy(params)
end
def replicate_destroy(params)
Geo::RepositoryRegistryRemovalService.new(self, params).execute
end
def sync_repository
Geo::FrameworkRepositorySyncService.new(self).execute
end
def reschedule_sync
Geo::EventWorker.perform_async(replicable_name, 'updated', { model_record_id: model_record.id })
end
def remote_url
Gitlab::Geo.primary_node.repository_url(repository)
end
def jwt_authentication_header
authorization = ::Gitlab::Geo::RepoSyncRequest.new(
scope: repository.full_path
).authorization
{ "http.#{remote_url}.extraHeader" => "Authorization: #{authorization}" }
end
def deleted_params
event_params.merge(
repository_storage: model_record.repository_storage,
disk_path: model_record.repository.disk_path,
full_path: model_record.repository.full_path
)
end
end
end
......@@ -12,8 +12,14 @@ module EE
class_methods do
def replicables_for_geo_node
# Not implemented yet. Should be responible for selective sync
none
all
end
end
# Geo checks this method in FrameworkRepositorySyncService to avoid snapshotting
# repositories using object pools
def pool_repository
nil
end
end
end
......@@ -207,6 +207,10 @@ class GeoNode < ApplicationRecord
url
end
def repository_url(repository)
Gitlab::Utils.append_path(internal_url, "#{repository.full_path}.git")
end
def oauth_callback_url
Gitlab::Routing.url_helpers.oauth_geo_callback_url(url_helper_args)
end
......
......@@ -8,12 +8,33 @@ module Geo
::SnippetRepository
end
def needs_checksum?
def self.replication_enabled_by_default?
false
end
def self.replication_enabled_by_default?
def needs_checksum?
false
end
def repository
model_record.repository
end
# Once https://gitlab.com/gitlab-org/gitlab/-/issues/213021 is fixed
# this method can be removed
def remote_url
url = Gitlab::Geo.primary_node.repository_url(repository)
url.sub('@snippets', 'snippets')
end
# Once https://gitlab.com/gitlab-org/gitlab/-/issues/213021 is fixed
# this method can be removed
def jwt_authentication_header
authorization = ::Gitlab::Geo::RepoSyncRequest.new(
scope: repository.full_path.sub('@snippets', 'snippets')
).authorization
{ "http.#{remote_url}.extraHeader" => "Authorization: #{authorization}" }
end
end
end
# frozen_string_literal: true
require 'securerandom'
module Geo
# This class is similar to RepositoryBaseSyncService
# but it works in a scope of Self-Service-Framework
class FrameworkRepositorySyncService
include ExclusiveLeaseGuard
include ::Gitlab::ShellAdapter
include ::Gitlab::Geo::LogHelpers
include Delay
attr_reader :replicator, :repository
delegate :registry, to: :replicator
GEO_REMOTE_NAME = 'geo'
LEASE_TIMEOUT = 8.hours
LEASE_KEY_PREFIX = 'geo_sync_ssf_service'
RETRIES_BEFORE_REDOWNLOAD = 5
def initialize(replicator)
@replicator = replicator
@repository = replicator.repository
end
def execute
try_obtain_lease do
log_info("Started #{replicable_name} sync")
sync_repository
log_info("Finished #{replicable_name} sync")
end
end
def sync_repository
start_registry_sync!
fetch_repository
mark_sync_as_successful
rescue Gitlab::Git::Repository::NoRepository => e
log_info('Marking the repository for a forced re-download')
fail_registry_sync!('Invalid repository', e, force_to_redownload: true)
log_info('Expiring caches')
repository.after_create
rescue Gitlab::Shell::Error, Gitlab::Git::BaseError => e
# In some cases repository does not exist, the only way to know about this is to parse the error text.
# If it does not exist we should consider it as successfully downloaded.
if e.message.include? Gitlab::GitAccess::ERROR_MESSAGES[:no_repo] # rubocop:disable Cop/LineBreakAroundConditionalBlock
log_info('Repository is not found, marking it as successfully synced')
mark_sync_as_successful(missing_on_primary: true)
else
fail_registry_sync!('Error syncing repository', e)
end
ensure
expire_repository_caches
end
def lease_key
@lease_key ||= "#{LEASE_KEY_PREFIX}:#{replicable_name}:#{replicator.model_record.id}"
end
def lease_timeout
LEASE_TIMEOUT
end
private
def fetch_repository
log_info("Trying to fetch #{replicable_name}")
clean_up_temporary_repository
if should_be_redownloaded?
redownload_repository
@new_repository = true
elsif repository.exists?
fetch_geo_mirror(repository)
else
ensure_repository
fetch_geo_mirror(repository)
@new_repository = true
end
end
def redownload_repository
log_info("Redownloading #{replicable_name}")
if fetch_snapshot_into_temp_repo
set_temp_repository_as_main
return
end
log_info("Attempting to fetch repository via git")
# `git fetch` needs an empty bare repository to fetch into
temp_repo.create_repository
fetch_geo_mirror(temp_repo)
set_temp_repository_as_main
ensure
clean_up_temporary_repository
end
def current_node
::Gitlab::Geo.current_node
end
def fetch_geo_mirror(repository)
# Fetch the repository, using a JWT header for authentication
repository.with_config(replicator.jwt_authentication_header) do
repository.fetch_as_mirror(replicator.remote_url, remote_name: GEO_REMOTE_NAME, forced: true)
end
end
# Use snapshotting for redownloads *only* when enabled.
#
# If writes happen to the repository while snapshotting, it may be
# returned in an inconsistent state. However, a subsequent git fetch
# will be enqueued by the log cursor, which should resolve any problems
# it is possible to fix.
def fetch_snapshot_into_temp_repo
# Snapshots will miss the data that are shared in object pools, and snapshotting should
# be avoided to guard against data loss.
return if replicator.model_record.pool_repository
log_info("Attempting to fetch repository via snapshot")
temp_repo.create_from_snapshot(
::Gitlab::Geo.primary_node.snapshot_url(temp_repo),
::Gitlab::Geo::RepoSyncRequest.new(scope: ::Gitlab::Geo::API_SCOPE).authorization
)
rescue => err
log_error('Snapshot attempt failed', err)
false
end
def mark_sync_as_successful(missing_on_primary: false)
log_info("Marking #{replicable_name} sync as successful")
registry = replicator.registry
registry.force_to_redownload = false
registry.missing_on_primary = missing_on_primary
persisted = registry.synced!
reschedule_sync unless persisted
log_info("Finished #{replicable_name} sync",
download_time_s: download_time_in_seconds)
end
def start_registry_sync!
log_info("Marking #{replicable_name} sync as started")
registry.start!
end
def fail_registry_sync!(message, error, force_to_redownload: false)
log_error(message, error)
registry = replicator.registry
registry.force_to_redownload = force_to_redownload
registry.failed!(message, error)
repository.clean_stale_repository_files
end
def download_time_in_seconds
(Time.current.to_f - registry.last_synced_at.to_f).round(3)
end
def disk_path_temp
# We use "@" as it's not allowed to use it in a group or project name
@disk_path_temp ||= "@geo-temporary/#{repository.disk_path}"
end
def deleted_disk_path_temp
@deleted_path ||= "@failed-geo-sync/#{repository.disk_path}"
end
def temp_repo
@temp_repo ||= ::Repository.new(repository.full_path, repository.container, shard: repository.shard, disk_path: disk_path_temp, repo_type: repository.repo_type)
end
def clean_up_temporary_repository
exists = gitlab_shell.repository_exists?(repository_storage, disk_path_temp + '.git')
if exists && !gitlab_shell.remove_repository(repository_storage, disk_path_temp)
raise Gitlab::Shell::Error, "Temporary #{replicable_name} can not be removed"
end
end
def set_temp_repository_as_main
log_info(
"Setting newly downloaded repository as main",
storage_shard: repository_storage,
temp_path: disk_path_temp,
deleted_disk_path_temp: deleted_disk_path_temp,
disk_path: repository.disk_path
)
# Remove the deleted path in case it exists, but it may not be there
gitlab_shell.remove_repository(repository_storage, deleted_disk_path_temp)
# Make sure we have the most current state of exists?
repository.expire_exists_cache
# Move the current canonical repository to the deleted path for reference
if repository.exists?
unless gitlab_shell.mv_repository(repository_storage, repository.disk_path, deleted_disk_path_temp)
raise Gitlab::Shell::Error, 'Can not move original repository out of the way'
end
end
# Move the temporary repository to the canonical path
unless gitlab_shell.mv_repository(repository_storage, disk_path_temp, repository.disk_path)
raise Gitlab::Shell::Error, 'Can not move temporary repository to canonical location'
end
# Purge the original repository
unless gitlab_shell.remove_repository(repository_storage, deleted_disk_path_temp)
raise Gitlab::Shell::Error, 'Can not remove outdated main repository'
end
end
def repository_storage
replicator.model_record.repository_storage
end
def new_repository?
@new_repository
end
def ensure_repository
repository.create_if_not_exists
end
def expire_repository_caches
log_info('Expiring caches for repository')
repository.after_sync
end
def should_be_redownloaded?
return true if registry.force_to_redownload
registry.retry_count > RETRIES_BEFORE_REDOWNLOAD
end
def reschedule_sync
log_info("Reschedule the sync because a RepositoryUpdateEvent was processed during the sync")
replicator.reschedule_sync
end
def replicable_name
replicator.replicable_name
end
end
end
......@@ -105,7 +105,7 @@ module Geo
end
def remote_url
Gitlab::Utils.append_path(Gitlab::Geo.primary_node.internal_url, "#{repository.full_path}.git")
Gitlab::Geo.primary_node.repository_url(repository)
end
# Use snapshotting for redownloads *only* when enabled.
......
# frozen_string_literal: true
module Geo
# This service is intended to remove any repository, including its
# registry record when container object doesn't exist anymore.
class RepositoryRegistryRemovalService
include ::Gitlab::Geo::LogHelpers
attr_reader :params, :replicator, :id, :name, :full_path
# @replicator [Gitlab::Geo::Replicator] Gitlab Geo Replicator
# @params [Hash] Should include keys: full_path, repository_storage, disk_path
def initialize(replicator, params)
@replicator = replicator
@params = params
@full_path = params[:full_path]
@id = replicator.model_record_id
end
def execute
destroy_repository
destroy_registry if registry
end
private
def destroy_repository
repository = Repository.new(params[:disk_path], self, shard: params[:repository_storage])
result = Repositories::DestroyService.new(repository).execute
if result[:status] == :success
log_info('Repository removed', params)
else
log_error("#{replicable_name} couldn't be destroyed", nil, params)
end
end
def destroy_registry
registry.destroy
log_info('Registry removed', params)
end
def registry
replicator.registry
end
end
end
# frozen_string_literal: true
module EE
module Gitlab
module GitAccessSnippet
extend ::Gitlab::Utils::Override
private
override :check_download_access!
def check_download_access!
return if geo?
super
end
override :check_push_access!
def check_push_access!
return if geo?
super
end
override :allowed_actor?
def allowed_actor?
super || geo?
end
end
end
end
......@@ -84,7 +84,7 @@ module Gitlab
end
def in_replicables_for_geo_node?
self.class.replicables_for_geo_node.id_in(self).exists?
self.class.replicables_for_geo_node.primary_key_in(self).exists?
end
end
end
......
......@@ -285,12 +285,14 @@ module Gitlab
end
def handle_after_destroy
return false unless Gitlab::Geo.enabled?
return unless self.class.enabled?
publish(:deleted, **deleted_params)
end
def handle_after_update
return false unless Gitlab::Geo.enabled?
return unless self.class.enabled?
publish(:updated, **updated_params)
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::GitAccessSnippet do
using RSpec::Parameterized::TableSyntax
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :public) }
let_it_be(:snippet) { create(:project_snippet, :public, :repository, project: project) }
let(:actor) { :geo }
let(:authentication_abilities) { [:read_project, :download_code, :push_code] }
subject(:access) { Gitlab::GitAccessSnippet.new(actor, snippet, protocol, authentication_abilities: authentication_abilities) }
describe '#check' do
subject { access.check('git-receive-pack', ::Gitlab::GitAccess::ANY) }
where(:protocol_name) do
%w(ssh web http https)
end
with_them do
let(:protocol) { protocol_name }
it { is_expected.to be_a(::Gitlab::GitAccessResult::Success) }
end
end
end
......@@ -27,6 +27,7 @@ RSpec.describe Packages::PackageFile, type: :model do
context 'new file' do
it 'calls checksum worker' do
allow(Gitlab::Geo).to receive(:enabled?).and_return(true)
allow(Geo::BlobVerificationPrimaryWorker).to receive(:perform_async)
package_file = create(:conan_package_file, :conan_recipe_file)
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Geo::FrameworkRepositorySyncService, :geo do
include ::EE::GeoHelpers
include ExclusiveLeaseHelpers
let_it_be(:primary) { create(:geo_node, :primary) }
let_it_be(:secondary) { create(:geo_node) }
let_it_be(:project) { create(:project_empty_repo) }
let_it_be(:snippet) { create(:project_snippet, :public, :repository, project: project) }
let_it_be(:replicator) { snippet.snippet_repository.replicator }
let(:repository) { snippet.repository }
let(:lease_key) { "geo_sync_ssf_service:snippet_repository:#{replicator.model_record.id}" }
let(:lease_uuid) { 'uuid'}
let(:registry) { replicator.registry }
subject { described_class.new(replicator) }
before do
stub_current_geo_node(secondary)
end
it_behaves_like 'geo base sync execution'
it_behaves_like 'geo base sync fetch'
context 'reschedules sync due to race condition instead of waiting for backfill' do
describe '#mark_sync_as_successful' do
let(:mark_sync_as_successful) { subject.send(:mark_sync_as_successful) }
let(:registry) { subject.send(:registry) }
context 'when UpdatedEvent was processed during a sync' do
it 'reschedules the sync' do
expect(::Geo::EventWorker).to receive(:perform_async)
expect_any_instance_of(registry.class).to receive(:synced!).and_return(false)
mark_sync_as_successful
end
end
end
end
describe '#execute' do
let(:url_to_repo) { replicator.remote_url }
before do
stub_exclusive_lease(lease_key, lease_uuid)
allow_any_instance_of(Repository).to receive(:fetch_as_mirror)
.and_return(true)
allow_any_instance_of(Repository)
.to receive(:find_remote_root_ref)
.with('geo')
.and_return('master')
end
include_context 'lease handling'
it 'fetches project repository with JWT credentials' do
expect(repository).to receive(:with_config)
.with("http.#{url_to_repo}.extraHeader" => anything)
.and_call_original
expect(repository).to receive(:fetch_as_mirror)
.with(url_to_repo, remote_name: 'geo', forced: true)
.once
subject.execute
end
it 'expires repository caches' do
expect_any_instance_of(Repository).to receive(:expire_all_method_caches).once
expect_any_instance_of(Repository).to receive(:expire_branch_cache).once
expect_any_instance_of(Repository).to receive(:expire_content_cache).once
subject.execute
end
it 'voids the failure message when it succeeds after an error' do
registry.update!(last_sync_failure: 'error')
expect { subject.execute }.to change { registry.reload.last_sync_failure}.to(nil)
end
it 'rescues when Gitlab::Shell::Error is raised' do
allow(repository).to receive(:fetch_as_mirror)
.with(url_to_repo, remote_name: 'geo', forced: true)
.and_raise(Gitlab::Shell::Error)
expect { subject.execute }.not_to raise_error
end
it 'rescues exception and fires after_create hook when Gitlab::Git::Repository::NoRepository is raised' do
allow(repository).to receive(:fetch_as_mirror)
.with(url_to_repo, remote_name: 'geo', forced: true)
.and_raise(Gitlab::Git::Repository::NoRepository)
expect(repository).to receive(:after_create)
expect { subject.execute }.not_to raise_error
end
it 'increases retry count when Gitlab::Git::Repository::NoRepository is raised' do
registry.save!
allow(repository).to receive(:fetch_as_mirror)
.with(url_to_repo, remote_name: 'geo', forced: true)
.and_raise(Gitlab::Git::Repository::NoRepository)
subject.execute
expect(registry.reload).to have_attributes(
state: Geo::SnippetRepositoryRegistry::STATE_VALUES[:failed],
retry_count: 1
)
end
it 'marks sync as successful if no repository found' do
allow(repository).to receive(:fetch_as_mirror)
.with(url_to_repo, remote_name: 'geo', forced: true)
.and_raise(Gitlab::Shell::Error.new(Gitlab::GitAccess::ERROR_MESSAGES[:no_repo]))
subject.execute
expect(registry).to have_attributes(
state: Geo::SnippetRepositoryRegistry::STATE_VALUES[:synced],
missing_on_primary: true
)
end
it 'marks sync as failed' do
subject.execute
expect(registry.synced?).to be true
allow(repository).to receive(:fetch_as_mirror)
.with(url_to_repo, remote_name: 'geo', forced: true)
.and_raise(Gitlab::Git::Repository::NoRepository)
subject.execute
expect(registry.reload.failed?).to be true
end
context 'tracking database' do
context 'temporary repositories' do
include_examples 'cleans temporary repositories'
end
context 'when repository sync succeed' do
it 'sets last_synced_at' do
subject.execute
expect(registry.last_synced_at).not_to be_nil
end
it 'logs success with timings' do
allow(Gitlab::Geo::Logger).to receive(:info).and_call_original
expect(Gitlab::Geo::Logger).to receive(:info).with(hash_including(:message, :download_time_s)).and_call_original
subject.execute
end
it 'sets retry_count and repository_retry_at to nil' do
registry.update!(retry_count: 2, retry_at: Date.yesterday)
subject.execute
expect(registry.reload.retry_count).to be_zero
expect(registry.retry_at).to be_nil
end
end
context 'when repository sync fail' do
before do
allow(repository).to receive(:fetch_as_mirror)
.with(url_to_repo, remote_name: 'geo', forced: true)
.and_raise(Gitlab::Shell::Error.new('shell error'))
end
it 'sets correct values for registry record' do
subject.execute
expect(registry).to have_attributes(last_synced_at: be_present,
retry_count: 1,
retry_at: be_present,
last_sync_failure: 'Error syncing repository: shell error'
)
end
it 'calls repository cleanup' do
expect(repository).to receive(:clean_stale_repository_files)
subject.execute
end
end
end
context 'retries' do
it 'tries to fetch repo' do
registry.update!(retry_count: described_class::RETRIES_BEFORE_REDOWNLOAD - 1)
expect(subject).to receive(:sync_repository)
subject.execute
end
it 'sets the redownload flag to false after success' do
registry.update!(retry_count: described_class::RETRIES_BEFORE_REDOWNLOAD + 1, force_to_redownload: true)
subject.execute
expect(registry.reload.force_to_redownload).to be false
end
it 'tries to redownload repo' do
registry.update!(retry_count: described_class::RETRIES_BEFORE_REDOWNLOAD + 1)
expect(subject).to receive(:sync_repository).and_call_original
expect(subject.gitlab_shell).to receive(:mv_repository).twice.and_call_original
expect(subject.gitlab_shell).to receive(:remove_repository).twice.and_call_original
subject.execute
repo_path = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
repository.path
end
expect(File.directory?(repo_path)).to be true
end
it 'tries to redownload repo when force_redownload flag is set' do
registry.update!(
retry_count: described_class::RETRIES_BEFORE_REDOWNLOAD - 1,
force_to_redownload: true
)
expect(subject).to receive(:sync_repository)
subject.execute
end
it 'cleans temporary repo after redownload' do
registry.update!(
retry_count: described_class::RETRIES_BEFORE_REDOWNLOAD - 1,
force_to_redownload: true
)
expect(subject).to receive(:fetch_geo_mirror)
expect(subject).to receive(:clean_up_temporary_repository).twice.and_call_original
expect(subject.gitlab_shell).to receive(:repository_exists?).twice.with(replicator.model_record.repository_storage, /.git$/)
subject.execute
end
it 'successfully redownloads the repository even if the retry time exceeds max value' do
timestamp = Time.current.utc
registry.update!(
retry_count: described_class::RETRIES_BEFORE_REDOWNLOAD + 2000,
retry_at: timestamp,
force_to_redownload: true
)
subject.execute
# The repository should be redownloaded and cleared without errors. If
# the timestamp were not capped, we would have seen a "timestamp out
# of range" in the first update to the registry.
registry.reload
expect(registry.retry_at).to be_nil
end
context 'no repository' do
it 'does not raise an error' do
registry.update!(force_to_redownload: true)
expect(repository).to receive(:expire_exists_cache).twice.and_call_original
expect(subject).not_to receive(:fail_registry_sync!)
subject.execute
end
end
end
it_behaves_like 'sync retries use the snapshot RPC' do
let(:retry_count) { described_class::RETRIES_BEFORE_REDOWNLOAD }
def registry_with_retry_count(retries)
replicator.registry.update!(retry_count: retries)
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Geo::RepositoryRegistryRemovalService do
include EE::GeoHelpers
let(:snippet_repository_registry) { create(:geo_snippet_repository_registry) }
let(:snippet_repository) { snippet_repository_registry.snippet_repository }
let(:replicator) { snippet_repository.replicator }
let(:params) do
{
full_path: snippet_repository.repository.full_path,
repository_storage: snippet_repository.repository_storage,
disk_path: snippet_repository.repository.disk_path
}
end
subject(:service) { described_class.new(replicator, params) }
describe '#execute' do
before do
service
snippet_repository.destroy!
end
it 'removes registry record' do
expect { service.execute }.to change { Geo::SnippetRepositoryRegistry.count }.by(-1)
end
it 'removes repository' do
expect_next_instance_of(Repositories::DestroyService) do |service|
expect(service).to receive(:execute).and_return({ status: :success })
end
service.execute
end
end
end
......@@ -53,7 +53,8 @@ RSpec.shared_examples 'a repository replicator' do
end.to change { ::Geo::Event.count }.by(1)
expect(::Geo::Event.last.attributes).to include(
"replicable_name" => replicator.replicable_name, "event_name" => "deleted", "payload" => { "model_record_id" => replicator.model_record.id })
"replicable_name" => replicator.replicable_name, "event_name" => "deleted")
expect(::Geo::Event.last.payload).to include({ "model_record_id" => replicator.model_record.id })
end
context 'when replication feature flag is disabled' do
......@@ -69,6 +70,49 @@ RSpec.shared_examples 'a repository replicator' do
end
end
describe 'updated event consumption' do
context 'in replicables_for_geo_node list' do
it 'runs SnippetRepositorySyncService service' do
model_record.save!
sync_service = double
expect(sync_service).to receive(:execute)
expect(::Geo::FrameworkRepositorySyncService)
.to receive(:new).with(replicator)
.and_return(sync_service)
replicator.consume(:updated)
end
end
context 'not in replicables_for_geo_node list' do
it 'runs SnippetRepositorySyncService service' do
expect(::Geo::FrameworkRepositorySyncService)
.not_to receive(:new)
replicator.consume(:updated)
end
end
end
describe 'deleted event consumption' do
it 'runs Geo::RepositoryRegistryRemovalService service' do
model_record.save!
sync_service = double
expect(sync_service).to receive(:execute)
expect(Geo::RepositoryRegistryRemovalService)
.to receive(:new).with(replicator, {})
.and_return(sync_service)
replicator.consume(:deleted)
end
end
describe '#model' do
let(:invoke_model) { replicator.class.model }
......
......@@ -54,7 +54,7 @@ RSpec.shared_examples 'geo base sync fetch' do
describe '#sync_repository' do
it 'tells registry that sync will start now' do
registry = subject.send(:registry)
expect(registry).to receive(:start_sync!)
allow_any_instance_of(registry.class).to receive(:start_sync!)
subject.send(:sync_repository)
end
......
......@@ -60,13 +60,17 @@ module Gitlab
def check_valid_actor!
# TODO: Investigate if expanding actor/authentication types are needed.
# https://gitlab.com/gitlab-org/gitlab/issues/202190
if actor && !actor.is_a?(User) && !actor.instance_of?(Key)
if actor && !allowed_actor?
raise ForbiddenError, ERROR_MESSAGES[:authentication_mechanism]
end
super
end
def allowed_actor?
actor.is_a?(User) || actor.instance_of?(Key)
end
def project_snippet?
snippet.is_a?(ProjectSnippet)
end
......@@ -138,3 +142,5 @@ module Gitlab
end
end
end
Gitlab::GitAccessSnippet.prepend_if_ee('EE::Gitlab::GitAccessSnippet')
......@@ -232,29 +232,6 @@ RSpec.describe Gitlab::GitAccessSnippet do
end
end
context 'when geo is enabled', if: Gitlab.ee? do
let(:user) { snippet.author }
let!(:primary_node) { FactoryBot.create(:geo_node, :primary) }
before do
allow(::Gitlab::Database).to receive(:read_only?).and_return(true)
allow(::Gitlab::Geo).to receive(:secondary_with_primary?).and_return(true)
end
# Without override, push access would return Gitlab::GitAccessResult::CustomAction
it 'skips geo for snippet' do
expect { push_access_check }.to raise_forbidden(/You can't push code to a read-only GitLab instance/)
end
context 'when user is migration bot' do
let(:user) { migration_bot }
it 'skips geo for snippet' do
expect { push_access_check }.to raise_forbidden(/You can't push code to a read-only GitLab instance/)
end
end
end
context 'when changes are specific' do
let(:changes) { "2d1db523e11e777e49377cfb22d368deec3f0793 ddd0f15ae83993f5cb66a927a28673882e99100b master" }
let(:user) { snippet.author }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment