Commit 80cf476e authored by Matthias Käppler's avatar Matthias Käppler

Merge branch 'ab-namespace-job-artifacts-services' into 'master'

Namespace job artifact related services as Ci::JobArtifacts::_Service

See merge request gitlab-org/gitlab!56567
parents 178a534d aea69299
# frozen_string_literal: true
module Ci
class CreateJobArtifactsService < ::BaseService
include Gitlab::Utils::UsageData
ArtifactsExistError = Class.new(StandardError)
LSIF_ARTIFACT_TYPE = 'lsif'
METRICS_REPORT_UPLOAD_EVENT_NAME = 'i_testing_metrics_report_artifact_uploaders'
OBJECT_STORAGE_ERRORS = [
Errno::EIO,
Google::Apis::ServerError,
Signet::RemoteServerError
].freeze
def initialize(job)
@job = job
@project = job.project
end
def authorize(artifact_type:, filesize: nil)
result = validate_requirements(artifact_type: artifact_type, filesize: filesize)
return result unless result[:status] == :success
headers = JobArtifactUploader.workhorse_authorize(has_length: false, maximum_size: max_size(artifact_type))
if lsif?(artifact_type)
headers[:ProcessLsif] = true
track_usage_event('i_source_code_code_intelligence', project.id)
end
success(headers: headers)
end
def execute(artifacts_file, params, metadata_file: nil)
result = validate_requirements(artifact_type: params[:artifact_type], filesize: artifacts_file.size)
return result unless result[:status] == :success
return success if sha256_matches_existing_artifact?(params[:artifact_type], artifacts_file)
artifact, artifact_metadata = build_artifact(artifacts_file, params, metadata_file)
result = parse_artifact(artifact)
track_artifact_uploader(artifact)
return result unless result[:status] == :success
persist_artifact(artifact, artifact_metadata, params)
end
private
attr_reader :job, :project
def validate_requirements(artifact_type:, filesize:)
return too_large_error if too_large?(artifact_type, filesize)
success
end
def too_large?(type, size)
size > max_size(type) if size
end
def lsif?(type)
type == LSIF_ARTIFACT_TYPE
end
def max_size(type)
Ci::JobArtifact.max_artifact_size(type: type, project: project)
end
def forbidden_type_error(type)
error("#{type} artifacts are forbidden", :forbidden)
end
def too_large_error
error('file size has reached maximum size limit', :payload_too_large)
end
def build_artifact(artifacts_file, params, metadata_file)
expire_in = params['expire_in'] ||
Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in
artifact = Ci::JobArtifact.new(
job_id: job.id,
project: project,
file: artifacts_file,
file_type: params[:artifact_type],
file_format: params[:artifact_format],
file_sha256: artifacts_file.sha256,
expire_in: expire_in)
artifact_metadata = if metadata_file
Ci::JobArtifact.new(
job_id: job.id,
project: project,
file: metadata_file,
file_type: :metadata,
file_format: :gzip,
file_sha256: metadata_file.sha256,
expire_in: expire_in)
end
[artifact, artifact_metadata]
end
def parse_artifact(artifact)
unless Feature.enabled?(:ci_synchronous_artifact_parsing, project, default_enabled: true)
return success
end
case artifact.file_type
when 'dotenv' then parse_dotenv_artifact(artifact)
when 'cluster_applications' then parse_cluster_applications_artifact(artifact)
else success
end
end
def persist_artifact(artifact, artifact_metadata, params)
Ci::JobArtifact.transaction do
artifact.save!
artifact_metadata&.save!
# NOTE: The `artifacts_expire_at` column is already deprecated and to be removed in the near future.
job.update_column(:artifacts_expire_at, artifact.expire_at)
end
success
rescue ActiveRecord::RecordNotUnique => error
track_exception(error, params)
error('another artifact of the same type already exists', :bad_request)
rescue *OBJECT_STORAGE_ERRORS => error
track_exception(error, params)
error(error.message, :service_unavailable)
rescue => error
track_exception(error, params)
error(error.message, :bad_request)
end
def sha256_matches_existing_artifact?(artifact_type, artifacts_file)
existing_artifact = job.job_artifacts.find_by_file_type(artifact_type)
return false unless existing_artifact
existing_artifact.file_sha256 == artifacts_file.sha256
end
def track_exception(error, params)
Gitlab::ErrorTracking.track_exception(error,
job_id: job.id,
project_id: job.project_id,
uploading_type: params[:artifact_type]
)
end
def track_artifact_uploader(artifact)
return unless artifact.file_type == 'metrics'
track_usage_event(METRICS_REPORT_UPLOAD_EVENT_NAME, @job.user_id)
end
def parse_dotenv_artifact(artifact)
Ci::ParseDotenvArtifactService.new(project, current_user).execute(artifact)
end
def parse_cluster_applications_artifact(artifact)
Clusters::ParseClusterApplicationsArtifactService.new(job, job.user).execute(artifact)
end
end
end
# frozen_string_literal: true
module Ci
class DestroyExpiredJobArtifactsService
include ::Gitlab::ExclusiveLeaseHelpers
include ::Gitlab::LoopHelpers
BATCH_SIZE = 100
LOOP_TIMEOUT = 5.minutes
LOOP_LIMIT = 1000
EXCLUSIVE_LOCK_KEY = 'expired_job_artifacts:destroy:lock'
LOCK_TIMEOUT = 6.minutes
def initialize
@removed_artifacts_count = 0
end
##
# Destroy expired job artifacts on GitLab instance
#
# This destroy process cannot run for more than 6 minutes. This is for
# preventing multiple `ExpireBuildArtifactsWorker` CRON jobs run concurrently,
# which is scheduled every 7 minutes.
def execute
in_lock(EXCLUSIVE_LOCK_KEY, ttl: LOCK_TIMEOUT, retries: 1) do
destroy_job_artifacts_with_slow_iteration(Time.current)
end
@removed_artifacts_count
end
private
def destroy_job_artifacts_with_slow_iteration(start_at)
Ci::JobArtifact.expired_before(start_at).each_batch(of: BATCH_SIZE, column: :expire_at, order: :desc) do |relation, index|
# For performance reasons, join with ci_pipelines after the batch is queried.
# See: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/47496
artifacts = relation.unlocked
service_response = destroy_batch_async(artifacts)
@removed_artifacts_count += service_response[:destroyed_artifacts_count]
break if loop_timeout?(start_at)
break if index >= LOOP_LIMIT
end
end
def destroy_batch_async(artifacts)
Ci::JobArtifactsDestroyBatchService.new(artifacts).execute
end
def loop_timeout?(start_at)
Time.current > start_at + LOOP_TIMEOUT
end
end
end
# frozen_string_literal: true
module Ci
module JobArtifacts
class CreateService < ::BaseService
include Gitlab::Utils::UsageData
ArtifactsExistError = Class.new(StandardError)
LSIF_ARTIFACT_TYPE = 'lsif'
METRICS_REPORT_UPLOAD_EVENT_NAME = 'i_testing_metrics_report_artifact_uploaders'
OBJECT_STORAGE_ERRORS = [
Errno::EIO,
Google::Apis::ServerError,
Signet::RemoteServerError
].freeze
def initialize(job)
@job = job
@project = job.project
end
def authorize(artifact_type:, filesize: nil)
result = validate_requirements(artifact_type: artifact_type, filesize: filesize)
return result unless result[:status] == :success
headers = JobArtifactUploader.workhorse_authorize(has_length: false, maximum_size: max_size(artifact_type))
if lsif?(artifact_type)
headers[:ProcessLsif] = true
track_usage_event('i_source_code_code_intelligence', project.id)
end
success(headers: headers)
end
def execute(artifacts_file, params, metadata_file: nil)
result = validate_requirements(artifact_type: params[:artifact_type], filesize: artifacts_file.size)
return result unless result[:status] == :success
return success if sha256_matches_existing_artifact?(params[:artifact_type], artifacts_file)
artifact, artifact_metadata = build_artifact(artifacts_file, params, metadata_file)
result = parse_artifact(artifact)
track_artifact_uploader(artifact)
return result unless result[:status] == :success
persist_artifact(artifact, artifact_metadata, params)
end
private
attr_reader :job, :project
def validate_requirements(artifact_type:, filesize:)
return too_large_error if too_large?(artifact_type, filesize)
success
end
def too_large?(type, size)
size > max_size(type) if size
end
def lsif?(type)
type == LSIF_ARTIFACT_TYPE
end
def max_size(type)
Ci::JobArtifact.max_artifact_size(type: type, project: project)
end
def forbidden_type_error(type)
error("#{type} artifacts are forbidden", :forbidden)
end
def too_large_error
error('file size has reached maximum size limit', :payload_too_large)
end
def build_artifact(artifacts_file, params, metadata_file)
expire_in = params['expire_in'] ||
Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in
artifact = Ci::JobArtifact.new(
job_id: job.id,
project: project,
file: artifacts_file,
file_type: params[:artifact_type],
file_format: params[:artifact_format],
file_sha256: artifacts_file.sha256,
expire_in: expire_in)
artifact_metadata = if metadata_file
Ci::JobArtifact.new(
job_id: job.id,
project: project,
file: metadata_file,
file_type: :metadata,
file_format: :gzip,
file_sha256: metadata_file.sha256,
expire_in: expire_in)
end
[artifact, artifact_metadata]
end
def parse_artifact(artifact)
unless Feature.enabled?(:ci_synchronous_artifact_parsing, project, default_enabled: true)
return success
end
case artifact.file_type
when 'dotenv' then parse_dotenv_artifact(artifact)
when 'cluster_applications' then parse_cluster_applications_artifact(artifact)
else success
end
end
def persist_artifact(artifact, artifact_metadata, params)
Ci::JobArtifact.transaction do
artifact.save!
artifact_metadata&.save!
# NOTE: The `artifacts_expire_at` column is already deprecated and to be removed in the near future.
job.update_column(:artifacts_expire_at, artifact.expire_at)
end
success
rescue ActiveRecord::RecordNotUnique => error
track_exception(error, params)
error('another artifact of the same type already exists', :bad_request)
rescue *OBJECT_STORAGE_ERRORS => error
track_exception(error, params)
error(error.message, :service_unavailable)
rescue => error
track_exception(error, params)
error(error.message, :bad_request)
end
def sha256_matches_existing_artifact?(artifact_type, artifacts_file)
existing_artifact = job.job_artifacts.find_by_file_type(artifact_type)
return false unless existing_artifact
existing_artifact.file_sha256 == artifacts_file.sha256
end
def track_exception(error, params)
Gitlab::ErrorTracking.track_exception(error,
job_id: job.id,
project_id: job.project_id,
uploading_type: params[:artifact_type]
)
end
def track_artifact_uploader(artifact)
return unless artifact.file_type == 'metrics'
track_usage_event(METRICS_REPORT_UPLOAD_EVENT_NAME, @job.user_id)
end
def parse_dotenv_artifact(artifact)
Ci::ParseDotenvArtifactService.new(project, current_user).execute(artifact)
end
def parse_cluster_applications_artifact(artifact)
Clusters::ParseClusterApplicationsArtifactService.new(job, job.user).execute(artifact)
end
end
end
end
# frozen_string_literal: true
module Ci
module JobArtifacts
class DestroyAllExpiredService
include ::Gitlab::ExclusiveLeaseHelpers
include ::Gitlab::LoopHelpers
BATCH_SIZE = 100
LOOP_TIMEOUT = 5.minutes
LOOP_LIMIT = 1000
EXCLUSIVE_LOCK_KEY = 'expired_job_artifacts:destroy:lock'
LOCK_TIMEOUT = 6.minutes
def initialize
@removed_artifacts_count = 0
end
##
# Destroy expired job artifacts on GitLab instance
#
# This destroy process cannot run for more than 6 minutes. This is for
# preventing multiple `ExpireBuildArtifactsWorker` CRON jobs run concurrently,
# which is scheduled every 7 minutes.
def execute
in_lock(EXCLUSIVE_LOCK_KEY, ttl: LOCK_TIMEOUT, retries: 1) do
destroy_job_artifacts_with_slow_iteration(Time.current)
end
@removed_artifacts_count
end
private
def destroy_job_artifacts_with_slow_iteration(start_at)
Ci::JobArtifact.expired_before(start_at).each_batch(of: BATCH_SIZE, column: :expire_at, order: :desc) do |relation, index|
# For performance reasons, join with ci_pipelines after the batch is queried.
# See: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/47496
artifacts = relation.unlocked
service_response = destroy_batch_async(artifacts)
@removed_artifacts_count += service_response[:destroyed_artifacts_count]
break if loop_timeout?(start_at)
break if index >= LOOP_LIMIT
end
end
def destroy_batch_async(artifacts)
Ci::JobArtifacts::DestroyBatchService.new(artifacts).execute
end
def loop_timeout?(start_at)
Time.current > start_at + LOOP_TIMEOUT
end
end
end
end
# frozen_string_literal: true
module Ci
module JobArtifacts
class DestroyBatchService
include BaseServiceUtility
include ::Gitlab::Utils::StrongMemoize
# Danger: Private - Should only be called in Ci Services that pass a batch of job artifacts
# Not for use outside of the Ci:: namespace
# Adds the passed batch of job artifacts to the `ci_deleted_objects` table
# for asyncronous destruction of the objects in Object Storage via the `Ci::DeleteObjectsService`
# and then deletes the batch of related `ci_job_artifacts` records.
# Params:
# +job_artifacts+:: A relation of job artifacts to destroy (fewer than MAX_JOB_ARTIFACT_BATCH_SIZE)
# +pick_up_at+:: When to pick up for deletion of files
# Returns:
# +Hash+:: A hash with status and destroyed_artifacts_count keys
def initialize(job_artifacts, pick_up_at: nil)
@job_artifacts = job_artifacts.with_destroy_preloads.to_a
@pick_up_at = pick_up_at
end
# rubocop: disable CodeReuse/ActiveRecord
def execute
return success(destroyed_artifacts_count: artifacts_count) if @job_artifacts.empty?
Ci::DeletedObject.transaction do
Ci::DeletedObject.bulk_import(@job_artifacts, @pick_up_at)
Ci::JobArtifact.id_in(@job_artifacts.map(&:id)).delete_all
destroy_related_records(@job_artifacts)
end
# This is executed outside of the transaction because it depends on Redis
update_project_statistics
increment_monitoring_statistics(artifacts_count)
success(destroyed_artifacts_count: artifacts_count)
end
# rubocop: enable CodeReuse/ActiveRecord
private
# This method is implemented in EE and it must do only database work
def destroy_related_records(artifacts); end
def update_project_statistics
artifacts_by_project = @job_artifacts.group_by(&:project)
artifacts_by_project.each do |project, artifacts|
delta = -artifacts.sum { |artifact| artifact.size.to_i }
ProjectStatistics.increment_statistic(
project, Ci::JobArtifact.project_statistics_name, delta)
end
end
def increment_monitoring_statistics(size)
metrics.increment_destroyed_artifacts(size)
end
def metrics
@metrics ||= ::Gitlab::Ci::Artifacts::Metrics.new
end
def artifacts_count
strong_memoize(:artifacts_count) do
@job_artifacts.count
end
end
end
end
end
Ci::JobArtifacts::DestroyBatchService.prepend_if_ee('EE::Ci::JobArtifacts::DestroyBatchService')
# frozen_string_literal: true
module Ci
class JobArtifactsDestroyBatchService
include BaseServiceUtility
include ::Gitlab::Utils::StrongMemoize
# Danger: Private - Should only be called in Ci Services that pass a batch of job artifacts
# Not for use outsie of the ci namespace
# Adds the passed batch of job artifacts to the `ci_deleted_objects` table
# for asyncronous destruction of the objects in Object Storage via the `Ci::DeleteObjectsService`
# and then deletes the batch of related `ci_job_artifacts` records.
# Params:
# +job_artifacts+:: A relation of job artifacts to destroy (fewer than MAX_JOB_ARTIFACT_BATCH_SIZE)
# +pick_up_at+:: When to pick up for deletion of files
# Returns:
# +Hash+:: A hash with status and destroyed_artifacts_count keys
def initialize(job_artifacts, pick_up_at: nil)
@job_artifacts = job_artifacts.with_destroy_preloads.to_a
@pick_up_at = pick_up_at
end
# rubocop: disable CodeReuse/ActiveRecord
def execute
return success(destroyed_artifacts_count: artifacts_count) if @job_artifacts.empty?
Ci::DeletedObject.transaction do
Ci::DeletedObject.bulk_import(@job_artifacts, @pick_up_at)
Ci::JobArtifact.id_in(@job_artifacts.map(&:id)).delete_all
destroy_related_records(@job_artifacts)
end
# This is executed outside of the transaction because it depends on Redis
update_project_statistics
increment_monitoring_statistics(artifacts_count)
success(destroyed_artifacts_count: artifacts_count)
end
# rubocop: enable CodeReuse/ActiveRecord
private
# This method is implemented in EE and it must do only database work
def destroy_related_records(artifacts); end
def update_project_statistics
artifacts_by_project = @job_artifacts.group_by(&:project)
artifacts_by_project.each do |project, artifacts|
delta = -artifacts.sum { |artifact| artifact.size.to_i }
ProjectStatistics.increment_statistic(
project, Ci::JobArtifact.project_statistics_name, delta)
end
end
def increment_monitoring_statistics(size)
metrics.increment_destroyed_artifacts(size)
end
def metrics
@metrics ||= ::Gitlab::Ci::Artifacts::Metrics.new
end
def artifacts_count
strong_memoize(:artifacts_count) do
@job_artifacts.count
end
end
end
end
Ci::JobArtifactsDestroyBatchService.prepend_if_ee('EE::Ci::JobArtifactsDestroyBatchService')
......@@ -2,7 +2,7 @@
module Ci
module PipelineArtifacts
class DestroyExpiredArtifactsService
class DestroyAllExpiredService
include ::Gitlab::LoopHelpers
include ::Gitlab::Utils::StrongMemoize
......
......@@ -14,7 +14,7 @@ module Ci
feature_category :continuous_integration
def perform
service = ::Ci::PipelineArtifacts::DestroyExpiredArtifactsService.new
service = ::Ci::PipelineArtifacts::DestroyAllExpiredService.new
artifacts_count = service.execute
log_extra_metadata_on_done(:destroyed_pipeline_artifacts_count, artifacts_count)
end
......
......@@ -10,7 +10,7 @@ class ExpireBuildArtifactsWorker # rubocop:disable Scalability/IdempotentWorker
feature_category :continuous_integration
def perform
service = Ci::DestroyExpiredJobArtifactsService.new
service = Ci::JobArtifacts::DestroyAllExpiredService.new
artifacts_count = service.execute
log_extra_metadata_on_done(:destroyed_job_artifacts_count, artifacts_count)
end
......
# frozen_string_literal: true
module EE
module Ci
module JobArtifacts
module DestroyBatchService
extend ::Gitlab::Utils::Override
private
override :destroy_related_records
def destroy_related_records(artifacts)
destroy_security_findings(artifacts)
end
def destroy_security_findings(artifacts)
job_ids = artifacts.map(&:job_id)
::Security::Finding.by_build_ids(job_ids).delete_all
end
end
end
end
end
# frozen_string_literal: true
module EE
module Ci
module JobArtifactsDestroyBatchService
extend ::Gitlab::Utils::Override
private
override :destroy_related_records
def destroy_related_records(artifacts)
destroy_security_findings(artifacts)
end
def destroy_security_findings(artifacts)
job_ids = artifacts.map(&:job_id)
::Security::Finding.by_build_ids(job_ids).delete_all
end
end
end
end
......@@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared_state do
RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_shared_state do
include ExclusiveLeaseHelpers
describe '.execute' do
......@@ -62,7 +62,7 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
context 'when failed to destroy artifact' do
before do
stub_const('Ci::DestroyExpiredJobArtifactsService::LOOP_LIMIT', 10)
stub_const('Ci::JobArtifacts::DestroyAllExpiredService::LOOP_LIMIT', 10)
expect(Ci::DeletedObject)
.to receive(:bulk_import)
.once
......@@ -77,7 +77,7 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
context 'when there are artifacts more than batch sizes' do
before do
stub_const('Ci::DestroyExpiredJobArtifactsService::BATCH_SIZE', 1)
stub_const('Ci::JobArtifacts::DestroyAllExpiredService::BATCH_SIZE', 1)
second_artifact.job.pipeline.unlocked!
end
......
......@@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Ci::JobArtifactsDestroyBatchService do
RSpec.describe Ci::JobArtifacts::DestroyBatchService do
describe '.execute' do
subject { service.execute }
......
......@@ -245,7 +245,7 @@ module API
job = authenticate_job!
result = ::Ci::CreateJobArtifactsService.new(job).authorize(artifact_type: params[:artifact_type], filesize: params[:filesize])
result = ::Ci::JobArtifacts::CreateService.new(job).authorize(artifact_type: params[:artifact_type], filesize: params[:filesize])
if result[:status] == :success
content_type Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE
......@@ -284,7 +284,7 @@ module API
artifacts = params[:file]
metadata = params[:metadata]
result = ::Ci::CreateJobArtifactsService.new(job).execute(artifacts, params, metadata_file: metadata)
result = ::Ci::JobArtifacts::CreateService.new(job).execute(artifacts, params, metadata_file: metadata)
if result[:status] == :success
status :created
......
......@@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Ci::CreateJobArtifactsService do
RSpec.describe Ci::JobArtifacts::CreateService do
let_it_be(:project) { create(:project) }
let(:service) { described_class.new(job) }
let(:job) { create(:ci_build, project: project) }
......
......@@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared_state do
RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_shared_state do
include ExclusiveLeaseHelpers
let(:service) { described_class.new }
......@@ -24,7 +24,7 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
job = create(:ci_build, pipeline: artifact.job.pipeline)
create(:ci_job_artifact, :archive, :expired, job: job)
stub_const('Ci::DestroyExpiredJobArtifactsService::LOOP_LIMIT', 1)
stub_const("#{described_class}::LOOP_LIMIT", 1)
end
it 'performs the smallest number of queries for job_artifacts' do
......@@ -113,7 +113,7 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
context 'when failed to destroy artifact' do
before do
stub_const('Ci::DestroyExpiredJobArtifactsService::LOOP_LIMIT', 10)
stub_const("#{described_class}::LOOP_LIMIT", 10)
end
context 'when the import fails' do
......@@ -159,8 +159,8 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
let!(:second_artifact) { create(:ci_job_artifact, expire_at: 1.day.ago) }
before do
stub_const('Ci::DestroyExpiredJobArtifactsService::LOOP_TIMEOUT', 0.seconds)
stub_const('Ci::DestroyExpiredJobArtifactsService::BATCH_SIZE', 1)
stub_const("#{described_class}::LOOP_TIMEOUT", 0.seconds)
stub_const("#{described_class}::BATCH_SIZE", 1)
second_artifact.job.pipeline.unlocked!
end
......@@ -176,8 +176,8 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
context 'when loop reached loop limit' do
before do
stub_const('Ci::DestroyExpiredJobArtifactsService::LOOP_LIMIT', 1)
stub_const('Ci::DestroyExpiredJobArtifactsService::BATCH_SIZE', 1)
stub_const("#{described_class}::LOOP_LIMIT", 1)
stub_const("#{described_class}::BATCH_SIZE", 1)
second_artifact.job.pipeline.unlocked!
end
......@@ -209,7 +209,7 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
context 'when there are artifacts more than batch sizes' do
before do
stub_const('Ci::DestroyExpiredJobArtifactsService::BATCH_SIZE', 1)
stub_const("#{described_class}::BATCH_SIZE", 1)
second_artifact.job.pipeline.unlocked!
end
......
......@@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Ci::JobArtifactsDestroyBatchService do
RSpec.describe Ci::JobArtifacts::DestroyBatchService do
include ExclusiveLeaseHelpers
let(:artifacts) { Ci::JobArtifact.all }
......
......@@ -2,7 +2,7 @@
require 'spec_helper'
RSpec.describe Ci::PipelineArtifacts::DestroyExpiredArtifactsService do
RSpec.describe Ci::PipelineArtifacts::DestroyAllExpiredService do
let(:service) { described_class.new }
describe '.execute' do
......@@ -10,7 +10,7 @@ RSpec.describe Ci::PipelineArtifacts::DestroyExpiredArtifactsService do
context 'when timeout happens' do
before do
stub_const('Ci::PipelineArtifacts::DestroyExpiredArtifactsService::LOOP_TIMEOUT', 0.1.seconds)
stub_const('Ci::PipelineArtifacts::DestroyAllExpiredService::LOOP_TIMEOUT', 0.1.seconds)
allow(service).to receive(:destroy_artifacts_batch) { true }
end
......@@ -27,8 +27,8 @@ RSpec.describe Ci::PipelineArtifacts::DestroyExpiredArtifactsService do
context 'when the loop limit is reached' do
before do
stub_const('::Ci::PipelineArtifacts::DestroyExpiredArtifactsService::LOOP_LIMIT', 1)
stub_const('::Ci::PipelineArtifacts::DestroyExpiredArtifactsService::BATCH_SIZE', 1)
stub_const('::Ci::PipelineArtifacts::DestroyAllExpiredService::LOOP_LIMIT', 1)
stub_const('::Ci::PipelineArtifacts::DestroyAllExpiredService::BATCH_SIZE', 1)
create_list(:ci_pipeline_artifact, 2, expire_at: 1.week.ago)
end
......@@ -44,7 +44,7 @@ RSpec.describe Ci::PipelineArtifacts::DestroyExpiredArtifactsService do
context 'when there are artifacts more than batch sizes' do
before do
stub_const('Ci::PipelineArtifacts::DestroyExpiredArtifactsService::BATCH_SIZE', 1)
stub_const('Ci::PipelineArtifacts::DestroyAllExpiredService::BATCH_SIZE', 1)
create_list(:ci_pipeline_artifact, 2, expire_at: 1.week.ago)
end
......
......@@ -11,7 +11,7 @@ RSpec.describe Ci::PipelineArtifacts::ExpireArtifactsWorker do
end
it 'executes a service' do
expect_next_instance_of(::Ci::PipelineArtifacts::DestroyExpiredArtifactsService) do |instance|
expect_next_instance_of(::Ci::PipelineArtifacts::DestroyAllExpiredService) do |instance|
expect(instance).to receive(:execute)
end
......
......@@ -7,7 +7,7 @@ RSpec.describe ExpireBuildArtifactsWorker do
describe '#perform' do
it 'executes a service' do
expect_next_instance_of(Ci::DestroyExpiredJobArtifactsService) do |instance|
expect_next_instance_of(Ci::JobArtifacts::DestroyAllExpiredService) do |instance|
expect(instance).to receive(:execute).and_call_original
end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment