Commit e5a4f704 authored by Erick Bajao's avatar Erick Bajao

Add plan limits for max size per artifact type

This is behind a feature flag.

Also adds support for fallback_limit parameter when checking for limits.
parent a30e2b49
...@@ -100,6 +100,8 @@ module Ci ...@@ -100,6 +100,8 @@ module Ci
TYPE_AND_FORMAT_PAIRS = INTERNAL_TYPES.merge(REPORT_TYPES).freeze TYPE_AND_FORMAT_PAIRS = INTERNAL_TYPES.merge(REPORT_TYPES).freeze
PLAN_LIMIT_PREFIX = 'ci_max_artifact_size_'
# This is required since we cannot add a default to the database # This is required since we cannot add a default to the database
# https://gitlab.com/gitlab-org/gitlab/-/issues/215418 # https://gitlab.com/gitlab-org/gitlab/-/issues/215418
attribute :locked, :boolean, default: false attribute :locked, :boolean, default: false
...@@ -289,6 +291,21 @@ module Ci ...@@ -289,6 +291,21 @@ module Ci
where(job_id: job_id).trace.take&.file&.file&.exists? where(job_id: job_id).trace.take&.file&.file&.exists?
end end
def self.max_artifact_size(type:, project:)
max_size = if Feature.enabled?(:ci_max_artifact_size_per_type, project, default_enabled: false)
limit_name = "#{PLAN_LIMIT_PREFIX}#{type}"
project.actual_limits.limit_for(
limit_name,
alternate_limit: -> { project.closest_setting(:max_artifacts_size) }
)
else
project.closest_setting(:max_artifacts_size)
end
max_size&.megabytes.to_i
end
private private
def file_format_adapter_class def file_format_adapter_class
......
# frozen_string_literal: true # frozen_string_literal: true
class PlanLimits < ApplicationRecord class PlanLimits < ApplicationRecord
LimitUndefinedError = Class.new(StandardError)
belongs_to :plan belongs_to :plan
def exceeded?(limit_name, object) def exceeded?(limit_name, subject, alternate_limit: 0)
return false unless enabled?(limit_name) limit = limit_for(limit_name, alternate_limit: alternate_limit)
return false unless limit
if object.is_a?(Integer) case subject
object >= read_attribute(limit_name) when Integer
else subject >= limit
# object.count >= limit value is slower than checking when ActiveRecord::Relation
# We intentionally not accept just plain ApplicationRecord classes to
# enforce the subject to be scoped down to a relation first.
#
# subject.count >= limit value is slower than checking
# if a record exists at the limit value - 1 position. # if a record exists at the limit value - 1 position.
object.offset(read_attribute(limit_name) - 1).exists? subject.offset(limit - 1).exists?
else
raise ArgumentError, "#{subject.class} is not supported as a limit value"
end end
end end
private def limit_for(limit_name, alternate_limit: 0)
limit = read_attribute(limit_name)
raise LimitUndefinedError, "The limit `#{limit_name}` is undefined" if limit.nil?
alternate_limit = alternate_limit.call if alternate_limit.respond_to?(:call)
def enabled?(limit_name) limits = [limit, alternate_limit]
read_attribute(limit_name) > 0 limits.map(&:to_i).select(&:positive?).min
end end
end end
# frozen_string_literal: true
module Ci
class AuthorizeJobArtifactService
include Gitlab::Utils::StrongMemoize
# Max size of the zipped LSIF artifact
LSIF_ARTIFACT_MAX_SIZE = 20.megabytes
LSIF_ARTIFACT_TYPE = 'lsif'
def initialize(job, params, max_size:)
@job = job
@max_size = max_size
@size = params[:filesize]
@type = params[:artifact_type].to_s
end
def forbidden?
lsif? && !code_navigation_enabled?
end
def too_large?
size && max_size <= size.to_i
end
def headers
default_headers = JobArtifactUploader.workhorse_authorize(has_length: false, maximum_size: max_size)
default_headers.tap do |h|
h[:ProcessLsif] = true if lsif? && code_navigation_enabled?
end
end
private
attr_reader :job, :size, :type
def code_navigation_enabled?
strong_memoize(:code_navigation_enabled) do
Feature.enabled?(:code_navigation, job.project, default_enabled: true)
end
end
def lsif?
strong_memoize(:lsif) do
type == LSIF_ARTIFACT_TYPE
end
end
def max_size
lsif? ? LSIF_ARTIFACT_MAX_SIZE : @max_size.to_i
end
end
end
...@@ -3,42 +3,100 @@ ...@@ -3,42 +3,100 @@
module Ci module Ci
class CreateJobArtifactsService < ::BaseService class CreateJobArtifactsService < ::BaseService
ArtifactsExistError = Class.new(StandardError) ArtifactsExistError = Class.new(StandardError)
LSIF_ARTIFACT_TYPE = 'lsif'
OBJECT_STORAGE_ERRORS = [ OBJECT_STORAGE_ERRORS = [
Errno::EIO, Errno::EIO,
Google::Apis::ServerError, Google::Apis::ServerError,
Signet::RemoteServerError Signet::RemoteServerError
].freeze ].freeze
def execute(job, artifacts_file, params, metadata_file: nil) def initialize(job)
return success if sha256_matches_existing_artifact?(job, params['artifact_type'], artifacts_file) @job = job
@project = job.project
end
def authorize(artifact_type:, filesize: nil)
result = validate_requirements(artifact_type: artifact_type, filesize: filesize)
return result unless result[:status] == :success
headers = JobArtifactUploader.workhorse_authorize(has_length: false, maximum_size: max_size(artifact_type))
headers[:ProcessLsif] = true if lsif?(artifact_type)
artifact, artifact_metadata = build_artifact(job, artifacts_file, params, metadata_file) success(headers: headers)
result = parse_artifact(job, artifact) end
def execute(artifacts_file, params, metadata_file: nil)
result = validate_requirements(artifact_type: params[:artifact_type], filesize: artifacts_file.size)
return result unless result[:status] == :success return result unless result[:status] == :success
persist_artifact(job, artifact, artifact_metadata) return success if sha256_matches_existing_artifact?(params[:artifact_type], artifacts_file)
artifact, artifact_metadata = build_artifact(artifacts_file, params, metadata_file)
result = parse_artifact(artifact)
return result unless result[:status] == :success
persist_artifact(artifact, artifact_metadata, params)
end end
private private
def build_artifact(job, artifacts_file, params, metadata_file) attr_reader :job, :project
def validate_requirements(artifact_type:, filesize:)
return forbidden_type_error(artifact_type) if forbidden_type?(artifact_type)
return too_large_error if too_large?(artifact_type, filesize)
success
end
def forbidden_type?(type)
lsif?(type) && !code_navigation_enabled?
end
def too_large?(type, size)
size > max_size(type) if size
end
def code_navigation_enabled?
Feature.enabled?(:code_navigation, project, default_enabled: true)
end
def lsif?(type)
type == LSIF_ARTIFACT_TYPE
end
def max_size(type)
Ci::JobArtifact.max_artifact_size(type: type, project: project)
end
def forbidden_type_error(type)
error("#{type} artifacts are forbidden", :forbidden)
end
def too_large_error
error('file size has reached maximum size limit', :payload_too_large)
end
def build_artifact(artifacts_file, params, metadata_file)
expire_in = params['expire_in'] || expire_in = params['expire_in'] ||
Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in Gitlab::CurrentSettings.current_application_settings.default_artifacts_expire_in
artifact = Ci::JobArtifact.new( artifact = Ci::JobArtifact.new(
job_id: job.id, job_id: job.id,
project: job.project, project: project,
file: artifacts_file, file: artifacts_file,
file_type: params['artifact_type'], file_type: params[:artifact_type],
file_format: params['artifact_format'], file_format: params[:artifact_format],
file_sha256: artifacts_file.sha256, file_sha256: artifacts_file.sha256,
expire_in: expire_in) expire_in: expire_in)
artifact_metadata = if metadata_file artifact_metadata = if metadata_file
Ci::JobArtifact.new( Ci::JobArtifact.new(
job_id: job.id, job_id: job.id,
project: job.project, project: project,
file: metadata_file, file: metadata_file,
file_type: :metadata, file_type: :metadata,
file_format: :gzip, file_format: :gzip,
...@@ -46,7 +104,7 @@ module Ci ...@@ -46,7 +104,7 @@ module Ci
expire_in: expire_in) expire_in: expire_in)
end end
if Feature.enabled?(:keep_latest_artifact_for_ref, job.project) if Feature.enabled?(:keep_latest_artifact_for_ref, project)
artifact.locked = true artifact.locked = true
artifact_metadata&.locked = true artifact_metadata&.locked = true
end end
...@@ -54,23 +112,23 @@ module Ci ...@@ -54,23 +112,23 @@ module Ci
[artifact, artifact_metadata] [artifact, artifact_metadata]
end end
def parse_artifact(job, artifact) def parse_artifact(artifact)
unless Feature.enabled?(:ci_synchronous_artifact_parsing, job.project, default_enabled: true) unless Feature.enabled?(:ci_synchronous_artifact_parsing, project, default_enabled: true)
return success return success
end end
case artifact.file_type case artifact.file_type
when 'dotenv' then parse_dotenv_artifact(job, artifact) when 'dotenv' then parse_dotenv_artifact(artifact)
when 'cluster_applications' then parse_cluster_applications_artifact(job, artifact) when 'cluster_applications' then parse_cluster_applications_artifact(artifact)
else success else success
end end
end end
def persist_artifact(job, artifact, artifact_metadata) def persist_artifact(artifact, artifact_metadata, params)
Ci::JobArtifact.transaction do Ci::JobArtifact.transaction do
artifact.save! artifact.save!
artifact_metadata&.save! artifact_metadata&.save!
unlock_previous_artifacts!(artifact) unlock_previous_artifacts!
# NOTE: The `artifacts_expire_at` column is already deprecated and to be removed in the near future. # NOTE: The `artifacts_expire_at` column is already deprecated and to be removed in the near future.
job.update_column(:artifacts_expire_at, artifact.expire_at) job.update_column(:artifacts_expire_at, artifact.expire_at)
...@@ -78,42 +136,42 @@ module Ci ...@@ -78,42 +136,42 @@ module Ci
success success
rescue ActiveRecord::RecordNotUnique => error rescue ActiveRecord::RecordNotUnique => error
track_exception(error, job, params) track_exception(error, params)
error('another artifact of the same type already exists', :bad_request) error('another artifact of the same type already exists', :bad_request)
rescue *OBJECT_STORAGE_ERRORS => error rescue *OBJECT_STORAGE_ERRORS => error
track_exception(error, job, params) track_exception(error, params)
error(error.message, :service_unavailable) error(error.message, :service_unavailable)
rescue => error rescue => error
track_exception(error, job, params) track_exception(error, params)
error(error.message, :bad_request) error(error.message, :bad_request)
end end
def unlock_previous_artifacts!(artifact) def unlock_previous_artifacts!
return unless Feature.enabled?(:keep_latest_artifact_for_ref, artifact.job.project) return unless Feature.enabled?(:keep_latest_artifact_for_ref, project)
Ci::JobArtifact.for_ref(artifact.job.ref, artifact.project_id).locked.update_all(locked: false) Ci::JobArtifact.for_ref(job.ref, project.id).locked.update_all(locked: false)
end end
def sha256_matches_existing_artifact?(job, artifact_type, artifacts_file) def sha256_matches_existing_artifact?(artifact_type, artifacts_file)
existing_artifact = job.job_artifacts.find_by_file_type(artifact_type) existing_artifact = job.job_artifacts.find_by_file_type(artifact_type)
return false unless existing_artifact return false unless existing_artifact
existing_artifact.file_sha256 == artifacts_file.sha256 existing_artifact.file_sha256 == artifacts_file.sha256
end end
def track_exception(error, job, params) def track_exception(error, params)
Gitlab::ErrorTracking.track_exception(error, Gitlab::ErrorTracking.track_exception(error,
job_id: job.id, job_id: job.id,
project_id: job.project_id, project_id: job.project_id,
uploading_type: params['artifact_type'] uploading_type: params[:artifact_type]
) )
end end
def parse_dotenv_artifact(job, artifact) def parse_dotenv_artifact(artifact)
Ci::ParseDotenvArtifactService.new(job.project, current_user).execute(artifact) Ci::ParseDotenvArtifactService.new(project, current_user).execute(artifact)
end end
def parse_cluster_applications_artifact(job, artifact) def parse_cluster_applications_artifact(artifact)
Clusters::ParseClusterApplicationsArtifactService.new(job, job.user).execute(artifact) Clusters::ParseClusterApplicationsArtifactService.new(job, job.user).execute(artifact)
end end
end end
......
---
title: Add plan limits for max size per artifact type
merge_request: 34767
author:
type: added
# frozen_string_literal: true
class AddPlanLimitsForMaxSizePerArtifactType < ActiveRecord::Migration[6.0]
DOWNTIME = false
def change
# We need to set the 20mb default for lsif for backward compatibility
# See https://gitlab.com/gitlab-org/gitlab/-/merge_requests/34767#note_371619075
add_column :plan_limits, "ci_max_artifact_size_lsif", :integer, default: 20, null: false
artifact_types.each do |type|
add_column :plan_limits, "ci_max_artifact_size_#{type}", :integer, default: 0, null: false
end
end
private
def artifact_types
# The list of artifact types (except lsif) from Ci::JobArtifact file_type enum as of this writing.
# Intentionally duplicated so that the migration won't change behavior
# if ever we remove or add more to the list later on.
%w[
archive
metadata
trace
junit
sast
dependency_scanning
container_scanning
dast
codequality
license_management
license_scanning
performance
metrics
metrics_referee
network_referee
dotenv
cobertura
terraform
accessibility
cluster_applications
secret_detection
requirements
coverage_fuzzing
]
end
end
...@@ -13733,7 +13733,31 @@ CREATE TABLE public.plan_limits ( ...@@ -13733,7 +13733,31 @@ CREATE TABLE public.plan_limits (
ci_pipeline_schedules integer DEFAULT 10 NOT NULL, ci_pipeline_schedules integer DEFAULT 10 NOT NULL,
offset_pagination_limit integer DEFAULT 50000 NOT NULL, offset_pagination_limit integer DEFAULT 50000 NOT NULL,
ci_instance_level_variables integer DEFAULT 25 NOT NULL, ci_instance_level_variables integer DEFAULT 25 NOT NULL,
storage_size_limit integer DEFAULT 0 NOT NULL storage_size_limit integer DEFAULT 0 NOT NULL,
ci_max_artifact_size_lsif integer DEFAULT 20 NOT NULL,
ci_max_artifact_size_archive integer DEFAULT 0 NOT NULL,
ci_max_artifact_size_metadata integer DEFAULT 0 NOT NULL,
ci_max_artifact_size_trace integer DEFAULT 0 NOT NULL,
ci_max_artifact_size_junit integer DEFAULT 0 NOT NULL,
ci_max_artifact_size_sast integer DEFAULT 0 NOT NULL,
ci_max_artifact_size_dependency_scanning integer DEFAULT 0 NOT NULL,
ci_max_artifact_size_container_scanning integer DEFAULT 0 NOT NULL,
ci_max_artifact_size_dast integer DEFAULT 0 NOT NULL,
ci_max_artifact_size_codequality integer DEFAULT 0 NOT NULL,
ci_max_artifact_size_license_management integer DEFAULT 0 NOT NULL,
ci_max_artifact_size_license_scanning integer DEFAULT 0 NOT NULL,
ci_max_artifact_size_performance integer DEFAULT 0 NOT NULL,
ci_max_artifact_size_metrics integer DEFAULT 0 NOT NULL,
ci_max_artifact_size_metrics_referee integer DEFAULT 0 NOT NULL,
ci_max_artifact_size_network_referee integer DEFAULT 0 NOT NULL,
ci_max_artifact_size_dotenv integer DEFAULT 0 NOT NULL,
ci_max_artifact_size_cobertura integer DEFAULT 0 NOT NULL,
ci_max_artifact_size_terraform integer DEFAULT 0 NOT NULL,
ci_max_artifact_size_accessibility integer DEFAULT 0 NOT NULL,
ci_max_artifact_size_cluster_applications integer DEFAULT 0 NOT NULL,
ci_max_artifact_size_secret_detection integer DEFAULT 0 NOT NULL,
ci_max_artifact_size_requirements integer DEFAULT 0 NOT NULL,
ci_max_artifact_size_coverage_fuzzing integer DEFAULT 0 NOT NULL
); );
CREATE SEQUENCE public.plan_limits_id_seq CREATE SEQUENCE public.plan_limits_id_seq
...@@ -23463,6 +23487,7 @@ COPY "schema_migrations" (version) FROM STDIN; ...@@ -23463,6 +23487,7 @@ COPY "schema_migrations" (version) FROM STDIN;
20200615193524 20200615193524
20200615232735 20200615232735
20200615234047 20200615234047
20200616124338
20200616145031 20200616145031
20200617000757 20200617000757
20200617001001 20200617001001
......
...@@ -218,25 +218,31 @@ module API ...@@ -218,25 +218,31 @@ module API
params do params do
requires :id, type: Integer, desc: %q(Job's ID) requires :id, type: Integer, desc: %q(Job's ID)
optional :token, type: String, desc: %q(Job's authentication token) optional :token, type: String, desc: %q(Job's authentication token)
# NOTE:
# In current runner, filesize parameter would be empty here. This is because archive is streamed by runner,
# so the archive size is not known ahead of time. Streaming is done to not use additional I/O on
# Runner to first save, and then send via Network.
optional :filesize, type: Integer, desc: %q(Artifacts filesize) optional :filesize, type: Integer, desc: %q(Artifacts filesize)
optional :artifact_type, type: String, desc: %q(The type of artifact), optional :artifact_type, type: String, desc: %q(The type of artifact),
default: 'archive', values: ::Ci::JobArtifact.file_types.keys default: 'archive', values: ::Ci::JobArtifact.file_types.keys
end end
post '/:id/artifacts/authorize' do post '/:id/artifacts/authorize' do
not_allowed! unless Gitlab.config.artifacts.enabled not_allowed! unless Gitlab.config.artifacts.enabled
require_gitlab_workhorse! require_gitlab_workhorse!
Gitlab::Workhorse.verify_api_request!(headers)
job = authenticate_job! job = authenticate_job!
service = ::Ci::AuthorizeJobArtifactService.new(job, params, max_size: max_artifacts_size(job)) result = ::Ci::CreateJobArtifactsService.new(job).authorize(artifact_type: params[:artifact_type], filesize: params[:filesize])
forbidden! if service.forbidden?
file_too_large! if service.too_large?
status 200 if result[:status] == :success
content_type Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE content_type Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE
service.headers status :ok
result[:headers]
else
render_api_error!(result[:message], result[:http_status])
end
end end
desc 'Upload artifacts for job' do desc 'Upload artifacts for job' do
...@@ -267,9 +273,7 @@ module API ...@@ -267,9 +273,7 @@ module API
artifacts = params[:file] artifacts = params[:file]
metadata = params[:metadata] metadata = params[:metadata]
file_too_large! unless artifacts.size < max_artifacts_size(job) result = ::Ci::CreateJobArtifactsService.new(job).execute(artifacts, params, metadata_file: metadata)
result = ::Ci::CreateJobArtifactsService.new(job.project).execute(job, artifacts, params, metadata_file: metadata)
if result[:status] == :success if result[:status] == :success
status :created status :created
......
...@@ -69,11 +69,6 @@ module API ...@@ -69,11 +69,6 @@ module API
token && job.valid_token?(token) token && job.valid_token?(token)
end end
def max_artifacts_size(job)
max_size = job.project.closest_setting(:max_artifacts_size)
max_size.megabytes.to_i
end
def job_forbidden!(job, reason) def job_forbidden!(job, reason)
header 'Job-Status', job.status header 'Job-Status', job.status
forbidden!(reason) forbidden!(reason)
......
...@@ -501,4 +501,100 @@ RSpec.describe Ci::JobArtifact do ...@@ -501,4 +501,100 @@ RSpec.describe Ci::JobArtifact do
end end
end end
end end
describe '.file_types' do
context 'all file types have corresponding limit' do
let_it_be(:plan_limits) { create(:plan_limits) }
where(:file_type) do
described_class.file_types.keys
end
with_them do
let(:limit_name) { "#{described_class::PLAN_LIMIT_PREFIX}#{file_type}" }
it { expect(plan_limits.attributes).to include(limit_name), file_type_limit_failure_message(file_type, limit_name) }
end
end
end
describe '.max_artifact_size' do
let(:build) { create(:ci_build) }
subject(:max_size) { described_class.max_artifact_size(type: artifact_type, project: build.project) }
context 'when file type is supported' do
let(:project_closest_setting) { 1024 }
let(:artifact_type) { 'junit' }
before do
stub_feature_flags(ci_max_artifact_size_per_type: flag_enabled)
allow(build.project).to receive(:closest_setting).with(:max_artifacts_size).and_return(project_closest_setting)
end
shared_examples_for 'basing off the project closest setting' do
it { is_expected.to eq(project_closest_setting.megabytes.to_i) }
end
shared_examples_for 'basing off the plan limit' do
it { is_expected.to eq(max_size_for_type.megabytes.to_i) }
end
context 'and feature flag for custom max size per type is enabled' do
let(:flag_enabled) { true }
let(:limit_name) { "#{described_class::PLAN_LIMIT_PREFIX}#{artifact_type}" }
let!(:plan_limits) { create(:plan_limits, :default_plan) }
context 'and plan limit is disabled for the given artifact type' do
before do
plan_limits.update!(limit_name => 0)
end
it_behaves_like 'basing off the project closest setting'
context 'and project closest setting results to zero' do
let(:project_closest_setting) { 0 }
it { is_expected.to eq(0) }
end
end
context 'and plan limit is enabled for the given artifact type' do
before do
plan_limits.update!(limit_name => max_size_for_type)
end
context 'and plan limit is smaller than project setting' do
let(:max_size_for_type) { project_closest_setting - 1 }
it_behaves_like 'basing off the plan limit'
end
context 'and plan limit is smaller than project setting' do
let(:max_size_for_type) { project_closest_setting + 1 }
it_behaves_like 'basing off the project closest setting'
end
end
end
context 'and feature flag for custom max size per type is disabled' do
let(:flag_enabled) { false }
it_behaves_like 'basing off the project closest setting'
end
end
end
def file_type_limit_failure_message(type, limit_name)
<<~MSG
The artifact type `#{type}` is missing its counterpart plan limit which is expected to be named `#{limit_name}`.
Please refer to https://docs.gitlab.com/ee/development/application_limits.html on how to add new plan limit columns.
Take note that while existing max size plan limits default to 0, succeeding new limits are recommended to have
non-zero default values.
MSG
end
end end
...@@ -3,57 +3,214 @@ ...@@ -3,57 +3,214 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe PlanLimits do RSpec.describe PlanLimits do
let(:plan_limits) { create(:plan_limits) } let_it_be(:project) { create(:project) }
let(:model) { ProjectHook } let_it_be(:plan_limits) { create(:plan_limits) }
let(:count) { model.count } let(:project_hooks_count) { 2 }
before do before do
create(:project_hook) create_list(:project_hook, project_hooks_count, project: project)
end end
context 'without plan limits configured' do
describe '#exceeded?' do describe '#exceeded?' do
it 'does not exceed any relation offset' do let(:alternate_limit) { double('an alternate limit value') }
expect(plan_limits.exceeded?(:project_hooks, model)).to be false
expect(plan_limits.exceeded?(:project_hooks, count)).to be false subject(:exceeded_limit) { plan_limits.exceeded?(:project_hooks, limit_subject, alternate_limit: alternate_limit) }
before do
allow(plan_limits).to receive(:limit_for).with(:project_hooks, alternate_limit: alternate_limit).and_return(limit)
end
shared_examples_for 'comparing limits' do
context 'when limit for given name results to a disabled value' do
let(:limit) { nil }
it { is_expected.to eq(false) }
end
context 'when limit for given name results to a non-disabled value' do
context 'and given count is smaller than limit' do
let(:limit) { project_hooks_count + 1 }
it { is_expected.to eq(false) }
end
context 'and given count is equal to the limit' do
let(:limit) { project_hooks_count }
it { is_expected.to eq(true) }
end
context 'and given count is greater than the limit' do
let(:limit) { project_hooks_count - 1 }
it { is_expected.to eq(true) }
end
end
end
context 'when given limit subject is an integer' do
let(:limit_subject) { project.hooks.count }
it_behaves_like 'comparing limits'
end
context 'when given limit subject is an ActiveRecord::Relation' do
let(:limit_subject) { project.hooks }
it_behaves_like 'comparing limits'
end
context 'when given limit subject is something else' do
let(:limit_subject) { ProjectHook }
let(:limit) { 100 }
it 'raises an error' do
expect { exceeded_limit }.to raise_error(ArgumentError)
end
end end
end end
describe '#limit_for' do
let(:alternate_limit) { nil }
subject(:limit) { plan_limits.limit_for(:project_hooks, alternate_limit: alternate_limit) }
context 'when given limit name does not exist' do
it 'raises an error' do
expect { plan_limits.limit_for(:project_foo) }.to raise_error(described_class::LimitUndefinedError)
end
end end
context 'with plan limits configured' do context 'when given limit name is disabled' do
before do before do
plan_limits.update!(project_hooks: 2) plan_limits.update!(project_hooks: 0)
end end
describe '#exceeded?' do it { is_expected.to eq(nil) }
it 'does not exceed the relation offset' do
expect(plan_limits.exceeded?(:project_hooks, model)).to be false context 'and alternate_limit is a non-zero integer' do
expect(plan_limits.exceeded?(:project_hooks, count)).to be false let(:alternate_limit) { 1 }
it { is_expected.to eq(1) }
end end
context 'and alternate_limit is zero' do
let(:alternate_limit) { 0 }
it { is_expected.to eq(nil) }
end end
context 'with boundary values' do context 'and alternate_limit is a proc that returns non-zero integer' do
let(:alternate_limit) { -> { 1 } }
it { is_expected.to eq(1) }
end
context 'and alternate_limit is a proc that returns zero' do
let(:alternate_limit) { -> { 0 } }
it { is_expected.to eq(nil) }
end
context 'and alternate_limit is a proc that returns nil' do
let(:alternate_limit) { -> { nil } }
it { is_expected.to eq(nil) }
end
end
context 'when given limit name is enabled' do
let(:plan_limit_value) { 2 }
before do before do
create(:project_hook) plan_limits.update!(project_hooks: plan_limit_value)
end end
describe '#exceeded?' do context 'and alternate_limit is a non-zero integer that is bigger than the plan limit' do
it 'does exceed the relation offset' do let(:alternate_limit) { plan_limit_value + 1 }
expect(plan_limits.exceeded?(:project_hooks, model)).to be true
expect(plan_limits.exceeded?(:project_hooks, count)).to be true it { is_expected.to eq(plan_limit_value) }
end end
context 'and alternate_limit is a non-zero integer that is smaller than the plan limit' do
let(:alternate_limit) { plan_limit_value - 1 }
it { is_expected.to eq(alternate_limit) }
end
context 'and alternate_limit is zero' do
let(:alternate_limit) { 0 }
it { is_expected.to eq(plan_limit_value) }
end
context 'and alternate_limit is a proc that returns non-zero integer that is bigger than the plan limit' do
let(:alternate_limit) { -> { plan_limit_value + 1 } }
it { is_expected.to eq(plan_limit_value) }
end
context 'and alternate_limit is a proc that returns non-zero integer that is smaller than the plan limit' do
let(:alternate_limit) { -> { plan_limit_value - 1 } }
it { is_expected.to eq(alternate_limit.call) }
end
context 'and alternate_limit is a proc that returns zero' do
let(:alternate_limit) { -> { 0 } }
it { is_expected.to eq(plan_limit_value) }
end
context 'and alternate_limit is a proc that returns nil' do
let(:alternate_limit) { -> { nil } }
it { is_expected.to eq(plan_limit_value) }
end end
end end
end end
context 'validates default values' do context 'validates default values' do
# TODO: For now, these columns have default values set to 0.
# Each artifact type listed here have their own matching issues to determine
# the actual limit value. In each of those issues, the default value should also be updated to
# a non-zero value. Also update existing values of zero to whatever the default value will be.
# For a list of the issues, see: https://gitlab.com/gitlab-org/gitlab/-/issues/211378#note_355619970
let(:disabled_max_artifact_size_columns) do
%w[
ci_max_artifact_size_archive
ci_max_artifact_size_metadata
ci_max_artifact_size_trace
ci_max_artifact_size_junit
ci_max_artifact_size_sast
ci_max_artifact_size_dependency_scanning
ci_max_artifact_size_container_scanning
ci_max_artifact_size_dast
ci_max_artifact_size_codequality
ci_max_artifact_size_license_management
ci_max_artifact_size_license_scanning
ci_max_artifact_size_performance
ci_max_artifact_size_metrics
ci_max_artifact_size_metrics_referee
ci_max_artifact_size_network_referee
ci_max_artifact_size_dotenv
ci_max_artifact_size_cobertura
ci_max_artifact_size_terraform
ci_max_artifact_size_accessibility
ci_max_artifact_size_cluster_applications
ci_max_artifact_size_secret_detection
ci_max_artifact_size_requirements
ci_max_artifact_size_coverage_fuzzing
]
end
let(:columns_with_zero) do let(:columns_with_zero) do
%w[ %w[
ci_active_pipelines ci_active_pipelines
ci_pipeline_size ci_pipeline_size
ci_active_jobs ci_active_jobs
storage_size_limit storage_size_limit
] ] + disabled_max_artifact_size_columns
end end
it "has positive values for enabled limits" do it "has positive values for enabled limits" do
......
...@@ -1592,8 +1592,105 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do ...@@ -1592,8 +1592,105 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
job.run! job.run!
end end
shared_examples_for 'rejecting artifacts that are too large' do
let(:filesize) { 100.megabytes.to_i }
let(:sample_max_size) { (filesize / 1.megabyte) - 10 } # Set max size to be smaller than file size to trigger error
shared_examples_for 'failed request' do
it 'responds with payload too large error' do
send_request
expect(response).to have_gitlab_http_status(:payload_too_large)
end
end
context 'based on plan limit setting' do
let(:application_max_size) { sample_max_size + 100 }
let(:limit_name) { "#{Ci::JobArtifact::PLAN_LIMIT_PREFIX}archive" }
before do
create(:plan_limits, :default_plan, limit_name => sample_max_size)
stub_application_setting(max_artifacts_size: application_max_size)
end
context 'and feature flag ci_max_artifact_size_per_type is enabled' do
before do
stub_feature_flags(ci_max_artifact_size_per_type: true)
end
it_behaves_like 'failed request'
end
context 'and feature flag ci_max_artifact_size_per_type is disabled' do
before do
stub_feature_flags(ci_max_artifact_size_per_type: false)
end
it 'bases of project closest setting' do
send_request
expect(response).to have_gitlab_http_status(success_code)
end
end
end
context 'based on application setting' do
before do
stub_application_setting(max_artifacts_size: sample_max_size)
end
it_behaves_like 'failed request'
end
context 'based on root namespace setting' do
let(:application_max_size) { sample_max_size + 10 }
before do
stub_application_setting(max_artifacts_size: application_max_size)
root_namespace.update!(max_artifacts_size: sample_max_size)
end
it_behaves_like 'failed request'
end
context 'based on child namespace setting' do
let(:application_max_size) { sample_max_size + 10 }
let(:root_namespace_max_size) { sample_max_size + 10 }
before do
stub_application_setting(max_artifacts_size: application_max_size)
root_namespace.update!(max_artifacts_size: root_namespace_max_size)
namespace.update!(max_artifacts_size: sample_max_size)
end
it_behaves_like 'failed request'
end
context 'based on project setting' do
let(:application_max_size) { sample_max_size + 10 }
let(:root_namespace_max_size) { sample_max_size + 10 }
let(:child_namespace_max_size) { sample_max_size + 10 }
before do
stub_application_setting(max_artifacts_size: application_max_size)
root_namespace.update!(max_artifacts_size: root_namespace_max_size)
namespace.update!(max_artifacts_size: child_namespace_max_size)
project.update!(max_artifacts_size: sample_max_size)
end
it_behaves_like 'failed request'
end
end
describe 'POST /api/v4/jobs/:id/artifacts/authorize' do describe 'POST /api/v4/jobs/:id/artifacts/authorize' do
context 'when using token as parameter' do context 'when using token as parameter' do
context 'and the artifact is too large' do
it_behaves_like 'rejecting artifacts that are too large' do
let(:success_code) { :ok }
let(:send_request) { authorize_artifacts_with_token_in_params(filesize: filesize) }
end
end
context 'posting artifacts to running job' do context 'posting artifacts to running job' do
subject do subject do
authorize_artifacts_with_token_in_params authorize_artifacts_with_token_in_params
...@@ -1651,56 +1748,6 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do ...@@ -1651,56 +1748,6 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
end end
end end
end end
context 'when artifact is too large' do
let(:sample_max_size) { 100 }
shared_examples_for 'rejecting too large artifacts' do
it 'fails to post' do
authorize_artifacts_with_token_in_params(filesize: sample_max_size.megabytes.to_i)
expect(response).to have_gitlab_http_status(:payload_too_large)
end
end
context 'based on application setting' do
before do
stub_application_setting(max_artifacts_size: sample_max_size)
end
it_behaves_like 'rejecting too large artifacts'
end
context 'based on root namespace setting' do
before do
stub_application_setting(max_artifacts_size: 200)
root_namespace.update!(max_artifacts_size: sample_max_size)
end
it_behaves_like 'rejecting too large artifacts'
end
context 'based on child namespace setting' do
before do
stub_application_setting(max_artifacts_size: 200)
root_namespace.update!(max_artifacts_size: 200)
namespace.update!(max_artifacts_size: sample_max_size)
end
it_behaves_like 'rejecting too large artifacts'
end
context 'based on project setting' do
before do
stub_application_setting(max_artifacts_size: 200)
root_namespace.update!(max_artifacts_size: 200)
namespace.update!(max_artifacts_size: 200)
project.update!(max_artifacts_size: sample_max_size)
end
it_behaves_like 'rejecting too large artifacts'
end
end
end end
context 'when using token as header' do context 'when using token as header' do
...@@ -1757,12 +1804,6 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do ...@@ -1757,12 +1804,6 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
expect(json_response['ProcessLsif']).to be_truthy expect(json_response['ProcessLsif']).to be_truthy
end end
it 'fails to authorize too large artifact' do
authorize_artifacts_with_token_in_headers(artifact_type: :lsif, filesize: 30.megabytes)
expect(response).to have_gitlab_http_status(:payload_too_large)
end
context 'code_navigation feature flag is disabled' do context 'code_navigation feature flag is disabled' do
it 'does not add ProcessLsif header' do it 'does not add ProcessLsif header' do
stub_feature_flags(code_navigation: false) stub_feature_flags(code_navigation: false)
...@@ -1799,6 +1840,32 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do ...@@ -1799,6 +1840,32 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
expect { upload_artifacts(file_upload, headers_with_token) }.to change { runner.reload.contacted_at } expect { upload_artifacts(file_upload, headers_with_token) }.to change { runner.reload.contacted_at }
end end
context 'when the artifact is too large' do
it_behaves_like 'rejecting artifacts that are too large' do
# This filesize validation also happens in non remote stored files,
# it's just that it's hard to stub the filesize in other cases to be
# more than a megabyte.
let!(:fog_connection) do
stub_artifacts_object_storage(direct_upload: true)
end
let(:object) do
fog_connection.directories.new(key: 'artifacts').files.create(
key: 'tmp/uploads/12312300',
body: 'content'
)
end
let(:file_upload) { fog_to_uploaded_file(object) }
let(:send_request) do
upload_artifacts(file_upload, headers_with_token, 'file.remote_id' => '12312300')
end
let(:success_code) { :created }
before do
allow(object).to receive(:content_length).and_return(filesize)
end
end
end
context 'when artifacts are being stored inside of tmp path' do context 'when artifacts are being stored inside of tmp path' do
before do before do
# by configuring this path we allow to pass temp file from any path # by configuring this path we allow to pass temp file from any path
...@@ -1877,16 +1944,6 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do ...@@ -1877,16 +1944,6 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
end end
end end
context 'when artifacts file is too large' do
it 'fails to post too large artifact' do
stub_application_setting(max_artifacts_size: 0)
upload_artifacts(file_upload, headers_with_token)
expect(response).to have_gitlab_http_status(:payload_too_large)
end
end
context 'when artifacts post request does not contain file' do context 'when artifacts post request does not contain file' do
it 'fails to post artifacts without file' do it 'fails to post artifacts without file' do
post api("/jobs/#{job.id}/artifacts"), params: {}, headers: headers_with_token post api("/jobs/#{job.id}/artifacts"), params: {}, headers: headers_with_token
......
...@@ -4,7 +4,7 @@ require 'spec_helper' ...@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Ci::CreateJobArtifactsService do RSpec.describe Ci::CreateJobArtifactsService do
let_it_be(:project) { create(:project) } let_it_be(:project) { create(:project) }
let(:service) { described_class.new(project) } let(:service) { described_class.new(job) }
let(:job) { create(:ci_build, project: project) } let(:job) { create(:ci_build, project: project) }
let(:artifacts_sha256) { '0' * 64 } let(:artifacts_sha256) { '0' * 64 }
let(:metadata_file) { nil } let(:metadata_file) { nil }
...@@ -17,7 +17,7 @@ RSpec.describe Ci::CreateJobArtifactsService do ...@@ -17,7 +17,7 @@ RSpec.describe Ci::CreateJobArtifactsService do
{ {
'artifact_type' => 'archive', 'artifact_type' => 'archive',
'artifact_format' => 'zip' 'artifact_format' => 'zip'
} }.with_indifferent_access
end end
def file_to_upload(path, params = {}) def file_to_upload(path, params = {})
...@@ -28,7 +28,7 @@ RSpec.describe Ci::CreateJobArtifactsService do ...@@ -28,7 +28,7 @@ RSpec.describe Ci::CreateJobArtifactsService do
end end
describe '#execute' do describe '#execute' do
subject { service.execute(job, artifacts_file, params, metadata_file: metadata_file) } subject { service.execute(artifacts_file, params, metadata_file: metadata_file) }
context 'locking' do context 'locking' do
let(:old_job) { create(:ci_build, pipeline: create(:ci_pipeline, project: job.project, ref: job.ref)) } let(:old_job) { create(:ci_build, pipeline: create(:ci_pipeline, project: job.project, ref: job.ref)) }
...@@ -150,7 +150,7 @@ RSpec.describe Ci::CreateJobArtifactsService do ...@@ -150,7 +150,7 @@ RSpec.describe Ci::CreateJobArtifactsService do
{ {
'artifact_type' => 'dotenv', 'artifact_type' => 'dotenv',
'artifact_format' => 'gzip' 'artifact_format' => 'gzip'
} }.with_indifferent_access
end end
it 'calls parse service' do it 'calls parse service' do
...@@ -186,7 +186,7 @@ RSpec.describe Ci::CreateJobArtifactsService do ...@@ -186,7 +186,7 @@ RSpec.describe Ci::CreateJobArtifactsService do
{ {
'artifact_type' => 'cluster_applications', 'artifact_type' => 'cluster_applications',
'artifact_format' => 'gzip' 'artifact_format' => 'gzip'
} }.with_indifferent_access
end end
it 'calls cluster applications parse service' do it 'calls cluster applications parse service' do
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment