Commit 4641cfb7 authored by Fabio Pitino's avatar Fabio Pitino Committed by Kamil Trzciński

Generate child pipeline from artifact

* allow `trigger:artifact` to point to a job artifact
* document new feature
* Remove FF ci_limit_yaml_expansion
parent bae06faf
......@@ -89,6 +89,10 @@ module Ci
end
end
def parent_pipeline
pipeline if triggers_child_pipeline?
end
def triggers_child_pipeline?
yaml_for_downstream.present?
end
......
......@@ -787,6 +787,10 @@ module Ci
.fabricate!
end
def find_job_with_archive_artifacts(name)
builds.latest.with_artifacts_archive.find_by_name(name)
end
def latest_builds_with_artifacts
# We purposely cast the builds to an Array here. Because we always use the
# rows if there are more than 0 this prevents us from having to run two
......
---
title: 'Create child pipelines dynamically using content from artifact as CI configuration'
merge_request: 23790
author:
type: fixed
# frozen_string_literal: true
# This class takes in input a Ci::Build object and an artifact path to read.
# It downloads and extracts the artifacts archive, then returns the content
# of the artifact, if found.
module Gitlab
module Ci
class ArtifactFileReader
Error = Class.new(StandardError)
MAX_ARCHIVE_SIZE = 5.megabytes
def initialize(job)
@job = job
raise ArgumentError, 'Job does not have artifacts' unless @job.artifacts?
validate!
end
def read(path)
return unless job.artifacts_metadata
metadata_entry = job.artifacts_metadata_entry(path)
if metadata_entry.total_size > MAX_ARCHIVE_SIZE
raise Error, "Artifacts archive for job `#{job.name}` is too large: max #{max_archive_size_in_mb}"
end
read_zip_file!(path)
end
private
attr_reader :job
def validate!
if job.job_artifacts_archive.size > MAX_ARCHIVE_SIZE
raise Error, "Artifacts archive for job `#{job.name}` is too large: max #{max_archive_size_in_mb}"
end
unless job.artifacts_metadata?
raise Error, "Job `#{job.name}` has missing artifacts metadata and cannot be extracted!"
end
end
def read_zip_file!(file_path)
job.artifacts_file.use_file do |archive_path|
Zip::File.open(archive_path) do |zip_file|
entry = zip_file.find_entry(file_path)
unless entry
raise Error, "Path `#{file_path}` does not exist inside the `#{job.name}` artifacts archive!"
end
if entry.name_is_directory?
raise Error, "Path `#{file_path}` was expected to be a file but it was a directory!"
end
zip_file.get_input_stream(entry) do |is|
is.read
end
end
end
end
def max_archive_size_in_mb
ActiveSupport::NumberHelper.number_to_human_size(MAX_ARCHIVE_SIZE)
end
end
end
end
......@@ -18,12 +18,9 @@ module Gitlab
attr_reader :root
def initialize(config, project: nil, sha: nil, user: nil)
@context = build_context(project: project, sha: sha, user: user)
if Feature.enabled?(:ci_limit_yaml_expansion, project, default_enabled: true)
def initialize(config, project: nil, sha: nil, user: nil, parent_pipeline: nil)
@context = build_context(project: project, sha: sha, user: user, parent_pipeline: parent_pipeline)
@context.set_deadline(TIMEOUT_SECONDS)
end
@config = expand_config(config)
......@@ -87,11 +84,12 @@ module Gitlab
initial_config
end
def build_context(project:, sha:, user:)
def build_context(project:, sha:, user:, parent_pipeline:)
Config::External::Context.new(
project: project,
sha: sha || project&.repository&.root_ref_sha,
user: user)
user: user,
parent_pipeline: parent_pipeline)
end
def track_and_raise_for_dev_exception(error)
......
......@@ -10,7 +10,7 @@ module Gitlab
class Include < ::Gitlab::Config::Entry::Node
include ::Gitlab::Config::Entry::Validatable
ALLOWED_KEYS = %i[local file remote template].freeze
ALLOWED_KEYS = %i[local file remote template artifact job].freeze
validations do
validates :config, hash_or_string: true
......
......@@ -7,13 +7,14 @@ module Gitlab
class Context
TimeoutError = Class.new(StandardError)
attr_reader :project, :sha, :user
attr_reader :project, :sha, :user, :parent_pipeline
attr_reader :expandset, :execution_deadline
def initialize(project: nil, sha: nil, user: nil)
def initialize(project: nil, sha: nil, user: nil, parent_pipeline: nil)
@project = project
@sha = sha
@user = user
@parent_pipeline = parent_pipeline
@expandset = Set.new
@execution_deadline = 0
......
# frozen_string_literal: true
module Gitlab
module Ci
class Config
module External
module File
class Artifact < Base
extend ::Gitlab::Utils::Override
include Gitlab::Utils::StrongMemoize
attr_reader :job_name
def initialize(params, context)
@location = params[:artifact]
@job_name = params[:job]
super
end
def content
strong_memoize(:content) do
next unless artifact_job
Gitlab::Ci::ArtifactFileReader.new(artifact_job).read(location)
rescue Gitlab::Ci::ArtifactFileReader::Error => error
errors.push(error.message)
end
end
def matching?
super &&
Feature.enabled?(:ci_dynamic_child_pipeline, project)
end
private
def project
context&.parent_pipeline&.project
end
def validate_content!
return unless ensure_preconditions_satisfied!
errors.push("File `#{location}` is empty!") unless content.present?
end
def ensure_preconditions_satisfied!
unless creating_child_pipeline?
errors.push('Including configs from artifacts is only allowed when triggering child pipelines')
return false
end
unless job_name.present?
errors.push("Job must be provided when including configs from artifacts")
return false
end
unless artifact_job.present?
errors.push("Job `#{job_name}` not found in parent pipeline or does not have artifacts!")
return false
end
true
end
def artifact_job
strong_memoize(:artifact_job) do
next unless creating_child_pipeline?
context.parent_pipeline.find_job_with_archive_artifacts(job_name)
end
end
def creating_child_pipeline?
context.parent_pipeline.present?
end
override :expand_context_attrs
def expand_context_attrs
{
project: context.project,
sha: context.sha,
user: context.user,
parent_pipeline: context.parent_pipeline
}
end
end
end
end
end
end
end
......@@ -40,7 +40,8 @@ module Gitlab
{
project: context.project,
sha: context.sha,
user: context.user
user: context.user,
parent_pipeline: context.parent_pipeline
}
end
end
......
......@@ -71,7 +71,8 @@ module Gitlab
{
project: project,
sha: sha,
user: context.user
user: context.user,
parent_pipeline: context.parent_pipeline
}
end
end
......
......@@ -13,7 +13,8 @@ module Gitlab
External::File::Remote,
External::File::Template,
External::File::Local,
External::File::Project
External::File::Project,
External::File::Artifact
].freeze
Error = Class.new(StandardError)
......
......@@ -7,7 +7,7 @@ module Gitlab
class Base
attr_reader :pipeline, :command, :config
delegate :project, :current_user, to: :command
delegate :project, :current_user, :parent_pipeline, to: :command
def initialize(pipeline, command)
@pipeline = pipeline
......
......@@ -72,6 +72,10 @@ module Gitlab
project.repository.ambiguous_ref?(origin_ref)
end
end
def parent_pipeline
bridge&.parent_pipeline
end
end
end
end
......
......@@ -15,7 +15,8 @@ module Gitlab
@command.config_content, {
project: project,
sha: @pipeline.sha,
user: current_user
user: current_user,
parent_pipeline: parent_pipeline
}
)
rescue Gitlab::Ci::YamlProcessor::ValidationError => ex
......
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::Ci::ArtifactFileReader do
let(:job) { create(:ci_build) }
let(:path) { 'generated.yml' } # included in the ci_build_artifacts.zip
describe '#read' do
subject { described_class.new(job).read(path) }
context 'when job has artifacts and metadata' do
let!(:artifacts) { create(:ci_job_artifact, :archive, job: job) }
let!(:metadata) { create(:ci_job_artifact, :metadata, job: job) }
it 'returns the content at the path' do
is_expected.to be_present
expect(YAML.safe_load(subject).keys).to contain_exactly('rspec', 'time', 'custom')
end
context 'when path does not exist' do
let(:path) { 'file/does/not/exist.txt' }
let(:expected_error) do
"Path `#{path}` does not exist inside the `#{job.name}` artifacts archive!"
end
it 'raises an error' do
expect { subject }.to raise_error(described_class::Error, expected_error)
end
end
context 'when path points to a directory' do
let(:path) { 'other_artifacts_0.1.2' }
let(:expected_error) do
"Path `#{path}` was expected to be a file but it was a directory!"
end
it 'raises an error' do
expect { subject }.to raise_error(described_class::Error, expected_error)
end
end
context 'when path is nested' do
# path exists in ci_build_artifacts.zip
let(:path) { 'other_artifacts_0.1.2/doc_sample.txt' }
it 'returns the content at the nested path' do
is_expected.to be_present
end
end
context 'when artifact archive size is greater than the limit' do
let(:expected_error) do
"Artifacts archive for job `#{job.name}` is too large: max 1 KB"
end
before do
stub_const("#{described_class}::MAX_ARCHIVE_SIZE", 1.kilobyte)
end
it 'raises an error' do
expect { subject }.to raise_error(described_class::Error, expected_error)
end
end
context 'when metadata entry shows size greater than the limit' do
let(:expected_error) do
"Artifacts archive for job `#{job.name}` is too large: max 5 MB"
end
before do
expect_next_instance_of(Gitlab::Ci::Build::Artifacts::Metadata::Entry) do |entry|
expect(entry).to receive(:total_size).and_return(10.megabytes)
end
end
it 'raises an error' do
expect { subject }.to raise_error(described_class::Error, expected_error)
end
end
end
context 'when job does not have metadata artifacts' do
let!(:artifacts) { create(:ci_job_artifact, :archive, job: job) }
let(:expected_error) do
"Job `#{job.name}` has missing artifacts metadata and cannot be extracted!"
end
it 'raises an error' do
expect { subject }.to raise_error(described_class::Error, expected_error)
end
end
context 'when job does not have artifacts' do
it 'raises ArgumentError' do
expect { subject }.to raise_error(ArgumentError, 'Job does not have artifacts')
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::Ci::Config::External::File::Artifact do
let(:parent_pipeline) { create(:ci_pipeline) }
let(:context) do
Gitlab::Ci::Config::External::Context.new(parent_pipeline: parent_pipeline)
end
let(:external_file) { described_class.new(params, context) }
describe '#matching?' do
context 'when params contain artifact location' do
let(:params) { { artifact: 'generated.yml' } }
it 'returns true' do
expect(external_file).to be_matching
end
end
context 'when params does not contain artifact location' do
let(:params) { {} }
it 'returns false' do
expect(external_file).not_to be_matching
end
end
end
describe '#valid?' do
shared_examples 'is invalid' do
it 'is not valid' do
expect(external_file).not_to be_valid
end
it 'sets the expected error' do
expect(external_file.errors)
.to contain_exactly(expected_error)
end
end
describe 'when used in non child pipeline context' do
let(:parent_pipeline) { nil }
let(:params) { { artifact: 'generated.yml' } }
let(:expected_error) do
'Including configs from artifacts is only allowed when triggering child pipelines'
end
it_behaves_like 'is invalid'
end
context 'when used in child pipeline context' do
let(:parent_pipeline) { create(:ci_pipeline) }
context 'when job is not provided' do
let(:params) { { artifact: 'generated.yml' } }
let(:expected_error) do
'Job must be provided when including configs from artifacts'
end
it_behaves_like 'is invalid'
end
context 'when job is provided' do
let(:params) { { artifact: 'generated.yml', job: 'generator' } }
context 'when job does not exist in the parent pipeline' do
let(:expected_error) do
'Job `generator` not found in parent pipeline or does not have artifacts!'
end
it_behaves_like 'is invalid'
end
context 'when job exists in the parent pipeline' do
let!(:generator_job) { create(:ci_build, name: 'generator', pipeline: parent_pipeline) }
context 'when job does not have artifacts' do
let(:expected_error) do
'Job `generator` not found in parent pipeline or does not have artifacts!'
end
it_behaves_like 'is invalid'
end
context 'when job has archive artifacts' do
let!(:artifacts) do
create(:ci_job_artifact, :archive,
job: generator_job,
file: fixture_file_upload(Rails.root.join('spec/fixtures/pages.zip'), 'application/zip'))
end
let(:expected_error) do
'Job `generator` has missing artifacts metadata and cannot be extracted!'
end
it_behaves_like 'is invalid'
context 'when job has artifacts exceeding the max allowed size' do
let(:expected_error) do
"Artifacts archive for job `generator` is too large: max 1 KB"
end
before do
stub_const("#{Gitlab::Ci::ArtifactFileReader}::MAX_ARCHIVE_SIZE", 1.kilobyte)
end
it_behaves_like 'is invalid'
end
context 'when job has artifacts metadata' do
let!(:metadata) do
create(:ci_job_artifact, :metadata, job: generator_job)
end
let(:expected_error) do
'Path `generated.yml` does not exist inside the `generator` artifacts archive!'
end
it_behaves_like 'is invalid'
context 'when file is found in metadata' do
let!(:artifacts) { create(:ci_job_artifact, :archive, job: generator_job) }
let!(:metadata) { create(:ci_job_artifact, :metadata, job: generator_job) }
context 'when file is empty' do
before do
allow_next_instance_of(Gitlab::Ci::ArtifactFileReader) do |reader|
allow(reader).to receive(:read).and_return('')
end
end
let(:expected_error) do
'File `generated.yml` is empty!'
end
it_behaves_like 'is invalid'
end
context 'when file is not empty' do
it 'is valid' do
expect(external_file).to be_valid
expect(external_file.content).to be_present
end
it 'propagates parent_pipeline to nested includes' do
expected_attrs = {
parent_pipeline: parent_pipeline,
project: anything,
sha: anything,
user: anything
}
expect(context).to receive(:mutate).with(expected_attrs).and_call_original
external_file.content
end
end
end
end
end
end
end
end
end
end
......@@ -6,10 +6,19 @@ describe Gitlab::Ci::Config::External::File::Local do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
let(:sha) { '12345' }
let(:context_params) { { project: project, sha: sha, user: user } }
let(:context) { Gitlab::Ci::Config::External::Context.new(**context_params) }
let(:params) { { local: location } }
let(:local_file) { described_class.new(params, context) }
let(:parent_pipeline) { double(:parent_pipeline) }
let(:context_params) do
{
project: project,
sha: sha,
user: user,
parent_pipeline: parent_pipeline
}
end
before do
allow_any_instance_of(Gitlab::Ci::Config::External::Context)
......@@ -117,7 +126,11 @@ describe Gitlab::Ci::Config::External::File::Local do
subject { local_file.send(:expand_context_attrs) }
it 'inherits project, user and sha' do
is_expected.to include(user: user, project: project, sha: sha)
is_expected.to include(
user: user,
project: project,
sha: sha,
parent_pipeline: parent_pipeline)
end
end
......
......@@ -7,10 +7,19 @@ describe Gitlab::Ci::Config::External::File::Project do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
let(:context_user) { user }
let(:context_params) { { project: context_project, sha: '12345', user: context_user } }
let(:parent_pipeline) { double(:parent_pipeline) }
let(:context) { Gitlab::Ci::Config::External::Context.new(**context_params) }
let(:project_file) { described_class.new(params, context) }
let(:context_params) do
{
project: context_project,
sha: '12345',
user: context_user,
parent_pipeline: parent_pipeline
}
end
before do
project.add_developer(user)
......@@ -152,7 +161,11 @@ describe Gitlab::Ci::Config::External::File::Project do
subject { project_file.send(:expand_context_attrs) }
it 'inherits user, and target project and sha' do
is_expected.to include(user: user, project: project, sha: project.commit('master').id)
is_expected.to include(
user: user,
project: project,
sha: project.commit('master').id,
parent_pipeline: parent_pipeline)
end
end
......
......@@ -376,23 +376,6 @@ describe Gitlab::Ci::Config do
end
end
context 'when context expansion timeout is disabled' do
before do
allow_next_instance_of(Gitlab::Ci::Config::External::Context) do |instance|
allow(instance).to receive(:check_execution_time!).and_call_original
end
allow(Feature)
.to receive(:enabled?)
.with(:ci_limit_yaml_expansion, project, default_enabled: true)
.and_return(false)
end
it 'does not raises errors' do
expect { config }.not_to raise_error
end
end
describe 'external file version' do
context 'when external local file SHA is defined' do
it 'is using a defined value' do
......@@ -541,5 +524,76 @@ describe Gitlab::Ci::Config do
end
end
end
context 'when including file from artifact' do
let(:config) do
described_class.new(
gitlab_ci_yml,
project: nil,
sha: nil,
user: nil,
parent_pipeline: parent_pipeline)
end
let(:gitlab_ci_yml) do
<<~HEREDOC
include:
- artifact: generated.yml
job: rspec
HEREDOC
end
let(:parent_pipeline) { nil }
context 'when used in the context of a child pipeline' do
# This job has ci_build_artifacts.zip artifact archive which
# contains generated.yml
let!(:job) { create(:ci_build, :artifacts, name: 'rspec', pipeline: parent_pipeline) }
let(:parent_pipeline) { create(:ci_pipeline) }
it 'returns valid config' do
expect(config).to be_valid
end
context 'when job key is missing' do
let(:gitlab_ci_yml) do
<<~HEREDOC
include:
- artifact: generated.yml
HEREDOC
end
it 'raises an error' do
expect { config }.to raise_error(
described_class::ConfigError,
'Job must be provided when including configs from artifacts'
)
end
end
context 'when artifact key is missing' do
let(:gitlab_ci_yml) do
<<~HEREDOC
include:
- job: rspec
HEREDOC
end
it 'raises an error' do
expect { config }.to raise_error(
described_class::ConfigError,
/needs to match exactly one accessor!/
)
end
end
end
it 'disallows the use in parent pipelines' do
expect { config }.to raise_error(
described_class::ConfigError,
'Including configs from artifacts is only allowed when triggering child pipelines'
)
end
end
end
end
......@@ -25,7 +25,7 @@ describe Ci::JobArtifact do
end
it_behaves_like 'UpdateProjectStatistics' do
subject { build(:ci_job_artifact, :archive, size: 106365) }
subject { build(:ci_job_artifact, :archive, size: 107464) }
end
end
......@@ -35,7 +35,7 @@ describe Ci::JobArtifact do
end
it_behaves_like 'UpdateProjectStatistics' do
subject { build(:ci_job_artifact, :archive, size: 106365) }
subject { build(:ci_job_artifact, :archive, size: 107464) }
end
end
......@@ -173,7 +173,7 @@ describe Ci::JobArtifact do
let(:artifact) { create(:ci_job_artifact, :archive, project: project) }
it 'sets the size from the file size' do
expect(artifact.size).to eq(106365)
expect(artifact.size).to eq(107464)
end
end
......
......@@ -2553,6 +2553,19 @@ describe Ci::Pipeline, :mailer do
end
end
describe '#find_job_with_archive_artifacts' do
let!(:old_job) { create(:ci_build, name: 'rspec', retried: true, pipeline: pipeline) }
let!(:job_without_artifacts) { create(:ci_build, name: 'rspec', pipeline: pipeline) }
let!(:expected_job) { create(:ci_build, :artifacts, name: 'rspec', pipeline: pipeline ) }
let!(:different_job) { create(:ci_build, name: 'deploy', pipeline: pipeline) }
subject { pipeline.find_job_with_archive_artifacts('rspec') }
it 'finds the expected job' do
expect(subject).to eq(expected_job)
end
end
describe '#latest_builds_with_artifacts' do
let!(:fresh_build) { create(:ci_build, :success, :artifacts, pipeline: pipeline) }
let!(:stale_build) { create(:ci_build, :success, :expired, :artifacts, pipeline: pipeline) }
......
......@@ -756,7 +756,7 @@ describe API::Runner, :clean_gitlab_redis_shared_state do
expect(json_response['dependencies'].count).to eq(1)
expect(json_response['dependencies']).to include(
{ 'id' => job.id, 'name' => job.name, 'token' => job.token,
'artifacts_file' => { 'filename' => 'ci_build_artifacts.zip', 'size' => 106365 } })
'artifacts_file' => { 'filename' => 'ci_build_artifacts.zip', 'size' => 107464 } })
end
end
......
......@@ -4,30 +4,77 @@ require 'spec_helper'
describe Ci::CreatePipelineService do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:admin) }
let(:upstream_pipeline) { create(:ci_pipeline) }
let(:ref) { 'refs/heads/master' }
let(:service) { described_class.new(project, user, { ref: ref }) }
let(:upstream_pipeline) { create(:ci_pipeline, project: project) }
let(:bridge) { create(:ci_bridge, pipeline: upstream_pipeline) }
subject { service.execute(:push, bridge: bridge) }
context 'custom config content' do
let(:bridge) do
create(:ci_bridge, status: 'running', pipeline: upstream_pipeline, project: upstream_pipeline.project).tap do |bridge|
allow(bridge).to receive(:yaml_for_downstream).and_return(
allow(bridge).to receive(:yaml_for_downstream).and_return(config_from_bridge)
end
end
let(:config_from_bridge) do
<<~YML
rspec:
script: rspec
custom:
script: custom
YML
)
end
end
subject { service.execute(:push, bridge: bridge) }
before do
allow(bridge).to receive(:yaml_for_downstream).and_return config_from_bridge
end
it 'creates a pipeline using the content passed in as param' do
expect(subject).to be_persisted
expect(subject.builds.map(&:name)).to eq %w[rspec custom]
expect(subject.config_source).to eq 'bridge_source'
end
context 'when bridge includes yaml from artifact' do
# the generated.yml is available inside the ci_build_artifacts.zip associated
# to the generator_job
let(:config_from_bridge) do
<<~YML
include:
- artifact: generated.yml
job: generator
YML
end
context 'when referenced job exists' do
let!(:generator_job) do
create(:ci_build, :artifacts,
project: project,
pipeline: upstream_pipeline,
name: 'generator')
end
it 'created a pipeline using the content passed in as param and download the artifact' do
expect(subject).to be_persisted
expect(subject.builds.pluck(:name)).to eq %w[rspec time custom]
expect(subject.config_source).to eq 'bridge_source'
end
end
context 'when referenced job does not exist' do
it 'creates an empty pipeline' do
expect(subject).to be_persisted
expect(subject).to be_failed
expect(subject.errors.full_messages)
.to contain_exactly(
'Job `generator` not found in parent pipeline or does not have artifacts!')
expect(subject.builds.pluck(:name)).to be_empty
expect(subject.config_source).to eq 'bridge_source'
end
end
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment