Commit 6ca02a41 authored by Grzegorz Bizon's avatar Grzegorz Bizon Committed by Kamil Trzciński

Merge branch 'zj-multiple-artifacts-ee' into 'master'

Multiple artifacts ee

See merge request gitlab-org/gitlab-ee!3276
parent ec72abf5
...@@ -42,8 +42,7 @@ class Projects::ArtifactsController < Projects::ApplicationController ...@@ -42,8 +42,7 @@ class Projects::ArtifactsController < Projects::ApplicationController
end end
def raw def raw
path = Gitlab::Ci::Build::Artifacts::Path path = Gitlab::Ci::Build::Artifacts::Path.new(params[:path])
.new(params[:path])
send_artifacts_entry(build, path) send_artifacts_entry(build, path)
end end
...@@ -72,7 +71,7 @@ class Projects::ArtifactsController < Projects::ApplicationController ...@@ -72,7 +71,7 @@ class Projects::ArtifactsController < Projects::ApplicationController
end end
def validate_artifacts! def validate_artifacts!
render_404 unless build && build.artifacts? render_404 unless build&.artifacts?
end end
def build def build
......
...@@ -45,7 +45,7 @@ module Ci ...@@ -45,7 +45,7 @@ module Ci
end end
scope :with_artifacts_not_expired, ->() { with_artifacts.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.now) } scope :with_artifacts_not_expired, ->() { with_artifacts.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.now) }
scope :with_expired_artifacts, ->() { with_artifacts.where('artifacts_expire_at < ?', Time.now) } scope :with_expired_artifacts, ->() { with_artifacts.where('artifacts_expire_at < ?', Time.now) }
scope :with_artifacts_stored_locally, ->() { with_artifacts.where(artifacts_file_store: [nil, ArtifactUploader::LOCAL_STORE]) } scope :with_artifacts_stored_locally, ->() { with_artifacts.where(artifacts_file_store: [nil, LegacyArtifactUploader::LOCAL_STORE]) }
scope :last_month, ->() { where('created_at > ?', Date.today - 1.month) } scope :last_month, ->() { where('created_at > ?', Date.today - 1.month) }
scope :manual_actions, ->() { where(when: :manual, status: COMPLETED_STATUSES + [:manual]) } scope :manual_actions, ->() { where(when: :manual, status: COMPLETED_STATUSES + [:manual]) }
scope :ref_protected, -> { where(protected: true) } scope :ref_protected, -> { where(protected: true) }
...@@ -361,22 +361,10 @@ module Ci ...@@ -361,22 +361,10 @@ module Ci
project.running_or_pending_build_count(force: true) project.running_or_pending_build_count(force: true)
end end
def artifacts?
!artifacts_expired? && artifacts_file.exists?
end
def browsable_artifacts? def browsable_artifacts?
artifacts_metadata? artifacts_metadata?
end end
def downloadable_single_artifacts_file?
artifacts_metadata? && artifacts_file.file_storage?
end
def artifacts_metadata?
artifacts? && artifacts_metadata.exists?
end
def artifacts_metadata_entry(path, **options) def artifacts_metadata_entry(path, **options)
artifacts_metadata.use_file do |metadata_path| artifacts_metadata.use_file do |metadata_path|
metadata = Gitlab::Ci::Build::Artifacts::Metadata.new( metadata = Gitlab::Ci::Build::Artifacts::Metadata.new(
......
class JobArtifactUploader < GitlabUploader class JobArtifactUploader < ObjectStoreUploader
storage :file storage_options Gitlab.config.artifacts
def self.local_store_path def self.local_store_path
Gitlab.config.artifacts.path Gitlab.config.artifacts.path
...@@ -15,24 +15,8 @@ class JobArtifactUploader < GitlabUploader ...@@ -15,24 +15,8 @@ class JobArtifactUploader < GitlabUploader
model.size model.size
end end
def store_dir
default_local_path
end
def cache_dir
File.join(self.class.local_store_path, 'tmp/cache')
end
def work_dir
File.join(self.class.local_store_path, 'tmp/work')
end
private private
def default_local_path
File.join(self.class.local_store_path, default_path)
end
def default_path def default_path
creation_date = model.created_at.utc.strftime('%Y_%m_%d') creation_date = model.created_at.utc.strftime('%Y_%m_%d')
......
class LegacyArtifactUploader < GitlabUploader class LegacyArtifactUploader < ObjectStoreUploader
storage :file storage_options Gitlab.config.artifacts
def self.local_store_path def self.local_store_path
Gitlab.config.artifacts.path Gitlab.config.artifacts.path
...@@ -9,24 +9,8 @@ class LegacyArtifactUploader < GitlabUploader ...@@ -9,24 +9,8 @@ class LegacyArtifactUploader < GitlabUploader
File.join(self.local_store_path, 'tmp/uploads/') File.join(self.local_store_path, 'tmp/uploads/')
end end
def store_dir
default_local_path
end
def cache_dir
File.join(self.class.local_store_path, 'tmp/cache')
end
def work_dir
File.join(self.class.local_store_path, 'tmp/work')
end
private private
def default_local_path
File.join(self.class.local_store_path, default_path)
end
def default_path def default_path
File.join(model.created_at.utc.strftime('%Y_%m'), model.project_id.to_s, model.id.to_s) File.join(model.created_at.utc.strftime('%Y_%m'), model.project_id.to_s, model.id.to_s)
end end
......
...@@ -7,12 +7,12 @@ class LfsObjectUploader < ObjectStoreUploader ...@@ -7,12 +7,12 @@ class LfsObjectUploader < ObjectStoreUploader
end end
def filename def filename
subject.oid[4..-1] model.oid[4..-1]
end end
private private
def default_path def default_path
"#{subject.oid[0, 2]}/#{subject.oid[2, 2]}" "#{model.oid[0, 2]}/#{model.oid[2, 2]}"
end end
end end
...@@ -38,11 +38,16 @@ class ObjectStoreUploader < GitlabUploader ...@@ -38,11 +38,16 @@ class ObjectStoreUploader < GitlabUploader
end end
end end
attr_reader :subject, :field def file_storage?
storage.is_a?(CarrierWave::Storage::File)
end
def file_cache_storage?
cache_storage.is_a?(CarrierWave::Storage::File)
end
def initialize(subject, field) def real_object_store
@subject = subject model.public_send(store_serialization_column) # rubocop:disable GitlabSecurity/PublicSend
@field = field
end end
def object_store def object_store
...@@ -51,7 +56,7 @@ class ObjectStoreUploader < GitlabUploader ...@@ -51,7 +56,7 @@ class ObjectStoreUploader < GitlabUploader
def object_store=(value) def object_store=(value)
@storage = nil @storage = nil
subject.public_send(:"#{field}_store=", value) model.public_send(:"#{store_serialization_column}=", value) # rubocop:disable GitlabSecurity/PublicSend
end end
def store_dir def store_dir
...@@ -99,7 +104,7 @@ class ObjectStoreUploader < GitlabUploader ...@@ -99,7 +104,7 @@ class ObjectStoreUploader < GitlabUploader
# since we change storage store the new storage # since we change storage store the new storage
# in case of failure delete new file # in case of failure delete new file
begin begin
subject.save! model.save!
rescue => e rescue => e
new_file.delete new_file.delete
self.object_store = old_store self.object_store = old_store
...@@ -113,7 +118,7 @@ class ObjectStoreUploader < GitlabUploader ...@@ -113,7 +118,7 @@ class ObjectStoreUploader < GitlabUploader
def schedule_migration_to_object_storage(new_file) def schedule_migration_to_object_storage(new_file)
if self.class.object_store_enabled? && licensed? && file_storage? if self.class.object_store_enabled? && licensed? && file_storage?
ObjectStorageUploadWorker.perform_async(self.class.name, subject.class.name, field, subject.id) ObjectStorageUploadWorker.perform_async(self.class.name, model.class.name, mounted_as, model.id)
end end
end end
...@@ -178,6 +183,14 @@ class ObjectStoreUploader < GitlabUploader ...@@ -178,6 +183,14 @@ class ObjectStoreUploader < GitlabUploader
raise NotImplementedError raise NotImplementedError
end end
def serialization_column
model.class.uploader_option(mounted_as, :mount_on) || mounted_as
end
def store_serialization_column
:"#{serialization_column}_store"
end
def storage def storage
@storage ||= @storage ||=
if object_store == REMOTE_STORE if object_store == REMOTE_STORE
......
...@@ -672,6 +672,7 @@ test: ...@@ -672,6 +672,7 @@ test:
aws_secret_access_key: AWS_SECRET_ACCESS_KEY aws_secret_access_key: AWS_SECRET_ACCESS_KEY
region: eu-central-1 region: eu-central-1
artifacts: artifacts:
path: tmp/tests/artifacts
enabled: true enabled: true
# The location where build artifacts are stored (default: shared/artifacts). # The location where build artifacts are stored (default: shared/artifacts).
# path: shared/artifacts # path: shared/artifacts
......
class AddFileStoreJobArtifacts < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
disable_ddl_transaction!
DOWNTIME = false
def up
add_column(:ci_job_artifacts, :file_store, :integer)
end
def down
remove_column(:ci_job_artifacts, :file_store)
end
end
...@@ -326,6 +326,7 @@ ActiveRecord::Schema.define(version: 20171205190711) do ...@@ -326,6 +326,7 @@ ActiveRecord::Schema.define(version: 20171205190711) do
t.integer "project_id", null: false t.integer "project_id", null: false
t.integer "job_id", null: false t.integer "job_id", null: false
t.integer "file_type", null: false t.integer "file_type", null: false
t.integer "file_store"
t.integer "size", limit: 8 t.integer "size", limit: 8
t.datetime_with_timezone "created_at", null: false t.datetime_with_timezone "created_at", null: false
t.datetime_with_timezone "updated_at", null: false t.datetime_with_timezone "updated_at", null: false
......
...@@ -12,8 +12,8 @@ namespace :gitlab do ...@@ -12,8 +12,8 @@ namespace :gitlab do
.with_artifacts_stored_locally .with_artifacts_stored_locally
.find_each(batch_size: 10) do |build| .find_each(batch_size: 10) do |build|
begin begin
build.artifacts_file.migrate!(ArtifactUploader::REMOTE_STORE) build.artifacts_file.migrate!(ObjectStoreUploader::REMOTE_STORE)
build.artifacts_metadata.migrate!(ArtifactUploader::REMOTE_STORE) build.artifacts_metadata.migrate!(ObjectStoreUploader::REMOTE_STORE)
logger.info("Transferred artifacts of #{build.id} of #{build.artifacts_size} to object storage") logger.info("Transferred artifacts of #{build.id} of #{build.artifacts_size} to object storage")
rescue => e rescue => e
......
require 'spec_helper' require 'spec_helper'
describe Projects::ArtifactsController do describe Projects::ArtifactsController do
set(:user) { create(:user) } let(:user) { project.owner }
set(:project) { create(:project, :repository, :public) } set(:project) { create(:project, :repository, :public) }
let(:pipeline) do let(:pipeline) do
...@@ -15,8 +15,6 @@ describe Projects::ArtifactsController do ...@@ -15,8 +15,6 @@ describe Projects::ArtifactsController do
let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline) } let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline) }
before do before do
project.add_developer(user)
sign_in(user) sign_in(user)
end end
...@@ -115,12 +113,12 @@ describe Projects::ArtifactsController do ...@@ -115,12 +113,12 @@ describe Projects::ArtifactsController do
describe 'GET raw' do describe 'GET raw' do
context 'when the file exists' do context 'when the file exists' do
let(:path) { 'ci_artifacts.txt' } let(:path) { 'ci_artifacts.txt' }
let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline, artifacts_file_store: store, artifacts_metadata_store: store) }
shared_examples 'a valid file' do shared_examples 'a valid file' do
it 'serves the file using workhorse' do it 'serves the file using workhorse' do
subject subject
expect(response).to have_gitlab_http_status(200)
expect(send_data).to start_with('artifacts-entry:') expect(send_data).to start_with('artifacts-entry:')
expect(params.keys).to eq(%w(Archive Entry)) expect(params.keys).to eq(%w(Archive Entry))
...@@ -144,8 +142,9 @@ describe Projects::ArtifactsController do ...@@ -144,8 +142,9 @@ describe Projects::ArtifactsController do
context 'when using local file storage' do context 'when using local file storage' do
it_behaves_like 'a valid file' do it_behaves_like 'a valid file' do
let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline) }
let(:store) { ObjectStoreUploader::LOCAL_STORE } let(:store) { ObjectStoreUploader::LOCAL_STORE }
let(:archive_path) { ArtifactUploader.local_store_path } let(:archive_path) { JobArtifactUploader.local_store_path }
end end
end end
...@@ -157,7 +156,7 @@ describe Projects::ArtifactsController do ...@@ -157,7 +156,7 @@ describe Projects::ArtifactsController do
it_behaves_like 'a valid file' do it_behaves_like 'a valid file' do
let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) } let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) }
let!(:job) { create(:ci_build, :success, pipeline: pipeline) } let!(:job) { create(:ci_build, :success, pipeline: pipeline) }
let(:store) { ObjectStorage::Store::REMOTE } let(:store) { ObjectStoreUploader::REMOTE_STORE }
let(:archive_path) { 'https://' } let(:archive_path) { 'https://' }
end end
end end
......
...@@ -5,6 +5,10 @@ FactoryGirl.define do ...@@ -5,6 +5,10 @@ FactoryGirl.define do
job factory: :ci_build job factory: :ci_build
file_type :archive file_type :archive
trait :remote_store do
file_store JobArtifactUploader::REMOTE_STORE
end
after :build do |artifact| after :build do |artifact|
artifact.project ||= artifact.job.project artifact.project ||= artifact.job.project
end end
......
...@@ -158,6 +158,20 @@ describe Ci::Build do ...@@ -158,6 +158,20 @@ describe Ci::Build do
end end
end end
context 'when legacy artifacts are used' do
let(:build) { create(:ci_build, :legacy_artifacts) }
subject { build.artifacts? }
context 'artifacts archive does not exist' do
let(:build) { create(:ci_build) }
context 'is not expired' do
it { is_expected.to be_truthy }
end
end
end
context 'when legacy artifacts are used' do context 'when legacy artifacts are used' do
let(:build) { create(:ci_build, :legacy_artifacts) } let(:build) { create(:ci_build, :legacy_artifacts) }
...@@ -190,7 +204,7 @@ describe Ci::Build do ...@@ -190,7 +204,7 @@ describe Ci::Build do
context 'artifacts metadata does not exist' do context 'artifacts metadata does not exist' do
before do before do
build.update_attributes(artifacts_metadata: nil) build.update_attributes(legacy_artifacts_metadata: nil)
end end
it { is_expected.to be_falsy } it { is_expected.to be_falsy }
......
...@@ -288,14 +288,21 @@ describe API::Jobs do ...@@ -288,14 +288,21 @@ describe API::Jobs do
get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user) get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
end end
context 'job with artifacts' do context 'normal authentication' do
context 'when artifacts are stored locally' do before do
let(:job) { create(:ci_build, :artifacts, pipeline: pipeline) } stub_artifacts_object_storage
end
context 'authorized user' do context 'job with artifacts' do
let(:download_headers) do context 'when artifacts are stored locally' do
{ 'Content-Transfer-Encoding' => 'binary', let(:job) { create(:ci_build, :artifacts, pipeline: pipeline) }
'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
before do
get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
end
context 'authorized user' do
it_behaves_like 'downloads artifact'
end end
it 'returns specific job artifacts' do it 'returns specific job artifacts' do
...@@ -305,13 +312,40 @@ describe API::Jobs do ...@@ -305,13 +312,40 @@ describe API::Jobs do
end end
end end
context 'unauthorized user' do context 'when artifacts are stored remotely' do
let(:api_user) { nil } let(:job) { create(:ci_build, pipeline: pipeline) }
let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) }
before do
job.reload
get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
end
it 'does not return specific job artifacts' do it 'does not return specific job artifacts' do
expect(response).to have_http_status(401) expect(response).to have_http_status(401)
end end
end end
it 'does not return job artifacts if not uploaded' do
get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
expect(response).to have_gitlab_http_status(404)
end
end
end
context 'authorized by job_token' do
let(:job) { create(:ci_build, :artifacts, pipeline: pipeline, user: api_user) }
before do
get api("/projects/#{project.id}/jobs/#{job.id}/artifacts"), job_token: job.token
end
context 'user is developer' do
let(:api_user) { user }
it_behaves_like 'downloads artifact'
end end
context 'when artifacts are stored remotely' do context 'when artifacts are stored remotely' do
...@@ -402,7 +436,14 @@ describe API::Jobs do ...@@ -402,7 +436,14 @@ describe API::Jobs do
end end
context 'when artifacts are stored remotely' do context 'when artifacts are stored remotely' do
let(:job) { create(:ci_build, :artifacts, :remote_store, pipeline: pipeline) } let(:job) { create(:ci_build, pipeline: pipeline, user: api_user) }
let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) }
before do
job.reload
get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
end
it 'returns location redirect' do it 'returns location redirect' do
expect(response).to have_http_status(302) expect(response).to have_http_status(302)
......
...@@ -1151,12 +1151,15 @@ describe API::Runner do ...@@ -1151,12 +1151,15 @@ describe API::Runner do
describe 'GET /api/v4/jobs/:id/artifacts' do describe 'GET /api/v4/jobs/:id/artifacts' do
let(:token) { job.token } let(:token) { job.token }
before do
download_artifact
end
context 'when job has artifacts' do context 'when job has artifacts' do
let(:job) { create(:ci_build, :artifacts) } let(:job) { create(:ci_build) }
let(:store) { JobArtifactUploader::LOCAL_STORE }
before do
create(:ci_job_artifact, :archive, file_store: store, job: job)
download_artifact
end
context 'when using job token' do context 'when using job token' do
context 'when artifacts are stored locally' do context 'when artifacts are stored locally' do
...@@ -1172,7 +1175,8 @@ describe API::Runner do ...@@ -1172,7 +1175,8 @@ describe API::Runner do
end end
context 'when artifacts are stored remotely' do context 'when artifacts are stored remotely' do
let(:job) { create(:ci_build, :artifacts, :remote_store) } let(:store) { JobArtifactUploader::REMOTE_STORE }
let!(:job) { create(:ci_build) }
it 'download artifacts' do it 'download artifacts' do
expect(response).to have_http_status(302) expect(response).to have_http_status(302)
...@@ -1191,12 +1195,16 @@ describe API::Runner do ...@@ -1191,12 +1195,16 @@ describe API::Runner do
context 'when job does not has artifacts' do context 'when job does not has artifacts' do
it 'responds with not found' do it 'responds with not found' do
download_artifact
expect(response).to have_gitlab_http_status(404) expect(response).to have_gitlab_http_status(404)
end end
end end
def download_artifact(params = {}, request_headers = headers) def download_artifact(params = {}, request_headers = headers)
params = params.merge(token: token) params = params.merge(token: token)
job.reload
get api("/jobs/#{job.id}/artifacts"), params, request_headers get api("/jobs/#{job.id}/artifacts"), params, request_headers
end end
end end
......
...@@ -215,10 +215,13 @@ describe API::V3::Builds do ...@@ -215,10 +215,13 @@ describe API::V3::Builds do
end end
context 'when artifacts are stored remotely' do context 'when artifacts are stored remotely' do
let(:build) { create(:ci_build, :artifacts, :remote_store, pipeline: pipeline) } let(:build) { create(:ci_build, pipeline: pipeline) }
let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: build) }
it 'returns location redirect' do it 'returns location redirect' do
expect(response).to have_http_status(302) get v3_api("/projects/#{project.id}/builds/#{build.id}/artifacts", api_user)
expect(response).to have_gitlab_http_status(302)
end end
end end
...@@ -309,7 +312,14 @@ describe API::V3::Builds do ...@@ -309,7 +312,14 @@ describe API::V3::Builds do
end end
context 'when artifacts are stored remotely' do context 'when artifacts are stored remotely' do
let(:build) { create(:ci_build, :artifacts, :remote_store, pipeline: pipeline) } let(:build) { create(:ci_build, pipeline: pipeline) }
let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: build) }
before do
build.reload
get v3_api("/projects/#{project.id}/builds/#{build.id}/artifacts", api_user)
end
it 'returns location redirect' do it 'returns location redirect' do
expect(response).to have_http_status(302) expect(response).to have_http_status(302)
......
...@@ -117,7 +117,8 @@ describe PipelineSerializer do ...@@ -117,7 +117,8 @@ describe PipelineSerializer do
shared_examples 'no N+1 queries' do shared_examples 'no N+1 queries' do
it 'verifies number of queries', :request_store do it 'verifies number of queries', :request_store do
recorded = ActiveRecord::QueryRecorder.new { subject } recorded = ActiveRecord::QueryRecorder.new { subject }
expect(recorded.count).to be_within(1).of(36)
expect(recorded.count).to be_within(1).of(40)
expect(recorded.cached_count).to eq(0) expect(recorded.cached_count).to eq(0)
end end
end end
......
...@@ -18,7 +18,7 @@ module StubConfiguration ...@@ -18,7 +18,7 @@ module StubConfiguration
def stub_artifacts_object_storage(**params) def stub_artifacts_object_storage(**params)
stub_object_storage_uploader(config: Gitlab.config.artifacts.object_store, stub_object_storage_uploader(config: Gitlab.config.artifacts.object_store,
uploader: ArtifactUploader, uploader: JobArtifactUploader,
remote_directory: 'artifacts', remote_directory: 'artifacts',
**params) **params)
end end
......
require 'rake_helper'
describe 'gitlab:artifacts namespace rake task' do
before(:context) do
Rake.application.rake_require 'tasks/gitlab/artifacts'
end
let(:object_storage_enabled) { false }
before do
stub_artifacts_object_storage(enabled: object_storage_enabled)
end
subject { run_rake_task('gitlab:artifacts:migrate') }
context 'legacy artifacts' do
describe 'migrate' do
let!(:build) { create(:ci_build, :legacy_artifacts, artifacts_file_store: store, artifacts_metadata_store: store) }
context 'when local storage is used' do
let(:store) { ObjectStoreUploader::LOCAL_STORE }
context 'and job does not have file store defined' do
let(:object_storage_enabled) { true }
let(:store) { nil }
it "migrates file to remote storage" do
subject
expect(build.reload.artifacts_file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
expect(build.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::REMOTE_STORE)
end
end
context 'and remote storage is defined' do
let(:object_storage_enabled) { true }
it "migrates file to remote storage" do
subject
expect(build.reload.artifacts_file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
expect(build.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::REMOTE_STORE)
end
end
context 'and remote storage is not defined' do
it "fails to migrate to remote storage" do
subject
expect(build.reload.artifacts_file_store).to eq(ObjectStoreUploader::LOCAL_STORE)
expect(build.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::LOCAL_STORE)
end
end
end
context 'when remote storage is used' do
let(:object_storage_enabled) { true }
let(:store) { ObjectStoreUploader::REMOTE_STORE }
it "file stays on remote storage" do
subject
expect(build.reload.artifacts_file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
expect(build.reload.artifacts_metadata_store).to eq(ObjectStoreUploader::REMOTE_STORE)
end
end
end
end
context 'job artifacts' do
let!(:artifact) { create(:ci_job_artifact, :archive, file_store: store) }
context 'when local storage is used' do
let(:store) { ObjectStoreUploader::LOCAL_STORE }
context 'and job does not have file store defined' do
let(:object_storage_enabled) { true }
let(:store) { nil }
it "migrates file to remote storage" do
subject
expect(artifact.reload.file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
end
end
context 'and remote storage is defined' do
let(:object_storage_enabled) { true }
it "migrates file to remote storage" do
subject
expect(artifact.reload.file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
end
end
context 'and remote storage is not defined' do
it "fails to migrate to remote storage" do
subject
expect(artifact.reload.file_store).to eq(ObjectStoreUploader::LOCAL_STORE)
end
end
end
context 'when remote storage is used' do
let(:object_storage_enabled) { true }
let(:store) { ObjectStoreUploader::REMOTE_STORE }
it "file stays on remote storage" do
subject
expect(artifact.reload.file_store).to eq(ObjectStoreUploader::REMOTE_STORE)
end
end
end
end
require 'spec_helper' require 'spec_helper'
describe JobArtifactUploader do describe JobArtifactUploader do
let(:job_artifact) { create(:ci_job_artifact) } let(:store) { described_class::LOCAL_STORE }
let(:job_artifact) { create(:ci_job_artifact, file_store: store) }
let(:uploader) { described_class.new(job_artifact, :file) } let(:uploader) { described_class.new(job_artifact, :file) }
let(:local_path) { Gitlab.config.artifacts.path } let(:local_path) { Gitlab.config.artifacts.path }
...@@ -15,6 +16,17 @@ describe JobArtifactUploader do ...@@ -15,6 +16,17 @@ describe JobArtifactUploader do
it { is_expected.to match(/\h{2}\/\h{2}\/\h{64}\/\d{4}_\d{1,2}_\d{1,2}\/\d+\/\d+\z/) } it { is_expected.to match(/\h{2}\/\h{2}\/\h{64}\/\d{4}_\d{1,2}_\d{1,2}\/\d+\/\d+\z/) }
it { is_expected.to end_with(path) } it { is_expected.to end_with(path) }
end end
context 'when using remote storage' do
let(:store) { described_class::REMOTE_STORE }
before do
stub_artifacts_object_storage
end
it { is_expected.to match(/\h{2}\/\h{2}\/\h{64}\/\d{4}_\d{1,2}_\d{1,2}\/\d+\/\d+\z/) }
it { is_expected.to end_with(path) }
end
end end
describe '#cache_dir' do describe '#cache_dir' do
......
require 'rails_helper' require 'rails_helper'
describe LegacyArtifactUploader do describe LegacyArtifactUploader do
let(:job) { create(:ci_build) } let(:store) { described_class::LOCAL_STORE }
let(:job) { create(:ci_build, artifacts_file_store: store) }
let(:uploader) { described_class.new(job, :legacy_artifacts_file) } let(:uploader) { described_class.new(job, :legacy_artifacts_file) }
let(:local_path) { Gitlab.config.artifacts.path } let(:local_path) { Gitlab.config.artifacts.path }
......
...@@ -4,27 +4,91 @@ require 'carrierwave/storage/fog' ...@@ -4,27 +4,91 @@ require 'carrierwave/storage/fog'
describe ObjectStoreUploader do describe ObjectStoreUploader do
let(:uploader_class) { Class.new(described_class) } let(:uploader_class) { Class.new(described_class) }
let(:object) { double } let(:object) { double }
let(:uploader) { uploader_class.new(object, :artifacts_file) } let(:uploader) { uploader_class.new(object, :file) }
before do
allow(object.class).to receive(:uploader_option).with(:file, :mount_on) { nil }
end
describe '#object_store' do describe '#object_store' do
it "calls artifacts_file_store on object" do it "calls artifacts_file_store on object" do
expect(object).to receive(:artifacts_file_store) expect(object).to receive(:file_store)
uploader.object_store uploader.object_store
end end
context 'when store is null' do
before do
expect(object).to receive(:file_store).twice.and_return(nil)
end
it "returns LOCAL_STORE" do
expect(uploader.real_object_store).to be_nil
expect(uploader.object_store).to eq(described_class::LOCAL_STORE)
end
end
context 'when value is set' do
before do
expect(object).to receive(:file_store).twice.and_return(described_class::REMOTE_STORE)
end
it "returns given value" do
expect(uploader.real_object_store).not_to be_nil
expect(uploader.object_store).to eq(described_class::REMOTE_STORE)
end
end
end end
describe '#object_store=' do describe '#object_store=' do
it "calls artifacts_file_store= on object" do it "calls artifacts_file_store= on object" do
expect(object).to receive(:artifacts_file_store=).with(described_class::REMOTE_STORE) expect(object).to receive(:file_store=).with(described_class::REMOTE_STORE)
uploader.object_store = described_class::REMOTE_STORE uploader.object_store = described_class::REMOTE_STORE
end end
end end
context 'when using ArtifactsUploader' do describe '#file_storage?' do
let(:job) { create(:ci_build, :artifacts, artifacts_file_store: store) } context 'when file storage is used' do
let(:uploader) { job.artifacts_file } before do
expect(object).to receive(:file_store).and_return(described_class::LOCAL_STORE)
end
it { expect(uploader).to be_file_storage }
end
context 'when is remote storage' do
before do
uploader_class.storage_options double(
object_store: double(enabled: true))
expect(object).to receive(:file_store).and_return(described_class::REMOTE_STORE)
end
it { expect(uploader).not_to be_file_storage }
end
end
describe '#file_cache_storage?' do
context 'when file storage is used' do
before do
uploader_class.cache_storage(:file)
end
it { expect(uploader).to be_file_cache_storage }
end
context 'when is remote storage' do
before do
uploader_class.cache_storage(:fog)
end
it { expect(uploader).not_to be_file_cache_storage }
end
end
context 'when using JobArtifactsUploader' do
let(:artifact) { create(:ci_job_artifact, :archive, file_store: store) }
let(:uploader) { artifact.file }
context 'checking described_class' do context 'checking described_class' do
let(:store) { described_class::LOCAL_STORE } let(:store) { described_class::LOCAL_STORE }
...@@ -32,6 +96,19 @@ describe ObjectStoreUploader do ...@@ -32,6 +96,19 @@ describe ObjectStoreUploader do
it "uploader is of a described_class" do it "uploader is of a described_class" do
expect(uploader).to be_a(described_class) expect(uploader).to be_a(described_class)
end end
it 'moves files locally' do
expect(uploader.move_to_store).to be(true)
expect(uploader.move_to_cache).to be(true)
end
end
context 'when store is null' do
let(:store) { nil }
it "sets the store to LOCAL_STORE" do
expect(artifact.file_store).to eq(described_class::LOCAL_STORE)
end
end end
describe '#use_file' do describe '#use_file' do
...@@ -57,8 +134,8 @@ describe ObjectStoreUploader do ...@@ -57,8 +134,8 @@ describe ObjectStoreUploader do
end end
describe '#migrate!' do describe '#migrate!' do
let(:job) { create(:ci_build, :artifacts, artifacts_file_store: store) } let(:artifact) { create(:ci_job_artifact, :archive, file_store: store) }
let(:uploader) { job.artifacts_file } let(:uploader) { artifact.file }
let(:store) { described_class::LOCAL_STORE } let(:store) { described_class::LOCAL_STORE }
subject { uploader.migrate!(new_store) } subject { uploader.migrate!(new_store) }
...@@ -141,7 +218,7 @@ describe ObjectStoreUploader do ...@@ -141,7 +218,7 @@ describe ObjectStoreUploader do
context 'when subject save fails' do context 'when subject save fails' do
before do before do
expect(job).to receive(:save!).and_raise(RuntimeError, "exception") expect(artifact).to receive(:save!).and_raise(RuntimeError, "exception")
end end
it "does catch an error" do it "does catch an error" do
...@@ -199,7 +276,7 @@ describe ObjectStoreUploader do ...@@ -199,7 +276,7 @@ describe ObjectStoreUploader do
context 'when using local storage' do context 'when using local storage' do
before do before do
expect(object).to receive(:artifacts_file_store) { described_class::LOCAL_STORE } expect(object).to receive(:file_store) { described_class::LOCAL_STORE }
end end
it "does not raise an error" do it "does not raise an error" do
...@@ -211,7 +288,7 @@ describe ObjectStoreUploader do ...@@ -211,7 +288,7 @@ describe ObjectStoreUploader do
before do before do
uploader_class.storage_options double( uploader_class.storage_options double(
object_store: double(enabled: true)) object_store: double(enabled: true))
expect(object).to receive(:artifacts_file_store) { described_class::REMOTE_STORE } expect(object).to receive(:file_store) { described_class::REMOTE_STORE }
end end
context 'feature is not available' do context 'feature is not available' do
......
...@@ -48,12 +48,12 @@ describe ObjectStorageUploadWorker do ...@@ -48,12 +48,12 @@ describe ObjectStorageUploadWorker do
end end
end end
context 'for artifacts' do context 'for legacy artifacts' do
let(:job) { create(:ci_build, :artifacts, artifacts_file_store: store, artifacts_metadata_store: store) } let(:build) { create(:ci_build, :legacy_artifacts) }
let(:uploader_class) { ArtifactUploader } let(:uploader_class) { LegacyArtifactUploader }
let(:subject_class) { Ci::Build } let(:subject_class) { Ci::Build }
let(:file_field) { :artifacts_file } let(:file_field) { :artifacts_file }
let(:subject_id) { job.id } let(:subject_id) { build.id }
context 'when local storage is used' do context 'when local storage is used' do
let(:store) { local } let(:store) { local }
...@@ -61,13 +61,12 @@ describe ObjectStorageUploadWorker do ...@@ -61,13 +61,12 @@ describe ObjectStorageUploadWorker do
context 'and remote storage is defined' do context 'and remote storage is defined' do
before do before do
stub_artifacts_object_storage stub_artifacts_object_storage
job
end end
it "migrates file to remote storage" do it "migrates file to remote storage" do
perform perform
expect(job.reload.artifacts_file_store).to eq(remote) expect(build.reload.artifacts_file_store).to eq(remote)
end end
context 'for artifacts_metadata' do context 'for artifacts_metadata' do
...@@ -76,10 +75,34 @@ describe ObjectStorageUploadWorker do ...@@ -76,10 +75,34 @@ describe ObjectStorageUploadWorker do
it 'migrates metadata to remote storage' do it 'migrates metadata to remote storage' do
perform perform
expect(job.reload.artifacts_metadata_store).to eq(remote) expect(build.reload.artifacts_metadata_store).to eq(remote)
end end
end end
end end
end end
end end
context 'for job artifacts' do
let(:artifact) { create(:ci_job_artifact, :archive) }
let(:uploader_class) { JobArtifactUploader }
let(:subject_class) { Ci::JobArtifact }
let(:file_field) { :file }
let(:subject_id) { artifact.id }
context 'when local storage is used' do
let(:store) { local }
context 'and remote storage is defined' do
before do
stub_artifacts_object_storage
end
it "migrates file to remote storage" do
perform
expect(artifact.reload.file_store).to eq(remote)
end
end
end
end
end end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment