Commit 4931f67a authored by Sean McGivern's avatar Sean McGivern

Merge branch 'ee-40781-os-to-ce' into 'master'

Refactor object storage for CE

Closes #4952, #4928, #4980, #4915, #4879, #4171, #4163, #3370, #2841, and gitlab-ce#29203

See merge request gitlab-org/gitlab-ee!4736
parents 49bafcb3 a57a919c
class Projects::JobsController < Projects::ApplicationController
prepend EE::Projects::JobsController
include SendFileUpload
before_action :build, except: [:index, :cancel_all]
......@@ -119,11 +119,17 @@ class Projects::JobsController < Projects::ApplicationController
end
def raw
build.trace.read do |stream|
if stream.file?
send_file stream.path, type: 'text/plain; charset=utf-8', disposition: 'inline'
else
render_404
if trace_artifact_file
send_upload(trace_artifact_file,
send_params: raw_send_params,
redirect_params: raw_redirect_params)
else
build.trace.read do |stream|
if stream.file?
send_file stream.path, type: 'text/plain; charset=utf-8', disposition: 'inline'
else
render_404
end
end
end
end
......@@ -138,6 +144,18 @@ class Projects::JobsController < Projects::ApplicationController
return access_denied! unless can?(current_user, :erase_build, build)
end
def raw_send_params
{ type: 'text/plain; charset=utf-8', disposition: 'inline' }
end
def raw_redirect_params
{ query: { 'response-content-type' => 'text/plain; charset=utf-8', 'response-content-disposition' => 'inline' } }
end
def trace_artifact_file
@trace_artifact_file ||= build.job_artifacts_trace&.file
end
def build
@build ||= project.builds.find(params[:id])
.present(current_user: current_user)
......
......@@ -72,6 +72,12 @@ class Projects::LfsStorageController < Projects::GitHttpClientController
end
end
def move_tmp_file_to_storage(object, path)
object.file = File.open(path)
object.file.store!
object.save
end
def link_to_project!(object)
if object && !object.projects.exists?(storage_project.id)
object.projects << storage_project
......
......@@ -3,6 +3,7 @@ module Ci
prepend ArtifactMigratable
include TokenAuthenticatable
include AfterCommitQueue
include ObjectStorage::BackgroundMove
include Presentable
include Importable
prepend EE::Ci::Build
......@@ -48,6 +49,7 @@ module Ci
where('(artifacts_file IS NOT NULL AND artifacts_file <> ?) OR EXISTS (?)',
'', Ci::JobArtifact.select(1).where('ci_builds.id = ci_job_artifacts.job_id').archive)
end
scope :with_artifacts_stored_locally, -> { with_artifacts_archive.where(artifacts_file_store: [nil, LegacyArtifactUploader::Store::LOCAL]) }
scope :with_artifacts_not_expired, ->() { with_artifacts_archive.where('artifacts_expire_at IS NULL OR artifacts_expire_at > ?', Time.now) }
scope :with_expired_artifacts, ->() { with_artifacts_archive.where('artifacts_expire_at < ?', Time.now) }
scope :last_month, ->() { where('created_at > ?', Date.today - 1.month) }
......
......@@ -2,6 +2,7 @@ module Ci
class JobArtifact < ActiveRecord::Base
prepend EE::Ci::JobArtifact
include AfterCommitQueue
include ObjectStorage::BackgroundMove
extend Gitlab::Ci::Model
belongs_to :project
......@@ -9,6 +10,8 @@ module Ci
before_save :set_size, if: :file_changed?
scope :with_files_stored_locally, -> { where(file_store: [nil, ::JobArtifactUploader::Store::LOCAL]) }
mount_uploader :file, JobArtifactUploader
delegate :exists?, :open, to: :file
......@@ -23,6 +26,10 @@ module Ci
self.where(project: project).sum(:size)
end
def local_store?
[nil, ::JobArtifactUploader::Store::LOCAL].include?(self.file_store)
end
def set_size
self.size = file.size
end
......
class LfsObject < ActiveRecord::Base
prepend EE::LfsObject
include AfterCommitQueue
include ObjectStorage::BackgroundMove
has_many :lfs_objects_projects, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent
has_many :projects, through: :lfs_objects_projects
scope :with_files_stored_locally, -> { where(file_store: [nil, LfsObjectUploader::Store::LOCAL]) }
validates :oid, presence: true, uniqueness: true
mount_uploader :file, LfsObjectUploader
......@@ -19,6 +22,10 @@ class LfsObject < ActiveRecord::Base
projects.exists?(project.lfs_storage_project.id)
end
def local_store?
[nil, LfsObjectUploader::Store::LOCAL].include?(self.file_store)
end
def self.destroy_unreferenced
joins("LEFT JOIN lfs_objects_projects ON lfs_objects_projects.lfs_object_id = #{table_name}.id")
.where(lfs_objects_projects: { id: nil })
......
......@@ -12,7 +12,6 @@ class Upload < ActiveRecord::Base
validates :uploader, presence: true
scope :with_files_stored_locally, -> { where(store: [nil, ObjectStorage::Store::LOCAL]) }
scope :with_files_stored_remotely, -> { where(store: ObjectStorage::Store::REMOTE) }
before_save :calculate_checksum!, if: :foreground_checksummable?
after_commit :schedule_checksum, if: :checksummable?
......@@ -36,7 +35,7 @@ class Upload < ActiveRecord::Base
self.checksum = nil
return unless checksummable?
self.checksum = self.class.hexdigest(absolute_path)
self.checksum = Digest::SHA256.file(absolute_path).hexdigest
end
def build_uploader(mounted_as = nil)
......
class AttachmentUploader < GitlabUploader
include UploaderHelper
include RecordsUploads::Concern
include ObjectStorage::Concern
prepend ObjectStorage::Extension::RecordsUploads
include UploaderHelper
private
......
......@@ -8,11 +8,11 @@ class AvatarUploader < GitlabUploader
model.avatar.file && model.avatar.file.present?
end
def move_to_cache
def move_to_store
false
end
def move_to_store
def move_to_cache
false
end
......
......@@ -49,10 +49,6 @@ class GitlabUploader < CarrierWave::Uploader::Base
file.present?
end
def store_dir
File.join(base_dir, dynamic_segment)
end
def cache_dir
File.join(root, base_dir, 'tmp/cache')
end
......@@ -74,6 +70,10 @@ class GitlabUploader < CarrierWave::Uploader::Base
# Designed to be overridden by child uploaders that have a dynamic path
# segment -- that is, a path that changes based on mutable attributes of its
# associated model
#
# For example, `FileUploader` builds the storage path based on the associated
# project model's `path_with_namespace` value, which can change when the
# project or its containing namespace is moved or renamed.
def dynamic_segment
raise(NotImplementedError)
end
......
class JobArtifactUploader < GitlabUploader
prepend EE::JobArtifactUploader
extend Workhorse::UploadPath
include ObjectStorage::Concern
......@@ -16,9 +15,11 @@ class JobArtifactUploader < GitlabUploader
end
def open
raise 'Only File System is supported' unless file_storage?
File.open(path, "rb") if path
if file_storage?
File.open(path, "rb") if path
else
::Gitlab::Ci::Trace::HttpIO.new(url, size) if url
end
end
private
......
......@@ -114,7 +114,6 @@ module ObjectStorage
included do |base|
base.include(ObjectStorage)
before :store, :verify_license!
after :migrate, :delete_migrated_file
end
......@@ -151,10 +150,6 @@ module ObjectStorage
object_store_options.remote_directory
end
def licensed?
License.feature_available?(:object_storage)
end
def serialization_column(model_class, mount_point)
model_class.uploader_options.dig(mount_point, :mount_on) || mount_point
end
......@@ -285,12 +280,6 @@ module ObjectStorage
migrated_file.delete if exists?
end
def verify_license!(_file)
return if file_storage?
raise(ObjectStorageUnavailable, 'Object Storage feature is missing') unless self.class.licensed?
end
def exists?
file.present?
end
......@@ -323,7 +312,6 @@ module ObjectStorage
def schedule_background_upload?
self.class.object_store_enabled? &&
self.class.background_upload_enabled? &&
self.class.licensed? &&
self.file_storage?
end
......
......@@ -14,18 +14,18 @@ class PersonalFileUploader < FileUploader
File.join(model.class.to_s.underscore, model.id.to_s)
end
# model_path_segment does not require a model to be passed, so we can always
# generate a path, even when there's no model.
def model_valid?
true
end
def object_store
return Store::LOCAL unless model
super
end
# model_path_segment does not require a model to be passed, so we can always
# generate a path, even when there's no model.
def model_valid?
true
end
# Revert-Override
def store_dir
store_dirs[object_store]
......
......@@ -39,6 +39,10 @@
- github_importer:github_import_stage_import_pull_requests
- github_importer:github_import_stage_import_repository
- object_storage_upload
- object_storage:object_storage_background_move
- object_storage:object_storage_migrate_uploads
- pipeline_cache:expire_job_cache
- pipeline_cache:expire_pipeline_cache
- pipeline_creation:create_pipeline
......
......@@ -11,7 +11,6 @@ module ObjectStorage
return unless uploader_class < ObjectStorage::Concern
return unless uploader_class.object_store_enabled?
return unless uploader_class.licensed?
return unless uploader_class.background_upload_enabled?
subject = subject_class.find(subject_id)
......
......@@ -12,7 +12,6 @@ class ObjectStorageUploadWorker
return unless uploader_class < ObjectStorage::Concern
return unless uploader_class.object_store_enabled?
return unless uploader_class.licensed?
return unless uploader_class.background_upload_enabled?
subject = subject_class.find(subject_id)
......
......@@ -182,19 +182,18 @@ production: &base
# storage_path: public/
# base_dir: uploads/-/system
object_store:
enabled: true
remote_directory: uploads # Bucket name
# background_upload: false # Temporary option to limit automatic upload (Default: true)
# proxy_download: false # Passthrough all downloads via GitLab instead of using Redirects to Object Storage
connection:
provider: AWS
aws_access_key_id: AWS_ACCESS_KEY_ID
aws_secret_access_key: AWS_SECRET_ACCESS_KEY
region: eu-central-1
# Use the following options to configure an AWS compatible host
# host: 'localhost' # default: s3.amazonaws.com
# endpoint: 'http://127.0.0.1:9000' # default: nil
# path_style: true # Use 'host/bucket_name/object' instead of 'bucket_name.host/object'
enabled: false
# remote_directory: uploads # Bucket name
# background_upload: false # Temporary option to limit automatic upload (Default: true)
# proxy_download: false # Passthrough all downloads via GitLab instead of using Redirects to Object Storage
# connection:
# provider: AWS
# aws_access_key_id: AWS_ACCESS_KEY_ID
# aws_secret_access_key: AWS_SECRET_ACCESS_KEY
# region: eu-central-1
# host: 'localhost' # default: s3.amazonaws.com
# endpoint: 'http://127.0.0.1:9000' # default: nil
# path_style: true # Use 'host/bucket_name/object' instead of 'bucket_name.host/object'
## GitLab Pages
pages:
......@@ -811,7 +810,6 @@ test:
region: us-east-1
uploads:
storage_path: tmp/tests/public
enabled: true
object_store:
enabled: false
connection:
......
......@@ -68,6 +68,8 @@
- [project_migrate_hashed_storage, 1]
- [storage_migrator, 1]
- [pages_domain_verification, 1]
- [object_storage_upload, 1]
- [object_storage, 1]
- [plugin, 1]
- [pipeline_background, 1]
......@@ -86,8 +88,6 @@
- [elastic_indexer, 1]
- [elastic_commit_indexer, 1]
- [export_csv, 1]
- [object_storage_upload, 1]
- [object_storage, 1]
# Deprecated queues: Remove after 10.7
- geo_base_scheduler
......
......@@ -23,13 +23,9 @@ class AddFileStoreToLfsObjects < ActiveRecord::Migration
#
# To disable transactions uncomment the following line and remove these
# comments:
disable_ddl_transaction!
# disable_ddl_transaction!
def up
def change
add_column(:lfs_objects, :file_store, :integer)
end
def down
remove_column(:lfs_objects, :file_store)
end
end
......@@ -4,11 +4,7 @@ class AddFileStoreJobArtifacts < ActiveRecord::Migration
disable_ddl_transaction!
DOWNTIME = false
def up
def change
add_column(:ci_job_artifacts, :file_store, :integer)
end
def down
remove_column(:ci_job_artifacts, :file_store)
end
end
......@@ -7,6 +7,6 @@ class AddStoreColumnToUploads < ActiveRecord::Migration
DOWNTIME = false
def change
add_column :uploads, :store, :integer
add_column(:uploads, :store, :integer)
end
end
......@@ -93,11 +93,12 @@ _The artifacts are stored by default in
9.4 lacks this feature.
> Available in [GitLab Premium](https://about.gitlab.com/products/) and
[GitLab.com Silver](https://about.gitlab.com/gitlab-com/).
> Since version 10.6, available in [GitLab CE](https://about.gitlab.com/products/)
If you don't want to use the local disk where GitLab is installed to store the
artifacts, you can use an object storage like AWS S3 instead.
This configuration relies on valid AWS credentials to be configured already.
Use an [Object storage option][ee-os] like AWS S3 to store job artifacts.
Use an [Object storage option][os] like AWS S3 to store job artifacts.
### Object Storage Settings
......@@ -313,4 +314,4 @@ memory and disk I/O.
[eep]: https://about.gitlab.com/products/ "GitLab Premium"
[ee-1762]: https://gitlab.com/gitlab-org/gitlab-ee/merge_requests/1762
[browsable]: ../user/project/pipelines/job_artifacts.md#browsing-job-artifacts
[ee-os]: https://docs.gitlab.com/ee/administration/job_artifacts.html#using-object-storage
[os]: https://docs.gitlab.com/administration/job_artifacts.html#using-object-storage
......@@ -300,9 +300,9 @@ sudo usermod -aG redis git
### Clone the Source
# Clone GitLab repository
sudo -u git -H git clone https://gitlab.com/gitlab-org/gitlab-ce.git -b 10-6-stable gitlab
sudo -u git -H git clone https://gitlab.com/gitlab-org/gitlab-ce.git -b 10-5-stable gitlab
**Note:** You can change `10-6-stable` to `master` if you want the *bleeding edge* version, but never install master on a production server!
**Note:** You can change `10-5-stable` to `master` if you want the *bleeding edge* version, but never install master on a production server!
### Configure It
......
......@@ -21,7 +21,6 @@ There are various configuration options to help GitLab server administrators:
### Omnibus packages
In `/etc/gitlab/gitlab.rb`:
```ruby
......@@ -48,6 +47,7 @@ In `config/gitlab.yml`:
## Setting up S3 compatible object storage
> **Note:** [Introduced][ee-2760] in [GitLab Premium][eep] 10.0.
> Available in [GitLab CE][ce] 10.7
It is possible to store LFS objects on remote object storage instead of on a local disk.
......
module EE
module Projects
module JobsController
extend ActiveSupport::Concern
include SendFileUpload
def raw
if trace_artifact_file
send_upload(trace_artifact_file,
send_params: raw_send_params,
redirect_params: raw_redirect_params)
else
super
end
end
private
def raw_send_params
{ type: 'text/plain; charset=utf-8', disposition: 'inline' }
end
def raw_redirect_params
{ query: { 'response-content-type' => 'text/plain; charset=utf-8', 'response-content-disposition' => 'inline' } }
end
def trace_artifact_file
@trace_artifact_file ||= build.job_artifacts_trace&.file
end
end
end
end
......@@ -14,14 +14,11 @@ module EE
DAST_FILE = 'gl-dast-report.json'.freeze
included do
include ObjectStorage::BackgroundMove
scope :codequality, -> { where(name: %w[codequality codeclimate]) }
scope :performance, -> { where(name: %w[performance deploy]) }
scope :sast, -> { where(name: 'sast') }
scope :sast_container, -> { where(name: 'sast:container') }
scope :dast, -> { where(name: 'dast') }
scope :with_artifacts_stored_locally, -> { with_artifacts_archive.where(artifacts_file_store: [nil, LegacyArtifactUploader::Store::LOCAL]) }
after_save :stick_build_if_status_changed
end
......
......@@ -7,15 +7,7 @@ module EE
extend ActiveSupport::Concern
prepended do
include ObjectStorage::BackgroundMove
after_destroy :log_geo_event
scope :with_files_stored_locally, -> { where(file_store: [nil, ::JobArtifactUploader::Store::LOCAL]) }
end
def local_store?
[nil, ::JobArtifactUploader::Store::LOCAL].include?(self.file_store)
end
private
......
......@@ -7,16 +7,7 @@ module EE
extend ActiveSupport::Concern
prepended do
include ObjectStorage::BackgroundMove
after_destroy :log_geo_event
scope :with_files_stored_locally, -> { where(file_store: [nil, LfsObjectUploader::Store::LOCAL]) }
scope :with_files_stored_remotely, -> { where(file_store: ObjectStorage::Store::REMOTE) }
end
def local_store?
[nil, LfsObjectUploader::Store::LOCAL].include?(self.file_store)
end
private
......
module EE
module JobArtifactUploader
extend ActiveSupport::Concern
def open
if file_storage?
super
else
::Gitlab::Ci::Trace::HttpIO.new(url, size) if url
end
end
end
end
module EE
module Gitlab
module Verify
module LfsObjects
extend ::Gitlab::Utils::Override
private
override :relation
def relation
super.with_files_stored_locally
end
end
end
end
end
module EE
module Gitlab
module Verify
module Uploads
extend ::Gitlab::Utils::Override
private
override :relation
def relation
super.with_files_stored_locally
end
end
end
end
end
require 'spec_helper'
describe Projects::JobsController do
include ApiHelpers
include HttpIOHelpers
let(:project) { create(:project, :public) }
let(:pipeline) { create(:ci_pipeline, project: project) }
describe 'GET trace.json' do
context 'when trace artifact is in ObjectStorage' do
let!(:job) { create(:ci_build, :success, :trace_artifact, pipeline: pipeline) }
before do
allow_any_instance_of(JobArtifactUploader).to receive(:file_storage?) { false }
allow_any_instance_of(JobArtifactUploader).to receive(:url) { remote_trace_url }
allow_any_instance_of(JobArtifactUploader).to receive(:size) { remote_trace_size }
end
context 'when there are no network issues' do
before do
stub_remote_trace_206
get_trace
end
it 'returns a trace' do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['id']).to eq job.id
expect(json_response['status']).to eq job.status
expect(json_response['html']).to eq(job.trace.html)
end
end
context 'when there is a network issue' do
before do
stub_remote_trace_500
end
it 'returns a trace' do
expect { get_trace }.to raise_error(Gitlab::Ci::Trace::HttpIO::FailedToGetChunkError)
end
end
end
def get_trace
get :trace, namespace_id: project.namespace,
project_id: project,
id: job.id,
format: :json
end
end
describe 'GET raw' do
subject do
post :raw, namespace_id: project.namespace,
project_id: project,
id: job.id
end
context 'when the trace artifact is in ObjectStorage' do
let!(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) }
before do
allow_any_instance_of(JobArtifactUploader).to receive(:file_storage?) { false }
end
it 'redirect to the trace file url' do
expect(subject).to redirect_to(job.job_artifacts_trace.file.url)
end
end
end
end
require 'spec_helper'
describe LfsObject do
describe '#local_store?' do
it 'returns true when file_store is nil' do
subject.file_store = nil
expect(subject.local_store?).to eq true
end
it 'returns true when file_store is equal to LfsObjectUploader::Store::LOCAL' do
subject.file_store = LfsObjectUploader::Store::LOCAL
expect(subject.local_store?).to eq true
end
it 'returns false whe file_store is equal to LfsObjectUploader::Store::REMOTE' do
subject.file_store = LfsObjectUploader::Store::REMOTE
expect(subject.local_store?).to eq false
end
end
describe '#destroy' do
subject { create(:lfs_object, :with_file) }
......@@ -33,78 +13,4 @@ describe LfsObject do
end
end
end
describe '#schedule_background_upload' do
before do
stub_lfs_setting(enabled: true)
end
subject { create(:lfs_object, :with_file) }
context 'when object storage is disabled' do
before do
stub_lfs_object_storage(enabled: false)
end
it 'does not schedule the migration' do
expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
subject
end
end
context 'when object storage is enabled' do
context 'when background upload is enabled' do
context 'when is licensed' do
before do
stub_lfs_object_storage(background_upload: true)
end
it 'schedules the model for migration' do
expect(ObjectStorage::BackgroundMoveWorker)
.to receive(:perform_async)
.with('LfsObjectUploader', described_class.name, :file, kind_of(Numeric))
.once
subject
end
it 'schedules the model for migration once' do
expect(ObjectStorage::BackgroundMoveWorker)
.to receive(:perform_async)
.with('LfsObjectUploader', described_class.name, :file, kind_of(Numeric))
.once
lfs_object = create(:lfs_object)
lfs_object.file = fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "`/png")
lfs_object.save!
end
end
context 'when is unlicensed' do
before do
stub_lfs_object_storage(background_upload: true, licensed: false)
end
it 'does not schedule the migration' do
expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
subject
end
end
end
context 'when background upload is disabled' do
before do
stub_lfs_object_storage(background_upload: false)
end
it 'schedules the model for migration' do
expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
subject
end
end
end
end
end
......@@ -17,11 +17,9 @@ describe API::Jobs do
let(:api_user) { user }
let(:reporter) { create(:project_member, :reporter, project: project).user }
let(:cross_project_pipeline_enabled) { true }
let(:object_storage_enabled) { true }
before do
stub_licensed_features(cross_project_pipelines: cross_project_pipeline_enabled,
object_storage: object_storage_enabled)
stub_licensed_features(cross_project_pipelines: cross_project_pipeline_enabled)
project.add_developer(user)
end
......@@ -39,32 +37,6 @@ describe API::Jobs do
end
end
context 'for normal authentication when job with artifacts are stored remotely' do
before do
stub_artifacts_object_storage(proxy_download: proxy_download)
create(:ci_job_artifact, :archive, :remote_store, job: job)
get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
end
context 'when proxy download is enabled' do
let(:proxy_download) { true }
it 'responds with the workhorse send-url' do
expect(response.headers[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("send-url:")
end
end
context 'when proxy download is disabled' do
let(:proxy_download) { false }
it 'returns location redirect' do
expect(response).to have_gitlab_http_status(302)
end
end
end
context 'authorized by job_token' do
let(:job) { create(:ci_build, :artifacts, pipeline: pipeline, user: api_user) }
......@@ -97,84 +69,4 @@ describe API::Jobs do
end
end
end
describe 'GET /projects/:id/artifacts/:ref_name/download?job=name' do
let(:api_user) { reporter }
let(:job) { create(:ci_build, :artifacts, pipeline: pipeline, user: api_user) }
before do
stub_artifacts_object_storage(licensed: :skip)
job.success
end
def get_for_ref(ref = pipeline.ref, job_name = job.name)
get api("/projects/#{project.id}/jobs/artifacts/#{ref}/download", api_user), job: job_name
end
context 'find proper job' do
shared_examples 'a valid file' do
context 'when artifacts are stored remotely' do
let(:job) { create(:ci_build, pipeline: pipeline, user: api_user) }
let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) }
before do
job.reload
get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
end
it 'returns location redirect' do
expect(response).to have_gitlab_http_status(302)
end
end
end
context 'with regular branch' do
before do
pipeline.reload
pipeline.update(ref: 'master',
sha: project.commit('master').sha)
get_for_ref('master')
end
it_behaves_like 'a valid file'
end
context 'with branch name containing slash' do
before do
pipeline.reload
pipeline.update(ref: 'improve/awesome',
sha: project.commit('improve/awesome').sha)
get_for_ref('improve/awesome')
end
it_behaves_like 'a valid file'
end
context 'when using job_token to authenticate' do
before do
pipeline.reload
pipeline.update(ref: 'master',
sha: project.commit('master').sha)
get api("/projects/#{project.id}/jobs/artifacts/master/download"), job: job.name, job_token: job.token
end
context 'when user is reporter' do
it_behaves_like 'a valid file'
end
context 'when user is admin, but not member' do
let(:api_user) { create(:admin) }
let(:job) { create(:ci_build, :artifacts, pipeline: pipeline, user: api_user) }
it 'does not allow to see that artfiact is present' do
expect(response).to have_gitlab_http_status(404)
end
end
end
end
end
end
......@@ -1432,6 +1432,16 @@ module Gitlab
output
end
def can_be_merged?(source_sha, target_branch)
gitaly_migrate(:can_be_merged) do |is_enabled|
if is_enabled
gitaly_can_be_merged?(source_sha, find_branch(target_branch).target)
else
rugged_can_be_merged?(source_sha, target_branch)
end
end
end
def last_commit_for_path(sha, path)
gitaly_migrate(:last_commit_for_path) do |is_enabled|
if is_enabled
......@@ -2385,6 +2395,14 @@ module Gitlab
.map { |c| commit(c) }
end
def gitaly_can_be_merged?(their_commit, our_commit)
!gitaly_conflicts_client(our_commit, their_commit).conflicts?
end
def rugged_can_be_merged?(their_commit, our_commit)
!rugged.merge_commits(our_commit, their_commit).conflicts?
end
def last_commit_for_path_by_gitaly(sha, path)
gitaly_commit_client.last_commit_for_path(sha, path)
end
......
module Gitlab
module Verify
class LfsObjects < BatchVerifier
prepend ::EE::Gitlab::Verify::LfsObjects
def name
'LFS objects'
end
......@@ -14,7 +12,7 @@ module Gitlab
private
def relation
LfsObject.all
LfsObject.with_files_stored_locally
end
def expected_checksum(lfs_object)
......
module Gitlab
module Verify
class Uploads < BatchVerifier
prepend ::EE::Gitlab::Verify::Uploads
def name
'Uploads'
end
......@@ -14,7 +12,7 @@ module Gitlab
private
def relation
Upload.all
Upload.with_files_stored_locally
end
def expected_checksum(upload)
......
......@@ -150,7 +150,6 @@ describe Projects::ArtifactsController do
end
end
## EE specific begins
context 'when using remote file storage' do
before do
stub_artifacts_object_storage
......@@ -163,7 +162,6 @@ describe Projects::ArtifactsController do
let(:archive_path) { 'https://' }
end
end
## EE specific ends
end
end
......
# coding: utf-8
require 'spec_helper'
describe Projects::JobsController do
include ApiHelpers
include HttpIOHelpers
let(:project) { create(:project, :public) }
let(:pipeline) { create(:ci_pipeline, project: project) }
......@@ -203,6 +205,41 @@ describe Projects::JobsController do
end
end
context 'when trace artifact is in ObjectStorage' do
let!(:job) { create(:ci_build, :success, :trace_artifact, pipeline: pipeline) }
before do
allow_any_instance_of(JobArtifactUploader).to receive(:file_storage?) { false }
allow_any_instance_of(JobArtifactUploader).to receive(:url) { remote_trace_url }
allow_any_instance_of(JobArtifactUploader).to receive(:size) { remote_trace_size }
end
context 'when there are no network issues' do
before do
stub_remote_trace_206
get_trace
end
it 'returns a trace' do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['id']).to eq job.id
expect(json_response['status']).to eq job.status
expect(json_response['html']).to eq(job.trace.html)
end
end
context 'when there is a network issue' do
before do
stub_remote_trace_500
end
it 'returns a trace' do
expect { get_trace }.to raise_error(Gitlab::Ci::Trace::HttpIO::FailedToGetChunkError)
end
end
end
def get_trace
get :trace, namespace_id: project.namespace,
project_id: project,
......@@ -446,14 +483,18 @@ describe Projects::JobsController do
end
describe 'GET raw' do
before do
get_raw
subject do
post :raw, namespace_id: project.namespace,
project_id: project,
id: job.id
end
context 'when job has a trace artifact' do
let(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) }
it 'returns a trace' do
response = subject
expect(response).to have_gitlab_http_status(:ok)
expect(response.content_type).to eq 'text/plain; charset=utf-8'
expect(response.body).to eq job.job_artifacts_trace.open.read
......@@ -464,6 +505,8 @@ describe Projects::JobsController do
let(:job) { create(:ci_build, :trace_live, pipeline: pipeline) }
it 'send a trace file' do
response = subject
expect(response).to have_gitlab_http_status(:ok)
expect(response.content_type).to eq 'text/plain; charset=utf-8'
expect(response.body).to eq 'BUILD TRACE'
......@@ -474,14 +517,22 @@ describe Projects::JobsController do
let(:job) { create(:ci_build, pipeline: pipeline) }
it 'returns not_found' do
response = subject
expect(response).to have_gitlab_http_status(:not_found)
end
end
def get_raw
post :raw, namespace_id: project.namespace,
project_id: project,
id: job.id
context 'when the trace artifact is in ObjectStorage' do
let!(:job) { create(:ci_build, :trace_artifact, pipeline: pipeline) }
before do
allow_any_instance_of(JobArtifactUploader).to receive(:file_storage?) { false }
end
it 'redirect to the trace file url' do
expect(subject).to redirect_to(job.job_artifacts_trace.file.url)
end
end
end
end
require 'spec_helper'
describe 'Fog::Storage::GoogleXML::File' do
let(:storage) do
Fog.mock!
Fog::Storage.new({
google_storage_access_key_id: "asdf",
google_storage_secret_access_key: "asdf",
provider: "Google"
})
end
let(:file) do
directory = storage.directories.create(key: 'data')
directory.files.create(
body: 'Hello World!',
key: 'hello_world.txt'
)
end
it 'delegates to #get_https_url' do
expect(file.url(Time.now)).to start_with("https://")
end
end
......@@ -31,5 +31,21 @@ describe Gitlab::Verify::LfsObjects do
expect(failures.keys).to contain_exactly(lfs_object)
expect(failure.to_s).to include('Checksum mismatch')
end
context 'with remote files' do
before do
stub_lfs_object_storage
end
it 'skips LFS objects in object storage' do
local_failure = create(:lfs_object)
create(:lfs_object, :object_storage)
failures = {}
described_class.new(batch_size: 10).run_batches { |_, failed| failures.merge!(failed) }
expect(failures.keys).to contain_exactly(local_failure)
end
end
end
end
......@@ -40,5 +40,21 @@ describe Gitlab::Verify::Uploads do
expect(failures.keys).to contain_exactly(upload)
expect(failure.to_s).to include('Checksum missing')
end
context 'with remote files' do
before do
stub_uploads_object_storage(AvatarUploader)
end
it 'skips uploads in object storage' do
local_failure = create(:upload)
create(:upload, :object_storage)
failures = {}
described_class.new(batch_size: 10).run_batches { |_, failed| failures.merge!(failed) }
expect(failures.keys).to contain_exactly(local_failure)
end
end
end
end
......@@ -19,6 +19,10 @@ describe RemoveEmptyForkNetworks, :migration do
deleted_project.destroy!
end
after do
Upload.reset_column_information
end
it 'deletes only the fork network without members' do
expect(fork_networks.count).to eq(2)
......
......@@ -199,6 +199,16 @@ describe Ci::Build do
end
context 'when legacy artifacts are used' do
let(:build) { create(:ci_build, :legacy_artifacts) }
subject { build.artifacts? }
context 'is expired' do
let(:build) { create(:ci_build, :legacy_artifacts, :expired) }
it { is_expected.to be_falsy }
end
context 'artifacts archive does not exist' do
let(:build) { create(:ci_build) }
......@@ -209,13 +219,25 @@ describe Ci::Build do
let(:build) { create(:ci_build, :legacy_artifacts) }
it { is_expected.to be_truthy }
end
end
end
context 'is expired' do
let(:build) { create(:ci_build, :legacy_artifacts, :expired) }
describe '#browsable_artifacts?' do
subject { build.browsable_artifacts? }
it { is_expected.to be_falsy }
end
context 'artifacts metadata does not exist' do
before do
build.update_attributes(legacy_artifacts_metadata: nil)
end
it { is_expected.to be_falsy }
end
context 'artifacts metadata does exists' do
let(:build) { create(:ci_build, :artifacts) }
it { is_expected.to be_truthy }
end
end
......
......@@ -33,28 +33,14 @@ describe Ci::JobArtifact do
context 'when object storage is enabled' do
context 'when background upload is enabled' do
context 'when is licensed' do
before do
stub_artifacts_object_storage(background_upload: true)
end
it 'schedules the model for migration' do
expect(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async).with('JobArtifactUploader', described_class.name, :file, kind_of(Numeric))
subject
end
before do
stub_artifacts_object_storage(background_upload: true)
end
context 'when is unlicensed' do
before do
stub_artifacts_object_storage(background_upload: true, licensed: false)
end
it 'does not schedule the migration' do
expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
it 'schedules the model for migration' do
expect(ObjectStorage::BackgroundMoveWorker).to receive(:perform_async).with('JobArtifactUploader', described_class.name, :file, kind_of(Numeric))
subject
end
subject
end
end
......
require 'spec_helper'
describe LfsObject do
describe '#local_store?' do
it 'returns true when file_store is nil' do
subject.file_store = nil
expect(subject.local_store?).to eq true
end
it 'returns true when file_store is equal to LfsObjectUploader::Store::LOCAL' do
subject.file_store = LfsObjectUploader::Store::LOCAL
expect(subject.local_store?).to eq true
end
it 'returns false whe file_store is equal to LfsObjectUploader::Store::REMOTE' do
subject.file_store = LfsObjectUploader::Store::REMOTE
expect(subject.local_store?).to eq false
end
end
describe '#schedule_background_upload' do
before do
stub_lfs_setting(enabled: true)
end
subject { create(:lfs_object, :with_file) }
context 'when object storage is disabled' do
before do
stub_lfs_object_storage(enabled: false)
end
it 'does not schedule the migration' do
expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
subject
end
end
context 'when object storage is enabled' do
context 'when background upload is enabled' do
context 'when is licensed' do
before do
stub_lfs_object_storage(background_upload: true)
end
it 'schedules the model for migration' do
expect(ObjectStorage::BackgroundMoveWorker)
.to receive(:perform_async)
.with('LfsObjectUploader', described_class.name, :file, kind_of(Numeric))
.once
subject
end
it 'schedules the model for migration once' do
expect(ObjectStorage::BackgroundMoveWorker)
.to receive(:perform_async)
.with('LfsObjectUploader', described_class.name, :file, kind_of(Numeric))
.once
lfs_object = create(:lfs_object)
lfs_object.file = fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "`/png")
lfs_object.save!
end
end
end
context 'when background upload is disabled' do
before do
stub_lfs_object_storage(background_upload: false)
end
it 'schedules the model for migration' do
expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
subject
end
end
end
end
end
......@@ -21,6 +21,7 @@ describe API::Jobs do
let(:guest) { create(:project_member, :guest, project: project).user }
before do
stub_licensed_features(cross_project_pipelines: true)
project.add_developer(user)
end
......@@ -114,6 +115,7 @@ describe API::Jobs do
let(:query) { Hash.new }
before do
job
get api("/projects/#{project.id}/pipelines/#{pipeline.id}/jobs", api_user), query
end
......@@ -337,10 +339,55 @@ describe API::Jobs do
end
end
context 'when artifacts are stored remotely' do
let(:proxy_download) { false }
before do
stub_artifacts_object_storage(proxy_download: proxy_download)
end
let(:job) { create(:ci_build, pipeline: pipeline) }
let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) }
before do
job.reload
get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
end
context 'when proxy download is enabled' do
let(:proxy_download) { true }
it 'responds with the workhorse send-url' do
expect(response.headers[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("send-url:")
end
end
context 'when proxy download is disabled' do
it 'returns location redirect' do
expect(response).to have_gitlab_http_status(302)
end
end
context 'authorized user' do
it 'returns the file remote URL' do
expect(response).to redirect_to(artifact.file.url)
end
end
context 'unauthorized user' do
let(:api_user) { nil }
it 'does not return specific job artifacts' do
expect(response).to have_gitlab_http_status(404)
end
end
end
it 'does not return job artifacts if not uploaded' do
get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
expect(response).to have_gitlab_http_status(404)
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
......@@ -351,6 +398,7 @@ describe API::Jobs do
let(:job) { create(:ci_build, :artifacts, pipeline: pipeline, user: api_user) }
before do
stub_artifacts_object_storage
job.success
end
......@@ -414,9 +462,24 @@ describe API::Jobs do
"attachment; filename=#{job.artifacts_file.filename}" }
end
it { expect(response).to have_gitlab_http_status(200) }
it { expect(response).to have_http_status(:ok) }
it { expect(response.headers).to include(download_headers) }
end
context 'when artifacts are stored remotely' do
let(:job) { create(:ci_build, pipeline: pipeline, user: api_user) }
let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: job) }
before do
job.reload
get api("/projects/#{project.id}/jobs/#{job.id}/artifacts", api_user)
end
it 'returns location redirect' do
expect(response).to have_http_status(:found)
end
end
end
context 'with regular branch' do
......@@ -444,6 +507,29 @@ describe API::Jobs do
it_behaves_like 'a valid file'
end
context 'when using job_token to authenticate' do
before do
pipeline.reload
pipeline.update(ref: 'master',
sha: project.commit('master').sha)
get api("/projects/#{project.id}/jobs/artifacts/master/download"), job: job.name, job_token: job.token
end
context 'when user is reporter' do
it_behaves_like 'a valid file'
end
context 'when user is admin, but not member' do
let(:api_user) { create(:admin) }
let(:job) { create(:ci_build, :artifacts, pipeline: pipeline, user: api_user) }
it 'does not allow to see that artfiact is present' do
expect(response).to have_gitlab_http_status(404)
end
end
end
end
end
......
......@@ -217,6 +217,7 @@ describe API::Runner do
let(:user_agent) { 'gitlab-runner 9.0.0 (9-0-stable; go1.7.4; linux/amd64)' }
before do
job
stub_container_registry_config(enabled: false)
end
......
......@@ -216,6 +216,7 @@ describe API::V3::Builds do
describe 'GET /projects/:id/builds/:build_id/artifacts' do
before do
stub_artifacts_object_storage
get v3_api("/projects/#{project.id}/builds/#{build.id}/artifacts", api_user)
end
......@@ -230,13 +231,24 @@ describe API::V3::Builds do
end
it 'returns specific job artifacts' do
expect(response).to have_gitlab_http_status(200)
expect(response).to have_http_status(200)
expect(response.headers).to include(download_headers)
expect(response.body).to match_file(build.artifacts_file.file.file)
end
end
end
context 'when artifacts are stored remotely' do
let(:build) { create(:ci_build, pipeline: pipeline) }
let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: build) }
it 'returns location redirect' do
get v3_api("/projects/#{project.id}/builds/#{build.id}/artifacts", api_user)
expect(response).to have_gitlab_http_status(302)
end
end
context 'unauthorized user' do
let(:api_user) { nil }
......@@ -256,6 +268,7 @@ describe API::V3::Builds do
let(:build) { create(:ci_build, :artifacts, pipeline: pipeline) }
before do
stub_artifacts_object_storage
build.success
end
......@@ -318,9 +331,24 @@ describe API::V3::Builds do
"attachment; filename=#{build.artifacts_file.filename}" }
end
it { expect(response).to have_gitlab_http_status(200) }
it { expect(response).to have_http_status(200) }
it { expect(response.headers).to include(download_headers) }
end
context 'when artifacts are stored remotely' do
let(:build) { create(:ci_build, pipeline: pipeline) }
let!(:artifact) { create(:ci_job_artifact, :archive, :remote_store, job: build) }
before do
build.reload
get v3_api("/projects/#{project.id}/builds/#{build.id}/artifacts", api_user)
end
it 'returns location redirect' do
expect(response).to have_http_status(302)
end
end
end
context 'with regular branch' do
......
module HttpIOHelpers
def stub_remote_trace_206
WebMock.stub_request(:get, remote_trace_url)
.to_return { |request| remote_trace_response(request, 206) }
end
def stub_remote_trace_200
WebMock.stub_request(:get, remote_trace_url)
.to_return { |request| remote_trace_response(request, 200) }
end
def stub_remote_trace_500
WebMock.stub_request(:get, remote_trace_url)
.to_return(status: [500, "Internal Server Error"])
end
def remote_trace_url
"http://trace.com/trace"
end
def remote_trace_response(request, responce_status)
range = request.headers['Range'].match(/bytes=(\d+)-(\d+)/)
{
status: responce_status,
headers: remote_trace_response_headers(responce_status, range[1].to_i, range[2].to_i),
body: range_trace_body(range[1].to_i, range[2].to_i)
}
end
def remote_trace_response_headers(responce_status, from, to)
headers = { 'Content-Type' => 'text/plain' }
if responce_status == 206
headers.merge('Content-Range' => "bytes #{from}-#{to}/#{remote_trace_size}")
end
headers
end
def range_trace_body(from, to)
remote_trace_body[from..to]
end
def remote_trace_body
@remote_trace_body ||= File.read(expand_fixture_path('trace/sample_trace'))
end
def remote_trace_size
remote_trace_body.length
end
def set_smaller_buffer_size_than(file_size)
blocks = (file_size / 128)
new_size = (blocks / 2) * 128
stub_const("Gitlab::Ci::Trace::HttpIO::BUFFER_SIZE", new_size)
end
def set_larger_buffer_size_than(file_size)
blocks = (file_size / 128)
new_size = (blocks * 2) * 128
stub_const("Gitlab::Ci::Trace::HttpIO::BUFFER_SIZE", new_size)
end
end
module StubConfiguration
def stub_object_storage_uploader(
config:, uploader:, remote_directory:, enabled: true, licensed: true,
proxy_download: false,
background_upload: false, direct_upload: false
config:,
uploader:,
remote_directory:,
enabled: true,
proxy_download: false,
background_upload: false,
direct_upload: false
)
allow(config).to receive(:enabled) { enabled }
allow(config).to receive(:proxy_download) { proxy_download }
allow(config).to receive(:background_upload) { background_upload }
allow(config).to receive(:direct_upload) { direct_upload }
stub_licensed_features(object_storage: licensed) unless licensed == :skip
return unless enabled
Fog.mock!
......
require 'spec_helper'
describe AvatarUploader do
let(:model) { create(:user, :with_avatar) }
let(:model) { build_stubbed(:user) }
let(:uploader) { described_class.new(model, :avatar) }
let(:upload) { create(:upload, model: model) }
......
......@@ -87,29 +87,6 @@ describe FileUploader do
end
end
describe 'callbacks' do
describe '#prune_store_dir after :remove' do
before do
uploader.store!(fixture_file_upload('spec/fixtures/doc_sample.txt'))
end
def store_dir
File.expand_path(uploader.store_dir, uploader.root)
end
it 'is called' do
expect(uploader).to receive(:prune_store_dir).once
uploader.remove!
end
it 'prune the store directory' do
expect { uploader.remove! }
.to change { File.exist?(store_dir) }.from(true).to(false)
end
end
end
describe "#migrate!" do
before do
uploader.store!(fixture_file_upload(Rails.root.join('spec/fixtures/dk.png')))
......@@ -135,15 +112,5 @@ describe FileUploader do
uploader.upload = upload
end
context 'uploader_context is empty' do
it 'fallbacks to regex based extraction' do
expect(upload).to receive(:uploader_context).and_return({})
uploader.upload = upload
expect(uploader.secret).to eq(secret)
expect(uploader.instance_variable_get(:@identifier)).to eq('file.txt')
end
end
end
end
......@@ -48,6 +48,17 @@ describe JobArtifactUploader do
end
end
end
context 'when trace is stored in Object storage' do
before do
allow(uploader).to receive(:file_storage?) { false }
allow(uploader).to receive(:url) { 'http://object_storage.com/trace' }
end
it 'returns http io stream' do
is_expected.to be_a(Gitlab::Ci::Trace::HttpIO)
end
end
end
context 'file is stored in valid local_path' do
......
......@@ -43,18 +43,6 @@ describe LfsObjectUploader do
lfs_object
end
end
context 'with object storage unlicenced' do
before do
stub_lfs_object_storage(licensed: false)
end
it 'is skipped' do
expect(ObjectStorage::BackgroundMoveWorker).not_to receive(:perform_async)
lfs_object
end
end
end
describe 'remote file' do
......@@ -75,16 +63,6 @@ describe LfsObjectUploader do
expect(lfs_object.file.path).not_to be_blank
end
end
context 'with object storage unlicenced' do
before do
stub_lfs_object_storage(licensed: false)
end
it 'can not store file remotely' do
expect { store_file(lfs_object) }.to raise_error('Object Storage feature is missing')
end
end
end
def store_file(lfs_object)
......
......@@ -270,16 +270,6 @@ describe ObjectStorage do
end
end
context 'when storage is unlicensed' do
before do
stub_artifacts_object_storage(licensed: false)
end
it "raises an error" do
expect { subject }.to raise_error(/Object Storage feature is missing/)
end
end
context 'when credentials are set' do
before do
stub_artifacts_object_storage
......@@ -350,50 +340,6 @@ describe ObjectStorage do
it { is_expected.to eq(false) }
end
describe '#verify_license!' do
subject { uploader.verify_license!(nil) }
context 'when using local storage' do
before do
expect(object).to receive(:file_store) { described_class::Store::LOCAL }
end
it "does not raise an error" do
expect { subject }.not_to raise_error
end
end
context 'when using remote storage' do
before do
allow(uploader_class).to receive(:options) do
double(object_store: double(enabled: true))
end
expect(object).to receive(:file_store) { described_class::Store::REMOTE }
end
context 'feature is not available' do
before do
expect(License).to receive(:feature_available?).with(:object_storage).and_return(false)
end
it "does raise an error" do
expect { subject }.to raise_error(/Object Storage feature is missing/)
end
end
context 'feature is available' do
before do
expect(License).to receive(:feature_available?).with(:object_storage).and_return(true)
end
it "does not raise an error" do
expect { subject }.not_to raise_error
end
end
end
end
describe '.workhorse_authorize' do
subject { uploader_class.workhorse_authorize }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment