Commit 3462a789 authored by Micaël Bergeron's avatar Micaël Bergeron

fix uploads path for external components

parent 586790ce
......@@ -28,14 +28,13 @@ module UploadsActions
# or send the file
disposition = uploader.image_or_video? ? 'inline' : 'attachment'
expires_in 0.seconds, must_revalidate: true, private: true
binding.pry
send_file uploader.file.path, disposition: disposition
end
private
def uploader_class
uploader.class
raise NotImplementedError
end
def upload_mount
......@@ -44,24 +43,32 @@ module UploadsActions
mounted_as if upload_mounts.include? mounted_as
end
def uploader_mounted?
upload_model_class < CarrierWave::Mount::Extension && !upload_mount.nil?
end
# TODO: this method is too complex
#
def uploader
@uploader ||= if upload_model_class < CarrierWave::Mount::Extension && upload_mount
model.public_send(upload_mount)
elsif upload_model_class == PersonalSnippet
find_upload(PersonalFileUploader)&.build_uploader || PersonalFileUploader.new(model)
@uploader ||= if uploader_mounted?
model.public_send(upload_mount) # rubocop:disable GitlabSecurity/PublicSend
else
find_upload(FileUploader)&.build_uploader || FileUploader.new(model)
build_uploader_from_upload || build_uploader_from_params
end
end
def find_upload(uploader_class)
def build_uploader_from_upload
return nil unless params[:secret] && params[:filename]
upload_path = uploader_class.upload_path(params[:secret], params[:filename])
Upload.where(uploader: uploader_class.to_s, path: upload_path)&.last
upload = Upload.where(uploader: uploader_class.to_s, path: upload_path)&.last
upload&.build_uploader
end
def build_uploader_from_params
uploader = uploader_class.new(model, params[:secret])
uploader.retrieve_from_store!(params[:filename])
uploader
end
def image_or_video?
......
......@@ -7,29 +7,27 @@ class Groups::UploadsController < Groups::ApplicationController
private
def show_model
strong_memoize(:show_model) do
group_id = params[:group_id]
Group.find_by_full_path(group_id)
end
def upload_model_class
Group
end
def authorize_upload_file!
render_404 unless can?(current_user, :upload_file, group)
def uploader_class
NamespaceFileUploader
end
def uploader
strong_memoize(:uploader) do
file_uploader = uploader_class.new(show_model, params[:secret])
file_uploader.retrieve_from_store!(params[:filename])
file_uploader
end
def find_model
return @group if @group
group_id = params[:group_id]
Group.find_by_full_path(group_id)
end
def uploader_class
NamespaceFileUploader
def authorize_upload_file!
render_404 unless can?(current_user, :upload_file, group)
end
alias_method :model, :group
def model
@model ||= find_model
end
end
......@@ -61,7 +61,7 @@ class Projects::LfsStorageController < Projects::GitHttpClientController
def store_file(oid, size, tmp_file)
# Define tmp_file_path early because we use it in "ensure"
tmp_file_path = File.join("#{Gitlab.config.lfs.storage_path}/tmp/upload", tmp_file)
tmp_file_path = File.join(LfsObjectUploader.workhorse_upload_path, tmp_file)
object = LfsObject.find_or_create_by(oid: oid, size: size)
file_exists = object.file.exists? || move_tmp_file_to_storage(object, tmp_file_path)
......
......@@ -27,6 +27,6 @@ class Projects::UploadsController < Projects::ApplicationController
end
def model
@model ||= @project || find_model
@model || find_model
end
end
......@@ -6,6 +6,16 @@ class UploadsController < ApplicationController
UnknownUploadModelError = Class.new(StandardError)
MODEL_CLASSES = {
"user" => User,
"project" => Project,
"note" => Note,
"group" => Group,
"appearance" => Appearance,
"personal_snippet" => PersonalSnippet,
nil => PersonalSnippet
}.freeze
rescue_from UnknownUploadModelError, with: :render_404
skip_before_action :authenticate_user!
......@@ -14,8 +24,13 @@ class UploadsController < ApplicationController
before_action :authorize_access!, only: [:show]
before_action :authorize_create_access!, only: [:create]
def uploader_class
PersonalFileUploader
end
def find_model
return nil unless params[:id]
@model = upload_model_class.find(params[:id])
end
......@@ -57,16 +72,8 @@ class UploadsController < ApplicationController
end
def upload_model_class
model_classes = {
"user" => User,
"project" => Project,
"note" => Note,
"group" => Group,
"appearance" => Appearance,
"personal_snippet" => PersonalSnippet
}
raise UnknownUploadModelError unless cls = model_classes[params[:model]]
raise UnknownUploadModelError unless cls = MODEL_CLASSES[params[:model]]
cls
end
......@@ -76,6 +83,7 @@ class UploadsController < ApplicationController
def upload_mount_satisfied?
return true unless upload_model_class_has_mounts?
upload_model_class.uploader_options.has_key?(upload_mount)
end
......
......@@ -29,6 +29,10 @@ class Upload < ActiveRecord::Base
upload
end
def self.hexdigest(path)
Digest::SHA256.file(path).hexdigest
end
def absolute_path
raise ObjectStorage::RemoteStoreError, "Remote object has no absolute path." unless local?
return path unless relative_path?
......@@ -40,7 +44,7 @@ class Upload < ActiveRecord::Base
self.checksum = nil
return unless checksumable?
self.checksum = Digest::SHA256.file(absolute_path).hexdigest
self.checksum = self.class.hexdigest(absolute_path)
end
def build_uploader(from = nil)
......
......@@ -21,8 +21,8 @@ module Geo
end
def execute
origin = File.join(CarrierWave.root, FileUploader.base_dir, old_attachments_path)
target = File.join(CarrierWave.root, FileUploader.base_dir, new_attachments_path)
origin = File.join(FileUploader.root, old_attachments_path)
target = File.join(FileUploader.root, new_attachments_path)
move_folder!(origin, target)
end
......
......@@ -25,7 +25,7 @@ module Geo
end
def local_store_path
Pathname.new(LfsObjectUploader.workhorse_upload_path)
Pathname.new(LfsObjectUploader.root)
end
def relative_file_path
......
......@@ -16,9 +16,9 @@ module Projects
@old_path = project.full_path
@new_path = project.disk_path
origin = FileUploader.model_path_segment(project)
origin = FileUploader.absolute_base_dir(project)
project.storage_version = ::Project::HASHED_STORAGE_FEATURES[:attachments]
target = FileUploader.model_path_segment(project)
target = FileUploader.absolute_base_dir(project)
result = move_folder!(origin, target)
project.save!
......
......@@ -26,13 +26,12 @@ class FileUploader < GitlabUploader
storage_options Gitlab.config.uploads
def self.root
storage_options&.storage_path
File.join(storage_options&.storage_path, 'uploads')
end
def self.absolute_path(upload)
File.join(
root,
base_dir(upload.model),
absolute_base_dir(upload.model),
upload.path # this already contain the dynamic_segment, see #upload_path
)
end
......@@ -41,6 +40,11 @@ class FileUploader < GitlabUploader
model_path_segment(model)
end
# this is used in migrations and import/exports
def self.absolute_base_dir(model)
File.join(root, base_dir(model))
end
# Returns the part of `store_dir` that can change based on the model's current
# path
#
......@@ -121,8 +125,8 @@ class FileUploader < GitlabUploader
self.file.filename
end
# This is weird: the upload do not hold the secret, but holds the path
# so we need to extract the secret from the path
# the upload does not hold the secret, but holds the path
# which contains the secret: extract it
def upload=(value)
if matches = DYNAMIC_PATH_PATTERN.match(value.path)
@secret = matches[:secret]
......
......@@ -20,7 +20,7 @@ class GitlabUploader < CarrierWave::Uploader::Base
end
def absolute_path(upload_record)
File.join(CarrierWave.root, upload_record.path)
File.join(root, upload_record.path)
end
end
......
......@@ -2,16 +2,21 @@ class LfsObjectUploader < GitlabUploader
extend Workhorse::UploadPath
include ObjectStorage::Concern
storage_options Gitlab.config.lfs
def store_dir
dynamic_segment
# LfsObject are in `tmp/upload` instead of `tmp/uploads`
def self.workhorse_upload_path
File.join(root, 'tmp/upload')
end
storage_options Gitlab.config.lfs
def filename
model.oid[4..-1]
end
def store_dir
dynamic_segment
end
private
def dynamic_segment
......
class NamespaceFileUploader < FileUploader
storage_options Gitlab.config.uploads
# Re-Override
def self.root
storage_options&.storage_path
end
def self.base_dir(model)
File.join(storage_options&.base_dir, 'namespace', model_path_segment(model))
end
......
class PersonalFileUploader < FileUploader
storage_options Gitlab.config.uploads
# Re-Override
def self.root
storage_options&.storage_path
end
def self.base_dir(model)
File.join(storage_options&.base_dir, model_path_segment(model))
end
......
module Workhorse
module UploadPath
def workhorse_upload_path
File.join(root, base_dir, 'tmp/uploads/')
File.join(root, base_dir, 'tmp/uploads')
end
end
end
......@@ -797,7 +797,7 @@ test:
aws_secret_access_key: AWS_SECRET_ACCESS_KEY
region: eu-central-1
uploads:
storage_path: tmp/tests/public/
storage_path: tmp/tests/public
enabled: true
object_store:
enabled: false
......
......@@ -334,6 +334,21 @@ Settings.gitlab_ci['url'] ||= Settings.__send__(:build_gitlab_ci
Settings['incoming_email'] ||= Settingslogic.new({})
Settings.incoming_email['enabled'] = false if Settings.incoming_email['enabled'].nil?
#
# Build Artifacts
#
Settings['artifacts'] ||= Settingslogic.new({})
Settings.artifacts['enabled'] = true if Settings.artifacts['enabled'].nil?
# DEPRECATED use `storage_path`
Settings.artifacts['storage_path'] = Settings.absolute(Settings.artifacts.values_at('path', 'storage_path').compact.first || File.join(Settings.shared['path'], "artifacts"))
Settings.artifacts['max_size'] ||= 100 # in megabytes
Settings.artifacts['object_store'] ||= Settingslogic.new({})
Settings.artifacts['object_store']['enabled'] ||= false
Settings.artifacts['object_store']['remote_directory'] ||= nil
Settings.artifacts['object_store']['background_upload'] ||= true
# Convert upload connection settings to use string keys, to make Fog happy
Settings.artifacts['object_store']['connection']&.deep_stringify_keys!
#
# Registry
......@@ -368,22 +383,6 @@ Settings.pages['artifacts_server'] ||= Settings.pages['enabled'] if Settings.pa
#
Settings.gitlab['geo_status_timeout'] ||= 10
#
# Build Artifacts
#
Settings['artifacts'] ||= Settingslogic.new({})
Settings.artifacts['enabled'] = true if Settings.artifacts['enabled'].nil?
# DEPRECATED use `storage_path`
Settings.artifacts['storage_path'] = Settings.absolute(Settings.artifacts.values_at('path', 'storage_path').compact.first || File.join(Settings.shared['path'], "artifacts"))
Settings.artifacts['max_size'] ||= 100 # in megabytes
Settings.artifacts['object_store'] ||= Settingslogic.new({})
Settings.artifacts['object_store']['enabled'] ||= false
Settings.artifacts['object_store']['remote_directory'] ||= nil
Settings.artifacts['object_store']['background_upload'] ||= true
# Convert upload connection settings to use string keys, to make Fog happy
Settings.artifacts['object_store']['connection']&.deep_stringify_keys!
#
# Git LFS
#
......
......@@ -173,11 +173,11 @@ ActiveRecord::Schema.define(version: 20171214144320) do
t.boolean "throttle_authenticated_web_enabled", default: false, null: false
t.integer "throttle_authenticated_web_requests_per_period", default: 7200, null: false
t.integer "throttle_authenticated_web_period_in_seconds", default: 3600, null: false
t.boolean "password_authentication_enabled_for_web"
t.boolean "password_authentication_enabled_for_git", default: true
t.integer "gitaly_timeout_default", default: 55, null: false
t.integer "gitaly_timeout_medium", default: 30, null: false
t.integer "gitaly_timeout_fast", default: 10, null: false
t.boolean "password_authentication_enabled_for_web"
t.boolean "password_authentication_enabled_for_git", default: true, null: false
t.boolean "mirror_available", default: true, null: false
end
......@@ -403,9 +403,9 @@ ActiveRecord::Schema.define(version: 20171214144320) do
t.integer "job_id", null: false
t.integer "file_type", null: false
t.integer "size", limit: 8
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.datetime "expire_at"
t.datetime_with_timezone "created_at", null: false
t.datetime_with_timezone "updated_at", null: false
t.datetime_with_timezone "expire_at"
t.string "file"
t.integer "file_store"
end
......
<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<title>file_storage.html</title>
</head>
<body>
<h1>File Storage in GitLab</h1>
<p>We use the <a href="https://github.com/carrierwaveuploader/carrierwave">CarrierWave</a> gem to handle file upload, store and retrieval.</p>
<p>There are many places where file uploading is used, according to contexts:</p>
<ul>
<li>System
<ul>
<li>Instance Logo (logo visible in sign in/sign up pages)</li>
<li>Header Logo (one displayed in the navigation bar)</li>
</ul>
</li>
<li>Group
<ul>
<li>Group avatars</li>
</ul>
</li>
<li>User
<ul>
<li>User avatars</li>
<li>User snippet attachments</li>
</ul>
</li>
<li>Project
<ul>
<li>Project avatars</li>
<li>Issues/MR/Notes Markdown attachments</li>
<li>Issues/MR/Notes Legacy Markdown attachments</li>
<li>CI Build Artifacts</li>
<li>LFS Objects</li>
</ul>
</li>
</ul>
<h2>Disk storage</h2>
<p>GitLab started saving everything on local disk. While directory location changed from previous versions,
they are still not 100% standardized. You can see them below:</p>
<table>
<thead>
<tr>
<th> Description </th>
<th> In DB? </th>
<th> Relative path </th>
<th> Uploader class </th>
<th> model_type </th>
</tr>
</thead>
<tbody>
<tr>
<td> Instance logo </td>
<td> yes </td>
<td> uploads/-/system/appearance/logo/:id/:filename </td>
<td> <code>AttachmentUploader</code> </td>
<td> Appearance </td>
</tr>
<tr>
<td> Header logo </td>
<td> yes </td>
<td> uploads/-/system/appearance/header_logo/:id/:filename </td>
<td> <code>AttachmentUploader</code> </td>
<td> Appearance </td>
</tr>
<tr>
<td> Group avatars </td>
<td> yes </td>
<td> uploads/-/system/group/avatar/:id/:filename </td>
<td> <code>AvatarUploader</code> </td>
<td> Group </td>
</tr>
<tr>
<td> User avatars </td>
<td> yes </td>
<td> uploads/-/system/user/avatar/:id/:filename </td>
<td> <code>AvatarUploader</code> </td>
<td> User </td>
</tr>
<tr>
<td> User snippet attachments </td>
<td> yes </td>
<td> uploads/-/system/personal_snippet/:id/:random_hex/:filename </td>
<td> <code>PersonalFileUploader</code> </td>
<td> Snippet </td>
</tr>
<tr>
<td> Project avatars </td>
<td> yes </td>
<td> uploads/-/system/project/avatar/:id/:filename </td>
<td> <code>AvatarUploader</code> </td>
<td> Project </td>
</tr>
<tr>
<td> Issues/MR/Notes Markdown attachments </td>
<td> yes </td>
<td> uploads/:project_path_with_namespace/:random_hex/:filename </td>
<td> <code>FileUploader</code> </td>
<td> Project </td>
</tr>
<tr>
<td> Issues/MR/Notes Legacy Markdown attachments </td>
<td> no </td>
<td> uploads/-/system/note/attachment/:id/:filename </td>
<td> <code>AttachmentUploader</code> </td>
<td> Note </td>
</tr>
<tr>
<td> CI Artifacts (CE) </td>
<td> yes </td>
<td> shared/artifacts/:year_:month/:project_id/:id </td>
<td> <code>ArtifactUploader</code> </td>
<td> Ci::Build </td>
</tr>
<tr>
<td> LFS Objects (CE) </td>
<td> yes </td>
<td> shared/lfs-objects/:hex/:hex/:object_hash </td>
<td> <code>LfsObjectUploader</code> </td>
<td> LfsObject </td>
</tr>
</tbody>
</table>
<p>CI Artifacts and LFS Objects behave differently in CE and EE. In CE they inherit the <code>GitlabUploader</code>
while in EE they inherit the <code>ObjectStoreUploader</code> and store files in and S3 API compatible object store.</p>
<p>In the case of Issues/MR/Notes Markdown attachments, there is a different approach using the <a href="../administration/repository_storage_types.md">Hashed Storage</a> layout,
instead of basing the path into a mutable variable <code>:project_path_with_namespace</code>, it&rsquo;s possible to use the
hash of the project ID instead, if project migrates to the new approach (introduced in 10.2).</p>
</body>
</html>
......@@ -22,6 +22,7 @@ module ObjectStorage
prepended do |base|
raise ObjectStoreUnavailable, "#{base} must include ObjectStorage::Concern to use extensions." unless base < Concern
base.include(::RecordsUploads::Concern)
end
......@@ -35,6 +36,11 @@ module ObjectStorage
self.upload = super
end
def destroy_upload(_tempfile = nil)
super
self.upload = nil
end
def upload=(upload)
return unless upload
......
......@@ -16,7 +16,7 @@ class ObjectStorageUploadWorker
uploader = subject.public_send(file_field) # rubocop:disable GitlabSecurity/PublicSend
uploader.migrate!(ObjectStorage::Store::REMOTE)
rescue RecordNotFound
# do not retry when the record do not exists
# does not retry when the record do not exists
Rails.logger.warn("Cannot find subject #{subject_class} with id=#{subject_id}.")
end
end
......@@ -215,9 +215,9 @@ module API
job = authenticate_job!
forbidden!('Job is not running!') unless job.running?
artifacts_upload_path = JobArtifactUploader.workhorse_upload_path
artifacts = uploaded_file(:file, artifacts_upload_path)
metadata = uploaded_file(:metadata, artifacts_upload_path)
workhorse_upload_path = JobArtifactUploader.workhorse_upload_path
artifacts = uploaded_file(:file, workhorse_upload_path)
metadata = uploaded_file(:metadata, workhorse_upload_path)
bad_request!('Missing artifacts file!') unless artifacts
file_to_large! unless artifacts.size < max_artifacts_size
......
......@@ -3,7 +3,7 @@ require 'backup/files'
module Backup
class Artifacts < Files
def initialize
super('artifacts', LegacyArtifactUploader.workhorse_upload_path)
super('artifacts', JobArtifactUploader.root)
end
def create_files_dir
......
......@@ -10,9 +10,12 @@ module Gitlab
FIND_BATCH_SIZE = 500
RELATIVE_UPLOAD_DIR = "uploads".freeze
ABSOLUTE_UPLOAD_DIR = "#{CarrierWave.root}/#{RELATIVE_UPLOAD_DIR}".freeze
ABSOLUTE_UPLOAD_DIR = File.join(
Gitlab.config.uploads.storage_path,
RELATIVE_UPLOAD_DIR
)
FOLLOW_UP_MIGRATION = 'PopulateUntrackedUploads'.freeze
START_WITH_CARRIERWAVE_ROOT_REGEX = %r{\A#{CarrierWave.root}/}
START_WITH_ROOT_REGEX = %r{\A#{Gitlab.config.uploads.storage_path}/}
EXCLUDED_HASHED_UPLOADS_PATH = "#{ABSOLUTE_UPLOAD_DIR}/@hashed/*".freeze
EXCLUDED_TMP_UPLOADS_PATH = "#{ABSOLUTE_UPLOAD_DIR}/tmp/*".freeze
......@@ -80,7 +83,7 @@ module Gitlab
paths = []
stdout.each_line("\0") do |line|
paths << line.chomp("\0").sub(START_WITH_CARRIERWAVE_ROOT_REGEX, '')
paths << line.chomp("\0").sub(START_WITH_ROOT_REGEX, '')
if paths.size >= batch_size
yield(paths)
......
......@@ -201,7 +201,7 @@ module Gitlab
end
def handle_lfs_object_deleted_event(event, created_at)
file_path = File.join(LfsObjectUploader.workhorse_upload_path, event.file_path)
file_path = File.join(LfsObjectUploader.root, event.file_path)
job_id = ::Geo::FileRemovalWorker.perform_async(file_path)
......
......@@ -17,16 +17,13 @@ module Gitlab
false
end
private
def uploads_path
FileUploader.absolute_base_dir(@project)
end
def uploads_export_path
File.join(@shared.export_path, 'uploads')
end
# this is not all uploads
def uploads_path
FileUploader.new(@project).store_dir
end
end
end
end
module Gitlab
class UploadsTransfer < ProjectTransfer
def root_dir
File.join(*Gitlab.config.uploads.values_at('storage_path', 'base_dir'))
FileUploader.root
end
end
end
{"version":"1","format":"fs","fs":{"version":"1"}}
\ No newline at end of file
......@@ -6,5 +6,7 @@ describe Groups::UploadsController do
{ group_id: model }
end
it_behaves_like 'handle uploads'
it_behaves_like 'handle uploads' do
let(:uploader_class) { NamespaceFileUploader }
end
end
......@@ -146,7 +146,7 @@ describe Projects::ArtifactsController do
it_behaves_like 'a valid file' do
let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline) }
let(:store) { ObjectStorage::Store::LOCAL }
let(:archive_path) { JobArtifactUploader.workhorse_upload_path }
let(:archive_path) { JobArtifactUploader.root }
end
end
......
......@@ -47,7 +47,7 @@ describe Projects::RawController do
end
it 'serves the file' do
expect(controller).to receive(:send_file).with("#{Gitlab.config.shared.path}/lfs-objects/91/ef/f75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897", filename: 'lfs_object.iso', disposition: 'attachment')
expect(controller).to receive(:send_file).with("#{LfsObjectUploader.root}/91/ef/f75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897", filename: 'lfs_object.iso', disposition: 'attachment')
get_show(public_project, id)
expect(response).to have_gitlab_http_status(200)
......
......@@ -34,6 +34,27 @@ describe Gitlab::BackgroundMigration::PrepareUntrackedUploads, :sidekiq do
end
end
# E.g. The installation is in use at the time of migration, and someone has
# just uploaded a file
shared_examples 'does not add files in /uploads/tmp' do
let(:tmp_file) { Rails.root.join(described_class::ABSOLUTE_UPLOAD_DIR, 'tmp', 'some_file.jpg') }
before do
FileUtils.mkdir(File.dirname(tmp_file))
FileUtils.touch(tmp_file)
end
after do
FileUtils.rm(tmp_file)
end
it 'does not add files from /uploads/tmp' do
described_class.new.perform
expect(untracked_files_for_uploads.count).to eq(5)
end
end
it 'ensures the untracked_files_for_uploads table exists' do
expect do
described_class.new.perform
......@@ -120,24 +141,8 @@ describe Gitlab::BackgroundMigration::PrepareUntrackedUploads, :sidekiq do
end
end
# E.g. The installation is in use at the time of migration, and someone has
# just uploaded a file
context 'when there are files in /uploads/tmp' do
let(:tmp_file) { Rails.root.join(described_class::ABSOLUTE_UPLOAD_DIR, 'tmp', 'some_file.jpg') }
before do
FileUtils.touch(tmp_file)
end
after do
FileUtils.rm(tmp_file)
end
it 'does not add files from /uploads/tmp' do
described_class.new.perform
expect(untracked_files_for_uploads.count).to eq(5)
end
it_behaves_like 'does not add files in /uploads/tmp'
end
end
end
......@@ -208,24 +213,8 @@ describe Gitlab::BackgroundMigration::PrepareUntrackedUploads, :sidekiq do
end
end
# E.g. The installation is in use at the time of migration, and someone has
# just uploaded a file
context 'when there are files in /uploads/tmp' do
let(:tmp_file) { Rails.root.join(described_class::ABSOLUTE_UPLOAD_DIR, 'tmp', 'some_file.jpg') }
before do
FileUtils.touch(tmp_file)
end
after do
FileUtils.rm(tmp_file)
end
it 'does not add files from /uploads/tmp' do
described_class.new.perform
expect(untracked_files_for_uploads.count).to eq(5)
end
it_behaves_like 'does not add files in /uploads/tmp'
end
end
end
......
......@@ -285,8 +285,7 @@ describe Gitlab::Geo::LogCursor::Daemon, :postgresql, :clean_gitlab_redis_shared
end
it 'schedules a Geo::FileRemovalWorker' do
file_path = File.join(LfsObjectUploader.workhorse_upload_path,
lfs_object_deleted_event.file_path)
file_path = File.join(LfsObjectUploader.root, lfs_object_deleted_event.file_path)
expect(::Geo::FileRemovalWorker).to receive(:perform_async)
.with(file_path)
......
......@@ -4,7 +4,6 @@ describe Gitlab::ImportExport::UploadsRestorer do
describe 'bundle a project Git repo' do
let(:export_path) { "#{Dir.tmpdir}/uploads_saver_spec" }
let(:shared) { Gitlab::ImportExport::Shared.new(relative_path: project.full_path) }
let(:uploads_path) { FileUploader.model_path_segment(project) }
before do
allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
......@@ -26,9 +25,9 @@ describe Gitlab::ImportExport::UploadsRestorer do
end
it 'copies the uploads to the project path' do
restorer.restore
subject.restore
uploads = Dir.glob(File.join(uploads_path, '**/*')).map { |file| File.basename(file) }
uploads = Dir.glob(File.join(subject.uploads_path, '**/*')).map { |file| File.basename(file) }
expect(uploads).to include('dummy.txt')
end
......@@ -44,9 +43,9 @@ describe Gitlab::ImportExport::UploadsRestorer do
end
it 'copies the uploads to the project path' do
restorer.restore
subject.restore
uploads = Dir.glob(File.join(uploads_path, '**/*')).map { |file| File.basename(file) }
uploads = Dir.glob(File.join(subject.uploads_path, '**/*')).map { |file| File.basename(file) }
expect(uploads).to include('dummy.txt')
end
......
......@@ -30,7 +30,7 @@ describe Gitlab::ImportExport::UploadsSaver do
it 'copies the uploads to the export path' do
saver.save
uploads = Dir.glob(File.join(shared.export_path, 'uploads', '**/*')).map { |file| File.basename(file) }
uploads = Dir.glob(File.join(saver.uploads_export_path, '**/*')).map { |file| File.basename(file) }
expect(uploads).to include('banana_sample.gif')
end
......@@ -52,7 +52,7 @@ describe Gitlab::ImportExport::UploadsSaver do
it 'copies the uploads to the export path' do
saver.save
uploads = Dir.glob(File.join(shared.export_path, 'uploads', '**/*')).map { |file| File.basename(file) }
uploads = Dir.glob(File.join(saver.uploads_export_path, '**/*')).map { |file| File.basename(file) }
expect(uploads).to include('banana_sample.gif')
end
......
......@@ -204,7 +204,7 @@ describe Namespace do
let(:parent) { create(:group, name: 'parent', path: 'parent') }
let(:child) { create(:group, name: 'child', path: 'child', parent: parent) }
let!(:project) { create(:project_empty_repo, path: 'the-project', namespace: child) }
let(:uploads_dir) { File.join(CarrierWave.root, FileUploader.base_dir) }
let(:uploads_dir) { FileUploader.root }
let(:pages_dir) { File.join(TestEnv.pages_path) }
before do
......
......@@ -21,7 +21,7 @@ describe Upload do
path: __FILE__,
size: described_class::CHECKSUM_THRESHOLD + 1.kilobyte,
model: build_stubbed(:user),
uploader: double('ExampleUploader'),
uploader: double('ExampleUploader')
)
expect(UploadChecksumWorker)
......@@ -35,7 +35,7 @@ describe Upload do
path: __FILE__,
size: described_class::CHECKSUM_THRESHOLD,
model: build_stubbed(:user),
uploader: double('ExampleUploader'),
uploader: double('ExampleUploader')
)
expect { upload.save }
......@@ -51,7 +51,7 @@ describe Upload do
size: File.size(__FILE__),
path: __FILE__,
model: build_stubbed(:user),
uploader: 'AvatarUploader',
uploader: 'AvatarUploader'
)
expect { described_class.remove_path(__FILE__) }
......
......@@ -948,7 +948,7 @@ describe API::Runner do
context 'when artifacts are being stored inside of tmp path' do
before do
# by configuring this path we allow to pass temp file from any path
allow(JobArtifactUploader).to receive(:artifacts_upload_path).and_return('/')
allow(JobArtifactUploader).to receive(:workhorse_upload_path).and_return('/')
end
context 'when job has been erased' do
......@@ -1125,7 +1125,7 @@ describe API::Runner do
# by configuring this path we allow to pass file from @tmpdir only
# but all temporary files are stored in system tmp directory
@tmpdir = Dir.mktmpdir
allow(JobArtifactUploader).to receive(:artifacts_upload_path).and_return(@tmpdir)
allow(JobArtifactUploader).to receive(:workhorse_upload_path).and_return(@tmpdir)
end
after do
......
......@@ -1003,7 +1003,7 @@ describe 'Git LFS API and storage' do
end
it 'responds with status 200, location of lfs store and object details' do
expect(json_response['StoreLFSPath']).to eq("#{Gitlab.config.shared.path}/lfs-objects/tmp/upload")
expect(json_response['StoreLFSPath']).to eq(LfsObjectUploader.workhorse_upload_path)
expect(json_response['LfsOid']).to eq(sample_oid)
expect(json_response['LfsSize']).to eq(sample_size)
end
......@@ -1160,7 +1160,7 @@ describe 'Git LFS API and storage' do
end
it 'with location of lfs store and object details' do
expect(json_response['StoreLFSPath']).to eq("#{Gitlab.config.shared.path}/lfs-objects/tmp/upload")
expect(json_response['StoreLFSPath']).to eq(LfsObjectUploader.workhorse_upload_path)
expect(json_response['LfsOid']).to eq(sample_oid)
expect(json_response['LfsSize']).to eq(sample_size)
end
......@@ -1274,7 +1274,7 @@ describe 'Git LFS API and storage' do
end
def setup_tempfile(lfs_tmp)
upload_path = "#{Gitlab.config.lfs.storage_path}/tmp/upload"
upload_path = LfsObjectUploader.workhorse_upload_path
FileUtils.mkdir_p(upload_path)
FileUtils.touch(File.join(upload_path, lfs_tmp))
......
require 'spec_helper'
def base_path(storage)
File.join(FileUploader.root, storage.disk_path)
end
describe Geo::HashedStorageAttachmentsMigrationService do
let!(:project) { create(:project) }
......@@ -11,7 +15,11 @@ describe Geo::HashedStorageAttachmentsMigrationService do
let(:old_path) { File.join(base_path(legacy_storage), upload.path) }
let(:new_path) { File.join(base_path(hashed_storage), upload.path) }
subject(:service) { described_class.new(project.id, old_attachments_path: legacy_storage.disk_path, new_attachments_path: hashed_storage.disk_path) }
subject(:service) do
described_class.new(project.id,
old_attachments_path: legacy_storage.disk_path,
new_attachments_path: hashed_storage.disk_path)
end
describe '#execute' do
context 'when succeeds' do
......@@ -72,8 +80,4 @@ describe Geo::HashedStorageAttachmentsMigrationService do
expect(service.async_execute).to eq('foo')
end
end
def base_path(storage)
File.join(CarrierWave.root, FileUploader.base_dir, storage.disk_path)
end
end
......@@ -3,6 +3,7 @@ shared_examples 'handle uploads' do
let(:jpg) { fixture_file_upload(Rails.root + 'spec/fixtures/rails_sample.jpg', 'image/jpg') }
let(:txt) { fixture_file_upload(Rails.root + 'spec/fixtures/doc_sample.txt', 'text/plain') }
let(:secret) { FileUploader.generate_secret }
let(:uploader_class) { FileUploader }
describe "POST #create" do
context 'when a user is not authorized to upload a file' do
......@@ -71,7 +72,7 @@ shared_examples 'handle uploads' do
before do
expect(FileUploader).to receive(:generate_secret).and_return(secret)
UploadService.new(model, jpg).execute
UploadService.new(model, jpg, uploader_class).execute
end
context "when the model is public" do
......
shared_context 'with storage' do |store, **stub_params|
before do
subject.object_store = store
end
end
shared_examples "builds correct paths" do |**patterns|
before do
allow(subject).to receive(:filename).and_return('<filename>')
end
describe "#store_dir" do
it "matches the pattern" do
expect(subject.store_dir).to match(patterns[:store_dir])
end
end if patterns.has_key?(:store_dir)
describe "#cache_dir" do
it "matches the pattern" do
expect(subject.cache_dir).to match(patterns[:cache_dir])
end
end if patterns.has_key?(:cache_dir)
describe "#work_dir" do
it "matches the pattern" do
expect(subject.work_dir).to match(patterns[:work_dir])
end
end if patterns.has_key?(:work_dir)
describe "#upload_path" do
it "matches the pattern" do
expect(subject.upload_path).to match(patterns[:upload_path])
end
end if patterns.has_key?(:upload_path)
describe ".absolute_path" do
it "matches the pattern" do
expect(subject.class.absolute_path(upload)).to match(patterns[:absolute_path])
end
end if patterns.has_key?(:absolute_path)
describe ".base_dir" do
it "matches the pattern" do
expect(subject.class.base_dir).to match(patterns[:base_dir])
end
end if patterns.has_key?(:base_dir)
end
module TrackUntrackedUploadsHelpers
def uploaded_file
fixture_path = Rails.root.join('spec', 'fixtures', 'rails_sample.jpg')
fixture_path = Rails.root.join('spec/fixtures/rails_sample.jpg')
fixture_file_upload(fixture_path)
end
......
......@@ -11,7 +11,7 @@ describe FileUploader do
shared_examples 'builds correct legacy storage paths' do
include_examples 'builds correct paths',
store_dir: %r{awesome/project/\h+},
absolute_path: %r{#{CarrierWave.root}/awesome/project/secret/foo.jpg}
absolute_path: %r{#{described_class.root}/awesome/project/secret/foo.jpg}
end
shared_examples 'uses hashed storage' do
......@@ -24,7 +24,7 @@ describe FileUploader do
it_behaves_like 'builds correct paths',
store_dir: %r{ca/fe/fe/ed/\h+},
absolute_path: %r{#{CarrierWave.root}/ca/fe/fe/ed/secret/foo.jpg}
absolute_path: %r{#{described_class.root}/ca/fe/fe/ed/secret/foo.jpg}
end
context 'when only repositories are rolled out' do
......
......@@ -35,7 +35,7 @@ describe JobArtifactUploader do
subject { uploader.file.path }
it { is_expected.to start_with("#{uploader.root}") }
it { is_expected.to start_with("#{uploader.root}/#{uploader.class.base_dir}") }
it { is_expected.to include("/#{job_artifact.created_at.utc.strftime('%Y_%m_%d')}/") }
it { is_expected.to include("/#{job_artifact.project_id}/") }
it { is_expected.to end_with("ci_build_artifacts.zip") }
......
......@@ -9,11 +9,11 @@ describe LegacyArtifactUploader do
subject { uploader }
# TODO: move to Workhorse::UploadPath
describe '.artifacts_upload_path' do
describe '.workhorse_upload_path' do
subject { described_class.workhorse_upload_path }
it { is_expected.to start_with(local_path) }
it { is_expected.to end_with('tmp/uploads/') }
it { is_expected.to end_with('tmp/uploads') }
end
it_behaves_like "builds correct paths",
......
......@@ -5,6 +5,24 @@ describe LfsObjectUploader do
let(:uploader) { described_class.new(lfs_object, :file) }
let(:path) { Gitlab.config.lfs.storage_path }
subject { uploader }
it_behaves_like "builds correct paths",
store_dir: %r[\h{2}/\h{2}],
cache_dir: %r[/lfs-objects/tmp/cache],
work_dir: %r[/lfs-objects/tmp/work]
context "object store is REMOTE" do
before do
stub_lfs_object_storage
end
include_context 'with storage', described_class::Store::REMOTE
it_behaves_like "builds correct paths",
store_dir: %r[\h{2}/\h{2}]
end
describe '#move_to_cache' do
it 'is true' do
expect(uploader.move_to_cache).to eq(true)
......@@ -17,26 +35,6 @@ describe LfsObjectUploader do
end
end
describe '#store_dir' do
subject { uploader.store_dir }
it { is_expected.to end_with("#{lfs_object.oid[0, 2]}/#{lfs_object.oid[2, 2]}") }
end
describe '#cache_dir' do
subject { uploader.cache_dir }
it { is_expected.to start_with(path) }
it { is_expected.to end_with('/tmp/cache') }
end
describe '#work_dir' do
subject { uploader.work_dir }
it { is_expected.to start_with(path) }
it { is_expected.to end_with('/tmp/work') }
end
describe 'migration to object storage' do
context 'with object storage disabled' do
it "is skipped" do
......@@ -102,7 +100,7 @@ describe LfsObjectUploader do
end
def store_file(lfs_object)
lfs_object.file = fixture_file_upload(Rails.root + "spec/fixtures/dk.png", "`/png")
lfs_object.file = fixture_file_upload(Rails.root.join("spec/fixtures/dk.png"), "`/png")
lfs_object.save!
end
end
......@@ -5,6 +5,7 @@ describe RecordsUploads do
class RecordsUploadsExampleUploader < GitlabUploader
include RecordsUploads::Concern
storage_options Gitlab.config.uploads
storage :file
def model
......@@ -20,6 +21,12 @@ describe RecordsUploads do
end
describe 'callbacks' do
let(:upload) { create(:upload) }
before do
uploader.upload = upload
end
it '#record_upload after `store`' do
expect(uploader).to receive(:record_upload).once
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment