Commit fc6587f1 authored by Micaël Bergeron's avatar Micaël Bergeron

Add proxy_download to perform proxied sending of all files

parent 6466739e
module SendFileUpload module SendFileUpload
def send_upload(file_upload, send_params: {}, redirect_params: {}, attachment: nil) def send_upload(file_upload, send_params: {}, redirect_params: {}, attachment: nil, disposition: 'attachment')
if attachment if attachment
redirect_params[:query] = { "response-content-disposition" => "attachment;filename=#{attachment.inspect}" } redirect_params[:query] = { "response-content-disposition" => "#{disposition};filename=#{attachment.inspect}" }
send_params.merge!(filename: attachment, disposition: 'attachment') send_params.merge!(filename: attachment, disposition: disposition)
end end
if file_upload.file_storage? if file_upload.file_storage?
send_file file_upload.path, send_params send_file file_upload.path, send_params
elsif file_upload.class.proxy_download_enabled?
Gitlab::Workhorse.send_url(file_upload.url(**redirect_params))
else else
redirect_to file_upload.url(**redirect_params) redirect_to file_upload.url(**redirect_params)
end end
......
module UploadsActions module UploadsActions
include Gitlab::Utils::StrongMemoize include Gitlab::Utils::StrongMemoize
include SendFileUpload
UPLOAD_MOUNTS = %w(avatar attachment file logo header_logo).freeze UPLOAD_MOUNTS = %w(avatar attachment file logo header_logo).freeze
...@@ -26,14 +27,11 @@ module UploadsActions ...@@ -26,14 +27,11 @@ module UploadsActions
def show def show
return render_404 unless uploader&.exists? return render_404 unless uploader&.exists?
if uploader.file_storage?
disposition = uploader.image_or_video? ? 'inline' : 'attachment'
expires_in 0.seconds, must_revalidate: true, private: true expires_in 0.seconds, must_revalidate: true, private: true
send_file uploader.file.path, disposition: disposition disposition = uploader.image_or_video? ? 'inline' : 'attachment'
else
redirect_to uploader.url send_upload(uploader, attachment: uploader.filename, disposition: disposition)
end
end end
private private
......
...@@ -127,6 +127,14 @@ module ObjectStorage ...@@ -127,6 +127,14 @@ module ObjectStorage
object_store_options.background_upload object_store_options.background_upload
end end
def proxy_download_enabled?
object_store_options.proxy_download
end
def direct_download_enabled?
!proxy_download_enabled?
end
def object_store_credentials def object_store_credentials
object_store_options.connection.to_hash.deep_symbolize_keys object_store_options.connection.to_hash.deep_symbolize_keys
end end
......
...@@ -149,6 +149,7 @@ production: &base ...@@ -149,6 +149,7 @@ production: &base
# enabled: false # enabled: false
# remote_directory: artifacts # The bucket name # remote_directory: artifacts # The bucket name
# background_upload: false # Temporary option to limit automatic upload (Default: true) # background_upload: false # Temporary option to limit automatic upload (Default: true)
# proxy_download: false # Passthrough all downloads via GitLab instead of using Redirects to Object Storage
# connection: # connection:
# provider: AWS # Only AWS supported at the moment # provider: AWS # Only AWS supported at the moment
# aws_access_key_id: AWS_ACCESS_KEY_ID # aws_access_key_id: AWS_ACCESS_KEY_ID
...@@ -164,6 +165,7 @@ production: &base ...@@ -164,6 +165,7 @@ production: &base
enabled: false enabled: false
remote_directory: lfs-objects # Bucket name remote_directory: lfs-objects # Bucket name
# background_upload: false # Temporary option to limit automatic upload (Default: true) # background_upload: false # Temporary option to limit automatic upload (Default: true)
# proxy_download: false # Passthrough all downloads via GitLab instead of using Redirects to Object Storage
connection: connection:
provider: AWS provider: AWS
aws_access_key_id: AWS_ACCESS_KEY_ID aws_access_key_id: AWS_ACCESS_KEY_ID
...@@ -183,6 +185,7 @@ production: &base ...@@ -183,6 +185,7 @@ production: &base
enabled: true enabled: true
remote_directory: uploads # Bucket name remote_directory: uploads # Bucket name
# background_upload: false # Temporary option to limit automatic upload (Default: true) # background_upload: false # Temporary option to limit automatic upload (Default: true)
# proxy_download: false # Passthrough all downloads via GitLab instead of using Redirects to Object Storage
connection: connection:
provider: AWS provider: AWS
aws_access_key_id: AWS_ACCESS_KEY_ID aws_access_key_id: AWS_ACCESS_KEY_ID
......
...@@ -309,6 +309,7 @@ Settings.artifacts['object_store'] ||= Settingslogic.new({}) ...@@ -309,6 +309,7 @@ Settings.artifacts['object_store'] ||= Settingslogic.new({})
Settings.artifacts['object_store']['enabled'] = false if Settings.artifacts['object_store']['enabled'].nil? Settings.artifacts['object_store']['enabled'] = false if Settings.artifacts['object_store']['enabled'].nil?
Settings.artifacts['object_store']['remote_directory'] ||= nil Settings.artifacts['object_store']['remote_directory'] ||= nil
Settings.artifacts['object_store']['background_upload'] = true if Settings.artifacts['object_store']['background_upload'].nil? Settings.artifacts['object_store']['background_upload'] = true if Settings.artifacts['object_store']['background_upload'].nil?
Settings.artifacts['object_store']['proxy_download'] = false if Settings.artifacts['object_store']['proxy_download'].nil?
# Convert upload connection settings to use string keys, to make Fog happy # Convert upload connection settings to use string keys, to make Fog happy
Settings.artifacts['object_store']['connection']&.deep_stringify_keys! Settings.artifacts['object_store']['connection']&.deep_stringify_keys!
...@@ -357,6 +358,7 @@ Settings.lfs['object_store'] ||= Settingslogic.new({}) ...@@ -357,6 +358,7 @@ Settings.lfs['object_store'] ||= Settingslogic.new({})
Settings.lfs['object_store']['enabled'] = false if Settings.lfs['object_store']['enabled'].nil? Settings.lfs['object_store']['enabled'] = false if Settings.lfs['object_store']['enabled'].nil?
Settings.lfs['object_store']['remote_directory'] ||= nil Settings.lfs['object_store']['remote_directory'] ||= nil
Settings.lfs['object_store']['background_upload'] = true if Settings.lfs['object_store']['background_upload'].nil? Settings.lfs['object_store']['background_upload'] = true if Settings.lfs['object_store']['background_upload'].nil?
Settings.lfs['object_store']['proxy_download'] = false if Settings.lfs['object_store']['proxy_download'].nil?
# Convert upload connection settings to use string keys, to make Fog happy # Convert upload connection settings to use string keys, to make Fog happy
Settings.lfs['object_store']['connection']&.deep_stringify_keys! Settings.lfs['object_store']['connection']&.deep_stringify_keys!
...@@ -370,6 +372,7 @@ Settings.uploads['object_store'] ||= Settingslogic.new({}) ...@@ -370,6 +372,7 @@ Settings.uploads['object_store'] ||= Settingslogic.new({})
Settings.uploads['object_store']['enabled'] = false if Settings.uploads['object_store']['enabled'].nil? Settings.uploads['object_store']['enabled'] = false if Settings.uploads['object_store']['enabled'].nil?
Settings.uploads['object_store']['remote_directory'] ||= 'uploads' Settings.uploads['object_store']['remote_directory'] ||= 'uploads'
Settings.uploads['object_store']['background_upload'] = true if Settings.uploads['object_store']['background_upload'].nil? Settings.uploads['object_store']['background_upload'] = true if Settings.uploads['object_store']['background_upload'].nil?
Settings.uploads['object_store']['proxy_download'] = false if Settings.uploads['object_store']['proxy_download'].nil?
# Convert upload connection settings to use string keys, to make Fog happy # Convert upload connection settings to use string keys, to make Fog happy
Settings.uploads['object_store']['connection']&.deep_stringify_keys! Settings.uploads['object_store']['connection']&.deep_stringify_keys!
......
...@@ -100,6 +100,32 @@ artifacts, you can use an object storage like AWS S3 instead. ...@@ -100,6 +100,32 @@ artifacts, you can use an object storage like AWS S3 instead.
This configuration relies on valid AWS credentials to be configured already. This configuration relies on valid AWS credentials to be configured already.
Use an [Object storage option][os] like AWS S3 to store job artifacts. Use an [Object storage option][os] like AWS S3 to store job artifacts.
### Object Storage Settings
For source installations the following settings are nested under `artifacts:` and then `object_store:`. On omnibus installs they are prefixed by `artifacts_object_store_`.
| Setting | Description | Default |
|---------|-------------|---------|
| `enabled` | Enable/disable object storage | `false` |
| `remote_directory` | The bucket name where Artfacts will be stored| |
| `background_upload` | Set to false to disable automatic upload. Option may be removed once upload is direct to S3 | `true` |
| `proxy_download` | Set to false to disable proxying all files served. Option allows to reduce egress traffic as this allows clients to download directly from remote storage instead of proxying all data | `false` |
| `connection` | Various connection options described below | |
#### S3 compatible connection settings
The connection settings match those provided by [Fog](https://github.com/fog), and are as follows:
| Setting | Description | Default |
|---------|-------------|---------|
| `provider` | Always `AWS` for compatible hosts | AWS |
| `aws_access_key_id` | AWS credentials, or compatible | |
| `aws_secret_access_key` | AWS credentials, or compatible | |
| `region` | AWS region | us-east-1 |
| `host` | S3 compatible host for when not using AWS, e.g. `localhost` or `storage.example.com` | s3.amazonaws.com |
| `endpoint` | Can be used when configuring an S3 compatible service such as [Minio](https://www.minio.io), by entering a URL such as `http://127.0.0.1:9000` | (optional) |
| `path_style` | Set to true to use `host/bucket_name/object` style paths instead of `bucket_name.host/object`. Leave as false for AWS S3 | false |
**In Omnibus installations:** **In Omnibus installations:**
_The artifacts are stored by default in _The artifacts are stored by default in
......
...@@ -57,6 +57,32 @@ If you don't want to use the local disk where GitLab is installed to store the ...@@ -57,6 +57,32 @@ If you don't want to use the local disk where GitLab is installed to store the
uploads, you can use an object storage provider like AWS S3 instead. uploads, you can use an object storage provider like AWS S3 instead.
This configuration relies on valid AWS credentials to be configured already. This configuration relies on valid AWS credentials to be configured already.
### Object Storage Settings
For source installations the following settings are nested under `uploads:` and then `object_store:`. On omnibus installs they are prefixed by `uploads_object_store_`.
| Setting | Description | Default |
|---------|-------------|---------|
| `enabled` | Enable/disable object storage | `false` |
| `remote_directory` | The bucket name where Uploads will be stored| |
| `background_upload` | Set to false to disable automatic upload. Option may be removed once upload is direct to S3 | `true` |
| `proxy_download` | Set to false to disable proxying all files served. Option allows to reduce egress traffic as this allows clients to download directly from remote storage instead of proxying all data | `false` |
| `connection` | Various connection options described below | |
#### S3 compatible connection settings
The connection settings match those provided by [Fog](https://github.com/fog), and are as follows:
| Setting | Description | Default |
|---------|-------------|---------|
| `provider` | Always `AWS` for compatible hosts | AWS |
| `aws_access_key_id` | AWS credentials, or compatible | |
| `aws_secret_access_key` | AWS credentials, or compatible | |
| `region` | AWS region | us-east-1 |
| `host` | S3 compatible host for when not using AWS, e.g. `localhost` or `storage.example.com` | s3.amazonaws.com |
| `endpoint` | Can be used when configuring an S3 compatible service such as [Minio](https://www.minio.io), by entering a URL such as `http://127.0.0.1:9000` | (optional) |
| `path_style` | Set to true to use `host/bucket_name/object` style paths instead of `bucket_name.host/object`. Leave as false for AWS S3 | false |
**In Omnibus installations:** **In Omnibus installations:**
_The uploads are stored by default in _The uploads are stored by default in
......
...@@ -64,6 +64,7 @@ For source installations the following settings are nested under `lfs:` and then ...@@ -64,6 +64,7 @@ For source installations the following settings are nested under `lfs:` and then
| `enabled` | Enable/disable object storage | `false` | | `enabled` | Enable/disable object storage | `false` |
| `remote_directory` | The bucket name where LFS objects will be stored| | | `remote_directory` | The bucket name where LFS objects will be stored| |
| `background_upload` | Set to false to disable automatic upload. Option may be removed once upload is direct to S3 | `true` | | `background_upload` | Set to false to disable automatic upload. Option may be removed once upload is direct to S3 | `true` |
| `proxy_download` | Set to false to disable proxying all files served. Option allows to reduce egress traffic as this allows clients to download directly from remote storage instead of proxying all data | `false` |
| `connection` | Various connection options described below | | | `connection` | Various connection options described below | |
#### S3 compatible connection settings #### S3 compatible connection settings
......
...@@ -410,7 +410,7 @@ module API ...@@ -410,7 +410,7 @@ module API
) )
end end
def present_file!(path, filename, content_type = 'application/octet-stream') def present_disk_file!(path, filename, content_type = 'application/octet-stream')
filename ||= File.basename(path) filename ||= File.basename(path)
header['Content-Disposition'] = "attachment; filename=#{filename}" header['Content-Disposition'] = "attachment; filename=#{filename}"
header['Content-Transfer-Encoding'] = 'binary' header['Content-Transfer-Encoding'] = 'binary'
...@@ -426,15 +426,15 @@ module API ...@@ -426,15 +426,15 @@ module API
end end
end end
def present_artifacts!(artifacts_file, direct_download: true) def present_carrierwave_file!(file, supports_direct_download: true)
return not_found! unless artifacts_file.exists? return not_found! unless file.exists?
if artifacts_file.file_storage? if file.file_storage?
present_file!(artifacts_file.path, artifacts_file.filename) present_disk_file!(file.path, file.filename)
elsif direct_download elsif supports_direct_download && file.class.direct_download_enabled?
redirect(artifacts_file.url) redirect(file.url)
else else
header(*Gitlab::Workhorse.send_url(artifacts_file.url)) header(*Gitlab::Workhorse.send_url(file.url))
status :ok status :ok
body body
end end
......
...@@ -28,7 +28,7 @@ module API ...@@ -28,7 +28,7 @@ module API
builds = user_project.latest_successful_builds_for(params[:ref_name]) builds = user_project.latest_successful_builds_for(params[:ref_name])
latest_build = builds.find_by!(name: params[:job]) latest_build = builds.find_by!(name: params[:job])
present_artifacts!(latest_build.artifacts_file) present_carrierwave_file!(latest_build.artifacts_file)
end end
desc 'Download the artifacts archive from a job' do desc 'Download the artifacts archive from a job' do
...@@ -43,7 +43,7 @@ module API ...@@ -43,7 +43,7 @@ module API
build = find_build!(params[:job_id]) build = find_build!(params[:job_id])
present_artifacts!(build.artifacts_file) present_carrierwave_file!(build.artifacts_file)
end end
desc 'Download a specific file from artifacts archive' do desc 'Download a specific file from artifacts archive' do
......
...@@ -72,7 +72,7 @@ module API ...@@ -72,7 +72,7 @@ module API
present build, with: Entities::Job present build, with: Entities::Job
end end
# TODO: We should use `present_file!` and leave this implementation for backward compatibility (when build trace # TODO: We should use `present_disk_file!` and leave this implementation for backward compatibility (when build trace
# is saved in the DB instead of file). But before that, we need to consider how to replace the value of # is saved in the DB instead of file). But before that, we need to consider how to replace the value of
# `runners_token` with some mask (like `xxxxxx`) when sending trace file directly by workhorse. # `runners_token` with some mask (like `xxxxxx`) when sending trace file directly by workhorse.
desc 'Get a trace of a specific job of a project' desc 'Get a trace of a specific job of a project'
......
...@@ -25,7 +25,7 @@ module API ...@@ -25,7 +25,7 @@ module API
render_api_error!('404 Not found or has expired', 404) unless path render_api_error!('404 Not found or has expired', 404) unless path
present_file!(path, File.basename(path), 'application/gzip') present_disk_file!(path, File.basename(path), 'application/gzip')
end end
desc 'Start export' do desc 'Start export' do
......
...@@ -249,7 +249,7 @@ module API ...@@ -249,7 +249,7 @@ module API
get '/:id/artifacts' do get '/:id/artifacts' do
job = authenticate_job! job = authenticate_job!
present_artifacts!(job.artifacts_file, direct_download: params[:direct_download]) present_carrierwave_file!(job.artifacts_file, supports_direct_download: params[:direct_download])
end end
end end
end end
......
...@@ -85,7 +85,7 @@ module API ...@@ -85,7 +85,7 @@ module API
build = get_build!(params[:build_id]) build = get_build!(params[:build_id])
present_artifacts!(build.artifacts_file) present_carrierwave_file!(build.artifacts_file)
end end
desc 'Download the artifacts file from build' do desc 'Download the artifacts file from build' do
...@@ -102,10 +102,10 @@ module API ...@@ -102,10 +102,10 @@ module API
builds = user_project.latest_successful_builds_for(params[:ref_name]) builds = user_project.latest_successful_builds_for(params[:ref_name])
latest_build = builds.find_by!(name: params[:job]) latest_build = builds.find_by!(name: params[:job])
present_artifacts!(latest_build.artifacts_file) present_carrierwave_file!(latest_build.artifacts_file)
end end
# TODO: We should use `present_file!` and leave this implementation for backward compatibility (when build trace # TODO: We should use `present_disk_file!` and leave this implementation for backward compatibility (when build trace
# is saved in the DB instead of file). But before that, we need to consider how to replace the value of # is saved in the DB instead of file). But before that, we need to consider how to replace the value of
# `runners_token` with some mask (like `xxxxxx`) when sending trace file directly by workhorse. # `runners_token` with some mask (like `xxxxxx`) when sending trace file directly by workhorse.
desc 'Get a trace of a specific build of a project' desc 'Get a trace of a specific build of a project'
......
require 'spec_helper'
describe SendFileUpload do
let(:uploader_class) do
Class.new(GitlabUploader) do
include ObjectStorage::Concern
storage_options Gitlab.config.uploads
private
# user/:id
def dynamic_segment
File.join(model.class.to_s.underscore, model.id.to_s)
end
end
end
let(:controller_class) do
Class.new do
include SendFileUpload
end
end
let(:object) { build_stubbed(:user) }
let(:uploader) { uploader_class.new(object, :file) }
describe '#send_upload' do
let(:controller) { controller_class.new }
let(:temp_file) { Tempfile.new('test') }
subject { controller.send_upload(uploader) }
before do
FileUtils.touch(temp_file)
end
after do
FileUtils.rm_f(temp_file)
end
context 'when local file is used' do
before do
uploader.store!(temp_file)
end
it 'sends a file' do
expect(controller).to receive(:send_file).with(uploader.path, anything)
subject
end
end
context 'when remote file is used' do
before do
stub_uploads_object_storage(uploader: uploader_class)
uploader.object_store = ObjectStorage::Store::REMOTE
uploader.store!(temp_file)
end
context 'and proxying is enabled' do
before do
allow(Gitlab.config.uploads.object_store).to receive(:proxy_download) { true }
end
it 'sends a file' do
subject
is_expected.to start_with(Gitlab::Workhorse::SEND_DATA_HEADER)
is_expected.to end_with(/^send-url:/)
end
end
context 'and proxying is disabled' do
before do
allow(Gitlab.config.uploads.object_store).to receive(:proxy_download) { false }
end
it 'sends a file' do
expect(controller).to receive(:redirect_to).with(/#{uploader.path}/)
subject
end
end
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment