Commit 3a4f1f24 authored by Bob Van Landuyt's avatar Bob Van Landuyt

Merge branch 'ab-205166-send-images-to-s3-bucket' into 'master'

Send Incident images to Status Page s3 Bucket on publish

See merge request gitlab-org/gitlab!31269
parents 81164bf0 a66cfc47
# frozen_string_literal: true
class UploaderFinder
# Instantiates a a new FileUploader
# FileUploader can be opened via .open agnostic of storage type
# Arguments correspond to Upload.secret, Upload.model_type and Upload.file_path
# Returns a FileUploader with uploaded file retrieved into the object state
def initialize(project, secret, file_path)
@project = project
@secret = secret
@file_path = file_path
end
def execute
prevent_path_traversal_attack!
retrieve_file_state!
uploader
rescue ::Gitlab::Utils::PathTraversalAttackError
nil # no-op if for incorrect files
end
def prevent_path_traversal_attack!
Gitlab::Utils.check_path_traversal!(@file_path)
end
def retrieve_file_state!
uploader.retrieve_from_store!(@file_path)
end
def uploader
@uploader ||= FileUploader.new(@project, secret: @secret)
end
end
......@@ -71,7 +71,8 @@ The incident detail page shows detailed information about a particular incident.
- Status on the incident, including when the incident was last updated.
- The incident title, including any emojis.
- The description of the incident, including emojis and static images.
- The description of the incident, including emojis.
- Any file attachments provided in the incident description or comments with a valid image extension. [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/205166) in GitLab 13.1.
- A chronological ordered list of updates to the incident.
![Status Page detail](../img/status_page_detail_v12_10.png)
......@@ -108,3 +109,15 @@ Anyone with access to view the Issue can add an Emoji Award to a comment, so you
### Changing the Incident status
To change the incident status from `open` to `closed`, close the incident issue within GitLab. This will then be updated shortly on the Status Page website.
## Attachment storage
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/205166) in GitLab 13.1.
Beginning with GitLab 13.1, files attached to incident issue descriptions or
comments are published and unpublished to the status page storage as part of
the [publication flow](#how-it-works).
### Limit
Only 5000 attachments per issue will be transferred to the status page.
# frozen_string_literal: true
module StatusPage
module PublicationServiceResponses
extend ActiveSupport::Concern
def error(message, payload = {})
ServiceResponse.error(message: message, payload: payload)
end
def success(payload = {})
ServiceResponse.success(payload: payload)
end
end
end
# frozen_string_literal: true
module StatusPage
# Publishes Attachments from incident comments and descriptions to s3
# Should only be called from publish details or a service that inherits from the publish_base_service
class PublishAttachmentsService
include ::Gitlab::Utils::StrongMemoize
include ::StatusPage::PublicationServiceResponses
def initialize(project:, issue:, user_notes:, storage_client:)
@project = project
@issue = issue
@user_notes = user_notes
@storage_client = storage_client
@total_uploads = existing_keys.size
@has_errors = false
end
def execute
publish_description_attachments
publish_user_note_attachments
return file_upload_error if @has_errors
success
end
private
attr_reader :project, :issue, :user_notes, :storage_client
def publish_description_attachments
publish_markdown_uploads(
markdown_field: issue.description
)
end
def publish_user_note_attachments
user_notes.each do |user_note|
publish_markdown_uploads(
markdown_field: user_note.note
)
end
end
def publish_markdown_uploads(markdown_field:)
markdown_field.scan(FileUploader::MARKDOWN_PATTERN).map do |secret, file_name|
break if @total_uploads >= StatusPage::Storage::MAX_UPLOADS
key = upload_path(secret, file_name)
next if existing_keys.include? key
file = find_file(secret, file_name)
next if file.nil?
upload_file(key, file)
end
end
def upload_file(key, file)
file.open do |open_file|
# Send files to s3 storage in parts (handles large files)
storage_client.multipart_upload(key, open_file)
@total_uploads += 1
end
rescue StatusPage::Storage::Error => e
# In production continue uploading other files if one fails But report the failure to Sentry
# raise errors in development and test
@has_errors = true
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(e)
end
def existing_keys
strong_memoize(:existing_keys) do
storage_client.list_object_keys(uploads_path)
end
end
def upload_path(secret, file_name)
StatusPage::Storage.upload_path(issue.iid, secret, file_name)
end
def uploads_path
StatusPage::Storage.uploads_path(issue.iid)
end
def find_file(secret, file_name)
# Uploader object behaves like a file with an 'open' method
UploaderFinder.new(project, secret, file_name).execute
end
def file_upload_error
error('One or more files did not upload properly')
end
end
end
......@@ -3,6 +3,7 @@
module StatusPage
class PublishBaseService
include Gitlab::Utils::StrongMemoize
include StatusPage::PublicationServiceResponses
def initialize(project:)
@project = project
......@@ -44,7 +45,7 @@ module StatusPage
project.status_page_setting&.enabled?
end
def upload(key, json)
def upload_json(key, json)
return error_limit_exceeded(key) if limit_exceeded?(json)
content = json.to_json
......@@ -53,6 +54,10 @@ module StatusPage
success(object_key: key)
end
def multipart_upload(key, uploader)
storage_client.multipart_upload(key, uploader)
end
def delete_object(key)
storage_client.delete_object(key)
end
......@@ -67,10 +72,6 @@ module StatusPage
.valid?
end
def error(message, payload = {})
ServiceResponse.error(message: message, payload: payload)
end
def error_limit_exceeded(key)
error("Failed to upload #{key}: Limit exceeded")
end
......@@ -82,9 +83,5 @@ module StatusPage
def error_no_storage_client
error('No storage client available. Is the status page setting activated?')
end
def success(payload = {})
ServiceResponse.success(payload: payload)
end
end
end
......@@ -11,22 +11,47 @@ module StatusPage
private
def process(issue, user_notes)
response = publish_json(issue, user_notes)
return response if response.error?
response = publish_attachments(issue, user_notes)
return response if response.error?
success
end
def publish_json(issue, user_notes)
json = serialize(issue, user_notes)
key = object_key(json)
key = json_object_key(json)
return error('Missing object key') unless key
upload(key, json)
upload_json(key, json)
end
def serialize(issue, user_notes)
serializer.represent_details(issue, user_notes)
end
def object_key(json)
def json_object_key(json)
id = json[:id]
return unless id
StatusPage::Storage.details_path(id)
end
def publish_attachments(issue, user_notes)
return success unless attachements_enabled?
StatusPage::PublishAttachmentsService.new(
project: @project,
issue: issue,
user_notes: user_notes,
storage_client: storage_client
).execute
end
def attachements_enabled?
Feature.enabled?(:status_page_attachments, @project, default_enabled: true)
end
end
end
......@@ -13,7 +13,7 @@ module StatusPage
def process(issues)
json = serialize(issues)
upload(object_key, json)
upload_json(object_key, json)
end
def serialize(issues)
......
......@@ -6,6 +6,7 @@ module StatusPage
#
# Use this service for publishing an incident to CDN synchronously.
# To publish asynchronously use +StatusPage::TriggerPublishService+ instead.
# Called within an async job so as to run out of out of band from web requests
#
# This services calls:
# * StatusPage::PublishDetailsService
......
---
title: Support transferring and displaying image uploads on Status Page
merge_request: 31269
author:
type: added
......@@ -11,7 +11,7 @@ module StatusPage
# Limit on paginated responses
MAX_KEYS_PER_PAGE = 1_000
MAX_PAGES = 5
MAX_IMAGE_UPLOADS = MAX_KEYS_PER_PAGE * MAX_PAGES
MAX_UPLOADS = MAX_KEYS_PER_PAGE * MAX_PAGES
def self.details_path(id)
"data/incident/#{id}.json"
......
......@@ -4,6 +4,8 @@ module StatusPage
module Storage
# Implements a minimal AWS S3 client.
class S3Client
include StatusPage::Storage::WrapsStorageErrors
def initialize(region:, bucket_name:, access_key_id:, secret_access_key:)
@bucket_name = bucket_name
@client = Aws::S3::Client.new(
......@@ -56,13 +58,23 @@ module StatusPage
def list_object_keys(prefix)
wrap_errors(prefix: prefix) do
list_objects(prefix).reduce(Set.new) do |objects, (response, index)|
break objects if objects.size >= StatusPage::Storage::MAX_IMAGE_UPLOADS
break objects if objects.size >= StatusPage::Storage::MAX_UPLOADS
objects | response.contents.map(&:key)
end
end
end
# Stores +file+ as +key+ in storage using multipart upload
#
# key: s3 key at which file is stored
# file: An open file or file-like io object
def multipart_upload(key, file)
StatusPage::Storage::S3MultipartUpload.new(
client: client, bucket_name: bucket_name, key: key, open_file: file
).call
end
private
attr_reader :client, :bucket_name
......@@ -70,12 +82,6 @@ module StatusPage
def list_objects(prefix)
client.list_objects_v2(bucket: bucket_name, prefix: prefix, max_keys: StatusPage::Storage::MAX_KEYS_PER_PAGE)
end
def wrap_errors(**args)
yield
rescue Aws::Errors::ServiceError => e
raise Error, bucket: bucket_name, error: e, **args
end
end
end
end
# frozen_string_literal: true
module StatusPage
module Storage
# Implements multipart upload in s3
class S3MultipartUpload
include StatusPage::Storage::WrapsStorageErrors
# 5 megabytes is the minimum part size specified in the amazon SDK
MULTIPART_UPLOAD_PART_SIZE = 5.megabytes
def initialize(client:, bucket_name:, key:, open_file:)
@client = client
@bucket_name = bucket_name
@key = key
@file = open_file
end
# Stores +file+ as +key+ in storage using multipart upload
#
# key: s3 key at which file is stored
# file: An open file or file-like io object
def call
# AWS sdk v2 has upload_file which supports multipart
# However Gitlab::HttpIO used when object storage is enabled
# cannot be used with upload_file
wrap_errors(key: key) do
upload_id = create_upload.to_h[:upload_id]
begin
parts = upload_part(upload_id)
complete_upload(upload_id, parts)
# Rescue on Exception since even on keyboard interrupt we want to abort the upload and re-raise
# abort clears the already uploaded parts so that they do not cost the bucket owner
# The status page bucket lifecycle policy will clear out unaborted parts if
# this fails without an exception (power failures etc.)
rescue Exception => e # rubocop:disable Lint/RescueException
abort_upload(upload_id)
raise e
end
end
end
private
attr_reader :key, :file, :client, :bucket_name
def create_upload
client.create_multipart_upload({ bucket: bucket_name, key: key })
end
def upload_part(upload_id)
parts = []
part_number = 1
file.seek(0)
until file.eof?
part = client.upload_part({
body: file.read(MULTIPART_UPLOAD_PART_SIZE),
bucket: bucket_name,
key: key,
part_number: part_number, # required
upload_id: upload_id
})
parts << part.to_h.merge(part_number: part_number)
part_number += 1
end
parts
end
def complete_upload(upload_id, parts)
client.complete_multipart_upload({
bucket: bucket_name,
key: key,
multipart_upload: {
parts: parts
},
upload_id: upload_id
})
end
def abort_upload(upload_id)
client.abort_multipart_upload(
bucket: bucket_name,
key: key,
upload_id: upload_id
)
end
end
end
end
# frozen_string_literal: true
module StatusPage
module Storage
module WrapsStorageErrors
def wrap_errors(**args)
yield
rescue Aws::Errors::ServiceError => e
raise Error, bucket: bucket_name, error: e, **args
end
end
end
end
......@@ -135,7 +135,7 @@ describe StatusPage::Storage::S3Client, :aws_s3 do
include_context 'oversized list_objects_v2 result'
it 'returns result at max size' do
expect(result.count).to eq(StatusPage::Storage::MAX_IMAGE_UPLOADS)
expect(result.count).to eq(StatusPage::Storage::MAX_UPLOADS)
end
end
......
# frozen_string_literal: true
require 'spec_helper'
describe StatusPage::Storage::S3MultipartUpload, :aws_s3 do
let(:region) { 'eu-west-1' }
let(:bucket_name) { 'bucket_name' }
let(:access_key_id) { 'key_id' }
let(:secret_access_key) { 'secret' }
let(:s3_client) do
Aws::S3::Client.new(
region: region,
credentials: Aws::Credentials.new(access_key_id, secret_access_key)
)
end
describe '#call' do
let(:key) { '123' }
let(:file) do
Tempfile.new('foo').tap do |file|
file.open
file.write('hello world')
file.rewind
end
end
let(:upload_id) { '123456789' }
subject(:result) { described_class.new(client: s3_client, bucket_name: bucket_name, key: key, open_file: file).call }
before do
stub_responses(
:create_multipart_upload,
instance_double(Aws::S3::Types::CreateMultipartUploadOutput, { to_h: { upload_id: upload_id } })
)
end
after do
file.close
end
context 'when sucessful' do
before do
stub_responses(
:upload_part,
instance_double(Aws::S3::Types::UploadPartOutput, to_h: {})
)
end
it 'completes' do
expect(s3_client).to receive(:complete_multipart_upload)
result
end
context 'with more than one part' do
before do
stub_const("#{described_class}::MULTIPART_UPLOAD_PART_SIZE", 1.byte)
end
it 'completes' do
# Ensure size limit triggers more than one part upload
expect(s3_client).to receive(:upload_part).at_least(:twice)
expect(s3_client).to receive(:complete_multipart_upload)
result
end
end
end
context 'when fails' do
let(:aws_error) { 'SomeError' }
context 'on upload part' do
before do
stub_responses(:upload_part, aws_error)
end
it 'aborts the upload and raises an error' do
msg = error_message(aws_error, key: key)
expect(s3_client).to receive(:abort_multipart_upload)
expect { result }.to raise_error(StatusPage::Storage::Error, msg)
end
end
context 'on complete_multipart_upload' do
before do
stub_responses(:upload_part, {})
stub_responses(:complete_multipart_upload, aws_error)
end
it 'aborts the upload and raises an error' do
msg = error_message(aws_error, key: key)
expect(s3_client).to receive(:abort_multipart_upload)
expect { result }.to raise_error(StatusPage::Storage::Error, msg)
end
end
end
end
private
def stub_responses(*args)
s3_client.stub_responses(*args)
end
def error_message(error_class, **args)
%{Error occured "Aws::S3::Errors::#{error_class}" } \
"for bucket #{bucket_name.inspect}. Arguments: #{args.inspect}"
end
end
......@@ -29,7 +29,7 @@ describe StatusPage::Storage do
it 'MAX_KEYS_PER_PAGE times MAX_PAGES establishes upload limit' do
# spec intended to fail if page related MAX constants change
# In order to ensure change to documented MAX_IMAGE_UPLOADS is considered
# In order to ensure change to documented MAX_UPLOADS is considered
expect(StatusPage::Storage::MAX_KEYS_PER_PAGE * StatusPage::Storage::MAX_PAGES).to eq(5000)
end
end
# frozen_string_literal: true
require 'spec_helper'
describe StatusPage::PublishAttachmentsService do
RSpec.shared_context 'second file' do
# Setup second file
let(:upload_secret_2) { '9cb61a79ce884d5b681dd42728d3c159' }
let(:image_file_name_2) { 'tanuki_2.png' }
let(:upload_path_2) { "/uploads/#{upload_secret_2}/#{image_file_name_2}" }
let(:markdown_field) { "![tanuki](#{upload_path}) and ![tanuki_2](#{upload_path_2})" }
let(:status_page_upload_path_2) { StatusPage::Storage.upload_path(issue.iid, upload_secret_2, image_file_name_2) }
end
describe '#execute' do
let_it_be(:project, refind: true) { create(:project) }
let(:markdown_field) { 'Hello World' }
let(:user_notes) { [] }
let(:incident_id) { 1 }
let(:issue) { instance_double(Issue, notes: user_notes, description: markdown_field, iid: incident_id) }
let(:key) { StatusPage::Storage.details_path(incident_id) }
let(:content) { { id: incident_id } }
let(:storage_client) { instance_double(StatusPage::Storage::S3Client) }
let(:service) { described_class.new(project: project, issue: issue, user_notes: user_notes, storage_client: storage_client) }
subject { service.execute }
include_context 'stub status page enabled'
context 'publishes file attachments' do
before do
allow(storage_client).to receive(:upload_object).with("data/incident/1.json", "{\"id\":1}")
allow(storage_client).to receive(:list_object_keys).and_return(Set.new)
end
context 'when not in markdown' do
it 'publishes no images' do
expect(storage_client).not_to receive(:multipart_upload)
expect(subject.payload).to eq({})
expect(subject).to be_success
end
end
context 'when in markdown' do
let(:upload_secret) { '734b8524a16d44eb0ff28a2c2e4ff3c0' }
let(:image_file_name) { 'tanuki.png'}
let(:upload_path) { "/uploads/#{upload_secret}/#{image_file_name}" }
let(:markdown_field) { "![tanuki](#{upload_path})" }
let(:status_page_upload_path) { StatusPage::Storage.upload_path(issue.iid, upload_secret, image_file_name) }
let(:user_notes) { [] }
let(:open_file) { instance_double(File, read: 'stubbed read') }
let(:uploader) { instance_double(FileUploader) }
before do
allow(uploader).to receive(:open).and_yield(open_file).twice
allow_next_instance_of(UploaderFinder) do |finder|
allow(finder).to receive(:execute).and_return(uploader)
end
allow(storage_client).to receive(:list_object_keys).and_return(Set[])
allow(storage_client).to receive(:upload_object)
end
it 'publishes description images' do
expect(storage_client).to receive(:multipart_upload).with(status_page_upload_path, open_file).once
expect(subject).to be_success
expect(subject.payload).to eq({})
end
context 'when upload to storage throws an error' do
it 'returns an error response' do
storage_error = StatusPage::Storage::Error.new(bucket: '', error: StandardError.new)
# no raise to mimic prod behavior
allow(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
allow(storage_client).to receive(:multipart_upload).and_raise(storage_error)
expect(subject.error?).to be true
end
end
context 'user notes uploads' do
let(:user_note) { instance_double(Note, note: markdown_field) }
let(:user_notes) { [user_note] }
let(:issue) { instance_double(Issue, notes: user_notes, description: '', iid: incident_id) }
it 'publishes images' do
expect(storage_client).to receive(:multipart_upload).with(status_page_upload_path, open_file).once
expect(subject).to be_success
expect(subject.payload).to eq({})
end
end
context 'when exceeds upload limit' do
include_context 'second file'
before do
stub_const("StatusPage::Storage::MAX_UPLOADS", 2)
allow(storage_client).to receive(:list_object_keys).and_return(Set['existing_key'])
end
it 'publishes no images' do
expect(storage_client).to receive(:multipart_upload).once
expect(subject).to be_success
expect(subject.payload).to eq({})
end
end
context 'when all images are in s3' do
before do
allow(storage_client).to receive(:list_object_keys).and_return(Set[status_page_upload_path])
end
it 'publishes no images' do
expect(storage_client).not_to receive(:multipart_upload)
expect(subject).to be_success
expect(subject.payload).to eq({})
end
end
context 'when images are already in s3' do
include_context 'second file'
before do
allow(storage_client).to receive(:list_object_keys).and_return(Set[status_page_upload_path])
end
it 'publishes only new images' do
expect(storage_client).to receive(:multipart_upload).with(status_page_upload_path_2, open_file).once
expect(storage_client).not_to receive(:multipart_upload).with(status_page_upload_path, open_file)
expect(subject).to be_success
expect(subject.payload).to eq({})
end
end
end
end
end
end
......@@ -3,10 +3,12 @@
require 'spec_helper'
describe StatusPage::PublishDetailsService do
include ::StatusPage::PublicationServiceResponses
let_it_be(:project, refind: true) { create(:project) }
let(:issue) { instance_double(Issue) }
let(:user_notes) { double(:user_notes) }
let(:user_notes) { [] }
let(:incident_id) { 1 }
let(:issue) { instance_double(Issue, notes: user_notes, description: 'Incident Occuring', iid: incident_id) }
let(:key) { StatusPage::Storage.details_path(incident_id) }
let(:content) { { id: incident_id } }
......@@ -30,5 +32,47 @@ describe StatusPage::PublishDetailsService do
expect(result.message).to eq('Missing object key')
end
end
context 'publishing attachments' do
before do
allow(storage_client).to receive(:upload_object).and_return(success)
allow(storage_client).to receive(:list_object_keys).and_return([])
end
context 'when feature flag disabled' do
before do
stub_feature_flags(status_page_attachments: false)
end
it 'does not publish attachments and returns success' do
expect(StatusPage::PublishAttachmentsService).not_to receive(:new)
expect(subject.success?).to be true
end
end
context 'when successful' do
let(:success_response) { double(error?: false, success?: true) }
it 'sends attachments to storage and returns success' do
expect_next_instance_of(StatusPage::PublishAttachmentsService) do |service|
expect(service).to receive(:execute).and_return(success_response)
end
expect(subject.success?).to be true
end
end
context 'when error returned from PublishAttachmentsService' do
let(:error_response) { double(error?: true, success?: false) }
it 'returns an error' do
expect_next_instance_of(StatusPage::PublishAttachmentsService) do |service|
expect(service).to receive(:execute).and_return(error_response)
end
expect(subject.success?).to be false
end
end
end
end
end
......@@ -26,8 +26,8 @@ describe StatusPage::PublishService do
describe 'publish details' do
context 'when upload succeeds' do
it 'uploads incident details and list' do
expect_to_upload_details(issue)
expect_to_upload_list
expect_to_publish_details(error?: false, success?: true)
expect_to_publish_list(error?: false, success?: true)
expect(result).to be_success
end
......@@ -47,7 +47,7 @@ describe StatusPage::PublishService do
context 'when unpublish succeeds' do
it 'unpublishes incident details and uploads incident list' do
expect_to_unpublish(error?: false)
expect_to_unpublish(error?: false, success?: true)
expect_to_upload_list
expect(result).to be_success
......@@ -65,11 +65,11 @@ describe StatusPage::PublishService do
describe 'publish list' do
context 'when upload fails' do
it 'returns error and skip list upload' do
expect_to_upload_details(issue)
expect_to_upload_list(status: 404)
it 'returns error response' do
expect_to_publish_details(error?: false, success?: true)
expect_to_publish_list(error?: true, success?: false)
expect { result }.to raise_error(StatusPage::Storage::Error)
expect(result.error?).to be(true)
end
end
end
......@@ -96,8 +96,20 @@ describe StatusPage::PublishService do
private
def expect_to_unpublish(**response_kwargs)
stub_service_called(StatusPage::UnpublishDetailsService, **response_kwargs)
end
def expect_to_publish_details(**response_kwargs)
stub_service_called(StatusPage::PublishDetailsService, **response_kwargs)
end
def expect_to_publish_list(**response_kwargs)
stub_service_called(StatusPage::PublishListService, **response_kwargs)
end
def stub_service_called(klass, **response_kwargs)
service_response = double(**response_kwargs)
expect_next_instance_of(StatusPage::UnpublishDetailsService) do |service|
expect_next_instance_of(klass) do |service|
expect(service).to receive(:execute).and_return(service_response)
end
......@@ -108,10 +120,6 @@ describe StatusPage::PublishService do
stub_aws_request(:put, StatusPage::Storage.details_path(issue.iid), **kwargs)
end
def expect_to_delete_details(issue, **kwargs)
stub_aws_request(:delete, StatusPage::Storage.details_path(issue.iid), **kwargs)
end
def expect_to_upload_list(**kwargs)
stub_aws_request(:put, StatusPage::Storage.list_path, **kwargs)
end
......
......@@ -12,3 +12,20 @@ RSpec.shared_context 'status page enabled' do
end
end
end
RSpec.shared_context 'stub status page enabled' do
let(:status_page_setting_enabled) { true }
let(:status_page_setting) do
instance_double(
StatusPage::ProjectSetting,
enabled?: status_page_setting_enabled,
storage_client: storage_client
)
end
before do
stub_licensed_features(status_page: true)
allow(project).to receive(:status_page_setting)
.and_return(status_page_setting)
end
end
......@@ -22,7 +22,7 @@ RSpec.shared_context 'oversized list_objects_v2 result' do
before do
stub_const("StatusPage::Storage::MAX_KEYS_PER_PAGE", 2)
stub_const("StatusPage::Storage::MAX_PAGES", 1)
stub_const("StatusPage::Storage::MAX_IMAGE_UPLOADS", StatusPage::Storage::MAX_PAGES * StatusPage::Storage::MAX_KEYS_PER_PAGE)
stub_const("StatusPage::Storage::MAX_UPLOADS", StatusPage::Storage::MAX_PAGES * StatusPage::Storage::MAX_KEYS_PER_PAGE)
# AWS s3 client responses for list_objects is paginated
# stub_responses allows multiple responses as arguments and they will be returned in sequence
stub_responses(
......
......@@ -20,17 +20,15 @@ RSpec.shared_examples 'publish incidents' do
.and_return(serializer)
end
shared_examples 'feature is not available' do
end
context 'when upload succeeds' do
context 'when json upload succeeds' do
before do
allow(storage_client).to receive(:upload_object).with(key, content_json)
allow(storage_client).to receive(:list_object_keys).and_return(Set.new)
end
it 'publishes details as JSON' do
expect(storage_client).to receive(:upload_object).with(key, content_json)
expect(result).to be_success
expect(result.payload).to eq(object_key: key)
end
end
......
......@@ -8,7 +8,7 @@ module API
path = params[attr_name]
Gitlab::Utils.check_path_traversal!(path)
rescue StandardError
rescue ::Gitlab::Utils::PathTraversalAttackError
raise Grape::Exceptions::Validation, params: [@scope.full_name(attr_name)],
message: "should be a valid file path"
end
......
......@@ -22,9 +22,10 @@ module Gitlab
return @text unless needs_rewrite?
@text.gsub(@pattern) do |markdown|
Gitlab::Utils.check_path_traversal!($~[:file])
file = find_file($~[:secret], $~[:file])
# No file will be returned for a path traversal
next if file.nil?
file = find_file(@source_project, $~[:secret], $~[:file])
break markdown unless file.try(:exists?)
klass = target_parent.is_a?(Namespace) ? NamespaceFileUploader : FileUploader
......@@ -47,7 +48,7 @@ module Gitlab
def files
referenced_files = @text.scan(@pattern).map do
find_file(@source_project, $~[:secret], $~[:file])
find_file($~[:secret], $~[:file])
end
referenced_files.compact.select(&:exists?)
......@@ -57,12 +58,8 @@ module Gitlab
markdown.starts_with?("!")
end
private
def find_file(project, secret, file)
uploader = FileUploader.new(project, secret: secret)
uploader.retrieve_from_store!(file)
uploader
def find_file(secret, file_name)
UploaderFinder.new(@source_project, secret, file_name).execute
end
end
end
......
......@@ -3,6 +3,7 @@
module Gitlab
module Utils
extend self
PathTraversalAttackError ||= Class.new(StandardError)
# Ensure that the relative path will not traverse outside the base directory
# We url decode the path to avoid passing invalid paths forward in url encoded format.
......@@ -17,7 +18,7 @@ module Gitlab
path.end_with?("#{File::SEPARATOR}..") ||
(!allowed_absolute && Pathname.new(path).absolute?)
raise StandardError.new("Invalid path")
raise PathTraversalAttackError.new('Invalid path')
end
path
......
# frozen_string_literal: true
require 'spec_helper'
describe UploaderFinder do
describe '#execute' do
let(:project) { build(:project) }
let(:upload) { create(:upload, :issuable_upload, :with_file) }
let(:secret) { upload.secret }
let(:file_name) { upload.path }
subject { described_class.new(project, secret, file_name).execute }
before do
upload.save
end
context 'when successful' do
before do
allow_next_instance_of(FileUploader) do |uploader|
allow(uploader).to receive(:retrieve_from_store!).with(upload.path).and_return(uploader)
end
end
it 'gets the file-like uploader' do
expect(subject).to be_an_instance_of(FileUploader)
expect(subject.model).to eq(project)
expect(subject.secret).to eq(secret)
end
end
context 'when path traversal in file name' do
before do
upload.path = '/uploads/11111111111111111111111111111111/../../../../../../../../../../../../../../etc/passwd)'
upload.save
end
it 'returns nil' do
expect(subject).to be(nil)
end
end
context 'when unexpected failure' do
before do
allow_next_instance_of(FileUploader) do |uploader|
allow(uploader).to receive(:retrieve_from_store!).and_raise(StandardError)
end
end
it 'returns nil when unexpected error is raised' do
expect { subject }.to raise_error(StandardError)
end
end
end
end
......@@ -54,6 +54,14 @@ describe Gitlab::Gfm::UploadsRewriter do
expect(new_paths).not_to include image_uploader.secret
expect(new_paths).not_to include zip_uploader.secret
end
it 'skips nil files do' do
allow_next_instance_of(UploaderFinder) do |finder|
allow(finder).to receive(:execute).and_return(nil)
end
expect(new_files).to be_empty
end
end
end
......@@ -68,16 +76,6 @@ describe Gitlab::Gfm::UploadsRewriter do
expect(moved_text.scan(/\A\[.*?\]/).count).to eq(1)
end
context 'path traversal in file name' do
let(:text) do
"![a](/uploads/11111111111111111111111111111111/../../../../../../../../../../../../../../etc/passwd)"
end
it 'throw an error' do
expect { rewriter.rewrite(new_project) }.to raise_error(an_instance_of(StandardError).and(having_attributes(message: "Invalid path")))
end
end
context "file are stored locally" do
include_examples "files are accessible"
end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment