Commit eb3a3cb0 authored by Douglas Barbosa Alexandre's avatar Douglas Barbosa Alexandre

Merge branch '218550-change-status-page-namespace' into 'master'

Move StatusPage under Gitlab namespace

Closes #218550

See merge request gitlab-org/gitlab!39436
parents 07c16be9 009d0f92
# frozen_string_literal: true # frozen_string_literal: true
require_dependency 'status_page' require_dependency 'gitlab/status_page'
# Retrieves Notes specifically for the Status Page # Retrieves Notes specifically for the Status Page
# which are rendered as comments. # which are rendered as comments.
...@@ -17,8 +17,8 @@ require_dependency 'status_page' ...@@ -17,8 +17,8 @@ require_dependency 'status_page'
# #
module StatusPage module StatusPage
class IncidentCommentsFinder class IncidentCommentsFinder
AWARD_EMOJI = StatusPage::AWARD_EMOJI AWARD_EMOJI = Gitlab::StatusPage::AWARD_EMOJI
MAX_LIMIT = StatusPage::Storage::MAX_COMMENTS MAX_LIMIT = Gitlab::StatusPage::Storage::MAX_COMMENTS
def initialize(issue:) def initialize(issue:)
@issue = issue @issue = issue
......
...@@ -20,7 +20,7 @@ ...@@ -20,7 +20,7 @@
# #
module StatusPage module StatusPage
class IncidentsFinder class IncidentsFinder
MAX_LIMIT = StatusPage::Storage::MAX_RECENT_INCIDENTS MAX_LIMIT = Gitlab::StatusPage::Storage::MAX_RECENT_INCIDENTS
def initialize(project_id:) def initialize(project_id:)
@project_id = project_id @project_id = project_id
......
...@@ -34,7 +34,7 @@ module EE ...@@ -34,7 +34,7 @@ module EE
return {} unless issuable.is_a?(Issue) return {} unless issuable.is_a?(Issue)
super.merge( super.merge(
publishedIncidentUrl: StatusPage::Storage.details_url(issuable) publishedIncidentUrl: ::Gitlab::StatusPage::Storage.details_url(issuable)
) )
end end
......
...@@ -62,7 +62,7 @@ module StatusPage ...@@ -62,7 +62,7 @@ module StatusPage
def storage_client def storage_client
return unless enabled? return unless enabled?
StatusPage::Storage::S3Client.new( Gitlab::StatusPage::Storage::S3Client.new(
region: aws_region, region: aws_region,
bucket_name: aws_s3_bucket_name, bucket_name: aws_s3_bucket_name,
access_key_id: aws_access_key, access_key_id: aws_access_key,
......
...@@ -16,7 +16,7 @@ module StatusPage ...@@ -16,7 +16,7 @@ module StatusPage
private private
def links def links
{ details: StatusPage::Storage.details_path(object.iid) } { details: Gitlab::StatusPage::Storage.details_path(object.iid) }
end end
def user_notes def user_notes
......
...@@ -4,7 +4,7 @@ module StatusPage ...@@ -4,7 +4,7 @@ module StatusPage
module Renderer module Renderer
def self.markdown(object, field, issue_iid:) def self.markdown(object, field, issue_iid:)
context = { context = {
post_process_pipeline: StatusPage::Pipeline::PostProcessPipeline, post_process_pipeline: Gitlab::StatusPage::Pipeline::PostProcessPipeline,
issue_iid: issue_iid issue_iid: issue_iid
} }
MarkupHelper.markdown_field(object, field, context) MarkupHelper.markdown_field(object, field, context)
......
...@@ -11,7 +11,7 @@ module EE ...@@ -11,7 +11,7 @@ module EE
def after_create(award) def after_create(award)
super super
StatusPage.trigger_publish(project, current_user, award) ::Gitlab::StatusPage.trigger_publish(project, current_user, award)
end end
end end
end end
......
...@@ -11,7 +11,7 @@ module EE ...@@ -11,7 +11,7 @@ module EE
def after_destroy(award) def after_destroy(award)
super super
StatusPage.trigger_publish(project, current_user, award) ::Gitlab::StatusPage.trigger_publish(project, current_user, award)
end end
end end
end end
......
...@@ -16,7 +16,7 @@ module EE ...@@ -16,7 +16,7 @@ module EE
Epics::UpdateDatesService.new([issue.epic]).execute Epics::UpdateDatesService.new([issue.epic]).execute
end end
StatusPage.trigger_publish(project, current_user, issue) if issue.valid? ::Gitlab::StatusPage.trigger_publish(project, current_user, issue) if issue.valid?
result result
end end
......
...@@ -10,7 +10,7 @@ module EE ...@@ -10,7 +10,7 @@ module EE
super super
::Analytics::RefreshCommentsData.for_note(note)&.execute(force: true) ::Analytics::RefreshCommentsData.for_note(note)&.execute(force: true)
StatusPage.trigger_publish(project, current_user, note) ::Gitlab::StatusPage.trigger_publish(project, current_user, note)
end end
end end
end end
......
...@@ -10,7 +10,7 @@ module EE ...@@ -10,7 +10,7 @@ module EE
updated_note = super updated_note = super
if updated_note&.errors&.empty? if updated_note&.errors&.empty?
StatusPage.trigger_publish(project, current_user, updated_note) ::Gitlab::StatusPage.trigger_publish(project, current_user, updated_note)
end end
updated_note updated_note
......
...@@ -44,7 +44,7 @@ module StatusPage ...@@ -44,7 +44,7 @@ module StatusPage
end end
def track_incident def track_incident
::StatusPage::UsageDataCounters::IncidentCounter.count(:publishes) ::Gitlab::StatusPage::UsageDataCounters::IncidentCounter.count(:publishes)
PublishedIncident.track(issue) PublishedIncident.track(issue)
end end
......
...@@ -45,7 +45,7 @@ module StatusPage ...@@ -45,7 +45,7 @@ module StatusPage
def publish_markdown_uploads(markdown_field:) def publish_markdown_uploads(markdown_field:)
markdown_field.scan(FileUploader::MARKDOWN_PATTERN).map do |secret, file_name| markdown_field.scan(FileUploader::MARKDOWN_PATTERN).map do |secret, file_name|
break if @total_uploads >= StatusPage::Storage::MAX_UPLOADS break if @total_uploads >= Gitlab::StatusPage::Storage::MAX_UPLOADS
key = upload_path(secret, file_name) key = upload_path(secret, file_name)
next if existing_keys.include? key next if existing_keys.include? key
...@@ -63,7 +63,7 @@ module StatusPage ...@@ -63,7 +63,7 @@ module StatusPage
storage_client.multipart_upload(key, open_file) storage_client.multipart_upload(key, open_file)
@total_uploads += 1 @total_uploads += 1
end end
rescue StatusPage::Storage::Error => e rescue Gitlab::StatusPage::Storage::Error => e
# In production continue uploading other files if one fails But report the failure to Sentry # In production continue uploading other files if one fails But report the failure to Sentry
# raise errors in development and test # raise errors in development and test
@has_errors = true @has_errors = true
...@@ -77,11 +77,11 @@ module StatusPage ...@@ -77,11 +77,11 @@ module StatusPage
end end
def upload_path(secret, file_name) def upload_path(secret, file_name)
StatusPage::Storage.upload_path(issue.iid, secret, file_name) Gitlab::StatusPage::Storage.upload_path(issue.iid, secret, file_name)
end end
def uploads_path def uploads_path
StatusPage::Storage.uploads_path(issue.iid) Gitlab::StatusPage::Storage.uploads_path(issue.iid)
end end
def find_file(secret, file_name) def find_file(secret, file_name)
......
...@@ -68,7 +68,7 @@ module StatusPage ...@@ -68,7 +68,7 @@ module StatusPage
def limit_exceeded?(json) def limit_exceeded?(json)
!Gitlab::Utils::DeepSize !Gitlab::Utils::DeepSize
.new(json, max_size: Storage::JSON_MAX_SIZE) .new(json, max_size: ::Gitlab::StatusPage::Storage::JSON_MAX_SIZE)
.valid? .valid?
end end
......
...@@ -36,7 +36,7 @@ module StatusPage ...@@ -36,7 +36,7 @@ module StatusPage
id = json[:id] id = json[:id]
return unless id return unless id
StatusPage::Storage.details_path(id) Gitlab::StatusPage::Storage.details_path(id)
end end
def publish_attachments(issue, user_notes) def publish_attachments(issue, user_notes)
......
...@@ -21,7 +21,7 @@ module StatusPage ...@@ -21,7 +21,7 @@ module StatusPage
end end
def object_key def object_key
StatusPage::Storage.list_path Gitlab::StatusPage::Storage.list_path
end end
end end
end end
...@@ -105,7 +105,7 @@ module StatusPage ...@@ -105,7 +105,7 @@ module StatusPage
return note.noteable_id if note.destroyed? return note.noteable_id if note.destroyed?
return if note.previous_changes.none? return if note.previous_changes.none?
return if note.award_emoji.named(StatusPage::AWARD_EMOJI).none? return if note.award_emoji.named(Gitlab::StatusPage::AWARD_EMOJI).none?
note.noteable_id note.noteable_id
end end
...@@ -113,7 +113,7 @@ module StatusPage ...@@ -113,7 +113,7 @@ module StatusPage
def eligable_issue_id_from_award_emoji def eligable_issue_id_from_award_emoji
award_emoji = triggered_by award_emoji = triggered_by
return unless award_emoji.name == StatusPage::AWARD_EMOJI return unless award_emoji.name == Gitlab::StatusPage::AWARD_EMOJI
return unless award_emoji.awardable.is_a?(Note) return unless award_emoji.awardable.is_a?(Note)
return unless award_emoji.awardable.for_issue? return unless award_emoji.awardable.for_issue?
......
...@@ -14,7 +14,7 @@ module StatusPage ...@@ -14,7 +14,7 @@ module StatusPage
def process(issue) def process(issue)
PublishedIncident.untrack(issue) PublishedIncident.untrack(issue)
::StatusPage::UsageDataCounters::IncidentCounter.count(:unpublishes) ::Gitlab::StatusPage::UsageDataCounters::IncidentCounter.count(:unpublishes)
# Delete the incident prior to deleting images to avoid broken links # Delete the incident prior to deleting images to avoid broken links
json_key = json_object_key(issue) json_key = json_object_key(issue)
...@@ -27,11 +27,11 @@ module StatusPage ...@@ -27,11 +27,11 @@ module StatusPage
end end
def uploads_path(issue) def uploads_path(issue)
StatusPage::Storage.uploads_path(issue.iid) Gitlab::StatusPage::Storage.uploads_path(issue.iid)
end end
def json_object_key(issue) def json_object_key(issue)
StatusPage::Storage.details_path(issue.iid) Gitlab::StatusPage::Storage.details_path(issue.iid)
end end
end end
end end
...@@ -142,8 +142,8 @@ module EE ...@@ -142,8 +142,8 @@ module EE
StatusPage::MarkForPublicationService.publishable?(project, current_user, quick_action_target) StatusPage::MarkForPublicationService.publishable?(project, current_user, quick_action_target)
end end
command :publish do command :publish do
if StatusPage.mark_for_publication(project, current_user, quick_action_target).success? if ::Gitlab::StatusPage.mark_for_publication(project, current_user, quick_action_target).success?
StatusPage.trigger_publish(project, current_user, quick_action_target, action: :init) ::Gitlab::StatusPage.trigger_publish(project, current_user, quick_action_target, action: :init)
@execution_message[:publish] = _('Issue published on status page.') @execution_message[:publish] = _('Issue published on status page.')
else else
@execution_message[:publish] = _('Failed to publish issue on status page.') @execution_message[:publish] = _('Failed to publish issue on status page.')
......
...@@ -40,7 +40,7 @@ module EE ...@@ -40,7 +40,7 @@ module EE
super + [ super + [
::Gitlab::UsageDataCounters::LicensesList, ::Gitlab::UsageDataCounters::LicensesList,
::Gitlab::UsageDataCounters::IngressModsecurityCounter, ::Gitlab::UsageDataCounters::IngressModsecurityCounter,
StatusPage::UsageDataCounters::IncidentCounter, ::Gitlab::StatusPage::UsageDataCounters::IncidentCounter,
::Gitlab::UsageDataCounters::NetworkPolicyCounter ::Gitlab::UsageDataCounters::NetworkPolicyCounter
] ]
end end
......
# frozen_string_literal: true
module Gitlab
module StatusPage
# Only comments with this emoji are visible.
# This filter will change once we have confidential notes.
# See https://gitlab.com/gitlab-org/gitlab/issues/207468
AWARD_EMOJI = 'microphone'
# Convenient method to trigger a status page update.
def self.trigger_publish(project, user, triggered_by, action: :update)
::StatusPage::TriggerPublishService.new(project, user, triggered_by, action: action).execute
end
# Method to mark an issue as published and trigger update
def self.mark_for_publication(project, user, issue)
::StatusPage::MarkForPublicationService.new(project, user, issue).execute
end
end
end
# frozen_string_literal: true
module Gitlab
module StatusPage
# HTML filter that converts lazy loaded img nodes to standard HTML spec img nodes
# We do not need to lazy load images on the Status Page
module Filter
class ImageFilter < HTML::Pipeline::Filter
# Part of FileUploader::MARKDOWN_PATTERN but with a non-greedy file name matcher (?<file>.*) vs (?<file>.*?)
NON_GREEDY_UPLOAD_FILE_PATH_PATTERN = %r{/uploads/(?<secret>[0-9a-f]{32})/(?<file>.*)}.freeze
def call
doc.css('img').each do |image_node|
image_node['class'] = 'gl-image'
original_src = image_node.delete('data-src').value
matches = NON_GREEDY_UPLOAD_FILE_PATH_PATTERN.match(original_src)
next unless matches && matches[:secret] && matches[:file]
change_image_path!(image_node, matches)
end
doc.to_html
end
def change_image_path!(image_node, matches)
new_src = ::Gitlab::StatusPage::Storage.upload_path(
context[:issue_iid],
matches[:secret],
matches[:file]
)
image_node['src'] = new_src
image_node.parent['href'] = new_src
end
def validate
raise ArgumentError unless context[:issue_iid]
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module StatusPage
module Filter
# HTML filter that replaces mention links with an anonymized plain version.
#
# This filter should be run before any references are redacted, before
# +Banzai::Filter::ReferenceRedactorFilter+, so it's easier to find and
# anonymize `user` references.
class MentionAnonymizationFilter < HTML::Pipeline::Filter
LINK_CSS_SELECTOR = "a.gfm[data-reference-type='user']"
# Static for now. In https://gitlab.com/gitlab-org/gitlab/-/issues/209114
# we'll map names with a more sophisticated approach.
ANONYMIZED_NAME = 'Incident Responder'
def call
doc.css(LINK_CSS_SELECTOR).each do |link_node|
link_node.replace(ANONYMIZED_NAME)
end
doc.to_html
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module StatusPage
module Pipeline
class PostProcessPipeline < ::Banzai::Pipeline::PostProcessPipeline
def self.filters
@filters ||= super
.dup
.insert_before(::Banzai::Filter::ReferenceRedactorFilter,
Gitlab::StatusPage::Filter::MentionAnonymizationFilter)
.concat(::Banzai::FilterArray[StatusPage::Filter::ImageFilter])
.freeze
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module StatusPage
module Storage
# Size limit of the generated JSON uploaded to CDN.
JSON_MAX_SIZE = 1.megabyte
# Limit the amount of the recent incidents in the JSON list
MAX_RECENT_INCIDENTS = 20
# Limit the amount of comments per incident
MAX_COMMENTS = 100
# Limit on paginated responses
MAX_KEYS_PER_PAGE = 1_000
MAX_PAGES = 5
MAX_UPLOADS = MAX_KEYS_PER_PAGE * MAX_PAGES
class << self
def details_path(id)
"data/incident/#{id}.json"
end
def details_url(issue)
return unless published_issue_available?(issue, issue.project.status_page_setting)
issue.project.status_page_setting.normalized_status_page_url +
CGI.escape(details_path(issue.iid))
end
def upload_path(issue_iid, secret, file_name)
uploads_path = uploads_path(issue_iid)
File.join(uploads_path, secret, file_name)
end
def uploads_path(issue_iid)
File.join('data', 'incident', issue_iid.to_s, '/')
end
def list_path
'data/list.json'
end
private
def published_issue_available?(issue, setting)
issue.status_page_published_incident &&
setting&.enabled? &&
setting&.status_page_url
end
end
class Error < StandardError
def initialize(bucket:, error:, **args)
super(
"Error occured #{error.class.name.inspect} " \
"for bucket #{bucket.inspect}. " \
"Arguments: #{args.inspect}"
)
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module StatusPage
module Storage
# Represents a platform-agnostic object class.
Object = Struct.new(:key, :content, :modified_at, keyword_init: true)
end
end
end
# frozen_string_literal: true
module Gitlab
module StatusPage
module Storage
# Implements a minimal AWS S3 client.
class S3Client
include Gitlab::StatusPage::Storage::WrapsStorageErrors
def initialize(region:, bucket_name:, access_key_id:, secret_access_key:)
@bucket_name = bucket_name
@client = Aws::S3::Client.new(
region: region,
credentials: Aws::Credentials.new(access_key_id, secret_access_key)
)
end
# Stores +content+ as +key+ in storage
#
# Note: We are making sure that
# * we control +content+ (not the user)
# * this upload is done as a background job (not in a web request)
def upload_object(key, content)
wrap_errors(key: key) do
client.put_object(bucket: bucket_name, key: key, body: content)
end
true
end
# Deletes object at +key+ from storage
#
# Note, this operation succeeds even if +key+ does not exist in storage.
def delete_object(key)
wrap_errors(key: key) do
client.delete_object(bucket: bucket_name, key: key)
end
true
end
# Delete all objects whose key has a given +prefix+
def recursive_delete(prefix)
wrap_errors(prefix: prefix) do
# Aws::S3::Types::ListObjectsV2Output is paginated and Enumerable
list_objects(prefix).each.with_index do |response, index|
break if index >= Gitlab::StatusPage::Storage::MAX_PAGES
objects = response.contents.map { |obj| { key: obj.key } }
# Batch delete in sets determined by default max_key argument that can be passed to list_objects_v2
client.delete_objects({ bucket: bucket_name, delete: { objects: objects } }) unless objects.empty?
end
end
true
end
# Return a Set of all keys with a given prefix
def list_object_keys(prefix)
wrap_errors(prefix: prefix) do
list_objects(prefix).reduce(Set.new) do |objects, (response, _index)|
break objects if objects.size >= Gitlab::StatusPage::Storage::MAX_UPLOADS
objects | response.contents.map(&:key)
end
end
end
# Stores +file+ as +key+ in storage using multipart upload
#
# key: s3 key at which file is stored
# file: An open file or file-like io object
def multipart_upload(key, file)
Gitlab::StatusPage::Storage::S3MultipartUpload.new(
client: client, bucket_name: bucket_name, key: key, open_file: file
).call
end
private
attr_reader :client, :bucket_name
def list_objects(prefix)
client.list_objects_v2(bucket: bucket_name, prefix: prefix, max_keys: Gitlab::StatusPage::Storage::MAX_KEYS_PER_PAGE)
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module StatusPage
module Storage
# Implements multipart upload in s3
class S3MultipartUpload
include Gitlab::StatusPage::Storage::WrapsStorageErrors
# 5 megabytes is the minimum part size specified in the amazon SDK
MULTIPART_UPLOAD_PART_SIZE = 5.megabytes
def initialize(client:, bucket_name:, key:, open_file:)
@client = client
@bucket_name = bucket_name
@key = key
@file = open_file
end
# Stores +file+ as +key+ in storage using multipart upload
#
# key: s3 key at which file is stored
# file: An open file or file-like io object
def call
# AWS sdk v2 has upload_file which supports multipart
# However Gitlab::HttpIO used when object storage is enabled
# cannot be used with upload_file
wrap_errors(key: key) do
upload_id = create_upload.to_h[:upload_id]
begin
parts = upload_part(upload_id)
complete_upload(upload_id, parts)
# Rescue on Exception since even on keyboard interrupt we want to abort the upload and re-raise
# abort clears the already uploaded parts so that they do not cost the bucket owner
# The status page bucket lifecycle policy will clear out unaborted parts if
# this fails without an exception (power failures etc.)
rescue Exception => e # rubocop:disable Lint/RescueException
abort_upload(upload_id)
raise e
end
end
end
private
attr_reader :key, :file, :client, :bucket_name
def create_upload
client.create_multipart_upload({ bucket: bucket_name, key: key })
end
def upload_part(upload_id)
parts = []
part_number = 1
file.seek(0)
until file.eof?
part = client.upload_part({
body: file.read(MULTIPART_UPLOAD_PART_SIZE),
bucket: bucket_name,
key: key,
part_number: part_number, # required
upload_id: upload_id
})
parts << part.to_h.merge(part_number: part_number)
part_number += 1
end
parts
end
def complete_upload(upload_id, parts)
client.complete_multipart_upload({
bucket: bucket_name,
key: key,
multipart_upload: {
parts: parts
},
upload_id: upload_id
})
end
def abort_upload(upload_id)
client.abort_multipart_upload(
bucket: bucket_name,
key: key,
upload_id: upload_id
)
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module StatusPage
module Storage
module WrapsStorageErrors
def wrap_errors(**args)
yield
rescue Aws::Errors::ServiceError => e
raise Error, bucket: bucket_name, error: e, **args
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module StatusPage
module UsageDataCounters
class IncidentCounter < ::Gitlab::UsageDataCounters::BaseCounter
KNOWN_EVENTS = %w[publishes unpublishes].freeze
PREFIX = 'status_page_incident'
end
end
end
end
# frozen_string_literal: true
module StatusPage
# Only comments with this emoji are visible.
# This filter will change once we have confidential notes.
# See https://gitlab.com/gitlab-org/gitlab/issues/207468
AWARD_EMOJI = 'microphone'
# Convenient method to trigger a status page update.
def self.trigger_publish(project, user, triggered_by, action: :update)
TriggerPublishService.new(project, user, triggered_by, action: action).execute
end
# Method to mark an issue as published and trigger update
def self.mark_for_publication(project, user, issue)
MarkForPublicationService.new(project, user, issue).execute
end
end
# frozen_string_literal: true
module StatusPage
# HTML filter that converts lazy loaded img nodes to standard HTML spec img nodes
# We do not need to lazy load images on the Status Page
module Filter
class ImageFilter < HTML::Pipeline::Filter
# Part of FileUploader::MARKDOWN_PATTERN but with a non-greedy file name matcher (?<file>.*) vs (?<file>.*?)
NON_GREEDY_UPLOAD_FILE_PATH_PATTERN = %r{/uploads/(?<secret>[0-9a-f]{32})/(?<file>.*)}.freeze
def call
doc.css('img').each do |image_node|
image_node['class'] = 'gl-image'
original_src = image_node.delete('data-src').value
matches = NON_GREEDY_UPLOAD_FILE_PATH_PATTERN.match(original_src)
next unless matches && matches[:secret] && matches[:file]
change_image_path!(image_node, matches)
end
doc.to_html
end
def change_image_path!(image_node, matches)
new_src = ::StatusPage::Storage.upload_path(
context[:issue_iid],
matches[:secret],
matches[:file]
)
image_node['src'] = new_src
image_node.parent['href'] = new_src
end
def validate
raise ArgumentError unless context[:issue_iid]
end
end
end
end
# frozen_string_literal: true
module StatusPage
module Filter
# HTML filter that replaces mention links with an anonymized plain version.
#
# This filter should be run before any references are redacted, before
# +Banzai::Filter::ReferenceRedactorFilter+, so it's easier to find and
# anonymize `user` references.
class MentionAnonymizationFilter < HTML::Pipeline::Filter
LINK_CSS_SELECTOR = "a.gfm[data-reference-type='user']"
# Static for now. In https://gitlab.com/gitlab-org/gitlab/-/issues/209114
# we'll map names with a more sophisticated approach.
ANONYMIZED_NAME = 'Incident Responder'
def call
doc.css(LINK_CSS_SELECTOR).each do |link_node|
link_node.replace(ANONYMIZED_NAME)
end
doc.to_html
end
end
end
end
# frozen_string_literal: true
module StatusPage
module Pipeline
class PostProcessPipeline < ::Banzai::Pipeline::PostProcessPipeline
def self.filters
@filters ||= super
.dup
.insert_before(::Banzai::Filter::ReferenceRedactorFilter,
StatusPage::Filter::MentionAnonymizationFilter)
.concat(::Banzai::FilterArray[StatusPage::Filter::ImageFilter])
.freeze
end
end
end
end
# frozen_string_literal: true
module StatusPage
module Storage
# Size limit of the generated JSON uploaded to CDN.
JSON_MAX_SIZE = 1.megabyte
# Limit the amount of the recent incidents in the JSON list
MAX_RECENT_INCIDENTS = 20
# Limit the amount of comments per incident
MAX_COMMENTS = 100
# Limit on paginated responses
MAX_KEYS_PER_PAGE = 1_000
MAX_PAGES = 5
MAX_UPLOADS = MAX_KEYS_PER_PAGE * MAX_PAGES
class << self
def details_path(id)
"data/incident/#{id}.json"
end
def details_url(issue)
return unless published_issue_available?(issue, issue.project.status_page_setting)
issue.project.status_page_setting.normalized_status_page_url +
CGI.escape(details_path(issue.iid))
end
def upload_path(issue_iid, secret, file_name)
uploads_path = uploads_path(issue_iid)
File.join(uploads_path, secret, file_name)
end
def uploads_path(issue_iid)
File.join('data', 'incident', issue_iid.to_s, '/')
end
def list_path
'data/list.json'
end
private
def published_issue_available?(issue, setting)
issue.status_page_published_incident &&
setting&.enabled? &&
setting&.status_page_url
end
end
class Error < StandardError
def initialize(bucket:, error:, **args)
super(
"Error occured #{error.class.name.inspect} " \
"for bucket #{bucket.inspect}. " \
"Arguments: #{args.inspect}"
)
end
end
end
end
# frozen_string_literal: true
module StatusPage
module Storage
# Represents a platform-agnostic object class.
Object = Struct.new(:key, :content, :modified_at, keyword_init: true)
end
end
# frozen_string_literal: true
module StatusPage
module Storage
# Implements a minimal AWS S3 client.
class S3Client
include StatusPage::Storage::WrapsStorageErrors
def initialize(region:, bucket_name:, access_key_id:, secret_access_key:)
@bucket_name = bucket_name
@client = Aws::S3::Client.new(
region: region,
credentials: Aws::Credentials.new(access_key_id, secret_access_key)
)
end
# Stores +content+ as +key+ in storage
#
# Note: We are making sure that
# * we control +content+ (not the user)
# * this upload is done as a background job (not in a web request)
def upload_object(key, content)
wrap_errors(key: key) do
client.put_object(bucket: bucket_name, key: key, body: content)
end
true
end
# Deletes object at +key+ from storage
#
# Note, this operation succeeds even if +key+ does not exist in storage.
def delete_object(key)
wrap_errors(key: key) do
client.delete_object(bucket: bucket_name, key: key)
end
true
end
# Delete all objects whose key has a given +prefix+
def recursive_delete(prefix)
wrap_errors(prefix: prefix) do
# Aws::S3::Types::ListObjectsV2Output is paginated and Enumerable
list_objects(prefix).each.with_index do |response, index|
break if index >= StatusPage::Storage::MAX_PAGES
objects = response.contents.map { |obj| { key: obj.key } }
# Batch delete in sets determined by default max_key argument that can be passed to list_objects_v2
client.delete_objects({ bucket: bucket_name, delete: { objects: objects } }) unless objects.empty?
end
end
true
end
# Return a Set of all keys with a given prefix
def list_object_keys(prefix)
wrap_errors(prefix: prefix) do
list_objects(prefix).reduce(Set.new) do |objects, (response, _index)|
break objects if objects.size >= StatusPage::Storage::MAX_UPLOADS
objects | response.contents.map(&:key)
end
end
end
# Stores +file+ as +key+ in storage using multipart upload
#
# key: s3 key at which file is stored
# file: An open file or file-like io object
def multipart_upload(key, file)
StatusPage::Storage::S3MultipartUpload.new(
client: client, bucket_name: bucket_name, key: key, open_file: file
).call
end
private
attr_reader :client, :bucket_name
def list_objects(prefix)
client.list_objects_v2(bucket: bucket_name, prefix: prefix, max_keys: StatusPage::Storage::MAX_KEYS_PER_PAGE)
end
end
end
end
# frozen_string_literal: true
module StatusPage
module Storage
# Implements multipart upload in s3
class S3MultipartUpload
include StatusPage::Storage::WrapsStorageErrors
# 5 megabytes is the minimum part size specified in the amazon SDK
MULTIPART_UPLOAD_PART_SIZE = 5.megabytes
def initialize(client:, bucket_name:, key:, open_file:)
@client = client
@bucket_name = bucket_name
@key = key
@file = open_file
end
# Stores +file+ as +key+ in storage using multipart upload
#
# key: s3 key at which file is stored
# file: An open file or file-like io object
def call
# AWS sdk v2 has upload_file which supports multipart
# However Gitlab::HttpIO used when object storage is enabled
# cannot be used with upload_file
wrap_errors(key: key) do
upload_id = create_upload.to_h[:upload_id]
begin
parts = upload_part(upload_id)
complete_upload(upload_id, parts)
# Rescue on Exception since even on keyboard interrupt we want to abort the upload and re-raise
# abort clears the already uploaded parts so that they do not cost the bucket owner
# The status page bucket lifecycle policy will clear out unaborted parts if
# this fails without an exception (power failures etc.)
rescue Exception => e # rubocop:disable Lint/RescueException
abort_upload(upload_id)
raise e
end
end
end
private
attr_reader :key, :file, :client, :bucket_name
def create_upload
client.create_multipart_upload({ bucket: bucket_name, key: key })
end
def upload_part(upload_id)
parts = []
part_number = 1
file.seek(0)
until file.eof?
part = client.upload_part({
body: file.read(MULTIPART_UPLOAD_PART_SIZE),
bucket: bucket_name,
key: key,
part_number: part_number, # required
upload_id: upload_id
})
parts << part.to_h.merge(part_number: part_number)
part_number += 1
end
parts
end
def complete_upload(upload_id, parts)
client.complete_multipart_upload({
bucket: bucket_name,
key: key,
multipart_upload: {
parts: parts
},
upload_id: upload_id
})
end
def abort_upload(upload_id)
client.abort_multipart_upload(
bucket: bucket_name,
key: key,
upload_id: upload_id
)
end
end
end
end
# frozen_string_literal: true
module StatusPage
module Storage
module WrapsStorageErrors
def wrap_errors(**args)
yield
rescue Aws::Errors::ServiceError => e
raise Error, bucket: bucket_name, error: e, **args
end
end
end
end
# frozen_string_literal: true
module StatusPage
module UsageDataCounters
class IncidentCounter < ::Gitlab::UsageDataCounters::BaseCounter
KNOWN_EVENTS = %w[publishes unpublishes].freeze
PREFIX = 'status_page_incident'
end
end
end
...@@ -67,7 +67,7 @@ RSpec.describe IssuablesHelper do ...@@ -67,7 +67,7 @@ RSpec.describe IssuablesHelper do
it 'returns the correct data that includes publishedIncidentUrl' do it 'returns the correct data that includes publishedIncidentUrl' do
@project = issue.project @project = issue.project
expect(StatusPage::Storage).to receive(:details_url).with(issue).and_return('http://status.com') expect(Gitlab::StatusPage::Storage).to receive(:details_url).with(issue).and_return('http://status.com')
expect(helper.issuable_initial_data(issue)).to include( expect(helper.issuable_initial_data(issue)).to include(
publishedIncidentUrl: 'http://status.com' publishedIncidentUrl: 'http://status.com'
) )
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe StatusPage::Filter::ImageFilter do RSpec.describe Gitlab::StatusPage::Filter::ImageFilter do
include FilterSpecHelper include FilterSpecHelper
describe '.call' do describe '.call' do
...@@ -12,9 +12,9 @@ RSpec.describe StatusPage::Filter::ImageFilter do ...@@ -12,9 +12,9 @@ RSpec.describe StatusPage::Filter::ImageFilter do
let(:secret) { '50b7a196557cf72a98e86a7ab4b1ac3b' } let(:secret) { '50b7a196557cf72a98e86a7ab4b1ac3b' }
let(:filename) { 'tanuki.png'} let(:filename) { 'tanuki.png'}
let(:original_source_path) { "/uploads/#{secret}/#{filename}" } let(:original_source_path) { "/uploads/#{secret}/#{filename}" }
let(:expected_source_path) { StatusPage::Storage.upload_path(issue_iid, secret, filename) } let(:expected_source_path) { Gitlab::StatusPage::Storage.upload_path(issue_iid, secret, filename) }
let(:original_html) { %Q{<a class="no-attachment-icon gfm" href="#{original_source_path}" target="_blank" rel="noopener noreferrer"><img class="lazy" data-src="#{original_source_path}"></a>} } let(:original_html) { %Q{<a class="no-attachment-icon gfm" href="#{original_source_path}" target="_blank" rel="noopener noreferrer"><img class="lazy" data-src="#{original_source_path}"></a>} }
let(:context_options) { { post_process_pipeline: StatusPage::Pipeline::PostProcessPipeline, issue_iid: issue_iid } } let(:context_options) { { post_process_pipeline: Gitlab::StatusPage::Pipeline::PostProcessPipeline, issue_iid: issue_iid } }
let(:img_tag) { Nokogiri::HTML(subject).css('img')[0] } let(:img_tag) { Nokogiri::HTML(subject).css('img')[0] }
let(:link_tag) { img_tag.parent } let(:link_tag) { img_tag.parent }
...@@ -23,7 +23,7 @@ RSpec.describe StatusPage::Filter::ImageFilter do ...@@ -23,7 +23,7 @@ RSpec.describe StatusPage::Filter::ImageFilter do
it { expect(link_tag['href']).to eq(expected_source_path) } it { expect(link_tag['href']).to eq(expected_source_path) }
context 'when no issue_iid key' do context 'when no issue_iid key' do
let(:context_options) { { post_process_pipeline: StatusPage::Pipeline::PostProcessPipeline } } let(:context_options) { { post_process_pipeline: Gitlab::StatusPage::Pipeline::PostProcessPipeline } }
it 'raises error' do it 'raises error' do
expect { subject }.to raise_error(ArgumentError) expect { subject }.to raise_error(ArgumentError)
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe StatusPage::Filter::MentionAnonymizationFilter do RSpec.describe Gitlab::StatusPage::Filter::MentionAnonymizationFilter do
include FilterSpecHelper include FilterSpecHelper
it 'replaces user link with anonymized text' do it 'replaces user link with anonymized text' do
......
...@@ -2,12 +2,12 @@ ...@@ -2,12 +2,12 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe StatusPage::Pipeline::PostProcessPipeline do RSpec.describe Gitlab::StatusPage::Pipeline::PostProcessPipeline do
describe '.filters' do describe '.filters' do
let(:expected_filters) do let(:expected_filters) do
[StatusPage::Filter::MentionAnonymizationFilter] + [Gitlab::StatusPage::Filter::MentionAnonymizationFilter] +
::Banzai::Pipeline::PostProcessPipeline.filters + ::Banzai::Pipeline::PostProcessPipeline.filters +
[StatusPage::Filter::ImageFilter] [Gitlab::StatusPage::Filter::ImageFilter]
end end
subject { described_class.filters } subject { described_class.filters }
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe StatusPage::Storage::S3Client, :aws_s3 do RSpec.describe Gitlab::StatusPage::Storage::S3Client, :aws_s3 do
let(:region) { 'eu-west-1' } let(:region) { 'eu-west-1' }
let(:bucket_name) { 'bucket_name' } let(:bucket_name) { 'bucket_name' }
let(:access_key_id) { 'key_id' } let(:access_key_id) { 'key_id' }
...@@ -36,7 +36,7 @@ RSpec.describe StatusPage::Storage::S3Client, :aws_s3 do ...@@ -36,7 +36,7 @@ RSpec.describe StatusPage::Storage::S3Client, :aws_s3 do
stub_responses(:put_object, aws_error) stub_responses(:put_object, aws_error)
msg = error_message(aws_error, key: key) msg = error_message(aws_error, key: key)
expect { result }.to raise_error(StatusPage::Storage::Error, msg) expect { result }.to raise_error(Gitlab::StatusPage::Storage::Error, msg)
end end
end end
end end
...@@ -59,7 +59,7 @@ RSpec.describe StatusPage::Storage::S3Client, :aws_s3 do ...@@ -59,7 +59,7 @@ RSpec.describe StatusPage::Storage::S3Client, :aws_s3 do
stub_responses(:delete_object, aws_error) stub_responses(:delete_object, aws_error)
msg = error_message(aws_error, key: key) msg = error_message(aws_error, key: key)
expect { result }.to raise_error(StatusPage::Storage::Error, msg) expect { result }.to raise_error(Gitlab::StatusPage::Storage::Error, msg)
end end
end end
end end
...@@ -113,7 +113,7 @@ RSpec.describe StatusPage::Storage::S3Client, :aws_s3 do ...@@ -113,7 +113,7 @@ RSpec.describe StatusPage::Storage::S3Client, :aws_s3 do
stub_responses(:list_objects_v2, aws_error) stub_responses(:list_objects_v2, aws_error)
msg = error_message(aws_error, prefix: key_prefix) msg = error_message(aws_error, prefix: key_prefix)
expect { result }.to raise_error(StatusPage::Storage::Error, msg) expect { result }.to raise_error(Gitlab::StatusPage::Storage::Error, msg)
end end
end end
end end
...@@ -135,7 +135,7 @@ RSpec.describe StatusPage::Storage::S3Client, :aws_s3 do ...@@ -135,7 +135,7 @@ RSpec.describe StatusPage::Storage::S3Client, :aws_s3 do
include_context 'oversized list_objects_v2 result' include_context 'oversized list_objects_v2 result'
it 'returns result at max size' do it 'returns result at max size' do
expect(result.count).to eq(StatusPage::Storage::MAX_UPLOADS) expect(result.count).to eq(Gitlab::StatusPage::Storage::MAX_UPLOADS)
end end
end end
...@@ -155,7 +155,7 @@ RSpec.describe StatusPage::Storage::S3Client, :aws_s3 do ...@@ -155,7 +155,7 @@ RSpec.describe StatusPage::Storage::S3Client, :aws_s3 do
stub_responses(:list_objects_v2, aws_error) stub_responses(:list_objects_v2, aws_error)
msg = error_message(aws_error, prefix: key_prefix) msg = error_message(aws_error, prefix: key_prefix)
expect { result }.to raise_error(StatusPage::Storage::Error, msg) expect { result }.to raise_error(Gitlab::StatusPage::Storage::Error, msg)
end end
end end
end end
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe StatusPage::Storage::S3MultipartUpload, :aws_s3 do RSpec.describe Gitlab::StatusPage::Storage::S3MultipartUpload, :aws_s3 do
let(:region) { 'eu-west-1' } let(:region) { 'eu-west-1' }
let(:bucket_name) { 'bucket_name' } let(:bucket_name) { 'bucket_name' }
let(:access_key_id) { 'key_id' } let(:access_key_id) { 'key_id' }
...@@ -81,7 +81,7 @@ RSpec.describe StatusPage::Storage::S3MultipartUpload, :aws_s3 do ...@@ -81,7 +81,7 @@ RSpec.describe StatusPage::Storage::S3MultipartUpload, :aws_s3 do
msg = error_message(aws_error, key: key) msg = error_message(aws_error, key: key)
expect(s3_client).to receive(:abort_multipart_upload) expect(s3_client).to receive(:abort_multipart_upload)
expect { result }.to raise_error(StatusPage::Storage::Error, msg) expect { result }.to raise_error(Gitlab::StatusPage::Storage::Error, msg)
end end
end end
...@@ -95,7 +95,7 @@ RSpec.describe StatusPage::Storage::S3MultipartUpload, :aws_s3 do ...@@ -95,7 +95,7 @@ RSpec.describe StatusPage::Storage::S3MultipartUpload, :aws_s3 do
msg = error_message(aws_error, key: key) msg = error_message(aws_error, key: key)
expect(s3_client).to receive(:abort_multipart_upload) expect(s3_client).to receive(:abort_multipart_upload)
expect { result }.to raise_error(StatusPage::Storage::Error, msg) expect { result }.to raise_error(Gitlab::StatusPage::Storage::Error, msg)
end end
end end
end end
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe StatusPage::Storage do RSpec.describe Gitlab::StatusPage::Storage do
describe '.details_path' do describe '.details_path' do
subject { described_class.details_path(123) } subject { described_class.details_path(123) }
...@@ -72,6 +72,6 @@ RSpec.describe StatusPage::Storage do ...@@ -72,6 +72,6 @@ RSpec.describe StatusPage::Storage do
it 'MAX_KEYS_PER_PAGE times MAX_PAGES establishes upload limit' do it 'MAX_KEYS_PER_PAGE times MAX_PAGES establishes upload limit' do
# spec intended to fail if page related MAX constants change # spec intended to fail if page related MAX constants change
# In order to ensure change to documented MAX_UPLOADS is considered # In order to ensure change to documented MAX_UPLOADS is considered
expect(StatusPage::Storage::MAX_KEYS_PER_PAGE * StatusPage::Storage::MAX_PAGES).to eq(5000) expect(Gitlab::StatusPage::Storage::MAX_KEYS_PER_PAGE * Gitlab::StatusPage::Storage::MAX_PAGES).to eq(5000)
end end
end end
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe StatusPage::UsageDataCounters::IncidentCounter do RSpec.describe Gitlab::StatusPage::UsageDataCounters::IncidentCounter do
it_behaves_like 'a redis usage counter', 'StatusPage::IncidentCounter', :publishes it_behaves_like 'a redis usage counter', 'StatusPage::IncidentCounter', :publishes
it_behaves_like 'a redis usage counter', 'StatusPage::IncidentCounter', :unpublishes it_behaves_like 'a redis usage counter', 'StatusPage::IncidentCounter', :unpublishes
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe StatusPage do RSpec.describe Gitlab::StatusPage do
let(:project) { instance_double(Project) } let(:project) { instance_double(Project) }
let(:user) { instance_double(User) } let(:user) { instance_double(User) }
let(:triggered_by) { instance_double(Issue) } let(:triggered_by) { instance_double(Issue) }
......
...@@ -198,7 +198,7 @@ RSpec.describe StatusPage::ProjectSetting do ...@@ -198,7 +198,7 @@ RSpec.describe StatusPage::ProjectSetting do
context 'when status page settings is enabled' do context 'when status page settings is enabled' do
let(:status_page_setting_enabled) { true } let(:status_page_setting_enabled) { true }
it { is_expected.to be_instance_of(StatusPage::Storage::S3Client) } it { is_expected.to be_instance_of(Gitlab::StatusPage::Storage::S3Client) }
end end
context 'when not enabled' do context 'when not enabled' do
......
...@@ -11,7 +11,7 @@ RSpec.describe StatusPage::Renderer do ...@@ -11,7 +11,7 @@ RSpec.describe StatusPage::Renderer do
expect(MarkupHelper) expect(MarkupHelper)
.to receive(:markdown_field) .to receive(:markdown_field)
.with(object, field, issue_iid: issue_iid, post_process_pipeline: ::StatusPage::Pipeline::PostProcessPipeline) .with(object, field, issue_iid: issue_iid, post_process_pipeline: ::Gitlab::StatusPage::Pipeline::PostProcessPipeline)
described_class.markdown(object, field, issue_iid: issue_iid) described_class.markdown(object, field, issue_iid: issue_iid)
end end
......
...@@ -17,7 +17,7 @@ RSpec.describe AwardEmojis::AddService do ...@@ -17,7 +17,7 @@ RSpec.describe AwardEmojis::AddService do
context 'when adding succeeds' do context 'when adding succeeds' do
context 'with recognized emoji' do context 'with recognized emoji' do
let(:name) { StatusPage::AWARD_EMOJI } let(:name) { Gitlab::StatusPage::AWARD_EMOJI }
include_examples 'trigger status page publish' include_examples 'trigger status page publish'
end end
......
...@@ -20,7 +20,7 @@ RSpec.describe AwardEmojis::DestroyService do ...@@ -20,7 +20,7 @@ RSpec.describe AwardEmojis::DestroyService do
end end
context 'with recognized emoji' do context 'with recognized emoji' do
let(:name) { StatusPage::AWARD_EMOJI } let(:name) { Gitlab::StatusPage::AWARD_EMOJI }
include_examples 'trigger status page publish' include_examples 'trigger status page publish'
end end
......
...@@ -18,7 +18,7 @@ RSpec.describe Notes::UpdateService do ...@@ -18,7 +18,7 @@ RSpec.describe Notes::UpdateService do
describe 'publish to status page' do describe 'publish to status page' do
let(:execute) { service.execute(note) } let(:execute) { service.execute(note) }
let(:issue_id) { note.noteable_id } let(:issue_id) { note.noteable_id }
let(:emoji_name) { StatusPage::AWARD_EMOJI } let(:emoji_name) { Gitlab::StatusPage::AWARD_EMOJI }
before do before do
create(:award_emoji, user: user, name: emoji_name, awardable: note) create(:award_emoji, user: user, name: emoji_name, awardable: note)
......
...@@ -56,7 +56,7 @@ RSpec.describe StatusPage::MarkForPublicationService do ...@@ -56,7 +56,7 @@ RSpec.describe StatusPage::MarkForPublicationService do
specify { expect(subject).to be_success } specify { expect(subject).to be_success }
it 'increments the publish counter' do it 'increments the publish counter' do
expect(StatusPage::UsageDataCounters::IncidentCounter).to receive(:count).with(:publishes).once expect(Gitlab::StatusPage::UsageDataCounters::IncidentCounter).to receive(:count).with(:publishes).once
subject subject
end end
......
...@@ -9,7 +9,7 @@ RSpec.describe StatusPage::PublishAttachmentsService do ...@@ -9,7 +9,7 @@ RSpec.describe StatusPage::PublishAttachmentsService do
let(:image_file_name_2) { 'tanuki_2.png' } let(:image_file_name_2) { 'tanuki_2.png' }
let(:upload_path_2) { "/uploads/#{upload_secret_2}/#{image_file_name_2}" } let(:upload_path_2) { "/uploads/#{upload_secret_2}/#{image_file_name_2}" }
let(:markdown_field) { "![tanuki](#{upload_path}) and ![tanuki_2](#{upload_path_2})" } let(:markdown_field) { "![tanuki](#{upload_path}) and ![tanuki_2](#{upload_path_2})" }
let(:status_page_upload_path_2) { StatusPage::Storage.upload_path(issue.iid, upload_secret_2, image_file_name_2) } let(:status_page_upload_path_2) { Gitlab::StatusPage::Storage.upload_path(issue.iid, upload_secret_2, image_file_name_2) }
end end
describe '#execute' do describe '#execute' do
...@@ -18,9 +18,9 @@ RSpec.describe StatusPage::PublishAttachmentsService do ...@@ -18,9 +18,9 @@ RSpec.describe StatusPage::PublishAttachmentsService do
let(:user_notes) { [] } let(:user_notes) { [] }
let(:incident_id) { 1 } let(:incident_id) { 1 }
let(:issue) { instance_double(Issue, notes: user_notes, description: markdown_field, iid: incident_id) } let(:issue) { instance_double(Issue, notes: user_notes, description: markdown_field, iid: incident_id) }
let(:key) { StatusPage::Storage.details_path(incident_id) } let(:key) { Gitlab::StatusPage::Storage.details_path(incident_id) }
let(:content) { { id: incident_id } } let(:content) { { id: incident_id } }
let(:storage_client) { instance_double(StatusPage::Storage::S3Client) } let(:storage_client) { instance_double(Gitlab::StatusPage::Storage::S3Client) }
let(:service) { described_class.new(project: project, issue: issue, user_notes: user_notes, storage_client: storage_client) } let(:service) { described_class.new(project: project, issue: issue, user_notes: user_notes, storage_client: storage_client) }
...@@ -47,7 +47,7 @@ RSpec.describe StatusPage::PublishAttachmentsService do ...@@ -47,7 +47,7 @@ RSpec.describe StatusPage::PublishAttachmentsService do
let(:image_file_name) { 'tanuki.png'} let(:image_file_name) { 'tanuki.png'}
let(:upload_path) { "/uploads/#{upload_secret}/#{image_file_name}" } let(:upload_path) { "/uploads/#{upload_secret}/#{image_file_name}" }
let(:markdown_field) { "![tanuki](#{upload_path})" } let(:markdown_field) { "![tanuki](#{upload_path})" }
let(:status_page_upload_path) { StatusPage::Storage.upload_path(issue.iid, upload_secret, image_file_name) } let(:status_page_upload_path) { Gitlab::StatusPage::Storage.upload_path(issue.iid, upload_secret, image_file_name) }
let(:user_notes) { [] } let(:user_notes) { [] }
let(:open_file) { instance_double(File, read: 'stubbed read') } let(:open_file) { instance_double(File, read: 'stubbed read') }
...@@ -73,7 +73,7 @@ RSpec.describe StatusPage::PublishAttachmentsService do ...@@ -73,7 +73,7 @@ RSpec.describe StatusPage::PublishAttachmentsService do
context 'when upload to storage throws an error' do context 'when upload to storage throws an error' do
it 'returns an error response' do it 'returns an error response' do
storage_error = StatusPage::Storage::Error.new(bucket: '', error: StandardError.new) storage_error = Gitlab::StatusPage::Storage::Error.new(bucket: '', error: StandardError.new)
# no raise to mimic prod behavior # no raise to mimic prod behavior
allow(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception) allow(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
allow(storage_client).to receive(:multipart_upload).and_raise(storage_error) allow(storage_client).to receive(:multipart_upload).and_raise(storage_error)
...@@ -99,7 +99,7 @@ RSpec.describe StatusPage::PublishAttachmentsService do ...@@ -99,7 +99,7 @@ RSpec.describe StatusPage::PublishAttachmentsService do
include_context 'second file' include_context 'second file'
before do before do
stub_const("StatusPage::Storage::MAX_UPLOADS", 2) stub_const("Gitlab::StatusPage::Storage::MAX_UPLOADS", 2)
allow(storage_client).to receive(:list_object_keys).and_return(Set['existing_key']) allow(storage_client).to receive(:list_object_keys).and_return(Set['existing_key'])
end end
......
...@@ -9,7 +9,7 @@ RSpec.describe StatusPage::PublishDetailsService do ...@@ -9,7 +9,7 @@ RSpec.describe StatusPage::PublishDetailsService do
let(:user_notes) { [] } let(:user_notes) { [] }
let(:incident_id) { 1 } let(:incident_id) { 1 }
let(:issue) { instance_double(Issue, notes: user_notes, description: 'Incident Occuring', iid: incident_id) } let(:issue) { instance_double(Issue, notes: user_notes, description: 'Incident Occuring', iid: incident_id) }
let(:key) { StatusPage::Storage.details_path(incident_id) } let(:key) { Gitlab::StatusPage::Storage.details_path(incident_id) }
let(:content) { { id: incident_id } } let(:content) { { id: incident_id } }
let(:service) { described_class.new(project: project) } let(:service) { described_class.new(project: project) }
......
...@@ -5,7 +5,7 @@ require 'spec_helper' ...@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe StatusPage::PublishListService do RSpec.describe StatusPage::PublishListService do
let_it_be(:project, refind: true) { create(:project) } let_it_be(:project, refind: true) { create(:project) }
let(:issues) { [instance_double(Issue)] } let(:issues) { [instance_double(Issue)] }
let(:key) { StatusPage::Storage.list_path } let(:key) { Gitlab::StatusPage::Storage.list_path }
let(:content) { [{ some: :content }] } let(:content) { [{ some: :content }] }
let(:service) { described_class.new(project: project) } let(:service) { described_class.new(project: project) }
......
...@@ -37,7 +37,7 @@ RSpec.describe StatusPage::PublishService do ...@@ -37,7 +37,7 @@ RSpec.describe StatusPage::PublishService do
it 'propagates the exception' do it 'propagates the exception' do
expect_to_upload_details(issue, status: 404) expect_to_upload_details(issue, status: 404)
expect { result }.to raise_error(StatusPage::Storage::Error) expect { result }.to raise_error(Gitlab::StatusPage::Storage::Error)
end end
end end
end end
...@@ -117,11 +117,11 @@ RSpec.describe StatusPage::PublishService do ...@@ -117,11 +117,11 @@ RSpec.describe StatusPage::PublishService do
end end
def expect_to_upload_details(issue, **kwargs) def expect_to_upload_details(issue, **kwargs)
stub_aws_request(:put, StatusPage::Storage.details_path(issue.iid), **kwargs) stub_aws_request(:put, Gitlab::StatusPage::Storage.details_path(issue.iid), **kwargs)
end end
def expect_to_upload_list(**kwargs) def expect_to_upload_list(**kwargs)
stub_aws_request(:put, StatusPage::Storage.list_path, **kwargs) stub_aws_request(:put, Gitlab::StatusPage::Storage.list_path, **kwargs)
end end
def stub_aws_request(method, path, status: 200) def stub_aws_request(method, path, status: 200)
......
...@@ -102,7 +102,7 @@ RSpec.describe StatusPage::TriggerPublishService do ...@@ -102,7 +102,7 @@ RSpec.describe StatusPage::TriggerPublishService do
describe 'triggered by note' do describe 'triggered by note' do
let(:issue_id) { triggered_by.noteable_id } let(:issue_id) { triggered_by.noteable_id }
let(:emoji_name) { StatusPage::AWARD_EMOJI } let(:emoji_name) { Gitlab::StatusPage::AWARD_EMOJI }
before do before do
create(:award_emoji, user: user, name: emoji_name, create(:award_emoji, user: user, name: emoji_name,
...@@ -183,7 +183,7 @@ RSpec.describe StatusPage::TriggerPublishService do ...@@ -183,7 +183,7 @@ RSpec.describe StatusPage::TriggerPublishService do
end end
describe 'triggered by award emoji' do describe 'triggered by award emoji' do
let(:emoji_name) { StatusPage::AWARD_EMOJI } let(:emoji_name) { Gitlab::StatusPage::AWARD_EMOJI }
let(:issue_id) { triggered_by.awardable.noteable_id } let(:issue_id) { triggered_by.awardable.noteable_id }
let(:triggered_by) do let(:triggered_by) do
......
...@@ -6,8 +6,8 @@ RSpec.describe StatusPage::UnpublishDetailsService do ...@@ -6,8 +6,8 @@ RSpec.describe StatusPage::UnpublishDetailsService do
let_it_be(:project, refind: true) { create(:project) } let_it_be(:project, refind: true) { create(:project) }
let(:issue) { instance_double(Issue, iid: incident_id) } let(:issue) { instance_double(Issue, iid: incident_id) }
let(:incident_id) { 1 } let(:incident_id) { 1 }
let(:key) { StatusPage::Storage.details_path(incident_id) } let(:key) { Gitlab::StatusPage::Storage.details_path(incident_id) }
let(:image_uploads_path) { StatusPage::Storage.uploads_path(issue.iid) } let(:image_uploads_path) { Gitlab::StatusPage::Storage.uploads_path(issue.iid) }
let(:service) { described_class.new(project: project) } let(:service) { described_class.new(project: project) }
...@@ -15,7 +15,7 @@ RSpec.describe StatusPage::UnpublishDetailsService do ...@@ -15,7 +15,7 @@ RSpec.describe StatusPage::UnpublishDetailsService do
describe '#execute' do describe '#execute' do
let(:status_page_setting_enabled) { true } let(:status_page_setting_enabled) { true }
let(:storage_client) { instance_double(StatusPage::Storage::S3Client) } let(:storage_client) { instance_double(Gitlab::StatusPage::Storage::S3Client) }
let(:status_page_setting) do let(:status_page_setting) do
instance_double(StatusPage::ProjectSetting, enabled?: status_page_setting_enabled, instance_double(StatusPage::ProjectSetting, enabled?: status_page_setting_enabled,
...@@ -53,7 +53,7 @@ RSpec.describe StatusPage::UnpublishDetailsService do ...@@ -53,7 +53,7 @@ RSpec.describe StatusPage::UnpublishDetailsService do
it 'untracks the issue' do it 'untracks the issue' do
expect(StatusPage::PublishedIncident).to receive(:untrack).with(issue) expect(StatusPage::PublishedIncident).to receive(:untrack).with(issue)
expect(StatusPage::UsageDataCounters::IncidentCounter).to receive(:count).with(:unpublishes).once expect(Gitlab::StatusPage::UsageDataCounters::IncidentCounter).to receive(:count).with(:unpublishes).once
result result
end end
...@@ -64,7 +64,7 @@ RSpec.describe StatusPage::UnpublishDetailsService do ...@@ -64,7 +64,7 @@ RSpec.describe StatusPage::UnpublishDetailsService do
let(:error) { StandardError.new } let(:error) { StandardError.new }
let(:exception) do let(:exception) do
StatusPage::Storage::Error.new(bucket: bucket, error: error) Gitlab::StatusPage::Storage::Error.new(bucket: bucket, error: error)
end end
context 'when json delete fails' do context 'when json delete fails' do
......
...@@ -16,13 +16,13 @@ RSpec.shared_context 'list_objects_v2 result' do ...@@ -16,13 +16,13 @@ RSpec.shared_context 'list_objects_v2 result' do
end end
RSpec.shared_context 'oversized list_objects_v2 result' do RSpec.shared_context 'oversized list_objects_v2 result' do
let(:keys_page_1) { random_keys(desired_size: StatusPage::Storage::MAX_KEYS_PER_PAGE) } let(:keys_page_1) { random_keys(desired_size: Gitlab::StatusPage::Storage::MAX_KEYS_PER_PAGE) }
let(:keys_page_2) { random_keys(desired_size: StatusPage::Storage::MAX_KEYS_PER_PAGE) } let(:keys_page_2) { random_keys(desired_size: Gitlab::StatusPage::Storage::MAX_KEYS_PER_PAGE) }
before do before do
stub_const("StatusPage::Storage::MAX_KEYS_PER_PAGE", 2) stub_const("Gitlab::StatusPage::Storage::MAX_KEYS_PER_PAGE", 2)
stub_const("StatusPage::Storage::MAX_PAGES", 1) stub_const("Gitlab::StatusPage::Storage::MAX_PAGES", 1)
stub_const("StatusPage::Storage::MAX_UPLOADS", StatusPage::Storage::MAX_PAGES * StatusPage::Storage::MAX_KEYS_PER_PAGE) stub_const("Gitlab::StatusPage::Storage::MAX_UPLOADS", Gitlab::StatusPage::Storage::MAX_PAGES * Gitlab::StatusPage::Storage::MAX_KEYS_PER_PAGE)
# AWS s3 client responses for list_objects is paginated # AWS s3 client responses for list_objects is paginated
# stub_responses allows multiple responses as arguments and they will be returned in sequence # stub_responses allows multiple responses as arguments and they will be returned in sequence
stub_responses( stub_responses(
......
...@@ -11,7 +11,7 @@ RSpec.shared_examples 'status page quick actions' do ...@@ -11,7 +11,7 @@ RSpec.shared_examples 'status page quick actions' do
shared_examples 'skip silently' do shared_examples 'skip silently' do
it 'does not allow publishing' do it 'does not allow publishing' do
expect(StatusPage).not_to receive(:mark_for_publication).with(project, user, issue) expect(Gitlab::StatusPage).not_to receive(:mark_for_publication).with(project, user, issue)
expect(StatusPage::PublishWorker).not_to receive(:perform_async).with(user.id, project.id, issue.id) expect(StatusPage::PublishWorker).not_to receive(:perform_async).with(user.id, project.id, issue.id)
add_note('/publish') add_note('/publish')
......
...@@ -14,7 +14,7 @@ RSpec.shared_examples "img upload tags for status page" do ...@@ -14,7 +14,7 @@ RSpec.shared_examples "img upload tags for status page" do
result_img_tag = Nokogiri::HTML(json[field]).css('img')[0] result_img_tag = Nokogiri::HTML(json[field]).css('img')[0]
result_link_tag = result_img_tag.parent result_link_tag = result_img_tag.parent
expected_source_path = StatusPage::Storage.upload_path(issue.iid, secret, filename) expected_source_path = Gitlab::StatusPage::Storage.upload_path(issue.iid, secret, filename)
expect(result_img_tag['class']).to eq 'gl-image' expect(result_img_tag['class']).to eq 'gl-image'
expect(result_img_tag['src']).to eq expected_source_path expect(result_img_tag['src']).to eq expected_source_path
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
RSpec.shared_examples 'publish incidents' do RSpec.shared_examples 'publish incidents' do
let(:status_page_setting_enabled) { true } let(:status_page_setting_enabled) { true }
let(:storage_client) { instance_double(StatusPage::Storage::S3Client) } let(:storage_client) { instance_double(Gitlab::StatusPage::Storage::S3Client) }
let(:serializer) { instance_double(StatusPage::IncidentSerializer) } let(:serializer) { instance_double(StatusPage::IncidentSerializer) }
let(:content_json) { content.to_json } let(:content_json) { content.to_json }
...@@ -37,7 +37,7 @@ RSpec.shared_examples 'publish incidents' do ...@@ -37,7 +37,7 @@ RSpec.shared_examples 'publish incidents' do
let(:error) { StandardError.new } let(:error) { StandardError.new }
let(:exception) do let(:exception) do
StatusPage::Storage::Error.new(bucket: bucket, error: error) Gitlab::StatusPage::Storage::Error.new(bucket: bucket, error: error)
end end
before do before do
...@@ -51,7 +51,7 @@ RSpec.shared_examples 'publish incidents' do ...@@ -51,7 +51,7 @@ RSpec.shared_examples 'publish incidents' do
end end
context 'when limits exceeded' do context 'when limits exceeded' do
let(:too_big) { 'a' * StatusPage::Storage::JSON_MAX_SIZE } let(:too_big) { 'a' * Gitlab::StatusPage::Storage::JSON_MAX_SIZE }
before do before do
if content.is_a?(Array) if content.is_a?(Array)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment