Commit d47100b5 authored by Vitali Tatarintev's avatar Vitali Tatarintev

Merge branch 'pl-rubocop-todo-raise-args' into 'master'

Resolves rubocop offense Style/RaiseArgs [RUN AS-IF-FOSS]

See merge request gitlab-org/gitlab!58009
parents 4ea9f1bb d926e54c
......@@ -820,14 +820,6 @@ Style/NumericLiteralPrefix:
Style/PercentLiteralDelimiters:
Enabled: false
# Offense count: 247
# Cop supports --auto-correct.
# Configuration parameters: .
# SupportedStyles: compact, exploded
Style/RaiseArgs:
Enabled: false
EnforcedStyle: exploded
# Offense count: 26
# Cop supports --auto-correct.
# Configuration parameters: SafeForConstants.
......
......@@ -46,7 +46,7 @@ module PageLimiter
if params[:page].present? && params[:page].to_i > max_page_number
record_page_limit_interception
raise PageOutOfBoundsError.new(max_page_number)
raise PageOutOfBoundsError, max_page_number
end
end
......
......@@ -37,7 +37,7 @@ module Packages
@mod.version_by(commit: target)
else
raise ArgumentError.new 'not a valid target'
raise ArgumentError, 'not a valid target'
end
end
end
......
......@@ -18,7 +18,7 @@ module TimeZoneHelper
def timezone_data(format: :short)
attrs = TIME_ZONE_FORMAT_ATTRS.fetch(format) do
valid_formats = TIME_ZONE_FORMAT_ATTRS.keys.map { |k| ":#{k}"}.join(", ")
raise ArgumentError.new("Invalid format :#{format}. Valid formats are #{valid_formats}.")
raise ArgumentError, "Invalid format :#{format}. Valid formats are #{valid_formats}."
end
ActiveSupport::TimeZone.all.map do |timezone|
......
......@@ -35,7 +35,7 @@ class BulkImports::Tracker < ApplicationRecord
def pipeline_class
unless BulkImports::Stage.pipeline_exists?(pipeline_name)
raise NameError.new("'#{pipeline_name}' is not a valid BulkImport Pipeline")
raise NameError, "'#{pipeline_name}' is not a valid BulkImport Pipeline"
end
pipeline_name.constantize
......
......@@ -26,7 +26,7 @@ module CacheMarkdownField
# Returns the default Banzai render context for the cached markdown field.
def banzai_render_context(field)
raise ArgumentError.new("Unknown field: #{field.inspect}") unless
raise ArgumentError, "Unknown field: #{field.inspect}" unless
cached_markdown_fields.markdown_fields.include?(field)
# Always include a project key, or Banzai complains
......@@ -99,7 +99,7 @@ module CacheMarkdownField
end
def cached_html_for(markdown_field)
raise ArgumentError.new("Unknown field: #{markdown_field}") unless
raise ArgumentError, "Unknown field: #{markdown_field}" unless
cached_markdown_fields.markdown_fields.include?(markdown_field)
__send__(cached_markdown_fields.html_field(markdown_field)) # rubocop:disable GitlabSecurity/PublicSend
......
......@@ -22,7 +22,7 @@ module GroupDescendant
return [] if descendants.empty?
unless descendants.all? { |hierarchy| hierarchy.is_a?(GroupDescendant) }
raise ArgumentError.new(_('element is not a hierarchy'))
raise ArgumentError, _('element is not a hierarchy')
end
all_hierarchies = descendants.map do |descendant|
......@@ -56,7 +56,7 @@ module GroupDescendant
end
if parent.nil? && hierarchy_top.present?
raise ArgumentError.new(_('specified top is not part of the tree'))
raise ArgumentError, _('specified top is not part of the tree')
end
if parent && parent != hierarchy_top
......
......@@ -59,7 +59,7 @@ module HasWikiPageMetaAttributes
if conflict.present?
meta.errors.add(:canonical_slug, 'Duplicate value found')
raise CanonicalSlugConflictError.new(meta)
raise CanonicalSlugConflictError, meta
end
meta
......
......@@ -168,7 +168,7 @@ module ReactiveCaching
data_deep_size = Gitlab::Utils::DeepSize.new(data, max_size: self.class.reactive_cache_hard_limit)
raise ExceededReactiveCacheLimit.new unless data_deep_size.valid?
raise ExceededReactiveCacheLimit unless data_deep_size.valid?
end
end
end
......@@ -31,7 +31,7 @@ module Sha256Attribute
end
unless column.type == :binary
raise ArgumentError.new("sha256_attribute #{name.inspect} is invalid since the column type is not :binary")
raise ArgumentError, "sha256_attribute #{name.inspect} is invalid since the column type is not :binary"
end
rescue StandardError => error
Gitlab::AppLogger.error "Sha256Attribute initialization: #{error.message}"
......
......@@ -24,7 +24,7 @@ module ShaAttribute
return unless column
unless column.type == :binary
raise ArgumentError.new("sha_attribute #{name.inspect} is invalid since the column type is not :binary")
raise ArgumentError, "sha_attribute #{name.inspect} is invalid since the column type is not :binary"
end
rescue StandardError => error
Gitlab::AppLogger.error "ShaAttribute initialization: #{error.message}"
......
......@@ -10,7 +10,7 @@ module Storage
proj_with_tags = first_project_with_container_registry_tags
if proj_with_tags
raise Gitlab::UpdatePathError.new("Namespace #{name} (#{id}) cannot be moved because at least one project (e.g. #{proj_with_tags.name} (#{proj_with_tags.id})) has tags in container registry")
raise Gitlab::UpdatePathError, "Namespace #{name} (#{id}) cannot be moved because at least one project (e.g. #{proj_with_tags.name} (#{proj_with_tags.id})) has tags in container registry"
end
parent_was = if saved_change_to_parent? && parent_id_before_last_save.present?
......@@ -83,7 +83,7 @@ module Storage
# if we cannot move namespace directory we should rollback
# db changes in order to prevent out of sync between db and fs
raise Gitlab::UpdatePathError.new('namespace directory cannot be moved')
raise Gitlab::UpdatePathError, 'namespace directory cannot be moved'
end
end
end
......
......@@ -12,7 +12,7 @@ module TokenAuthenticatable
def add_authentication_token_field(token_field, options = {})
if token_authenticatable_fields.include?(token_field)
raise ArgumentError.new("#{token_field} already configured via add_authentication_token_field")
raise ArgumentError, "#{token_field} already configured via add_authentication_token_field"
end
token_authenticatable_fields.push(token_field)
......
......@@ -31,7 +31,7 @@ module X509SerialNumberAttribute
end
unless column.type == :binary
raise ArgumentError.new("x509_serial_number_attribute #{name.inspect} is invalid since the column type is not :binary")
raise ArgumentError, "x509_serial_number_attribute #{name.inspect} is invalid since the column type is not :binary"
end
rescue StandardError => error
Gitlab::AppLogger.error "X509SerialNumberAttribute initialization: #{error.message}"
......
......@@ -20,7 +20,7 @@ class Namespace
end
def initialize(root)
raise StandardError.new('Must specify a root node') if root.parent_id
raise StandardError, 'Must specify a root node' if root.parent_id
@root = root
end
......
......@@ -85,7 +85,7 @@ module Namespaces
# Search this namespace's lineage. Bound inclusively by top node.
def lineage(top)
raise UnboundedSearch.new('Must bound search by a top') unless top
raise UnboundedSearch, 'Must bound search by a top' unless top
without_sti_condition
.traversal_ids_contains("{#{top.id}}")
......
......@@ -18,8 +18,8 @@ module Packages
end
def version_by(ref: nil, commit: nil)
raise ArgumentError.new 'no filter specified' unless ref || commit
raise ArgumentError.new 'ref and commit are mutually exclusive' if ref && commit
raise ArgumentError, 'no filter specified' unless ref || commit
raise ArgumentError, 'ref and commit are mutually exclusive' if ref && commit
if commit
return version_by_sha(commit) if commit.is_a? String
......
......@@ -17,15 +17,15 @@ module Packages
delegate :build, to: :@semver, allow_nil: true
def initialize(mod, type, commit, name: nil, semver: nil, ref: nil)
raise ArgumentError.new("invalid type '#{type}'") unless VALID_TYPES.include? type
raise ArgumentError.new("mod is required") unless mod
raise ArgumentError.new("commit is required") unless commit
raise ArgumentError, "invalid type '#{type}'" unless VALID_TYPES.include? type
raise ArgumentError, "mod is required" unless mod
raise ArgumentError, "commit is required" unless commit
if type == :ref
raise ArgumentError.new("ref is required") unless ref
raise ArgumentError, "ref is required" unless ref
elsif type == :pseudo
raise ArgumentError.new("name is required") unless name
raise ArgumentError.new("semver is required") unless semver
raise ArgumentError, "name is required" unless name
raise ArgumentError, "semver is required" unless semver
end
@mod = mod
......
......@@ -1004,7 +1004,7 @@ class Project < ApplicationRecord
end
def latest_successful_build_for_ref!(job_name, ref = default_branch)
latest_successful_build_for_ref(job_name, ref) || raise(ActiveRecord::RecordNotFound.new("Couldn't find job #{job_name}"))
latest_successful_build_for_ref(job_name, ref) || raise(ActiveRecord::RecordNotFound, "Couldn't find job #{job_name}")
end
def latest_pipeline(ref = default_branch, sha = nil)
......
......@@ -128,10 +128,10 @@ class SshHostKey
def normalize_url(url)
full_url = ::Addressable::URI.parse(url)
raise ArgumentError.new("Invalid URL") unless full_url&.scheme == 'ssh'
raise ArgumentError, "Invalid URL" unless full_url&.scheme == 'ssh'
Addressable::URI.parse("ssh://#{full_url.host}:#{full_url.inferred_port}")
rescue Addressable::URI::InvalidURIError
raise ArgumentError.new("Invalid URL")
raise ArgumentError, "Invalid URL"
end
end
......@@ -14,7 +14,7 @@ module Clusters
end
def execute
raise MissingRoleError.new('AWS provisioning role not configured') unless provision_role.present?
raise MissingRoleError, 'AWS provisioning role not configured' unless provision_role.present?
::Aws::AssumeRoleCredentials.new(
client: client,
......
......@@ -96,7 +96,7 @@ module Groups
def notify_error!
notify_error
raise Gitlab::ImportExport::Error.new(shared.errors.to_sentence)
raise Gitlab::ImportExport::Error, shared.errors.to_sentence
end
def notify_success
......
......@@ -114,7 +114,7 @@ module Groups
def notify_error!
notify_error
raise Gitlab::ImportExport::Error.new(shared.errors.to_sentence)
raise Gitlab::ImportExport::Error, shared.errors.to_sentence
end
def remove_base_tmp_dir
......
......@@ -80,7 +80,7 @@ module Metrics
def fetch_dashboard
uid = GrafanaUidParser.new(grafana_url, project).parse
raise DashboardProcessingError.new(_('Dashboard uid not found')) unless uid
raise DashboardProcessingError, _('Dashboard uid not found') unless uid
response = client.get_dashboard(uid: uid)
......@@ -89,7 +89,7 @@ module Metrics
def fetch_datasource(dashboard)
name = DatasourceNameParser.new(grafana_url, dashboard).parse
raise DashboardProcessingError.new(_('Datasource name not found')) unless name
raise DashboardProcessingError, _('Datasource name not found') unless name
response = client.get_datasource(name: name)
......@@ -115,7 +115,7 @@ module Metrics
def parse_json(json)
Gitlab::Json.parse(json, symbolize_names: true)
rescue JSON::ParserError
raise DashboardProcessingError.new(_('Grafana response contains invalid json'))
raise DashboardProcessingError, _('Grafana response contains invalid json')
end
end
......
......@@ -39,7 +39,7 @@ module Metrics
end
def invalid_embed_json!(message)
raise DashboardProcessingError.new(_("Parsing error for param :embed_json. %{message}") % { message: message })
raise DashboardProcessingError, _("Parsing error for param :embed_json. %{message}") % { message: message }
end
end
end
......
......@@ -9,7 +9,7 @@ module Namespaces
root_storage_statistics.recalculate!
rescue ActiveRecord::ActiveRecordError => e
raise RefresherError.new(e.message)
raise RefresherError, e.message
end
private
......
......@@ -20,7 +20,7 @@ module Packages
files: files
}
rescue ActiveModel::ValidationError => e
raise ExtractionError.new(e.message)
raise ExtractionError, e.message
end
private
......@@ -41,10 +41,10 @@ module Packages
def files
strong_memoize(:files) do
raise ExtractionError.new("is not a changes file") unless file_type == :changes
raise ExtractionError.new("Files field is missing") if fields['Files'].blank?
raise ExtractionError.new("Checksums-Sha1 field is missing") if fields['Checksums-Sha1'].blank?
raise ExtractionError.new("Checksums-Sha256 field is missing") if fields['Checksums-Sha256'].blank?
raise ExtractionError, "is not a changes file" unless file_type == :changes
raise ExtractionError, "Files field is missing" if fields['Files'].blank?
raise ExtractionError, "Checksums-Sha1 field is missing" if fields['Checksums-Sha1'].blank?
raise ExtractionError, "Checksums-Sha256 field is missing" if fields['Checksums-Sha256'].blank?
init_entries_from_files
entries_from_checksums_sha1
......@@ -73,8 +73,8 @@ module Packages
each_lines_for('Checksums-Sha1') do |line|
sha1sum, size, filename = line.split
entry = @entries[filename]
raise ExtractionError.new("#{filename} is listed in Checksums-Sha1 but not in Files") unless entry
raise ExtractionError.new("Size for #{filename} in Files and Checksums-Sha1 differ") unless entry.size == size.to_i
raise ExtractionError, "#{filename} is listed in Checksums-Sha1 but not in Files" unless entry
raise ExtractionError, "Size for #{filename} in Files and Checksums-Sha1 differ" unless entry.size == size.to_i
entry.sha1sum = sha1sum
end
......@@ -84,8 +84,8 @@ module Packages
each_lines_for('Checksums-Sha256') do |line|
sha256sum, size, filename = line.split
entry = @entries[filename]
raise ExtractionError.new("#{filename} is listed in Checksums-Sha256 but not in Files") unless entry
raise ExtractionError.new("Size for #{filename} in Files and Checksums-Sha256 differ") unless entry.size == size.to_i
raise ExtractionError, "#{filename} is listed in Checksums-Sha256 but not in Files" unless entry
raise ExtractionError, "Size for #{filename} in Files and Checksums-Sha256 differ" unless entry.size == size.to_i
entry.sha256sum = sha256sum
end
......@@ -104,7 +104,7 @@ module Packages
entry.package_file = ::Packages::PackageFileFinder.new(@package_file.package, filename).execute!
entry.validate!
rescue ActiveRecord::RecordNotFound
raise ExtractionError.new("#{filename} is listed in Files but was not uploaded")
raise ExtractionError, "#{filename} is listed in Files but was not uploaded"
end
end
end
......
......@@ -12,7 +12,7 @@ module Packages
end
def execute
raise ExtractionError.new('invalid package file') unless valid_package_file?
raise ExtractionError, 'invalid package file' unless valid_package_file?
extract_metadata
end
......
......@@ -26,7 +26,7 @@ module Packages
end
def execute
raise ExtractionError.new('invalid package file') unless valid_package_file?
raise ExtractionError, 'invalid package file' unless valid_package_file?
extract_metadata(nuspec_file)
end
......@@ -94,8 +94,8 @@ module Packages
Zip::File.open(file_path) do |zip_file|
entry = zip_file.glob('*.nuspec').first
raise ExtractionError.new('nuspec file not found') unless entry
raise ExtractionError.new('nuspec file too big') if entry.size > MAX_FILE_SIZE
raise ExtractionError, 'nuspec file not found' unless entry
raise ExtractionError, 'nuspec file too big' if entry.size > MAX_FILE_SIZE
entry.get_input_stream.read
end
......
......@@ -16,7 +16,7 @@ module Packages
end
def execute
raise InvalidMetadataError.new('package name and/or package version not found in metadata') unless valid_metadata?
raise InvalidMetadataError, 'package name and/or package version not found in metadata' unless valid_metadata?
try_obtain_lease do
@package_file.transaction do
......@@ -33,7 +33,7 @@ module Packages
end
end
rescue ActiveRecord::RecordInvalid => e
raise InvalidMetadataError.new(e.message)
raise InvalidMetadataError, e.message
end
private
......
......@@ -13,7 +13,7 @@ module Packages
)
unless meta.valid?
raise ActiveRecord::RecordInvalid.new(meta)
raise ActiveRecord::RecordInvalid, meta
end
Packages::Pypi::Metadatum.upsert(meta.attributes)
......
......@@ -107,7 +107,7 @@ module Packages
Gem::Package.new(File.open(file_path))
end
rescue StandardError
raise ExtractionError.new('Unable to read gem file')
raise ExtractionError, 'Unable to read gem file'
end
# used by ExclusiveLeaseGuard
......
......@@ -49,10 +49,8 @@ module Projects
def first_ensure_no_registry_tags_are_present
return unless project.has_container_registry_tags?
raise RenameFailedError.new(
"Project #{full_path_before} cannot be renamed because images are " \
raise RenameFailedError, "Project #{full_path_before} cannot be renamed because images are " \
"present in its container registry"
)
end
def expire_caches_before_rename
......@@ -144,7 +142,7 @@ module Projects
Gitlab::AppLogger.error(error)
raise RenameFailedError.new(error)
raise RenameFailedError, error
end
end
end
......
......@@ -174,7 +174,7 @@ module Projects
end
def raise_error(message)
raise DestroyError.new(message)
raise DestroyError, message
end
def flush_caches(project)
......
......@@ -112,7 +112,7 @@ module Projects
def notify_error!
notify_error
raise Gitlab::ImportExport::Error.new(shared.errors.to_sentence)
raise Gitlab::ImportExport::Error, shared.errors.to_sentence
end
def notify_success
......
......@@ -47,16 +47,16 @@ module Projects
@old_namespace = project.namespace
if Project.where(namespace_id: @new_namespace.try(:id)).where('path = ? or name = ?', project.path, project.name).exists?
raise TransferError.new(s_("TransferProject|Project with same name or path in target namespace already exists"))
raise TransferError, s_("TransferProject|Project with same name or path in target namespace already exists")
end
if project.has_container_registry_tags?
# We currently don't support renaming repository if it contains tags in container registry
raise TransferError.new(s_('TransferProject|Project cannot be transferred, because tags are present in its container registry'))
raise TransferError, s_('TransferProject|Project cannot be transferred, because tags are present in its container registry')
end
if project.has_packages?(:npm) && !new_namespace_has_same_root?(project)
raise TransferError.new(s_("TransferProject|Root namespace can't be updated if project has NPM packages"))
raise TransferError, s_("TransferProject|Root namespace can't be updated if project has NPM packages")
end
proceed_to_transfer
......@@ -170,7 +170,7 @@ module Projects
# Move main repository
unless move_repo_folder(@old_path, @new_path)
raise TransferError.new(s_("TransferProject|Cannot move project"))
raise TransferError, s_("TransferProject|Cannot move project")
end
# Disk path is changed; we need to ensure we reload it
......
......@@ -49,11 +49,11 @@ module Projects
def validate!
unless valid_visibility_level_change?(project, params[:visibility_level])
raise ValidationError.new(s_('UpdateProject|New visibility level not allowed!'))
raise ValidationError, s_('UpdateProject|New visibility level not allowed!')
end
if renaming_project_with_container_registry_tags?
raise ValidationError.new(s_('UpdateProject|Cannot rename project because it contains container registry tags!'))
raise ValidationError, s_('UpdateProject|Cannot rename project because it contains container registry tags!')
end
validate_default_branch_change
......@@ -67,7 +67,7 @@ module Projects
if project.change_head(params[:default_branch])
after_default_branch_change(previous_default_branch)
else
raise ValidationError.new(s_("UpdateProject|Could not set the default branch"))
raise ValidationError, s_("UpdateProject|Could not set the default branch")
end
end
......
......@@ -67,7 +67,7 @@ module StaticSiteEditor
def check_for_duplicate_keys!(generated_data, file_data)
duplicate_keys = generated_data.keys & file_data.keys
raise ValidationError.new("Duplicate key(s) '#{duplicate_keys}' found.") if duplicate_keys.present?
raise ValidationError, "Duplicate key(s) '#{duplicate_keys}' found." if duplicate_keys.present?
end
def merged_data(generated_data, file_data)
......
......@@ -22,7 +22,7 @@ class SubmitUsagePingService
usage_data = Gitlab::UsageData.data(force_refresh: true)
raise SubmissionError.new('Usage data is blank') if usage_data.blank?
raise SubmissionError, 'Usage data is blank' if usage_data.blank?
raw_usage_data = save_raw_usage_data(usage_data)
......@@ -33,12 +33,12 @@ class SubmitUsagePingService
headers: { 'Content-type' => 'application/json' }
)
raise SubmissionError.new("Unsuccessful response code: #{response.code}") unless response.success?
raise SubmissionError, "Unsuccessful response code: #{response.code}" unless response.success?
version_usage_data_id = response.dig('conv_index', 'usage_data_id') || response.dig('dev_ops_score', 'usage_data_id')
unless version_usage_data_id.is_a?(Integer) && version_usage_data_id > 0
raise SubmissionError.new("Invalid usage_data_id in response: #{version_usage_data_id}")
raise SubmissionError, "Invalid usage_data_id in response: #{version_usage_data_id}"
end
raw_usage_data.update_version_metadata!(usage_data_id: version_usage_data_id)
......
......@@ -94,7 +94,7 @@ module Terraform
end
def find_state!(find_params)
find_state(find_params) || raise(ActiveRecord::RecordNotFound.new("Couldn't find state"))
find_state(find_params) || raise(ActiveRecord::RecordNotFound, "Couldn't find state")
end
end
end
......@@ -9,7 +9,7 @@ module Todos
def initialize(user_id, entity_id, entity_type)
unless %w(Group Project).include?(entity_type)
raise ArgumentError.new("#{entity_type} is not an entity user can leave")
raise ArgumentError, "#{entity_type} is not an entity user can leave"
end
@user = UserFinder.new(user_id).find_by_id
......
......@@ -8,7 +8,7 @@ module Users
def initialize(target_user:)
@target_user = target_user
raise ArgumentError.new("Please provide a target user") unless target_user.is_a?(User)
raise ArgumentError, "Please provide a target user" unless target_user.is_a?(User)
end
def execute
......
......@@ -7,7 +7,7 @@ module Users
INCLUDED_DOMAINS_PATTERN = [/gmail.com/].freeze
def initialize(user:)
raise ArgumentError.new("Please provide a user") unless user.is_a?(User)
raise ArgumentError, "Please provide a user" unless user.is_a?(User)
@user = user
end
......
......@@ -34,7 +34,7 @@ module Users
def execute!(*args, &block)
result = execute(*args, &block)
raise ActiveRecord::RecordInvalid.new(@user) unless result[:status] == :success
raise ActiveRecord::RecordInvalid, @user unless result[:status] == :success
true
end
......
......@@ -451,7 +451,7 @@ module ObjectStorage
def with_exclusive_lease
lease_key = exclusive_lease_key
uuid = Gitlab::ExclusiveLease.new(lease_key, timeout: 1.hour.to_i).try_obtain
raise ExclusiveLeaseTaken.new(lease_key) unless uuid
raise ExclusiveLeaseTaken, lease_key unless uuid
yield uuid
ensure
......
......@@ -10,7 +10,7 @@ class CronValidator < ActiveModel::EachValidator
cron_parser = Gitlab::Ci::CronParser.new(record.public_send(attribute), record.cron_timezone) # rubocop:disable GitlabSecurity/PublicSend
record.errors.add(attribute, " is invalid syntax") unless cron_parser.cron_valid?
else
raise NonWhitelistedAttributeError.new "Non-whitelisted attribute"
raise NonWhitelistedAttributeError, "Non-whitelisted attribute"
end
end
end
......@@ -97,10 +97,10 @@ module GitGarbageCollectMethods
end
rescue GRPC::NotFound => e
Gitlab::GitLogger.error("#{__method__} failed:\nRepository not found")
raise Gitlab::Git::Repository::NoRepository.new(e)
raise Gitlab::Git::Repository::NoRepository, e
rescue GRPC::BadStatus => e
Gitlab::GitLogger.error("#{__method__} failed:\n#{e}")
raise Gitlab::Git::CommandError.new(e)
raise Gitlab::Git::CommandError, e
end
def get_gitaly_client(task, repository)
......
......@@ -50,7 +50,7 @@ module ObjectStorage
Gitlab::AppLogger.info header(success, failures)
Gitlab::AppLogger.warn failures(failures)
raise MigrationFailures.new(failures.map(&:error)) if failures.any?
raise MigrationFailures, failures.map(&:error) if failures.any?
end
def header(success, failures)
......
......@@ -33,10 +33,10 @@ module Packages
if result.success?
log_extra_metadata_on_done(:message, result.message)
else
raise SyncError.new(result.message)
raise SyncError, result.message
end
raise SyncError.new(result.message) unless result.success?
raise SyncError, result.message unless result.success?
end
private
......
......@@ -11,10 +11,10 @@ module Users
def perform(target_user_ids)
target_user_ids = Array.wrap(target_user_ids)
raise ArgumentError.new('No target user ID provided') if target_user_ids.empty?
raise ArgumentError, 'No target user ID provided' if target_user_ids.empty?
target_users = User.id_in(target_user_ids)
raise ArgumentError.new('No valid target user ID provided') if target_users.empty?
raise ArgumentError, 'No valid target user ID provided' if target_users.empty?
target_users.each do |user|
Users::UpdateAssignedOpenIssueCountService.new(target_user: user).execute
......
---
title: Resolves offenses Style/RaiseArgs
merge_request: 58009
author: Shubham Kumar @imskr
type: fixed
......@@ -914,7 +914,7 @@ class BackportEnterpriseSchema < ActiveRecord::Migration[5.0]
MSG
end
raise StandardError.new(message)
raise StandardError, message
end
def create_missing_tables
......
......@@ -17,7 +17,7 @@ module EE
class_methods do
def priority(algorithm_type)
raise ArgumentError.new("No priority for #{algorithm_type.inspect}") unless PRIORITIES.key?(algorithm_type)
raise ArgumentError, "No priority for #{algorithm_type.inspect}" unless PRIORITIES.key?(algorithm_type)
PRIORITIES[algorithm_type]
end
......
......@@ -23,13 +23,13 @@ module Geo
unless ::Gitlab::Geo.geo_database_configured?
message = NOT_CONFIGURED_MSG
message = "#{message}\nIn the GDK root, try running `make geo-setup`" if Rails.env.development?
raise SecondaryNotConfigured.new(message)
raise SecondaryNotConfigured, message
end
# Don't call super because LoadBalancing::ActiveRecordProxy will intercept it
retrieve_connection
rescue ActiveRecord::NoDatabaseError
raise SecondaryNotConfigured.new(NOT_CONFIGURED_MSG)
raise SecondaryNotConfigured, NOT_CONFIGURED_MSG
end
end
end
......@@ -58,11 +58,11 @@ module Vulnerabilities
def self.validate_enums(feedback_params)
unless feedback_types.include?(feedback_params[:feedback_type])
raise ArgumentError.new("'#{feedback_params[:feedback_type]}' is not a valid feedback_type")
raise ArgumentError, "'#{feedback_params[:feedback_type]}' is not a valid feedback_type"
end
unless categories.include?(feedback_params[:category])
raise ArgumentError.new("'#{feedback_params[:category]}' is not a valid category")
raise ArgumentError, "'#{feedback_params[:category]}' is not a valid category"
end
end
......
......@@ -32,7 +32,7 @@ module Ci
reset_ci_minutes!(namespaces)
end
raise BatchNotResetError.new(@failed_batches) if @failed_batches.any?
raise BatchNotResetError, @failed_batches if @failed_batches.any?
end
private
......
......@@ -6,7 +6,7 @@ module DastSiteValidations
TokenNotFound = Class.new(StandardError)
def execute!
raise PermissionsError.new('Insufficient permissions') unless allowed?
raise PermissionsError, 'Insufficient permissions' unless allowed?
return if dast_site_validation.passed?
......@@ -49,7 +49,7 @@ module DastSiteValidations
end
def validate!(response)
raise TokenNotFound.new('Could not find token') unless token_found?(response)
raise TokenNotFound, 'Could not find token' unless token_found?(response)
dast_site_validation.pass
end
......
......@@ -5,7 +5,7 @@ module DastSites
PermissionsError = Class.new(StandardError)
def execute!(url:)
raise PermissionsError.new('Insufficient permissions') unless allowed?
raise PermissionsError, 'Insufficient permissions' unless allowed?
find_or_create_by!(url)
end
......
......@@ -10,7 +10,7 @@ module Licenses
def execute
raise ActiveRecord::RecordNotFound unless license
raise Gitlab::Access::AccessDeniedError unless can?(user, :destroy_licenses)
raise DestroyCloudLicenseError.new(_('Cloud licenses can not be removed.')) if license.cloud_license?
raise DestroyCloudLicenseError, _('Cloud licenses can not be removed.') if license.cloud_license?
license.destroy
end
......
......@@ -67,7 +67,7 @@ class Gitlab::Seeder::ComplianceDashboardMergeRequests
merge_request
end
rescue ::Gitlab::Access::AccessDeniedError
raise ::Gitlab::Access::AccessDeniedError.new("If you are re-creating your GitLab database, you should also delete your old repositories located at $GDK/repositories/@hashed")
raise ::Gitlab::Access::AccessDeniedError, "If you are re-creating your GitLab database, you should also delete your old repositories located at $GDK/repositories/@hashed"
end
def create_pipeline!(project, ref, commit, status)
......
......@@ -16,7 +16,7 @@ module EE
response = faraday.put(url, payload, headers)
raise Error.new("Push Blob error: #{response.body}") unless response.success?
raise Error, "Push Blob error: #{response.body}" unless response.success?
true
end
......@@ -24,7 +24,7 @@ module EE
def push_manifest(name, tag, manifest, manifest_type)
response = faraday.put("v2/#{name}/manifests/#{tag}", manifest, { 'Content-Type' => manifest_type })
raise Error.new("Push manifest error: #{response.body}") unless response.success?
raise Error, "Push manifest error: #{response.body}" unless response.success?
true
end
......@@ -60,7 +60,7 @@ module EE
file.write(chunk)
end
raise Error.new("Could not download the blob: #{digest}") unless response.status.success?
raise Error, "Could not download the blob: #{digest}" unless response.status.success?
file
ensure
......@@ -76,7 +76,7 @@ module EE
def get_upload_url(name, digest)
response = faraday.post("/v2/#{name}/blobs/uploads/")
raise Error.new("Get upload URL error: #{response.body}") unless response.success?
raise Error, "Get upload URL error: #{response.body}" unless response.success?
upload_url = URI(response.headers['location'])
upload_url.query = "#{upload_url.query}&#{URI.encode_www_form(digest: digest)}"
......
......@@ -14,7 +14,7 @@ module EE
logger.log_timed(LOG_MESSAGE) do
unless branch_name_allowed_by_push_rule?
message = ERROR_MESSAGE % { branch_name_regex: push_rule.branch_name_regex }
raise ::Gitlab::GitAccess::ForbiddenError.new(message)
raise ::Gitlab::GitAccess::ForbiddenError, message
end
end
......
......@@ -14,11 +14,11 @@ module Gitlab
end
def title
raise NotImplementedError.new("Expected #{self.name} to implement title")
raise NotImplementedError, "Expected #{self.name} to implement title"
end
def value
raise NotImplementedError.new("Expected #{self.name} to implement value")
raise NotImplementedError, "Expected #{self.name} to implement value"
end
end
end
......
......@@ -21,11 +21,11 @@ module Gitlab
end
def start_event_identifier
raise NotImplementedError.new("Expected #{self.name} to implement start_event_identifier")
raise NotImplementedError, "Expected #{self.name} to implement start_event_identifier"
end
def end_event_identifier
raise NotImplementedError.new("Expected #{self.name} to implement end_event_identifier")
raise NotImplementedError, "Expected #{self.name} to implement end_event_identifier"
end
private
......
......@@ -58,7 +58,7 @@ module Gitlab
when Array
serialize_array(anything)
else
raise InvalidError.new("Don't know how to serialize #{anything.class}")
raise InvalidError, "Don't know how to serialize #{anything.class}"
end
end
......@@ -85,7 +85,7 @@ module Gitlab
private
def test_array!(array)
raise InvalidError.new("Bad array representation: #{array.inspect}") unless
raise InvalidError, "Bad array representation: #{array.inspect}" unless
(3..4).cover?(array.size)
end
end
......
......@@ -59,7 +59,7 @@ module Gitlab
rescue OpenSSL::Cipher::CipherError
message = 'Error decrypting the Geo secret from the database. Check that the primary and secondary have the same db_key_base.'
log_error(message)
raise InvalidDecryptionKeyError.new(message)
raise InvalidDecryptionKeyError, message
end
return unless data.present?
......@@ -81,7 +81,7 @@ module Gitlab
rescue JWT::ImmatureSignature, JWT::ExpiredSignature
message = "Signature not within leeway of #{IAT_LEEWAY} seconds. Check your system clocks!"
log_error(message)
raise InvalidSignatureTimeError.new(message)
raise InvalidSignatureTimeError, message
rescue JWT::DecodeError => e
log_error("Error decoding Geo request: #{e}")
nil
......
......@@ -19,7 +19,7 @@ module Gitlab
error = validate_facet(aggregate_facet)
if error
raise ArgumentError.new("#{error}. Please specify either #{COUNT} or #{WEIGHT_SUM}")
raise ArgumentError, "#{error}. Please specify either #{COUNT} or #{WEIGHT_SUM}"
end
@facet = aggregate_facet.to_sym
......
......@@ -26,14 +26,14 @@ module Gitlab
# limit (~200), then postgres uses a slow query plan and first does
# left join of epic_issues with issues which times out
epic_ids = ::Epic.ids_for_base_and_decendants(target_epic_ids)
raise ArgumentError.new("There are too many epics to load. Please select fewer epics or contact your administrator.") if epic_ids.count >= MAXIMUM_LOADABLE
raise ArgumentError, "There are too many epics to load. Please select fewer epics or contact your administrator." if epic_ids.count >= MAXIMUM_LOADABLE
# We do a left outer join in order to capture epics with no issues
# This is so we can aggregate the epic counts for every epic
raw_results = []
epic_ids.in_groups_of(EPIC_BATCH_SIZE).each do |epic_batch_ids|
raw_results += ::Epic.issue_metadata_for_epics(epic_ids: epic_ids, limit: MAXIMUM_LOADABLE)
raise ArgumentError.new("There are too many records to load. Please select fewer epics or contact your administrator.") if raw_results.count >= MAXIMUM_LOADABLE
raise ArgumentError, "There are too many records to load. Please select fewer epics or contact your administrator." if raw_results.count >= MAXIMUM_LOADABLE
end
@results = raw_results.group_by { |record| record[:id] }
......
......@@ -17,7 +17,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::Host do
def raise_and_wrap(wrapper, original)
raise original
rescue original.class
raise wrapper.new('boom')
raise wrapper, 'boom'
end
def wrapped_exception(wrapper, original)
......
......@@ -21,7 +21,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do
def raise_and_wrap(wrapper, original)
raise original
rescue original.class
raise wrapper.new('boop')
raise wrapper, 'boop'
end
def wrapped_exception(wrapper, original)
......@@ -94,7 +94,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do
returned = lb.read do
unless raised
raised = true
raise conflict_error.new
raise conflict_error
end
10
......@@ -107,7 +107,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do
expect(lb).to receive(:release_host).exactly(6).times
expect(lb).to receive(:read_write)
lb.read { raise conflict_error.new }
lb.read { raise conflict_error }
end
it 'uses the primary if no secondaries are available' do
......
......@@ -59,7 +59,7 @@ RSpec.describe AutoMerge::MergeTrainService do
context 'when failed to save the record' do
before do
allow(merge_request).to receive(:save!) { raise PG::QueryCanceled.new }
allow(merge_request).to receive(:save!) { raise PG::QueryCanceled }
end
it 'returns result code' do
......@@ -69,7 +69,7 @@ RSpec.describe AutoMerge::MergeTrainService do
context 'when statement timeout happened on system note creation' do
before do
allow(SystemNoteService).to receive(:merge_train) { raise PG::QueryCanceled.new }
allow(SystemNoteService).to receive(:merge_train) { raise PG::QueryCanceled }
end
it 'returns failed status' do
......@@ -218,7 +218,7 @@ RSpec.describe AutoMerge::MergeTrainService do
context 'when statement timeout happened on system note creation' do
before do
allow(SystemNoteService).to receive(:cancel_merge_train) { raise PG::QueryCanceled.new }
allow(SystemNoteService).to receive(:cancel_merge_train) { raise PG::QueryCanceled }
end
it 'returns error' do
......@@ -304,7 +304,7 @@ RSpec.describe AutoMerge::MergeTrainService do
context 'when statement timeout happened on system note creation' do
before do
allow(SystemNoteService).to receive(:abort_merge_train) { raise PG::QueryCanceled.new }
allow(SystemNoteService).to receive(:abort_merge_train) { raise PG::QueryCanceled }
end
it 'returns error' do
......
......@@ -141,7 +141,7 @@ RSpec.describe MergeTrains::CreatePipelineService do
context 'when there is a conflict on merge ref creation' do
before do
allow(project.repository).to receive(:merge_to_ref) do
raise Gitlab::Git::CommandError.new('Failed to create merge commit')
raise Gitlab::Git::CommandError, 'Failed to create merge commit'
end
end
......
......@@ -29,7 +29,7 @@ RSpec.describe Geo::RepositorySyncWorker, :geo, :clean_gitlab_redis_cache do
create(:geo_project_registry, :synced, :repository_dirty, project: unhealthy_dirty)
allow(Gitlab::GitalyClient).to receive(:call) do
raise GRPC::Unavailable.new('No Gitaly available')
raise GRPC::Unavailable, 'No Gitaly available'
end
expect(repository_worker).not_to receive(:perform_async).with('broken')
......
......@@ -26,7 +26,7 @@ RSpec.describe Geo::RepositoryVerification::Primary::BatchWorker, :clean_gitlab_
create(:repository_state, :repository_outdated, project: unhealthy_outdated)
allow(Gitlab::GitalyClient).to receive(:call) do
raise GRPC::Unavailable.new('No Gitaly available')
raise GRPC::Unavailable, 'No Gitaly available'
end
expect(Geo::RepositoryVerification::Primary::ShardWorker).not_to receive(:perform_async).with('broken')
......
......@@ -23,7 +23,7 @@ RSpec.describe Geo::RepositoryVerification::Secondary::SchedulerWorker, :clean_g
create(:project, :broken_storage)
allow(Gitlab::GitalyClient).to receive(:call) do
raise GRPC::Unavailable.new('No Gitaly available')
raise GRPC::Unavailable, 'No Gitaly available'
end
expect(Geo::RepositoryVerification::Secondary::ShardWorker).not_to receive(:perform_async).with('broken')
......
......@@ -8,7 +8,7 @@ module API
def set_http_headers(header_data)
header_data.each do |key, value|
if value.is_a?(Enumerable)
raise ArgumentError.new("Header value should be a string")
raise ArgumentError, "Header value should be a string"
end
header "X-Gitlab-#{key.to_s.split('_').collect(&:capitalize).join('-')}", value.to_s
......
......@@ -158,7 +158,7 @@ module API
status 200
unless actor.key_or_user
raise ActiveRecord::RecordNotFound.new('User not found!')
raise ActiveRecord::RecordNotFound, 'User not found!'
end
actor.update_last_used_at!
......
......@@ -63,7 +63,7 @@ module BulkImports
def with_error_handling
response = yield
raise ConnectionError.new("Error #{response.code}") unless response.success?
raise ConnectionError, "Error #{response.code}" unless response.success?
response
rescue *Gitlab::HTTP::HTTP_ERRORS => e
......
......@@ -64,7 +64,7 @@ module DeclarativeEnum
end
def define(&block)
raise LocalJumpError.new('No block given') unless block
raise LocalJumpError, 'No block given' unless block
@definition = Builder.new(definition, block).build
end
......
......@@ -17,7 +17,7 @@ module Flowdock
end
def initialize(ref, from, to, options = {})
raise TokenError.new("Flowdock API token not found") unless options[:token]
raise TokenError, "Flowdock API token not found" unless options[:token]
@ref = ref
@from = from
......
......@@ -160,7 +160,7 @@ module Gitlab
case AccessTokenValidationService.new(access_token, request: request).validate(scopes: scopes)
when AccessTokenValidationService::INSUFFICIENT_SCOPE
raise InsufficientScopeError.new(scopes)
raise InsufficientScopeError, scopes
when AccessTokenValidationService::EXPIRED
raise ExpiredError
when AccessTokenValidationService::REVOKED
......
......@@ -59,7 +59,7 @@ module Gitlab
end
def self.invalid_provider(provider)
raise InvalidProvider.new("Unknown provider (#{provider}). Available providers: #{providers}")
raise InvalidProvider, "Unknown provider (#{provider}). Available providers: #{providers}"
end
def self.encrypted_secrets
......
......@@ -13,7 +13,7 @@ module Gitlab
repos_to_import = Dir.glob(import_path + '**/*.git')
unless user = User.admins.order_id_asc.first
raise NoAdminError.new('No admin user found to import repositories')
raise NoAdminError, 'No admin user found to import repositories'
end
repos_to_import.each do |repo_path|
......
......@@ -55,7 +55,7 @@ module Gitlab
result = service.execute
raise Error.new(result[:message]) if result[:status] != :success
raise Error, result[:message] if result[:status] != :success
end
end
......
......@@ -169,7 +169,7 @@ module Gitlab
# We raise a custom error so it's easier to catch different changelog
# related errors. In addition, this ensures the caller of this method
# doesn't depend on a Parslet specific error class.
raise Error.new("Failed to parse the template: #{ex.message}")
raise Error, "Failed to parse the template: #{ex.message}"
end
end
end
......
......@@ -31,7 +31,7 @@ module Gitlab
def ensure_test_cases_limited!(total_parsed, limit)
return unless limit > 0 && total_parsed > limit
raise JunitParserError.new("number of test cases exceeded the limit of #{limit}")
raise JunitParserError, "number of test cases exceeded the limit of #{limit}"
end
def all_cases(root, parent = nil, &blk)
......
......@@ -141,7 +141,7 @@ module Gitlab
end
def error!(message)
raise ValidationError.new(message)
raise ValidationError, message
end
end
end
......
......@@ -11,11 +11,11 @@ module Gitlab
end
def title
raise NotImplementedError.new("Expected #{self.name} to implement title")
raise NotImplementedError, "Expected #{self.name} to implement title"
end
def value
raise NotImplementedError.new("Expected #{self.name} to implement value")
raise NotImplementedError, "Expected #{self.name} to implement value"
end
end
end
......
......@@ -94,7 +94,7 @@ module Gitlab
elsif column.is_a?(Arel::Attributes::Attribute)
column
else
raise ColumnConfigurationError.new("Cannot transform the column: #{column.inspect}, please provide the column name as string")
raise ColumnConfigurationError, "Cannot transform the column: #{column.inspect}, please provide the column name as string"
end
end
end
......
......@@ -65,7 +65,7 @@ module Gitlab
contents = deserialize(read)
raise InvalidConfigError.new unless contents.is_a?(Hash)
raise InvalidConfigError unless contents.is_a?(Hash)
@config = contents.deep_symbolize_keys
end
......@@ -115,7 +115,7 @@ module Gitlab
end
def handle_missing_key!
raise MissingKeyError.new if @key.nil?
raise MissingKeyError if @key.nil?
end
end
end
......@@ -150,7 +150,7 @@ module Gitlab
elsif subject.respond_to?(:to_s)
subject.to_s
else
raise ArgumentError.new('Subject must respond to `to_global_id` or `to_s`')
raise ArgumentError, 'Subject must respond to `to_global_id` or `to_s`'
end
end
end
......
......@@ -24,7 +24,7 @@ module Gitlab
)
::Gitlab::ExternalAuthorization::Response.new(response)
rescue *Gitlab::HTTP::HTTP_ERRORS => e
raise ::Gitlab::ExternalAuthorization::RequestFailed.new(e)
raise ::Gitlab::ExternalAuthorization::RequestFailed, e
end
private
......
......@@ -20,9 +20,9 @@ module Gitlab
gitaly_conflicts_client(@target_repository).list_conflict_files.to_a
end
rescue GRPC::FailedPrecondition => e
raise Gitlab::Git::Conflict::Resolver::ConflictSideMissing.new(e.message)
raise Gitlab::Git::Conflict::Resolver::ConflictSideMissing, e.message
rescue GRPC::BadStatus => e
raise Gitlab::Git::CommandError.new(e)
raise Gitlab::Git::CommandError, e
end
def resolve_conflicts(source_repository, resolution, source_branch:, target_branch:)
......
......@@ -89,9 +89,9 @@ module Gitlab
def root_ref
gitaly_ref_client.default_branch_name
rescue GRPC::NotFound => e
raise NoRepository.new(e.message)
raise NoRepository, e.message
rescue GRPC::Unknown => e
raise Gitlab::Git::CommandError.new(e.message)
raise Gitlab::Git::CommandError, e.message
end
def exists?
......@@ -348,7 +348,7 @@ module Gitlab
limit = options[:limit]
if limit == 0 || !limit.is_a?(Integer)
raise ArgumentError.new("invalid Repository#log limit: #{limit.inspect}")
raise ArgumentError, "invalid Repository#log limit: #{limit.inspect}"
end
wrapped_gitaly_errors do
......@@ -414,7 +414,7 @@ module Gitlab
end
end
rescue ArgumentError => e
raise Gitlab::Git::Repository::GitError.new(e)
raise Gitlab::Git::Repository::GitError, e
end
# Returns the SHA of the most recent common ancestor of +from+ and +to+
......@@ -836,7 +836,7 @@ module Gitlab
def fsck
msg, status = gitaly_repository_client.fsck
raise GitError.new("Could not fsck repository: #{msg}") unless status == 0
raise GitError, "Could not fsck repository: #{msg}" unless status == 0
end
def create_from_bundle(bundle_path)
......
......@@ -31,7 +31,7 @@ module Gitlab
def rugged
@rugged ||= ::Rugged::Repository.new(path, alternates: alternate_object_directories)
rescue ::Rugged::RepositoryError, ::Rugged::OSError
raise ::Gitlab::Git::Repository::NoRepository.new('no repository for such path')
raise ::Gitlab::Git::Repository::NoRepository, 'no repository for such path'
end
def cleanup
......
......@@ -6,13 +6,13 @@ module Gitlab
def wrapped_gitaly_errors(&block)
yield block
rescue GRPC::NotFound => e
raise Gitlab::Git::Repository::NoRepository.new(e)
raise Gitlab::Git::Repository::NoRepository, e
rescue GRPC::InvalidArgument => e
raise ArgumentError.new(e)
raise ArgumentError, e
rescue GRPC::DeadlineExceeded => e
raise Gitlab::Git::CommandTimedOut.new(e)
raise Gitlab::Git::CommandTimedOut, e
rescue GRPC::BadStatus => e
raise Gitlab::Git::CommandError.new(e)
raise Gitlab::Git::CommandError, e
end
end
end
......
......@@ -59,7 +59,7 @@ module Gitlab
:user_create_branch, request, timeout: GitalyClient.long_timeout)
if response.pre_receive_error.present?
raise Gitlab::Git::PreReceiveError.new(response.pre_receive_error)
raise Gitlab::Git::PreReceiveError, response.pre_receive_error
end
branch = response.branch
......@@ -159,7 +159,7 @@ module Gitlab
branch_update = second_response.branch_update
return if branch_update.nil?
raise Gitlab::Git::CommitError.new('failed to apply merge to branch') unless branch_update.commit_id.present?
raise Gitlab::Git::CommitError, 'failed to apply merge to branch' unless branch_update.commit_id.present?
Gitlab::Git::OperationService::BranchUpdate.from_gitaly(branch_update)
ensure
......
......@@ -292,7 +292,7 @@ module Gitlab
end
def invalid_ref!(message)
raise Gitlab::Git::Repository::InvalidRef.new(message)
raise Gitlab::Git::Repository::InvalidRef, message
end
end
end
......
......@@ -319,7 +319,7 @@ module Gitlab
response = GitalyClient.call(@storage, :repository_service, :calculate_checksum, request, timeout: GitalyClient.fast_timeout)
response.checksum.presence
rescue GRPC::DataLoss => e
raise Gitlab::Git::Repository::InvalidRepository.new(e)
raise Gitlab::Git::Repository::InvalidRepository, e
end
def raw_changes_between(from, to)
......
......@@ -69,13 +69,13 @@ module Gitlab
# Error messages are based on the responses of proxy.golang.org
# Verify that the SHA fragment references a commit
raise ArgumentError.new 'invalid pseudo-version: unknown commit' unless commit
raise ArgumentError, 'invalid pseudo-version: unknown commit' unless commit
# Require the SHA fragment to be 12 characters long
raise ArgumentError.new 'invalid pseudo-version: revision is shorter than canonical' unless version.commit_id.length == 12
raise ArgumentError, 'invalid pseudo-version: revision is shorter than canonical' unless version.commit_id.length == 12
# Require the timestamp to match that of the commit
raise ArgumentError.new 'invalid pseudo-version: does not match version-control timestamp' unless commit.committed_date.strftime('%Y%m%d%H%M%S') == version.timestamp
raise ArgumentError, 'invalid pseudo-version: does not match version-control timestamp' unless commit.committed_date.strftime('%Y%m%d%H%M%S') == version.timestamp
commit
end
......
......@@ -114,7 +114,7 @@ module Gitlab
def limited_nodes
strong_memoize(:limited_nodes) do
if first && last
raise Gitlab::Graphql::Errors::ArgumentError.new("Can only provide either `first` or `last`, not both")
raise Gitlab::Graphql::Errors::ArgumentError, "Can only provide either `first` or `last`, not both"
end
if last
......@@ -158,7 +158,7 @@ module Gitlab
def ordered_items
strong_memoize(:ordered_items) do
unless items.primary_key.present?
raise ArgumentError.new('Relation must have a primary key')
raise ArgumentError, 'Relation must have a primary key'
end
list = OrderInfo.build_order_list(items)
......
......@@ -36,24 +36,24 @@ module Gitlab
def self.validate_ordering(relation, order_list)
if order_list.empty?
raise ArgumentError.new('A minimum of 1 ordering field is required')
raise ArgumentError, 'A minimum of 1 ordering field is required'
end
if order_list.count > 2
# Keep in mind an order clause for primary key is added if one is not present
# lib/gitlab/graphql/pagination/keyset/connection.rb:97
raise ArgumentError.new('A maximum of 2 ordering fields are allowed')
raise ArgumentError, 'A maximum of 2 ordering fields are allowed'
end
# make sure the last ordering field is non-nullable
attribute_name = order_list.last&.attribute_name
if relation.columns_hash[attribute_name].null
raise ArgumentError.new("Column `#{attribute_name}` must not allow NULL")
raise ArgumentError, "Column `#{attribute_name}` must not allow NULL"
end
if order_list.last.attribute_name != relation.primary_key
raise ArgumentError.new("Last ordering field must be the primary key, `#{relation.primary_key}`")
raise ArgumentError, "Last ordering field must be the primary key, `#{relation.primary_key}`"
end
end
......
......@@ -12,7 +12,7 @@ module Gitlab
@before_or_after = before_or_after
if order_list.empty?
raise ArgumentError.new('No ordering scopes have been supplied')
raise ArgumentError, 'No ordering scopes have been supplied'
end
end
......@@ -49,7 +49,7 @@ module Gitlab
end
if order_list.count == 1 && attr_values.first.nil?
raise Gitlab::Graphql::Errors::ArgumentError.new('Before/after cursor invalid: `nil` was provided as only sortable value')
raise Gitlab::Graphql::Errors::ArgumentError, 'Before/after cursor invalid: `nil` was provided as only sortable value'
end
if order_list.count == 1 || attr_values.first.present?
......
......@@ -32,7 +32,7 @@ module Gitlab
raise Invalid, "Unexpected parameter: #{ambiguous_param}"
end
rescue JSON::ParserError => e
raise Invalid.new(e)
raise Invalid, e
end
end
end
......
......@@ -28,7 +28,7 @@ module Gitlab
def handle_response_error(response)
unless response.success?
raise StrategyError.new("Error uploading the project. Code #{response.code}: #{response.message}")
raise StrategyError, "Error uploading the project. Code #{response.code}: #{response.message}"
end
end
......
......@@ -12,7 +12,7 @@ module Gitlab
klass = strategy_klass.constantize rescue nil
unless klass && klass < AfterExportStrategies::BaseAfterExportStrategy
raise StrategyNotFoundError.new("Strategy #{strategy_klass} not found")
raise StrategyNotFoundError, "Strategy #{strategy_klass} not found"
end
klass.new(**attributes.symbolize_keys)
......
......@@ -57,7 +57,7 @@ module Gitlab
def decompress_archive
result = untar_zxf(archive: @archive_file, dir: @shared.export_path)
raise ImporterError.new("Unable to decompress #{@archive_file} into #{@shared.export_path}") unless result
raise ImporterError, "Unable to decompress #{@archive_file} into #{@shared.export_path}" unless result
result
end
......@@ -87,7 +87,7 @@ module Gitlab
end
def validate_decompressed_archive_size
raise ImporterError.new(_('Decompressed archive size validation failed.')) unless size_validator.valid?
raise ImporterError, _('Decompressed archive size validation failed.') unless size_validator.valid?
end
def size_validator
......
......@@ -21,7 +21,7 @@ module Gitlab
if import_file && check_version! && restorers.all?(&:restore) && overwrite_project
project
else
raise Projects::ImportService::Error.new(shared.errors.to_sentence)
raise Projects::ImportService::Error, shared.errors.to_sentence
end
rescue StandardError => e
# If some exception was raised could mean that the SnippetsRepoRestorer
......@@ -29,7 +29,7 @@ module Gitlab
# This is a state we don't want them to be, so we better delete them.
remove_non_migrated_snippets
raise Projects::ImportService::Error.new(e.message)
raise Projects::ImportService::Error, e.message
ensure
remove_base_tmp_dir
remove_import_file
......
......@@ -30,7 +30,7 @@ module Gitlab
ActiveSupport::JSON.decode(IO.read(@path))
rescue StandardError => e
Gitlab::ErrorTracking.log_exception(e)
raise Gitlab::ImportExport::Error.new('Incorrect JSON format')
raise Gitlab::ImportExport::Error, 'Incorrect JSON format'
end
end
......
......@@ -74,7 +74,7 @@ module Gitlab
json = IO.read(lfs_json_path)
ActiveSupport::JSON.decode(json)
rescue StandardError
raise Gitlab::ImportExport::Error.new('Incorrect JSON format')
raise Gitlab::ImportExport::Error, 'Incorrect JSON format'
end
end
......
......@@ -90,7 +90,7 @@ module Gitlab
when 'Group'
@exportable.full_path
else
raise Gitlab::ImportExport::Error.new("Unsupported Exportable Type #{@exportable&.class}")
raise Gitlab::ImportExport::Error, "Unsupported Exportable Type #{@exportable&.class}"
end
end
......
......@@ -27,7 +27,7 @@ module Gitlab
def verify_version!(version)
if different_version?(version)
raise Gitlab::ImportExport::Error.new("Import version mismatch: Required #{Gitlab::ImportExport.version} but was #{version}")
raise Gitlab::ImportExport::Error, "Import version mismatch: Required #{Gitlab::ImportExport.version} but was #{version}"
else
true
end
......@@ -41,7 +41,7 @@ module Gitlab
error: e.message
)
raise Gitlab::ImportExport::Error.new('Incorrect VERSION format')
raise Gitlab::ImportExport::Error, 'Incorrect VERSION format'
end
end
end
......
......@@ -62,7 +62,7 @@ module Gitlab
end
if key_slots.uniq.many? # rubocop: disable CodeReuse/ActiveRecord
raise CrossSlotError.new("Redis command #{command_name} arguments hash to different slots. See https://docs.gitlab.com/ee/development/redis.html#multi-key-commands")
raise CrossSlotError, "Redis command #{command_name} arguments hash to different slots. See https://docs.gitlab.com/ee/development/redis.html#multi-key-commands"
end
end
......
......@@ -12,7 +12,7 @@ module Gitlab
def request(*args)
result = make_request(*args)
raise JIRA::HTTPError.new(result.response) unless result.response.is_a?(Net::HTTPSuccess)
raise JIRA::HTTPError, result.response unless result.response.is_a?(Net::HTTPSuccess)
result
end
......
......@@ -84,7 +84,7 @@ module Gitlab
Oj.load(string, opts)
rescue Oj::ParseError, Encoding::UndefinedConversionError => ex
raise parser_error.new(ex)
raise parser_error, ex
end
# Take a Ruby object and convert it to a string. This method varies
......
......@@ -20,7 +20,7 @@ module Gitlab
service = ::Labels::FindOrCreateService.new(nil, project, params)
label = service.execute(skip_authorization: true)
raise ActiveRecord::RecordInvalid.new(label) unless label.persisted?
raise ActiveRecord::RecordInvalid, label unless label.persisted?
label
end
......
......@@ -43,7 +43,7 @@ module Gitlab
body = Gitlab::Json.parse(rsp.body)
transfer = body.fetch('transfer', 'basic')
raise UnsupportedTransferError.new(transfer.inspect) unless transfer == 'basic'
raise UnsupportedTransferError, transfer.inspect unless transfer == 'basic'
body
end
......
......@@ -33,7 +33,7 @@ module Gitlab
end
def panels_not_found!(opts)
raise PanelNotFoundError.new(_("No panels matching properties %{opts}") % { opts: opts })
raise PanelNotFoundError, _("No panels matching properties %{opts}") % { opts: opts }
end
end
end
......
......@@ -23,15 +23,15 @@ module Gitlab
protected
def missing_panel_groups!
raise Errors::LayoutError.new('Top-level key :panel_groups must be an array')
raise Errors::LayoutError, 'Top-level key :panel_groups must be an array'
end
def missing_panels!
raise Errors::LayoutError.new('Each "panel_group" must define an array :panels')
raise Errors::LayoutError, 'Each "panel_group" must define an array :panels'
end
def missing_metrics!
raise Errors::LayoutError.new('Each "panel" must define an array :metrics')
raise Errors::LayoutError, 'Each "panel" must define an array :metrics'
end
def for_metrics
......
......@@ -39,7 +39,7 @@ module Gitlab
end
def error!(message)
raise Errors::DashboardProcessingError.new(message)
raise Errors::DashboardProcessingError, message
end
def group_url(metric)
......@@ -67,14 +67,14 @@ module Gitlab
def query_for_metric(metric)
query = metric[query_type(metric)]
raise Errors::MissingQueryError.new('Each "metric" must define one of :query or :query_range') unless query
raise Errors::MissingQueryError, 'Each "metric" must define one of :query or :query_range' unless query
query
end
def verify_params
raise Errors::DashboardProcessingError.new(_('Cluster is required for Stages::ClusterEndpointInserter')) unless params[:cluster]
raise Errors::DashboardProcessingError.new(_('Cluster type must be specificed for Stages::ClusterEndpointInserter')) unless params[:cluster_type]
raise Errors::DashboardProcessingError, _('Cluster is required for Stages::ClusterEndpointInserter') unless params[:cluster]
raise Errors::DashboardProcessingError, _('Cluster type must be specificed for Stages::ClusterEndpointInserter') unless params[:cluster_type]
end
end
end
......
......@@ -6,7 +6,7 @@ module Gitlab
module Stages
class MetricEndpointInserter < BaseStage
def transform!
raise Errors::DashboardProcessingError.new(_('Environment is required for Stages::MetricEndpointInserter')) unless params[:environment]
raise Errors::DashboardProcessingError, _('Environment is required for Stages::MetricEndpointInserter') unless params[:environment]
for_metrics do |metric|
metric[:prometheus_endpoint_path] = endpoint_for_metric(metric)
......@@ -43,7 +43,7 @@ module Gitlab
def query_for_metric(metric)
query = metric[query_type(metric)]
raise Errors::MissingQueryError.new('Each "metric" must define one of :query or :query_range') unless query
raise Errors::MissingQueryError, 'Each "metric" must define one of :query or :query_range' unless query
# We need to remove any newlines since our UrlBlocker does not allow
# multiline URLs.
......
......@@ -8,7 +8,7 @@ module Gitlab
VARIABLE_TYPE_METRIC_LABEL_VALUES = 'metric_label_values'
def transform!
raise Errors::DashboardProcessingError.new(_('Environment is required for Stages::VariableEndpointInserter')) unless params[:environment]
raise Errors::DashboardProcessingError, _('Environment is required for Stages::VariableEndpointInserter') unless params[:environment]
for_variables do |variable_name, variable|
if variable.is_a?(Hash) && variable[:type] == VARIABLE_TYPE_METRIC_LABEL_VALUES
......
......@@ -5,7 +5,7 @@ module Gitlab
attr_reader :first_collection, :second_collection, :per_page
def initialize(*collections, per_page: nil)
raise ArgumentError.new('Only 2 collections are supported') if collections.size != 2
raise ArgumentError, 'Only 2 collections are supported' if collections.size != 2
@per_page = (per_page || Kaminari.config.default_per_page).to_i
@first_collection, @second_collection = collections
......
......@@ -14,7 +14,7 @@ module Gitlab
# descendants_base - An instance of ActiveRecord::Relation for which to
# get child objects. If omitted, ancestors_base is used.
def initialize(ancestors_base, descendants_base = ancestors_base, options: {})
raise ArgumentError.new("Model of ancestors_base does not match model of descendants_base") if ancestors_base.model != descendants_base.model
raise ArgumentError, "Model of ancestors_base does not match model of descendants_base" if ancestors_base.model != descendants_base.model
@ancestors_base = ancestors_base
@descendants_base = descendants_base
......
......@@ -32,8 +32,8 @@ module Gitlab
def rotate!(old_key:, new_key:)
old_key ||= Gitlab::Application.secrets.otp_key_base
raise ArgumentError.new("Old key is the same as the new key") if old_key == new_key
raise ArgumentError.new("New key is too short! Must be 256 bits") if new_key.size < 64
raise ArgumentError, "Old key is the same as the new key" if old_key == new_key
raise ArgumentError, "New key is too short! Must be 256 bits" if new_key.size < 64
write_csv do |csv|
ActiveRecord::Base.transaction do
......
......@@ -10,7 +10,7 @@ module Gitlab
def draw(routes_name)
drawn_any = draw_ee(routes_name) | draw_ce(routes_name)
drawn_any || raise(RoutesNotFound.new("Cannot find #{routes_name}"))
drawn_any || raise(RoutesNotFound, "Cannot find #{routes_name}")
end
def draw_ce(routes_name)
......
......@@ -13,7 +13,7 @@ module Gitlab
Response.parse!(response)
rescue *Gitlab::HTTP::HTTP_ERRORS => e
# Wrap all errors from the API into an API-error.
raise ApiError.new(e)
raise ApiError, e
end
private
......
......@@ -18,7 +18,7 @@ module Gitlab
response
rescue JSON::JSONError => e
raise ResponseError.new(e)
raise ResponseError, e
end
def initialize(json)
......
......@@ -14,7 +14,7 @@ module Gitlab
private
def validate!(obj)
raise ParsingError.new(obj.errors.full_messages.join('\n')) unless obj.valid?
raise ParsingError, obj.errors.full_messages.join('\n') unless obj.valid?
end
def group_from_entry(entry)
......
......@@ -50,7 +50,7 @@ module Gitlab
# @return [String] the encoded boolean
# @raise [NotABooleanError] if the value isn't true or false
def encode(value)
raise NotABooleanError.new(value) unless bool?(value)
raise NotABooleanError, value unless bool?(value)
[LABEL, to_string(value)].join(DELIMITER)
end
......@@ -61,11 +61,11 @@ module Gitlab
# @return [Boolean] true or false
# @raise [NotAnEncodedBooleanStringError] if the provided value isn't an encoded boolean
def decode(value)
raise NotAnEncodedBooleanStringError.new(value.class) unless value.is_a?(String)
raise NotAnEncodedBooleanStringError, value.class unless value.is_a?(String)
label, bool_str = *value.split(DELIMITER, 2)
raise NotAnEncodedBooleanStringError.new(label) unless label == LABEL
raise NotAnEncodedBooleanStringError, label unless label == LABEL
from_string(bool_str)
end
......@@ -99,7 +99,7 @@ module Gitlab
end
def from_string(str)
raise NotAnEncodedBooleanStringError.new(str) unless [TRUE_STR, FALSE_STR].include?(str)
raise NotAnEncodedBooleanStringError, str unless [TRUE_STR, FALSE_STR].include?(str)
str == TRUE_STR
end
......
......@@ -46,7 +46,7 @@ module Gitlab
def validate_key!(key)
return if KEY_REGEX.match?(key)
raise KeyFormatError.new("Invalid key format. #{key} key should have changeable parts in curly braces. See https://docs.gitlab.com/ee/development/redis.html#multi-key-commands")
raise KeyFormatError, "Invalid key format. #{key} key should have changeable parts in curly braces. See https://docs.gitlab.com/ee/development/redis.html#multi-key-commands"
end
end
end
......
......@@ -10,7 +10,7 @@ module Gitlab
when :http
http_url(path)
else
raise NotImplementedError.new("No URL builder defined for protocol #{protocol}")
raise NotImplementedError, "No URL builder defined for protocol #{protocol}"
end
end
......
......@@ -30,7 +30,7 @@ module Gitlab
rescue URI::InvalidURIError => e
# If url is invalid, raise custom error,
# which can be ignored by monitoring tools.
raise ActionController::RoutingError.new(e.message)
raise ActionController::RoutingError, e.message
end
end
......
......@@ -26,13 +26,9 @@ module Gitlab
if matches.one?
matches.first
elsif matches.none?
raise UnknownProcessError.new(
"Failed to identify runtime for process #{Process.pid} (#{$0})"
)
raise UnknownProcessError, "Failed to identify runtime for process #{Process.pid} (#{$0})"
else
raise AmbiguousProcessError.new(
"Ambiguous runtime #{matches} for process #{Process.pid} (#{$0})"
)
raise AmbiguousProcessError, "Ambiguous runtime #{matches} for process #{Process.pid} (#{$0})"
end
end
......
......@@ -50,7 +50,7 @@ module Gitlab
when :including then including
when :excluding then excluding
else
raise ArgumentError.new(type)
raise ArgumentError, type
end
end
end
......
......@@ -51,7 +51,7 @@ module Gitlab
def predicate_for_term(term)
match = term.match(QUERY_TERM_REGEX)
raise InvalidTerm.new("Invalid term: #{term}") unless match
raise InvalidTerm, "Invalid term: #{term}" unless match
_, lhs, op, rhs = *match
......@@ -67,14 +67,14 @@ module Gitlab
else
# This is unreachable because InvalidTerm will be raised instead, but
# keeping it allows to guard against that changing in future.
raise UnknownOperator.new("Unknown operator: #{op}")
raise UnknownOperator, "Unknown operator: #{op}"
end
end
def predicate_factory(lhs, values)
values_block = QUERY_PREDICATES[lhs.to_sym]
raise UnknownPredicate.new("Unknown predicate: #{lhs}") unless values_block
raise UnknownPredicate, "Unknown predicate: #{lhs}" unless values_block
lambda do |queue|
comparator = Array(queue[lhs.to_sym]).to_set
......
......@@ -109,7 +109,7 @@ module Gitlab
def run_command!(command)
output, status = Gitlab::Popen.popen(command)
raise Gitlab::TaskFailedError.new(output) unless status == 0
raise Gitlab::TaskFailedError, output unless status == 0
output
end
......
......@@ -22,7 +22,7 @@ module Gitlab
@regexp = RE2::Regexp.new(pattern, log_errors: false)
raise RegexpError.new(regexp.error) unless regexp.ok?
raise RegexpError, regexp.error unless regexp.ok?
end
def replace_all(text, rewrite)
......
......@@ -49,7 +49,7 @@ module Gitlab
when ::DesignManagement::Design
design_url(object, **options)
else
raise NotImplementedError.new("No URL builder defined for #{object.inspect}")
raise NotImplementedError, "No URL builder defined for #{object.inspect}"
end
end
# rubocop:enable Metrics/CyclomaticComplexity
......
......@@ -232,8 +232,8 @@ module Gitlab
# Compose the key in order to store events daily or weekly
def redis_key(event, time, context = '')
raise UnknownEvent.new("Unknown event #{event[:name]}") unless known_events_names.include?(event[:name].to_s)
raise UnknownAggregation.new("Use :daily or :weekly aggregation") unless ALLOWED_AGGREGATIONS.include?(event[:aggregation].to_sym)
raise UnknownEvent, "Unknown event #{event[:name]}" unless known_events_names.include?(event[:name].to_s)
raise UnknownAggregation, "Use :daily or :weekly aggregation" unless ALLOWED_AGGREGATIONS.include?(event[:aggregation].to_sym)
key = apply_slot(event)
key = apply_time_aggregation(key, time, event)
......
......@@ -16,7 +16,7 @@ module Gitlab
path_regex = /(\A(\.{1,2})\z|\A\.\.[\/\\]|[\/\\]\.\.\z|[\/\\]\.\.[\/\\]|\n)/
if path.match?(path_regex)
raise PathTraversalAttackError.new('Invalid path')
raise PathTraversalAttackError, 'Invalid path'
end
path
......
......@@ -43,12 +43,12 @@ module Gitlab
instance_method_defined?(parent, method_name)
end
raise NotImplementedError.new("#{klass}\##{method_name} doesn't exist!") unless overridden_parent
raise NotImplementedError, "#{klass}\##{method_name} doesn't exist!" unless overridden_parent
super_method_arity = find_direct_method(overridden_parent, method_name).arity
unless arity_compatible?(sub_method_arity, super_method_arity)
raise NotImplementedError.new("#{subject}\##{method_name} has arity of #{sub_method_arity}, but #{overridden_parent}\##{method_name} has arity of #{super_method_arity}")
raise NotImplementedError, "#{subject}\##{method_name} has arity of #{sub_method_arity}, but #{overridden_parent}\##{method_name} has arity of #{super_method_arity}"
end
end
......
......@@ -24,11 +24,11 @@ module Gitlab
end
def name
raise NotImplementedError.new
raise NotImplementedError
end
def describe(_object)
raise NotImplementedError.new
raise NotImplementedError
end
private
......@@ -77,27 +77,27 @@ module Gitlab
# This should return an ActiveRecord::Relation suitable for calling #in_batches on
def all_relation
raise NotImplementedError.new
raise NotImplementedError
end
# Should return true if the object is stored locally
def local?(_object)
raise NotImplementedError.new
raise NotImplementedError
end
# The checksum we expect the object to have
def expected_checksum(_object)
raise NotImplementedError.new
raise NotImplementedError
end
# The freshly-recalculated checksum of the object
def actual_checksum(_object)
raise NotImplementedError.new
raise NotImplementedError
end
# Be sure to perform a hard check of the remote object (don't just check DB value)
def remote_object_exists?(object)
raise NotImplementedError.new
raise NotImplementedError
end
end
end
......
......@@ -11,7 +11,7 @@ module Gitlab
attributes.each do |key, value|
if subject.respond_to?(key)
raise CannotOverrideMethodError.new("#{subject} already respond to #{key}!")
raise CannotOverrideMethodError, "#{subject} already respond to #{key}!"
end
define_singleton_method(key) { value }
......
......@@ -109,7 +109,7 @@ module Grafana
def from_ms_since_epoch(time)
return if time.nil?
raise Error.new('Expected milliseconds since epoch') unless ms_since_epoch?(time)
raise Error, 'Expected milliseconds since epoch' unless ms_since_epoch?(time)
new(cast_ms_to_time(time))
end
......
......@@ -52,12 +52,12 @@ module Mattermost
json_response = Gitlab::Json.parse(response.body, legacy_mode: true)
unless response.success?
raise Mattermost::ClientError.new(json_response['message'] || 'Undefined error')
raise Mattermost::ClientError, json_response['message'] || 'Undefined error'
end
json_response
rescue JSON::JSONError
raise Mattermost::ClientError.new('Cannot parse response')
raise Mattermost::ClientError, 'Cannot parse response'
end
end
end
......@@ -174,9 +174,9 @@ module Mattermost
def handle_exceptions
yield
rescue Gitlab::HTTP::Error => e
raise Mattermost::ConnectionError.new(e.message)
raise Mattermost::ConnectionError, e.message
rescue Errno::ECONNREFUSED => e
raise Mattermost::ConnectionError.new(e.message)
raise Mattermost::ConnectionError, e.message
end
def parse_cookie(response)
......
......@@ -404,7 +404,7 @@ module QA
end
def visible?
raise NoRequiredElementsError.new(self.class) if self.class.required_elements.empty?
raise NoRequiredElementsError, self.class if self.class.required_elements.empty?
self.class.required_elements.each do |required_element|
return false if has_no_element? required_element
......
......@@ -9,7 +9,7 @@ RSpec.describe GracefulTimeoutHandling, type: :controller do
skip_before_action :authenticate_user!
def index
raise ActiveRecord::QueryCanceled.new
raise ActiveRecord::QueryCanceled
end
end
......
......@@ -201,7 +201,7 @@ RSpec.describe Projects::LabelsController do
context 'service raising InvalidRecord' do
before do
expect_any_instance_of(Labels::PromoteService).to receive(:execute) do |label|
raise ActiveRecord::RecordInvalid.new(label_1)
raise ActiveRecord::RecordInvalid, label_1
end
end
......
......@@ -176,7 +176,7 @@ RSpec.describe Gitlab::Database do
closed_pool = pool
raise error.new('boom')
raise error, 'boom'
end
rescue error
end
......
......@@ -82,7 +82,7 @@ RSpec.describe Gitlab::ExternalAuthorization::Access, :clean_gitlab_redis_cache
context 'when the request fails' do
before do
allow(fake_client).to receive(:request_access) do
raise ::Gitlab::ExternalAuthorization::RequestFailed.new('Service unavailable')
raise ::Gitlab::ExternalAuthorization::RequestFailed, 'Service unavailable'
end
end
......
......@@ -71,7 +71,7 @@ RSpec.describe Gitlab::ExternalAuthorization::Client do
end
it 'wraps exceptions if the request fails' do
expect(Gitlab::HTTP).to receive(:post) { raise Gitlab::HTTP::BlockedUrlError.new('the request broke') }
expect(Gitlab::HTTP).to receive(:post) { raise Gitlab::HTTP::BlockedUrlError, 'the request broke' }
expect { client.request_access }
.to raise_error(::Gitlab::ExternalAuthorization::RequestFailed)
......
......@@ -17,7 +17,7 @@ RSpec.describe Gitlab::Git::WrapsGitalyErrors do
mapping.each do |grpc_error, error|
it "wraps #{grpc_error} in a #{error}" do
expect { wrapper.wrapped_gitaly_errors { raise grpc_error.new('wrapped') } }
expect { wrapper.wrapped_gitaly_errors { raise grpc_error, 'wrapped' } }
.to raise_error(error)
end
end
......
......@@ -178,7 +178,7 @@ RSpec.describe Ci::PipelineSchedule do
context 'when record is invalid' do
before do
allow(pipeline_schedule).to receive(:save!) { raise ActiveRecord::RecordInvalid.new(pipeline_schedule) }
allow(pipeline_schedule).to receive(:save!) { raise ActiveRecord::RecordInvalid, pipeline_schedule }
end
it 'nullifies the next run at' do
......
......@@ -86,7 +86,7 @@ RSpec.describe Namespace::TraversalHierarchy, type: :model do
connection_double = double(:connection)
allow(Namespace).to receive(:connection).and_return(connection_double)
allow(connection_double).to receive(:exec_query) { raise ActiveRecord::Deadlocked.new }
allow(connection_double).to receive(:exec_query) { raise ActiveRecord::Deadlocked }
end
it { expect { subject }.to raise_error(ActiveRecord::Deadlocked) }
......
......@@ -13,7 +13,7 @@ RSpec.describe API::APIGuard::AdminModeMiddleware, :request_store do
let(:app) do
Class.new(API::API) do
get 'willfail' do
raise StandardError.new('oh noes!')
raise StandardError, 'oh noes!'
end
end
end
......
......@@ -39,7 +39,7 @@ RSpec.describe API::Helpers do
end
def error!(message, status, header)
raise StandardError.new("#{status} - #{message}")
raise StandardError, "#{status} - #{message}"
end
def set_param(key, value)
......
......@@ -84,7 +84,7 @@ RSpec.describe AutoMerge::BaseService do
context 'when failed to save merge request' do
before do
allow(merge_request).to receive(:save!) { raise ActiveRecord::RecordInvalid.new }
allow(merge_request).to receive(:save!) { raise ActiveRecord::RecordInvalid }
end
it 'does not yield block' do
......@@ -195,7 +195,7 @@ RSpec.describe AutoMerge::BaseService do
context 'when failed to save' do
before do
allow(merge_request).to receive(:save!) { raise ActiveRecord::RecordInvalid.new }
allow(merge_request).to receive(:save!) { raise ActiveRecord::RecordInvalid }
end
it 'does not yield block' do
......@@ -213,7 +213,7 @@ RSpec.describe AutoMerge::BaseService do
context 'when failed to save merge request' do
before do
allow(merge_request).to receive(:save!) { raise ActiveRecord::RecordInvalid.new }
allow(merge_request).to receive(:save!) { raise ActiveRecord::RecordInvalid }
end
it 'returns error status' do
......@@ -260,7 +260,7 @@ RSpec.describe AutoMerge::BaseService do
context 'when failed to save' do
before do
allow(merge_request).to receive(:save!) { raise ActiveRecord::RecordInvalid.new }
allow(merge_request).to receive(:save!) { raise ActiveRecord::RecordInvalid }
end
it 'returns error status' do
......
......@@ -41,7 +41,7 @@ RSpec.describe Branches::DeleteService do
context 'when Gitlab::Git::CommandError is raised' do
before do
allow(repository).to receive(:rm_branch) do
raise Gitlab::Git::CommandError.new('Could not update patch')
raise Gitlab::Git::CommandError, 'Could not update patch'
end
end
......
......@@ -25,7 +25,7 @@ RSpec.describe Ci::ParseDotenvArtifactService do
context 'when parse error happens' do
before do
allow(service).to receive(:scan_line!) { raise described_class::ParserError.new('Invalid Format') }
allow(service).to receive(:scan_line!) { raise described_class::ParserError, 'Invalid Format' }
end
it 'returns error' do
......
......@@ -94,7 +94,7 @@ RSpec.describe MergeRequests::MergeToRefService do
it 'returns an error when Gitlab::Git::CommandError is raised during merge' do
allow(project.repository).to receive(:merge_to_ref) do
raise Gitlab::Git::CommandError.new('Failed to create merge commit')
raise Gitlab::Git::CommandError, 'Failed to create merge commit'
end
result = service.execute(merge_request)
......
......@@ -18,7 +18,7 @@ module DnsHelpers
def stub_invalid_dns!
allow(Addrinfo).to receive(:getaddrinfo).with(/\Afoobar\.\w|(\d{1,3}\.){4,}\d{1,3}\z/i, anything, nil, :STREAM) do
raise SocketError.new("getaddrinfo: Name or service not known")
raise SocketError, "getaddrinfo: Name or service not known"
end
end
......
......@@ -22,7 +22,7 @@ module NextFoundInstanceOf
private
def check_if_active_record!(klass)
raise ArgumentError.new(ERROR_MESSAGE) unless klass < ActiveRecord::Base
raise ArgumentError, ERROR_MESSAGE unless klass < ActiveRecord::Base
end
def stub_allocate(target, klass)
......
......@@ -4,7 +4,7 @@ class Redis
ForbiddenCommand = Class.new(StandardError)
def keys(*args)
raise ForbiddenCommand.new("Don't use `Redis#keys` as it iterates over all "\
"keys in redis. Use `Redis#scan_each` instead.")
raise ForbiddenCommand, "Don't use `Redis#keys` as it iterates over all "\
"keys in redis. Use `Redis#scan_each` instead."
end
end
......@@ -20,7 +20,7 @@ class RequireMigration
class << self
def require_migration!(file_name)
file_paths = search_migration_file(file_name)
raise AutoLoadError.new(file_name) unless file_paths.first
raise AutoLoadError, file_name unless file_paths.first
require file_paths.first
end
......
......@@ -3,7 +3,7 @@
RSpec.configure do |config|
config.before(:each, :broken_storage) do
allow(Gitlab::GitalyClient).to receive(:call) do
raise GRPC::Unavailable.new('Gitaly broken in this spec')
raise GRPC::Unavailable, 'Gitaly broken in this spec'
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment