Commit 62e0877d authored by Rémy Coutable's avatar Rémy Coutable

Merge branch '54230-eliminate-duplicated-words-in-apps' into 'master'

Eliminate duplicated words (in apps)

Closes #54230

See merge request gitlab-org/gitlab-ce!23184
parents fd2c20bc e902f462
...@@ -124,7 +124,7 @@ export default class FileTemplateMediator { ...@@ -124,7 +124,7 @@ export default class FileTemplateMediator {
selectTemplateFile(selector, query, data) { selectTemplateFile(selector, query, data) {
selector.renderLoading(); selector.renderLoading();
// in case undo menu is already already there // in case undo menu is already there
this.destroyUndoMenu(); this.destroyUndoMenu();
this.fetchFileTemplate(selector.config.type, query, data) this.fetchFileTemplate(selector.config.type, query, data)
.then(file => { .then(file => {
......
...@@ -22,7 +22,7 @@ export default class Labels { ...@@ -22,7 +22,7 @@ export default class Labels {
updateColorPreview() { updateColorPreview() {
const previewColor = $('input#label_color').val(); const previewColor = $('input#label_color').val();
return $('div.label-color-preview').css('background-color', previewColor); return $('div.label-color-preview').css('background-color', previewColor);
// Updates the the preview color with the hex-color input // Updates the preview color with the hex-color input
} }
// Updates the preview color with a click on a suggested color // Updates the preview color with a click on a suggested color
......
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
# Overrides `as_json` and `to_json` to raise an exception when called in order # Overrides `as_json` and `to_json` to raise an exception when called in order
# to prevent accidentally exposing attributes # to prevent accidentally exposing attributes
# #
# Not that that would ever happen... but just in case. # Not that would ever happen... but just in case.
module BlocksJsonSerialization module BlocksJsonSerialization
extend ActiveSupport::Concern extend ActiveSupport::Concern
......
...@@ -190,7 +190,7 @@ class Namespace < ActiveRecord::Base ...@@ -190,7 +190,7 @@ class Namespace < ActiveRecord::Base
.base_and_ancestors .base_and_ancestors
end end
# returns all ancestors upto but excluding the the given namespace # returns all ancestors upto but excluding the given namespace
# when no namespace is given, all ancestors upto the top are returned # when no namespace is given, all ancestors upto the top are returned
def ancestors_upto(top = nil) def ancestors_upto(top = nil)
Gitlab::GroupHierarchy.new(self.class.where(id: id)) Gitlab::GroupHierarchy.new(self.class.where(id: id))
......
...@@ -7,7 +7,7 @@ module Commits ...@@ -7,7 +7,7 @@ module Commits
# - user: `User` that will be the committer # - user: `User` that will be the committer
# - params: # - params:
# - branch_name: `String` the branch that will be committed into # - branch_name: `String` the branch that will be committed into
# - start_branch: `String` the branch that will will started from # - start_branch: `String` the branch that will be started from
# - patches: `Gitlab::Git::Patches::Collection` that contains the patches # - patches: `Gitlab::Git::Patches::Collection` that contains the patches
def initialize(*args) def initialize(*args)
super super
......
...@@ -50,7 +50,7 @@ class NotificationService ...@@ -50,7 +50,7 @@ class NotificationService
# Always notify the user about gpg key added # Always notify the user about gpg key added
# #
# This is a security email so it will be sent even if the user user disabled # This is a security email so it will be sent even if the user disabled
# notifications # notifications
def new_gpg_key(gpg_key) def new_gpg_key(gpg_key)
if gpg_key.user&.can?(:receive_notifications) if gpg_key.user&.can?(:receive_notifications)
......
...@@ -26,7 +26,7 @@ ...@@ -26,7 +26,7 @@
%h4.prepend-top-0 %h4.prepend-top-0
Feed token Feed token
%p %p
Your feed token is used to authenticate you when your RSS reader loads a personalized RSS feed or when when your calendar application loads a personalized calendar, and is included in those feed URLs. Your feed token is used to authenticate you when your RSS reader loads a personalized RSS feed or when your calendar application loads a personalized calendar, and is included in those feed URLs.
%p %p
It cannot be used to access any other data. It cannot be used to access any other data.
.col-lg-8.feed-token-reset .col-lg-8.feed-token-reset
......
...@@ -12,5 +12,5 @@ require 'bundler/setup' if File.exist?(ENV['BUNDLE_GEMFILE']) ...@@ -12,5 +12,5 @@ require 'bundler/setup' if File.exist?(ENV['BUNDLE_GEMFILE'])
begin begin
require 'bootsnap/setup' require 'bootsnap/setup'
rescue LoadError rescue LoadError
# bootsnap is optional dependency, so if we don't have it it's fine # bootsnap is an optional dependency, so if we don't have it, it's fine
end end
...@@ -12,7 +12,7 @@ class AlterWebHookLogsIndexes < ActiveRecord::Migration ...@@ -12,7 +12,7 @@ class AlterWebHookLogsIndexes < ActiveRecord::Migration
disable_ddl_transaction! disable_ddl_transaction!
# "created_at" comes first so the Sidekiq worker pruning old webhook logs can # "created_at" comes first so the Sidekiq worker pruning old webhook logs can
# use a composite index index. # use a composite index.
# #
# We leave the old standalone index on "web_hook_id" in place so future code # We leave the old standalone index on "web_hook_id" in place so future code
# that doesn't care about "created_at" can still use that index. # that doesn't care about "created_at" can still use that index.
......
...@@ -8,7 +8,7 @@ module Gitlab ...@@ -8,7 +8,7 @@ module Gitlab
# Class that rewrites markdown links for uploads # Class that rewrites markdown links for uploads
# #
# Using a pattern defined in `FileUploader` it copies files to a new # Using a pattern defined in `FileUploader` it copies files to a new
# project and rewrites all links to uploads in in a given text. # project and rewrites all links to uploads in a given text.
# #
# #
class UploadsRewriter class UploadsRewriter
......
...@@ -441,7 +441,7 @@ module Gitlab ...@@ -441,7 +441,7 @@ module Gitlab
gitaly_ref_client.find_ref_name(sha, ref_path) gitaly_ref_client.find_ref_name(sha, ref_path)
end end
# Get refs hash which key is is the commit id # Get refs hash which key is the commit id
# and value is a Gitlab::Git::Tag or Gitlab::Git::Branch # and value is a Gitlab::Git::Tag or Gitlab::Git::Branch
# Note that both inherit from Gitlab::Git::Ref # Note that both inherit from Gitlab::Git::Ref
def refs_hash def refs_hash
......
...@@ -251,7 +251,7 @@ module Gitlab ...@@ -251,7 +251,7 @@ module Gitlab
def validate_variable_usage(errors, translation, required_variables) def validate_variable_usage(errors, translation, required_variables)
# We don't need to validate when the message is empty. # We don't need to validate when the message is empty.
# In this case we fall back to the default, which has all the the # In this case we fall back to the default, which has all the
# required variables. # required variables.
return if translation.empty? return if translation.empty?
......
...@@ -6,10 +6,10 @@ ...@@ -6,10 +6,10 @@
# used for rendering Markdown) are completely unnecessary and may even lead to # used for rendering Markdown) are completely unnecessary and may even lead to
# transaction timeouts. # transaction timeouts.
# #
# To ensure importing merge requests requests has a minimal impact and can # To ensure importing merge requests has a minimal impact and can complete in
# complete in a reasonable time we bypass all the hooks by inserting the row # a reasonable time we bypass all the hooks by inserting the row and then
# and then retrieving it. We then only perform the additional work that is # retrieving it. We then only perform the additional work that is strictly
# strictly necessary. # necessary.
module Gitlab module Gitlab
module Import module Import
class MergeRequestCreator class MergeRequestCreator
......
...@@ -152,7 +152,7 @@ describe Projects::BlobController do ...@@ -152,7 +152,7 @@ describe Projects::BlobController do
expect(match_line['meta_data']).to have_key('new_pos') expect(match_line['meta_data']).to have_key('new_pos')
end end
it 'does not add top match line when when "since" is equal 1' do it 'does not add top match line when "since" is equal 1' do
do_get(since: 1, to: 10, offset: 10, from_merge_request: true) do_get(since: 1, to: 10, offset: 10, from_merge_request: true)
match_line = JSON.parse(response.body).first match_line = JSON.parse(response.body).first
......
...@@ -42,7 +42,7 @@ describe 'Merge request > User assigns themselves' do ...@@ -42,7 +42,7 @@ describe 'Merge request > User assigns themselves' do
visit project_merge_request_path(project, merge_request) visit project_merge_request_path(project, merge_request)
end end
it 'does not not show assignment link' do it 'does not show assignment link' do
expect(page).not_to have_content 'Assign yourself' expect(page).not_to have_content 'Assign yourself'
end end
end end
......
...@@ -325,7 +325,7 @@ describe 'Merge request > User resolves diff notes and discussions', :js do ...@@ -325,7 +325,7 @@ describe 'Merge request > User resolves diff notes and discussions', :js do
end end
end end
it 'allows user user to mark all discussions as resolved' do it 'allows user to mark all discussions as resolved' do
page.all('.discussion-reply-holder', count: 2).each do |reply_holder| page.all('.discussion-reply-holder', count: 2).each do |reply_holder|
page.within reply_holder do page.within reply_holder do
click_button 'Resolve discussion' click_button 'Resolve discussion'
......
...@@ -157,7 +157,7 @@ describe "User creates wiki page" do ...@@ -157,7 +157,7 @@ describe "User creates wiki page" do
expect(page).to have_field("wiki[message]", with: "Create home") expect(page).to have_field("wiki[message]", with: "Create home")
end end
it "creates a page from from the home page" do it "creates a page from the home page" do
page.within(".wiki-form") do page.within(".wiki-form") do
fill_in(:wiki_content, with: "My awesome wiki!") fill_in(:wiki_content, with: "My awesome wiki!")
......
...@@ -12,7 +12,7 @@ describe PipelineSchedulesFinder do ...@@ -12,7 +12,7 @@ describe PipelineSchedulesFinder do
context 'when the scope is nil' do context 'when the scope is nil' do
let(:params) { { scope: nil } } let(:params) { { scope: nil } }
it 'selects all pipeline pipeline schedules' do it 'selects all pipeline schedules' do
expect(subject.count).to be(2) expect(subject.count).to be(2)
expect(subject).to include(active_schedule, inactive_schedule) expect(subject).to include(active_schedule, inactive_schedule)
end end
......
...@@ -17,7 +17,7 @@ X-Received: by 10.0.0.1 with SMTP id n7mr11234144ipb.85.1371157428600; Thu, ...@@ -17,7 +17,7 @@ X-Received: by 10.0.0.1 with SMTP id n7mr11234144ipb.85.1371157428600; Thu,
13 Jun 2013 14:03:48 -0700 (PDT) 13 Jun 2013 14:03:48 -0700 (PDT)
X-Scanned-By: MIMEDefang 2.69 on IPv6:2001:470:1d:165::1 X-Scanned-By: MIMEDefang 2.69 on IPv6:2001:470:1d:165::1
Is there any reason the *old* candy can't be be kept in silos while the new candy Is there any reason the *old* candy can't be kept in silos while the new candy
is imported into *new* silos? is imported into *new* silos?
The thing about candy is it stays delicious for a long time -- we can just keep The thing about candy is it stays delicious for a long time -- we can just keep
......
...@@ -123,7 +123,7 @@ describe 'create_tokens' do ...@@ -123,7 +123,7 @@ describe 'create_tokens' do
create_tokens create_tokens
end end
it 'sets the the keys to the values from the environment and secrets.yml' do it 'sets the keys to the values from the environment and secrets.yml' do
create_tokens create_tokens
expect(secrets.secret_key_base).to eq('secret_key_base') expect(secrets.secret_key_base).to eq('secret_key_base')
......
...@@ -29,7 +29,7 @@ describe('ide component', () => { ...@@ -29,7 +29,7 @@ describe('ide component', () => {
resetStore(vm.$store); resetStore(vm.$store);
}); });
it('does not render right right when no files open', () => { it('does not render right when no files open', () => {
expect(vm.$el.querySelector('.panel-right')).toBeNull(); expect(vm.$el.querySelector('.panel-right')).toBeNull();
}); });
......
...@@ -59,7 +59,7 @@ describe('IDE branches actions', () => { ...@@ -59,7 +59,7 @@ describe('IDE branches actions', () => {
}); });
describe('receiveBranchesError', () => { describe('receiveBranchesError', () => {
it('should should commit error', done => { it('should commit error', done => {
testAction( testAction(
receiveBranchesError, receiveBranchesError,
{ search: TEST_SEARCH }, { search: TEST_SEARCH },
......
...@@ -39,7 +39,7 @@ describe('IDE merge requests actions', () => { ...@@ -39,7 +39,7 @@ describe('IDE merge requests actions', () => {
}); });
describe('receiveMergeRequestsError', () => { describe('receiveMergeRequestsError', () => {
it('should should commit error', done => { it('should commit error', done => {
testAction( testAction(
receiveMergeRequestsError, receiveMergeRequestsError,
{ type: 'created', search: '' }, { type: 'created', search: '' },
......
...@@ -28,7 +28,7 @@ describe Banzai::Filter::AbsoluteLinkFilter do ...@@ -28,7 +28,7 @@ describe Banzai::Filter::AbsoluteLinkFilter do
end end
context 'if relative_url_root is set' do context 'if relative_url_root is set' do
it 'joins the url without without doubling the path' do it 'joins the url without doubling the path' do
allow(Gitlab.config.gitlab).to receive(:url).and_return("#{fake_url}/gitlab/") allow(Gitlab.config.gitlab).to receive(:url).and_return("#{fake_url}/gitlab/")
doc = filter(link("/gitlab/foo", 'gfm'), only_path_context) doc = filter(link("/gitlab/foo", 'gfm'), only_path_context)
expect(doc.at_css('a')['href']).to eq "#{fake_url}/gitlab/foo" expect(doc.at_css('a')['href']).to eq "#{fake_url}/gitlab/foo"
......
...@@ -498,7 +498,7 @@ describe Gitlab::Auth::OAuth::User do ...@@ -498,7 +498,7 @@ describe Gitlab::Auth::OAuth::User do
end end
end end
describe 'ensure backwards compatibility with with sync email from provider option' do describe 'ensure backwards compatibility with sync email from provider option' do
let!(:existing_user) { create(:omniauth_user, extern_uid: 'my-uid', provider: 'my-provider') } let!(:existing_user) { create(:omniauth_user, extern_uid: 'my-uid', provider: 'my-provider') }
before do before do
......
...@@ -4,7 +4,7 @@ describe Gitlab::Ci::Build::Policy::Changes do ...@@ -4,7 +4,7 @@ describe Gitlab::Ci::Build::Policy::Changes do
set(:project) { create(:project) } set(:project) { create(:project) }
describe '#satisfied_by?' do describe '#satisfied_by?' do
describe 'paths matching matching' do describe 'paths matching' do
let(:pipeline) do let(:pipeline) do
build(:ci_empty_pipeline, project: project, build(:ci_empty_pipeline, project: project,
ref: 'master', ref: 'master',
......
...@@ -37,7 +37,7 @@ describe Gitlab::Ci::Config::External::File::Local do ...@@ -37,7 +37,7 @@ describe Gitlab::Ci::Config::External::File::Local do
end end
describe '#content' do describe '#content' do
context 'with a a valid file' do context 'with a valid file' do
let(:local_file_content) do let(:local_file_content) do
<<~HEREDOC <<~HEREDOC
before_script: before_script:
......
...@@ -93,7 +93,7 @@ describe Gitlab::Ci::Pipeline::Expression::Lexeme::String do ...@@ -93,7 +93,7 @@ describe Gitlab::Ci::Pipeline::Expression::Lexeme::String do
end end
describe '#evaluate' do describe '#evaluate' do
it 'returns string value it is is present' do it 'returns string value if it is present' do
string = described_class.new('my string') string = described_class.new('my string')
expect(string.evaluate).to eq 'my string' expect(string.evaluate).to eq 'my string'
......
...@@ -135,7 +135,7 @@ describe Gitlab::ContributionsCalendar do ...@@ -135,7 +135,7 @@ describe Gitlab::ContributionsCalendar do
expect(calendar(contributor).events_by_date(today)).to contain_exactly(e1, e2, e3) expect(calendar(contributor).events_by_date(today)).to contain_exactly(e1, e2, e3)
end end
context 'when the user cannot read read cross project' do context 'when the user cannot read cross project' do
before do before do
allow(Ability).to receive(:allowed?).and_call_original allow(Ability).to receive(:allowed?).and_call_original
expect(Ability).to receive(:allowed?).with(user, :read_cross_project) { false } expect(Ability).to receive(:allowed?).with(user, :read_cross_project) { false }
......
...@@ -50,7 +50,7 @@ describe Gitlab::CrossProjectAccess::CheckInfo do ...@@ -50,7 +50,7 @@ describe Gitlab::CrossProjectAccess::CheckInfo do
expect(info.should_run?(dummy_controller)).to be_truthy expect(info.should_run?(dummy_controller)).to be_truthy
end end
it 'returns the the opposite of #should_skip? when the check is a skip' do it 'returns the opposite of #should_skip? when the check is a skip' do
info = described_class.new({}, nil, nil, true) info = described_class.new({}, nil, nil, true)
expect(info).to receive(:should_skip?).with(dummy_controller).and_return(false) expect(info).to receive(:should_skip?).with(dummy_controller).and_return(false)
...@@ -101,7 +101,7 @@ describe Gitlab::CrossProjectAccess::CheckInfo do ...@@ -101,7 +101,7 @@ describe Gitlab::CrossProjectAccess::CheckInfo do
expect(info.should_skip?(dummy_controller)).to be_truthy expect(info.should_skip?(dummy_controller)).to be_truthy
end end
it 'returns the the opposite of #should_run? when the check is not a skip' do it 'returns the opposite of #should_run? when the check is not a skip' do
info = described_class.new({}, nil, nil, false) info = described_class.new({}, nil, nil, false)
expect(info).to receive(:should_run?).with(dummy_controller).and_return(false) expect(info).to receive(:should_run?).with(dummy_controller).and_return(false)
......
...@@ -165,7 +165,7 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespaces, : ...@@ -165,7 +165,7 @@ describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameNamespaces, :
end end
describe '#rename_namespace_dependencies' do describe '#rename_namespace_dependencies' do
it "moves the the repository for a project in the namespace" do it "moves the repository for a project in the namespace" do
create(:project, :repository, :legacy_storage, namespace: namespace, path: "the-path-project") create(:project, :repository, :legacy_storage, namespace: namespace, path: "the-path-project")
expected_repo = File.join(TestEnv.repos_path, "the-path0", "the-path-project.git") expected_repo = File.join(TestEnv.repos_path, "the-path0", "the-path-project.git")
......
...@@ -16,7 +16,7 @@ describe Gitlab::Diff::InlineDiffMarker do ...@@ -16,7 +16,7 @@ describe Gitlab::Diff::InlineDiffMarker do
end end
end end
context "when the text text is not html safe" do context "when the text is not html safe" do
let(:rich) { "abc 'def' differs" } let(:rich) { "abc 'def' differs" }
it 'marks the range' do it 'marks the range' do
......
...@@ -49,7 +49,7 @@ describe Gitlab::Email::ReplyParser do ...@@ -49,7 +49,7 @@ describe Gitlab::Email::ReplyParser do
expect(test_parse_body(fixture_file("emails/paragraphs.eml"))) expect(test_parse_body(fixture_file("emails/paragraphs.eml")))
.to eq( .to eq(
<<-BODY.strip_heredoc.chomp <<-BODY.strip_heredoc.chomp
Is there any reason the *old* candy can't be be kept in silos while the new candy Is there any reason the *old* candy can't be kept in silos while the new candy
is imported into *new* silos? is imported into *new* silos?
The thing about candy is it stays delicious for a long time -- we can just keep The thing about candy is it stays delicious for a long time -- we can just keep
......
...@@ -82,7 +82,7 @@ describe Gitlab::Git::MergeBase do ...@@ -82,7 +82,7 @@ describe Gitlab::Git::MergeBase do
end end
describe '#unknown_refs', :missing_ref do describe '#unknown_refs', :missing_ref do
it 'returns the the refs passed that are not part of the repository' do it 'returns the refs passed that are not part of the repository' do
expect(merge_base.unknown_refs).to contain_exactly('aaaa') expect(merge_base.unknown_refs).to contain_exactly('aaaa')
end end
......
...@@ -28,7 +28,7 @@ describe Gitlab::MultiCollectionPaginator do ...@@ -28,7 +28,7 @@ describe Gitlab::MultiCollectionPaginator do
expect(paginator.paginate(1)).to eq(all_projects.take(3)) expect(paginator.paginate(1)).to eq(all_projects.take(3))
end end
it 'fils the second page with a mixture of of the first & second collection' do it 'fils the second page with a mixture of the first & second collection' do
first_collection_element = all_projects.last first_collection_element = all_projects.last
second_collection_elements = all_groups.take(2) second_collection_elements = all_groups.take(2)
......
...@@ -45,11 +45,11 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do ...@@ -45,11 +45,11 @@ describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
is_expected.to eq(%w[redis database fog]) is_expected.to eq(%w[redis database fog])
end end
it 'returns redis store as the the lowest precedence' do it 'returns redis store as the lowest precedence' do
expect(subject.first).to eq('redis') expect(subject.first).to eq('redis')
end end
it 'returns fog store as the the highest precedence' do it 'returns fog store as the highest precedence' do
expect(subject.last).to eq('fog') expect(subject.last).to eq('fog')
end end
end end
......
...@@ -22,13 +22,13 @@ describe List do ...@@ -22,13 +22,13 @@ describe List do
end end
describe '#destroy' do describe '#destroy' do
it 'can be destroyed when when list_type is set to label' do it 'can be destroyed when list_type is set to label' do
subject = create(:list) subject = create(:list)
expect(subject.destroy).to be_truthy expect(subject.destroy).to be_truthy
end end
it 'can not be destroyed when when list_type is set to closed' do it 'can not be destroyed when list_type is set to closed' do
subject = create(:closed_list) subject = create(:closed_list)
expect(subject.destroy).to be_falsey expect(subject.destroy).to be_falsey
......
...@@ -70,7 +70,7 @@ describe Ci::PipelineSchedulePolicy, :models do ...@@ -70,7 +70,7 @@ describe Ci::PipelineSchedulePolicy, :models do
pipeline_schedule.update(owner: user) pipeline_schedule.update(owner: user)
end end
it 'includes abilities to do do all operations on pipeline schedule' do it 'includes abilities to do all operations on pipeline schedule' do
expect(policy).to be_allowed :play_pipeline_schedule expect(policy).to be_allowed :play_pipeline_schedule
expect(policy).to be_allowed :update_pipeline_schedule expect(policy).to be_allowed :update_pipeline_schedule
expect(policy).to be_allowed :admin_pipeline_schedule expect(policy).to be_allowed :admin_pipeline_schedule
...@@ -82,7 +82,7 @@ describe Ci::PipelineSchedulePolicy, :models do ...@@ -82,7 +82,7 @@ describe Ci::PipelineSchedulePolicy, :models do
project.add_maintainer(user) project.add_maintainer(user)
end end
it 'includes abilities to do do all operations on pipeline schedule' do it 'includes abilities to do all operations on pipeline schedule' do
expect(policy).to be_allowed :play_pipeline_schedule expect(policy).to be_allowed :play_pipeline_schedule
expect(policy).to be_allowed :update_pipeline_schedule expect(policy).to be_allowed :update_pipeline_schedule
expect(policy).to be_allowed :admin_pipeline_schedule expect(policy).to be_allowed :admin_pipeline_schedule
......
...@@ -223,7 +223,7 @@ describe ProjectPolicy do ...@@ -223,7 +223,7 @@ describe ProjectPolicy do
expect_disallowed(*other_write_abilities) expect_disallowed(*other_write_abilities)
end end
it 'does not disable other other abilities' do it 'does not disable other abilities' do
expect_allowed(*(regular_abilities - feature_write_abilities - other_write_abilities)) expect_allowed(*(regular_abilities - feature_write_abilities - other_write_abilities))
end end
end end
......
...@@ -95,7 +95,7 @@ describe MergeRequests::MergeWhenPipelineSucceedsService do ...@@ -95,7 +95,7 @@ describe MergeRequests::MergeWhenPipelineSucceedsService do
sha: '1234abcdef', status: 'success') sha: '1234abcdef', status: 'success')
end end
it 'it does not merge merge request' do it 'it does not merge request' do
expect(MergeWorker).not_to receive(:perform_async) expect(MergeWorker).not_to receive(:perform_async)
service.trigger(old_pipeline) service.trigger(old_pipeline)
end end
......
...@@ -7,7 +7,7 @@ describe Users::SetStatusService do ...@@ -7,7 +7,7 @@ describe Users::SetStatusService do
subject(:service) { described_class.new(current_user, params) } subject(:service) { described_class.new(current_user, params) }
describe '#execute' do describe '#execute' do
context 'when when params are set' do context 'when params are set' do
let(:params) { { emoji: 'taurus', message: 'a random status' } } let(:params) { { emoji: 'taurus', message: 'a random status' } }
it 'creates a status' do it 'creates a status' do
......
...@@ -123,7 +123,7 @@ module ExportFileHelper ...@@ -123,7 +123,7 @@ module ExportFileHelper
false false
end end
# Compares model attributes with those those found in the hash # Compares model attributes with those found in the hash
# and returns true if there is a match, ignoring some excluded attributes. # and returns true if there is a match, ignoring some excluded attributes.
def safe_model?(model, excluded_attributes, parent) def safe_model?(model, excluded_attributes, parent)
excluded_attributes += associations_for(model) excluded_attributes += associations_for(model)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment