Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
G
gitlab-ce
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
1
Merge Requests
1
Analytics
Analytics
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Commits
Issue Boards
Open sidebar
nexedi
gitlab-ce
Commits
c420df3d
Commit
c420df3d
authored
Mar 17, 2021
by
GitLab Bot
Browse files
Options
Browse Files
Download
Plain Diff
Automatic merge of gitlab-org/gitlab master
parents
343876bb
c0e30987
Changes
20
Hide whitespace changes
Inline
Side-by-side
Showing
20 changed files
with
165 additions
and
156 deletions
+165
-156
GITALY_SERVER_VERSION
GITALY_SERVER_VERSION
+1
-1
app/assets/javascripts/pipeline_editor/components/header/pipeline_status.vue
...pts/pipeline_editor/components/header/pipeline_status.vue
+21
-11
app/assets/javascripts/pipeline_editor/graphql/queries/client/pipeline.graphql
...s/pipeline_editor/graphql/queries/client/pipeline.graphql
+1
-2
app/assets/javascripts/pipeline_editor/graphql/resolvers.js
app/assets/javascripts/pipeline_editor/graphql/resolvers.js
+0
-23
config/initializers/active_record_preloader.rb
config/initializers/active_record_preloader.rb
+1
-0
ee/app/models/concerns/elastic/application_versioned_search.rb
...p/models/concerns/elastic/application_versioned_search.rb
+2
-0
ee/changelogs/unreleased/324745-preload-notes-data-for-es-indexing.yml
.../unreleased/324745-preload-notes-data-for-es-indexing.yml
+5
-0
ee/changelogs/unreleased/make-notes-migration-more-efficient.yml
...gelogs/unreleased/make-notes-migration-more-efficient.yml
+5
-0
ee/elastic/migrate/20210128163600_add_permissions_data_to_notes_documents.rb
...20210128163600_add_permissions_data_to_notes_documents.rb
+2
-2
ee/lib/elastic/latest/application_class_proxy.rb
ee/lib/elastic/latest/application_class_proxy.rb
+6
-0
ee/lib/elastic/latest/note_class_proxy.rb
ee/lib/elastic/latest/note_class_proxy.rb
+6
-0
ee/lib/gitlab/elastic/document_reference.rb
ee/lib/gitlab/elastic/document_reference.rb
+1
-2
ee/spec/elastic/migrate/202101281636000_add_permissions_data_to_notes_documents_spec.rb
...281636000_add_permissions_data_to_notes_documents_spec.rb
+7
-7
ee/spec/lib/gitlab/elastic/document_reference_spec.rb
ee/spec/lib/gitlab/elastic/document_reference_spec.rb
+11
-5
ee/spec/services/elastic/process_bookkeeping_service_spec.rb
ee/spec/services/elastic/process_bookkeeping_service_spec.rb
+28
-0
lib/gitlab/tracking.rb
lib/gitlab/tracking.rb
+2
-0
qa/qa/specs/features/ee/browser_ui/4_verify/system_cancels_merge_request_in_merge_trains_spec.rb
...rify/system_cancels_merge_request_in_merge_trains_spec.rb
+34
-39
spec/frontend/pipeline_editor/components/header/pipeline_status_spec.js
...pipeline_editor/components/header/pipeline_status_spec.js
+16
-39
spec/frontend/pipeline_editor/graphql/resolvers_spec.js
spec/frontend/pipeline_editor/graphql/resolvers_spec.js
+0
-18
spec/lib/gitlab/tracking_spec.rb
spec/lib/gitlab/tracking_spec.rb
+16
-7
No files found.
GITALY_SERVER_VERSION
View file @
c420df3d
0cc0f3d488f96261608d7c06261be8a0cce0d668
eb6ecf6f96946849761c1fcdcaf0bda15fc000f9
app/assets/javascripts/pipeline_editor/components/header/pipeline_status.vue
View file @
c420df3d
<
script
>
import
{
GlIcon
,
GlLink
,
GlLoadingIcon
,
GlSprintf
}
from
'
@gitlab/ui
'
;
import
{
getIdFromGraphQLId
}
from
'
~/graphql_shared/utils
'
;
import
{
truncateSha
}
from
'
~/lib/utils/text_utility
'
;
import
{
s__
}
from
'
~/locale
'
;
import
getCommitSha
from
'
~/pipeline_editor/graphql/queries/client/commit_sha.graphql
'
;
import
getPipelineQuery
from
'
~/pipeline_editor/graphql/queries/client/pipeline.graphql
'
;
import
{
toggleQueryPollingByVisibility
}
from
'
~/pipelines/components/graph/utils
'
;
import
CiIcon
from
'
~/vue_shared/components/ci_icon.vue
'
;
const
POLL_INTERVAL
=
10000
;
...
...
@@ -38,13 +40,11 @@ export default {
};
},
update
:
(
data
)
=>
{
const
{
id
,
commitPath
=
''
,
shortSha
=
''
,
detailedStatus
=
{}
}
=
data
.
project
?.
pipeline
||
{};
const
{
id
,
commitPath
=
''
,
detailedStatus
=
{}
}
=
data
.
project
?.
pipeline
||
{};
return
{
id
,
commitPath
,
shortSha
,
detailedStatus
,
};
},
...
...
@@ -61,24 +61,34 @@ export default {
},
computed
:
{
hasPipelineData
()
{
return
Boolean
(
this
.
$apollo
.
queries
.
pipeline
?.
id
);
return
Boolean
(
this
.
pipeline
?.
id
);
},
isQueryLoading
()
{
return
this
.
$apollo
.
queries
.
pipeline
.
loading
&&
!
this
.
hasPipelineData
;
pipelineId
()
{
return
getIdFromGraphQLId
(
this
.
pipeline
.
id
);
},
showLoadingState
()
{
// the query is set to poll regularly, so if there is no pipeline data
// (e.g. pipeline is null during fetch when the pipeline hasn't been
// triggered yet), we can just show the loading state until the pipeline
// details are ready to be fetched
return
this
.
$apollo
.
queries
.
pipeline
.
loading
||
(
!
this
.
hasPipelineData
&&
!
this
.
hasError
);
},
shortSha
()
{
return
truncateSha
(
this
.
commitSha
);
},
status
()
{
return
this
.
pipeline
.
detailedStatus
;
},
pipelineId
()
{
return
getIdFromGraphQLId
(
this
.
pipeline
.
id
);
},
},
mounted
()
{
toggleQueryPollingByVisibility
(
this
.
$apollo
.
queries
.
pipeline
,
POLL_INTERVAL
);
},
};
</
script
>
<
template
>
<div
class=
"gl-white-space-nowrap gl-max-w-full"
>
<template
v-if=
"
isQueryLoading
"
>
<template
v-if=
"
showLoadingState
"
>
<gl-loading-icon
class=
"gl-mr-auto gl-display-inline-block"
size=
"sm"
/>
<span
data-testid=
"pipeline-loading-msg"
>
{{
$options
.
i18n
.
fetchLoading
}}
</span>
</
template
>
...
...
@@ -110,7 +120,7 @@ export default {
target=
"_blank"
data-testid=
"pipeline-commit"
>
{{
pipeline
.
shortSha
}}
{{
shortSha
}}
</gl-link>
</
template
>
</gl-sprintf>
...
...
app/assets/javascripts/pipeline_editor/graphql/queries/client/pipeline.graphql
View file @
c420df3d
query
getPipeline
(
$fullPath
:
ID
!,
$sha
:
String
!)
{
project
(
fullPath
:
$fullPath
)
@client
{
project
(
fullPath
:
$fullPath
)
{
pipeline
(
sha
:
$sha
)
{
commitPath
id
iid
shortSha
status
detailedStatus
{
detailsPath
...
...
app/assets/javascripts/pipeline_editor/graphql/resolvers.js
View file @
c420df3d
...
...
@@ -11,29 +11,6 @@ export const resolvers = {
}),
};
},
/* eslint-disable @gitlab/require-i18n-strings */
project
()
{
return
{
__typename
:
'
Project
'
,
pipeline
:
{
__typename
:
'
Pipeline
'
,
commitPath
:
`/-/commit/aabbccdd`
,
id
:
'
gid://gitlab/Ci::Pipeline/118
'
,
iid
:
'
28
'
,
shortSha
:
'
aabbccdd
'
,
status
:
'
SUCCESS
'
,
detailedStatus
:
{
__typename
:
'
DetailedStatus
'
,
detailsPath
:
'
/root/sample-ci-project/-/pipelines/118"
'
,
group
:
'
success
'
,
icon
:
'
status_success
'
,
text
:
'
passed
'
,
},
},
};
},
/* eslint-enable @gitlab/require-i18n-strings */
},
Mutation
:
{
lintCI
:
(
_
,
{
endpoint
,
content
,
dry_run
})
=>
{
...
...
config/initializers/active_record_preloader.rb
View file @
c420df3d
...
...
@@ -9,6 +9,7 @@ module ActiveRecord
end
def
self
.
run
self
end
def
self
.
preloaded_records
...
...
ee/app/models/concerns/elastic/application_versioned_search.rb
View file @
c420df3d
...
...
@@ -47,6 +47,8 @@ module Elastic
after_commit
:maintain_elasticsearch_create
,
on: :create
,
if: :maintaining_elasticsearch?
after_commit
:maintain_elasticsearch_update
,
on: :update
,
if: :maintaining_elasticsearch?
after_commit
:maintain_elasticsearch_destroy
,
on: :destroy
,
if: :maintaining_elasticsearch?
scope
:preload_indexing_data
,
->
{
__elasticsearch__
.
preload_indexing_data
(
self
)
}
end
end
...
...
ee/changelogs/unreleased/324745-preload-notes-data-for-es-indexing.yml
0 → 100644
View file @
c420df3d
---
title
:
Improve performance of indexing notes in Elasticsearch
merge_request
:
56808
author
:
type
:
performance
ee/changelogs/unreleased/make-notes-migration-more-efficient.yml
0 → 100644
View file @
c420df3d
---
title
:
Improve performance of Elasticsearch notes permissions migration
merge_request
:
56823
author
:
type
:
performance
ee/elastic/migrate/20210128163600_add_permissions_data_to_notes_documents.rb
View file @
c420df3d
...
...
@@ -4,7 +4,7 @@ class AddPermissionsDataToNotesDocuments < Elastic::Migration
batched!
throttle_delay
3
.
minutes
QUERY_BATCH_SIZE
=
9
_000
QUERY_BATCH_SIZE
=
6
_000
UPDATE_BATCH_SIZE
=
100
def
migrate
...
...
@@ -30,7 +30,7 @@ class AddPermissionsDataToNotesDocuments < Elastic::Migration
end
document_references
.
each_slice
(
UPDATE_BATCH_SIZE
)
do
|
refs
|
Elastic
::
ProcessBookkeepingService
.
track!
(
*
refs
)
Elastic
::
Process
Initial
BookkeepingService
.
track!
(
*
refs
)
end
log
"Adding permission data to notes documents is completed for batch of
#{
document_references
.
size
}
documents"
...
...
ee/lib/elastic/latest/application_class_proxy.rb
View file @
c420df3d
...
...
@@ -37,6 +37,12 @@ module Elastic
self
.
import
(
options
)
end
# Should be overriden in *ClassProxy for specific model if data needs to
# be preloaded by #as_indexed_json method
def
preload_indexing_data
(
relation
)
relation
end
private
def
default_operator
...
...
ee/lib/elastic/latest/note_class_proxy.rb
View file @
c420df3d
...
...
@@ -24,6 +24,12 @@ module Elastic
search
(
query_hash
,
options
)
end
# rubocop: disable CodeReuse/ActiveRecord
def
preload_indexing_data
(
relation
)
relation
.
includes
(
noteable: :assignees
)
end
# rubocop: enable CodeReuse/ActiveRecord
private
def
confidentiality_filter
(
query_hash
,
options
)
...
...
ee/lib/gitlab/elastic/document_reference.rb
View file @
c420df3d
...
...
@@ -31,7 +31,7 @@ module Gitlab
@refs
.
group_by
(
&
:klass
).
each
do
|
klass
,
group
|
ids
=
group
.
map
(
&
:db_id
)
records
=
klass
.
id_in
(
ids
)
records
=
klass
.
id_in
(
ids
)
.
preload_indexing_data
records_by_id
=
records
.
each_with_object
({})
{
|
record
,
hash
|
hash
[
record
.
id
]
=
record
}
group
.
each
do
|
ref
|
...
...
@@ -112,7 +112,6 @@ module Gitlab
klass
.
to_s
end
# TODO: return a promise for batch loading: https://gitlab.com/gitlab-org/gitlab/issues/207280
def
database_record
strong_memoize
(
:database_record
)
{
klass
.
find_by_id
(
db_id
)
}
end
...
...
ee/spec/elastic/migrate/202101281636000_add_permissions_data_to_notes_documents_spec.rb
View file @
c420df3d
...
...
@@ -32,7 +32,7 @@ RSpec.describe AddPermissionsDataToNotesDocuments, :elastic, :sidekiq_inline do
context
'when migration is completed'
do
it
'does not queue documents for indexing'
do
expect
(
migration
.
completed?
).
to
be_truthy
expect
(
::
Elastic
::
ProcessBookkeepingService
).
not_to
receive
(
:track!
)
expect
(
::
Elastic
::
Process
Initial
BookkeepingService
).
not_to
receive
(
:track!
)
migration
.
migrate
end
...
...
@@ -44,7 +44,7 @@ RSpec.describe AddPermissionsDataToNotesDocuments, :elastic, :sidekiq_inline do
end
it
'queues documents for indexing'
do
expect
(
::
Elastic
::
ProcessBookkeepingService
).
to
receive
(
:track!
).
once
do
|*
tracked_refs
|
expect
(
::
Elastic
::
Process
Initial
BookkeepingService
).
to
receive
(
:track!
).
once
do
|*
tracked_refs
|
expect
(
tracked_refs
.
count
).
to
eq
(
4
)
end
...
...
@@ -55,7 +55,7 @@ RSpec.describe AddPermissionsDataToNotesDocuments, :elastic, :sidekiq_inline do
add_permission_data_for_notes
([
note_on_issue
,
note_on_snippet
,
note_on_merge_request
])
expected
=
[
Gitlab
::
Elastic
::
DocumentReference
.
new
(
Note
,
note_on_commit
.
id
,
note_on_commit
.
es_id
,
note_on_commit
.
es_parent
)]
expect
(
::
Elastic
::
ProcessBookkeepingService
).
to
receive
(
:track!
).
with
(
*
expected
).
once
expect
(
::
Elastic
::
Process
Initial
BookkeepingService
).
to
receive
(
:track!
).
with
(
*
expected
).
once
migration
.
migrate
end
...
...
@@ -64,22 +64,22 @@ RSpec.describe AddPermissionsDataToNotesDocuments, :elastic, :sidekiq_inline do
stub_const
(
"
#{
described_class
}
::QUERY_BATCH_SIZE"
,
2
)
stub_const
(
"
#{
described_class
}
::UPDATE_BATCH_SIZE"
,
1
)
allow
(
::
Elastic
::
ProcessBookkeepingService
).
to
receive
(
:track!
).
and_call_original
allow
(
::
Elastic
::
Process
Initial
BookkeepingService
).
to
receive
(
:track!
).
and_call_original
migration
.
migrate
expect
(
::
Elastic
::
ProcessBookkeepingService
).
to
have_received
(
:track!
).
exactly
(
2
).
times
expect
(
::
Elastic
::
Process
Initial
BookkeepingService
).
to
have_received
(
:track!
).
exactly
(
2
).
times
ensure_elasticsearch_index!
migration
.
migrate
expect
(
::
Elastic
::
ProcessBookkeepingService
).
to
have_received
(
:track!
).
exactly
(
4
).
times
expect
(
::
Elastic
::
Process
Initial
BookkeepingService
).
to
have_received
(
:track!
).
exactly
(
4
).
times
ensure_elasticsearch_index!
migration
.
migrate
# The migration should have already finished so there are no more items to process
expect
(
::
Elastic
::
ProcessBookkeepingService
).
to
have_received
(
:track!
).
exactly
(
4
).
times
expect
(
::
Elastic
::
Process
Initial
BookkeepingService
).
to
have_received
(
:track!
).
exactly
(
4
).
times
expect
(
migration
).
to
be_completed
end
end
...
...
ee/spec/lib/gitlab/elastic/document_reference_spec.rb
View file @
c420df3d
...
...
@@ -200,22 +200,28 @@ RSpec.describe Gitlab::Elastic::DocumentReference do
let
(
:note_ref2
)
{
described_class
.
new
(
Note
,
note2
.
id
,
note2
.
es_id
,
note2
.
es_parent
)
}
let
(
:note_ref_deleted
)
{
described_class
.
new
(
Note
,
note_deleted
.
id
,
note_deleted
.
es_id
,
note_deleted
.
es_parent
)
}
it
'preloads database records
in one query per type
'
do
it
'preloads database records
to avoid N+1 queries
'
do
collection
=
described_class
::
Collection
.
new
collection
.
deserialize_and_add
(
issue_ref1
.
serialize
)
collection
.
deserialize_and_add
(
issue_ref2
.
serialize
)
collection
.
deserialize_and_add
(
note_ref1
.
serialize
)
control
=
ActiveRecord
::
QueryRecorder
.
new
{
collection
.
preload_database_records
.
map
(
&
:database_record
)
}
collection
=
described_class
::
Collection
.
new
collection
.
deserialize_and_add
(
issue_ref1
.
serialize
)
collection
.
deserialize_and_add
(
note_ref1
.
serialize
)
collection
.
deserialize_and_add
(
issue_ref2
.
serialize
)
collection
.
deserialize_and_add
(
note_ref2
.
serialize
)
collection
.
deserialize_and_add
(
note_ref_deleted
.
serialize
)
database_records
=
nil
expect
do
database_records
=
collection
.
preload_database_records
.
map
{
|
ref
|
ref
.
database_record
}
end
.
not_to
exceed_query_limit
(
2
)
end
.
not_to
exceed_query_limit
(
control
)
expect
(
database_records
[
0
]).
to
eq
(
issue1
)
expect
(
database_records
[
1
]).
to
eq
(
issue2
)
expect
(
database_records
[
2
]).
to
eq
(
note1
)
expect
(
database_records
[
1
]).
to
eq
(
note1
)
expect
(
database_records
[
2
]).
to
eq
(
issue2
)
expect
(
database_records
[
3
]).
to
eq
(
note2
)
expect
(
database_records
[
4
]).
to
eq
(
nil
)
# Deleted database record will be nil
end
...
...
ee/spec/services/elastic/process_bookkeeping_service_spec.rb
View file @
c420df3d
...
...
@@ -208,6 +208,34 @@ RSpec.describe Elastic::ProcessBookkeepingService, :clean_gitlab_redis_shared_st
expect
(
described_class
.
queue_size
).
to
eq
(
1
)
end
context
'N+1 queries'
do
it
'does not have N+1 queries for notes'
do
notes
=
[]
2
.
times
do
notes
<<
create
(
:note
)
notes
<<
create
(
:discussion_note_on_merge_request
)
notes
<<
create
(
:note_on_merge_request
)
notes
<<
create
(
:note_on_commit
)
end
described_class
.
track!
(
*
notes
)
control
=
ActiveRecord
::
QueryRecorder
.
new
{
described_class
.
new
.
execute
}
3
.
times
do
notes
<<
create
(
:note
)
notes
<<
create
(
:discussion_note_on_merge_request
)
notes
<<
create
(
:note_on_merge_request
)
notes
<<
create
(
:note_on_commit
)
end
described_class
.
track!
(
*
notes
)
expect
{
described_class
.
new
.
execute
}.
not_to
exceed_all_query_limit
(
control
)
end
end
def
expect_processing
(
*
refs
,
failures:
[])
expect_next_instance_of
(
::
Gitlab
::
Elastic
::
BulkIndexer
)
do
|
indexer
|
refs
.
each
{
|
ref
|
expect
(
indexer
).
to
receive
(
:process
).
with
(
ref
)
}
...
...
lib/gitlab/tracking.rb
View file @
c420df3d
...
...
@@ -14,6 +14,8 @@ module Gitlab
snowplow
.
event
(
category
,
action
,
label:
label
,
property:
property
,
value:
value
,
context:
contexts
)
product_analytics
.
event
(
category
,
action
,
label:
label
,
property:
property
,
value:
value
,
context:
contexts
)
rescue
=>
error
Gitlab
::
ErrorTracking
.
track_and_raise_for_dev_exception
(
error
,
snowplow_category:
category
,
snowplow_action:
action
)
end
def
self_describing_event
(
schema_url
,
data
:,
context:
nil
)
...
...
qa/qa/specs/features/ee/browser_ui/4_verify/system_cancels_merge_request_in_merge_trains_spec.rb
View file @
c420df3d
...
...
@@ -5,7 +5,7 @@ require 'faker'
module
QA
RSpec
.
describe
'Verify'
do
describe
'Merge train'
,
:runner
,
:requires_admin
do
let
(
:file_name
)
{
'custom_file.txt'
}
let
(
:file_name
)
{
Faker
::
Lorem
.
word
}
let
(
:mr_title
)
{
Faker
::
Lorem
.
sentence
}
let
(
:executor
)
{
"qa-runner-
#{
Faker
::
Alphanumeric
.
alphanumeric
(
8
)
}
"
}
...
...
@@ -23,25 +23,29 @@ module QA
end
end
let!
(
:
ci_file
)
do
let!
(
:
original_files
)
do
Resource
::
Repository
::
Commit
.
fabricate_via_api!
do
|
commit
|
commit
.
project
=
project
commit
.
commit_message
=
'Add .gitlab-ci.yml'
commit
.
add_files
(
[
{
file_path:
'.gitlab-ci.yml'
,
content:
<<~
YAML
test_merge_train:
tags:
-
#{
executor
}
script:
- sleep 3
- echo 'OK!'
only:
- merge_requests
YAML
}
{
file_path:
'.gitlab-ci.yml'
,
content:
<<~
YAML
test_merge_train:
tags:
-
#{
executor
}
script:
- sleep 3
- echo 'OK!'
only:
- merge_requests
YAML
},
{
file_path:
file_name
,
content:
Faker
::
Lorem
.
sentence
}
]
)
end
...
...
@@ -56,8 +60,15 @@ module QA
let
(
:user_api_client
)
{
Runtime
::
API
::
Client
.
new
(
:gitlab
,
user:
user
)
}
let
(
:admin_api_client
)
{
Runtime
::
API
::
Client
.
as_admin
}
let
(
:merge_request
)
do
Resource
::
MergeRequest
.
fabricate_via_api!
do
|
merge_request
|
before
do
Runtime
::
Feature
.
enable
(
:invite_members_group_modal
,
project:
project
)
Flow
::
Login
.
sign_in
project
.
visit!
Flow
::
MergeRequest
.
enable_merge_trains
project
.
add_member
(
user
,
Resource
::
Members
::
AccessLevel
::
MAINTAINER
)
merge_request
=
Resource
::
MergeRequest
.
fabricate_via_api!
do
|
merge_request
|
merge_request
.
api_client
=
user_api_client
merge_request
.
title
=
mr_title
merge_request
.
project
=
project
...
...
@@ -66,30 +77,20 @@ module QA
merge_request
.
file_name
=
file_name
merge_request
.
file_content
=
Faker
::
Lorem
.
sentence
end
end
before
do
Runtime
::
Feature
.
enable
(
:invite_members_group_modal
,
project:
project
)
Flow
::
Login
.
sign_in
project
.
visit!
Flow
::
MergeRequest
.
enable_merge_trains
project
.
add_member
(
user
,
Resource
::
Members
::
AccessLevel
::
MAINTAINER
)
create_user_personal_access_token
Flow
::
Login
.
sign_in
(
as:
user
)
merge_request
.
visit!
Page
::
MergeRequest
::
Show
.
perform
do
|
show
|
show
.
has_pipeline_status?
(
'passed'
)
show
.
try_to_merge!
show
.
wait_until
(
reload:
false
)
{
show
.
has_content?
'started a merge train'
}
end
end
after
do
runner
.
remove_via_api!
user
.
remove_via_api!
project
.
remove_via_api!
end
context
'when system cancels a merge request'
do
...
...
@@ -99,12 +100,12 @@ module QA
commit
.
api_client
=
user_api_client
commit
.
project
=
project
commit
.
commit_message
=
'changing text file'
commit
.
add
_files
(
commit
.
update
_files
(
[
{
file_path:
file_name
,
content:
Faker
::
Lorem
.
sentence
}
{
file_path:
file_name
,
content:
'Has to be different than before.'
}
]
)
end
...
...
@@ -124,12 +125,6 @@ module QA
end
end
end
private
def
create_user_personal_access_token
user_api_client
.
personal_access_token
end
end
end
end
spec/frontend/pipeline_editor/components/header/pipeline_status_spec.js
View file @
c420df3d
...
...
@@ -4,6 +4,7 @@ import VueApollo from 'vue-apollo';
import
createMockApollo
from
'
helpers/mock_apollo_helper
'
;
import
waitForPromises
from
'
helpers/wait_for_promises
'
;
import
PipelineStatus
,
{
i18n
}
from
'
~/pipeline_editor/components/header/pipeline_status.vue
'
;
import
getPipelineQuery
from
'
~/pipeline_editor/graphql/queries/client/pipeline.graphql
'
;
import
CiIcon
from
'
~/vue_shared/components/ci_icon.vue
'
;
import
{
mockCommitSha
,
mockProjectPipeline
,
mockProjectFullPath
}
from
'
../../mock_data
'
;
...
...
@@ -19,32 +20,9 @@ describe('Pipeline Status', () => {
let
mockApollo
;
let
mockPipelineQuery
;
const
createComponent
=
({
hasPipeline
=
true
,
isQueryLoading
=
false
})
=>
{
const
pipeline
=
hasPipeline
?
{
loading
:
isQueryLoading
,
...
mockProjectPipeline
.
pipeline
}
:
{
loading
:
isQueryLoading
};
wrapper
=
shallowMount
(
PipelineStatus
,
{
provide
:
mockProvide
,
stubs
:
{
GlLink
,
GlSprintf
},
data
:
()
=>
(
hasPipeline
?
{
pipeline
}
:
{}),
mocks
:
{
$apollo
:
{
queries
:
{
pipeline
,
},
},
},
});
};
const
createComponentWithApollo
=
()
=>
{
const
resolvers
=
{
Query
:
{
project
:
mockPipelineQuery
,
},
};
mockApollo
=
createMockApollo
([],
resolvers
);
const
handlers
=
[[
getPipelineQuery
,
mockPipelineQuery
]];
mockApollo
=
createMockApollo
(
handlers
);
wrapper
=
shallowMount
(
PipelineStatus
,
{
localVue
,
...
...
@@ -78,16 +56,17 @@ describe('Pipeline Status', () => {
wrapper
=
null
;
});
describe
(
'
while querying
'
,
()
=>
{
it
(
'
renders
loading icon
'
,
()
=>
{
createComponent
({
isQueryLoading
:
true
,
hasPipeline
:
false
}
);
describe
(
'
loading icon
'
,
()
=>
{
it
(
'
renders
while query is being fetched
'
,
()
=>
{
createComponent
WithApollo
(
);
expect
(
findLoadingIcon
().
exists
()).
toBe
(
true
);
expect
(
findPipelineLoadingMsg
().
text
()).
toBe
(
i18n
.
fetchLoading
);
});
it
(
'
does not render loading icon if pipeline data is already set
'
,
()
=>
{
createComponent
({
isQueryLoading
:
true
});
it
(
'
does not render if query is no longer loading
'
,
async
()
=>
{
createComponentWithApollo
();
await
waitForPromises
();
expect
(
findLoadingIcon
().
exists
()).
toBe
(
false
);
});
...
...
@@ -96,7 +75,9 @@ describe('Pipeline Status', () => {
describe
(
'
when querying data
'
,
()
=>
{
describe
(
'
when data is set
'
,
()
=>
{
beforeEach
(
async
()
=>
{
mockPipelineQuery
.
mockResolvedValue
(
mockProjectPipeline
);
mockPipelineQuery
.
mockResolvedValue
({
data
:
{
project
:
mockProjectPipeline
},
});
createComponentWithApollo
();
await
waitForPromises
();
...
...
@@ -104,14 +85,10 @@ describe('Pipeline Status', () => {
it
(
'
query is called with correct variables
'
,
async
()
=>
{
expect
(
mockPipelineQuery
).
toHaveBeenCalledTimes
(
1
);
expect
(
mockPipelineQuery
).
toHaveBeenCalledWith
(
expect
.
anything
(),
{
fullPath
:
mockProjectFullPath
,
},
expect
.
anything
(),
expect
.
anything
(),
);
expect
(
mockPipelineQuery
).
toHaveBeenCalledWith
({
fullPath
:
mockProjectFullPath
,
sha
:
mockCommitSha
,
});
});
it
(
'
does not render error
'
,
()
=>
{
...
...
spec/frontend/pipeline_editor/graphql/resolvers_spec.js
View file @
c420df3d
...
...
@@ -46,24 +46,6 @@ describe('~/pipeline_editor/graphql/resolvers', () => {
await
expect
(
result
.
rawData
).
resolves
.
toBe
(
mockCiYml
);
});
});
describe
(
'
pipeline
'
,
()
=>
{
it
(
'
resolves pipeline data with type names
'
,
async
()
=>
{
const
result
=
await
resolvers
.
Query
.
project
(
null
);
// eslint-disable-next-line no-underscore-dangle
expect
(
result
.
__typename
).
toBe
(
'
Project
'
);
});
it
(
'
resolves pipeline data with necessary data
'
,
async
()
=>
{
const
result
=
await
resolvers
.
Query
.
project
(
null
);
const
pipelineKeys
=
Object
.
keys
(
result
.
pipeline
);
const
statusKeys
=
Object
.
keys
(
result
.
pipeline
.
detailedStatus
);
expect
(
pipelineKeys
).
toContain
(
'
id
'
,
'
commitPath
'
,
'
detailedStatus
'
,
'
shortSha
'
);
expect
(
statusKeys
).
toContain
(
'
detailsPath
'
,
'
text
'
);
});
});
});
describe
(
'
Mutation
'
,
()
=>
{
...
...
spec/lib/gitlab/tracking_spec.rb
View file @
c420df3d
...
...
@@ -36,12 +36,12 @@ RSpec.describe Gitlab::Tracking do
end
describe
'.event'
do
before
do
allow_any_instance_of
(
Gitlab
::
Tracking
::
Destinations
::
Snowplow
).
to
receive
(
:event
)
allow_any_instance_of
(
Gitlab
::
Tracking
::
Destinations
::
ProductAnalytics
).
to
receive
(
:event
)
end
shared_examples
'delegates to destination'
do
|
klass
|
before
do
allow_any_instance_of
(
Gitlab
::
Tracking
::
Destinations
::
Snowplow
).
to
receive
(
:event
)
allow_any_instance_of
(
Gitlab
::
Tracking
::
Destinations
::
ProductAnalytics
).
to
receive
(
:event
)
end
it
"delegates to
#{
klass
}
destination"
do
other_context
=
double
(
:context
)
...
...
@@ -70,8 +70,17 @@ RSpec.describe Gitlab::Tracking do
end
end
include_examples
'delegates to destination'
,
Gitlab
::
Tracking
::
Destinations
::
Snowplow
include_examples
'delegates to destination'
,
Gitlab
::
Tracking
::
Destinations
::
ProductAnalytics
it_behaves_like
'delegates to destination'
,
Gitlab
::
Tracking
::
Destinations
::
Snowplow
it_behaves_like
'delegates to destination'
,
Gitlab
::
Tracking
::
Destinations
::
ProductAnalytics
it
'tracks errors'
do
expect
(
Gitlab
::
ErrorTracking
).
to
receive
(
:track_and_raise_for_dev_exception
).
with
(
an_instance_of
(
ContractError
),
snowplow_category:
nil
,
snowplow_action:
'some_action'
)
described_class
.
event
(
nil
,
'some_action'
)
end
end
describe
'.self_describing_event'
do
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment