Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
G
gitlab-ce
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
1
Merge Requests
1
Analytics
Analytics
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Commits
Issue Boards
Open sidebar
nexedi
gitlab-ce
Commits
38ecd59d
Commit
38ecd59d
authored
Apr 07, 2022
by
Andrejs Cunskis
Committed by
Chloe Liu
Apr 07, 2022
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
E2E: Large repo gitlab migration test
parent
ef4b7929
Changes
4
Show whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
438 additions
and
33 deletions
+438
-33
qa/qa/resource/bulk_import_group.rb
qa/qa/resource/bulk_import_group.rb
+9
-7
qa/qa/resource/project.rb
qa/qa/resource/project.rb
+7
-8
qa/qa/specs/features/api/1_manage/import_large_github_repo_spec.rb
...cs/features/api/1_manage/import_large_github_repo_spec.rb
+22
-18
qa/qa/specs/features/api/1_manage/migration/gitlab_migration_large_project_spec.rb
...1_manage/migration/gitlab_migration_large_project_spec.rb
+400
-0
No files found.
qa/qa/resource/bulk_import_group.rb
View file @
38ecd59d
...
@@ -7,10 +7,14 @@ module QA
...
@@ -7,10 +7,14 @@ module QA
:destination_group
,
:destination_group
,
:import_id
:import_id
attribute
:access_token
do
attribute
:
import_
access_token
do
api_client
.
personal_access_token
api_client
.
personal_access_token
end
end
attribute
:gitlab_address
do
QA
::
Runtime
::
Scenario
.
gitlab_address
end
# In most cases we will want to set path the same as source group
# In most cases we will want to set path the same as source group
# but it can be set to a custom name as well when imported via API
# but it can be set to a custom name as well when imported via API
attribute
:destination_group_path
do
attribute
:destination_group_path
do
...
@@ -19,18 +23,16 @@ module QA
...
@@ -19,18 +23,16 @@ module QA
# Can't define path as attribue since @path is set in base class initializer
# Can't define path as attribue since @path is set in base class initializer
alias_method
:path
,
:destination_group_path
alias_method
:path
,
:destination_group_path
delegate
:gitlab_address
,
to:
'QA::Runtime::Scenario'
def
fabricate!
def
fabricate_via_browser_ui!
Page
::
Main
::
Menu
.
perform
(
&
:go_to_create_group
)
Page
::
Main
::
Menu
.
perform
(
&
:go_to_create_group
)
Page
::
Group
::
New
.
perform
do
|
group
|
Page
::
Group
::
New
.
perform
do
|
group
|
group
.
switch_to_import_tab
group
.
switch_to_import_tab
group
.
connect_gitlab_instance
(
gitlab_address
,
api_client
.
personal
_access_token
)
group
.
connect_gitlab_instance
(
gitlab_address
,
import
_access_token
)
end
end
Page
::
Group
::
BulkImport
.
perform
do
|
import_page
|
Page
::
Group
::
BulkImport
.
perform
do
|
import_page
|
import_page
.
import_group
(
path
,
sandbox
.
path
)
import_page
.
import_group
(
destination_group_path
,
sandbox
.
full_
path
)
end
end
reload!
reload!
...
@@ -49,7 +51,7 @@ module QA
...
@@ -49,7 +51,7 @@ module QA
{
{
configuration:
{
configuration:
{
url:
gitlab_address
,
url:
gitlab_address
,
access_token:
access_token
access_token:
import_
access_token
},
},
entities:
[
entities:
[
{
{
...
...
qa/qa/resource/project.rb
View file @
38ecd59d
...
@@ -343,16 +343,17 @@ module QA
...
@@ -343,16 +343,17 @@ module QA
parse_body
(
response
)
parse_body
(
response
)
end
end
def
pipelines
response
=
get
(
request_url
(
api_pipelines_path
))
parse_body
(
response
)
end
def
pipeline_schedules
def
pipeline_schedules
response
=
get
(
request_url
(
api_pipeline_schedules_path
))
response
=
get
(
request_url
(
api_pipeline_schedules_path
))
parse_body
(
response
)
parse_body
(
response
)
end
end
def
pipelines
(
auto_paginate:
false
,
attempts:
0
)
return
parse_body
(
api_get_from
(
api_pipelines_path
))
unless
auto_paginate
auto_paginated_response
(
request_url
(
api_pipelines_path
,
per_page:
'100'
),
attempts:
attempts
)
end
def
issues
(
auto_paginate:
false
,
attempts:
0
)
def
issues
(
auto_paginate:
false
,
attempts:
0
)
return
parse_body
(
api_get_from
(
api_issues_path
))
unless
auto_paginate
return
parse_body
(
api_get_from
(
api_issues_path
))
unless
auto_paginate
...
@@ -387,9 +388,7 @@ module QA
...
@@ -387,9 +388,7 @@ module QA
api_resource
[
:import_status
]
==
"finished"
api_resource
[
:import_status
]
==
"finished"
end
end
unless
mirror_succeeded
raise
"Mirroring failed with error:
#{
api_resource
[
:import_error
]
}
"
unless
mirror_succeeded
raise
"Mirroring failed with error:
#{
api_resource
[
:import_error
]
}
"
end
end
end
def
remove_via_api!
def
remove_via_api!
...
...
qa/qa/specs/features/api/1_manage/import_large_github_repo_spec.rb
View file @
38ecd59d
...
@@ -111,7 +111,7 @@ module QA
...
@@ -111,7 +111,7 @@ module QA
user
.
remove_via_api!
unless
example
.
exception
user
.
remove_via_api!
unless
example
.
exception
next
unless
defined?
(
@import_time
)
next
unless
defined?
(
@import_time
)
# save data for comparison
after run finished
# save data for comparison
notification creation
save_json
(
save_json
(
"data"
,
"data"
,
{
{
...
@@ -121,6 +121,7 @@ module QA
...
@@ -121,6 +121,7 @@ module QA
source:
{
source:
{
name:
"GitHub"
,
name:
"GitHub"
,
project_name:
github_repo
,
project_name:
github_repo
,
data:
{
branches:
gh_branches
.
length
,
branches:
gh_branches
.
length
,
commits:
gh_commits
.
length
,
commits:
gh_commits
.
length
,
labels:
gh_labels
.
length
,
labels:
gh_labels
.
length
,
...
@@ -129,10 +130,12 @@ module QA
...
@@ -129,10 +130,12 @@ module QA
mr_comments:
gh_prs
.
sum
{
|
_k
,
v
|
v
[
:comments
].
length
},
mr_comments:
gh_prs
.
sum
{
|
_k
,
v
|
v
[
:comments
].
length
},
issues:
gh_issues
.
length
,
issues:
gh_issues
.
length
,
issue_comments:
gh_issues
.
sum
{
|
_k
,
v
|
v
[
:comments
].
length
}
issue_comments:
gh_issues
.
sum
{
|
_k
,
v
|
v
[
:comments
].
length
}
}
},
},
target:
{
target:
{
name:
"GitLab"
,
name:
"GitLab"
,
project_name:
imported_project
.
path_with_namespace
,
project_name:
imported_project
.
path_with_namespace
,
data:
{
branches:
gl_branches
.
length
,
branches:
gl_branches
.
length
,
commits:
gl_commits
.
length
,
commits:
gl_commits
.
length
,
labels:
gl_labels
.
length
,
labels:
gl_labels
.
length
,
...
@@ -141,6 +144,7 @@ module QA
...
@@ -141,6 +144,7 @@ module QA
mr_comments:
mrs
.
sum
{
|
_k
,
v
|
v
[
:comments
].
length
},
mr_comments:
mrs
.
sum
{
|
_k
,
v
|
v
[
:comments
].
length
},
issues:
gl_issues
.
length
,
issues:
gl_issues
.
length
,
issue_comments:
gl_issues
.
sum
{
|
_k
,
v
|
v
[
:comments
].
length
}
issue_comments:
gl_issues
.
sum
{
|
_k
,
v
|
v
[
:comments
].
length
}
}
},
},
not_imported:
{
not_imported:
{
mrs:
@mr_diff
,
mrs:
@mr_diff
,
...
@@ -158,7 +162,7 @@ module QA
...
@@ -158,7 +162,7 @@ module QA
start
=
Time
.
now
start
=
Time
.
now
# import the project and log gitlab path
# import the project and log gitlab path
Runtime
::
L
ogger
.
info
(
"== Importing project '
#{
github_repo
}
' in to '
#{
imported_project
.
reload!
.
full_path
}
' =="
)
l
ogger
.
info
(
"== Importing project '
#{
github_repo
}
' in to '
#{
imported_project
.
reload!
.
full_path
}
' =="
)
# fetch all objects right after import has started
# fetch all objects right after import has started
fetch_github_objects
fetch_github_objects
...
...
qa/qa/specs/features/api/1_manage/migration/gitlab_migration_large_project_spec.rb
0 → 100644
View file @
38ecd59d
# frozen_string_literal: true
# rubocop:disable Rails/Pluck, Layout/LineLength, RSpec/MultipleMemoizedHelpers
module
QA
RSpec
.
describe
"Manage"
,
:requires_admin
,
only:
{
job:
'large-gitlab-import'
}
do
describe
"Gitlab migration"
do
let
(
:logger
)
{
Runtime
::
Logger
.
logger
}
let
(
:differ
)
{
RSpec
::
Support
::
Differ
.
new
(
color:
true
)
}
let
(
:gitlab_group
)
{
'gitlab-migration'
}
let
(
:gitlab_source_address
)
{
"https://staging.gitlab.com"
}
let
(
:import_wait_duration
)
do
{
max_duration:
(
ENV
[
'QA_LARGE_IMPORT_DURATION'
]
||
3600
).
to_i
,
sleep_interval:
30
}
end
let
(
:admin_api_client
)
{
Runtime
::
API
::
Client
.
as_admin
}
# explicitly create PAT via api to not create it via UI in environments where admin token env var is not present
let
(
:target_api_client
)
do
Runtime
::
API
::
Client
.
new
(
user:
user
,
personal_access_token:
Resource
::
PersonalAccessToken
.
fabricate_via_api!
do
|
pat
|
pat
.
api_client
=
admin_api_client
end
.
token
)
end
let
(
:user
)
do
Resource
::
User
.
fabricate_via_api!
do
|
usr
|
usr
.
api_client
=
admin_api_client
end
end
let
(
:source_api_client
)
do
Runtime
::
API
::
Client
.
new
(
gitlab_source_address
,
personal_access_token:
ENV
[
"QA_LARGE_IMPORT_GL_TOKEN"
],
is_new_session:
false
)
end
let
(
:sandbox
)
do
Resource
::
Sandbox
.
fabricate_via_api!
do
|
group
|
group
.
api_client
=
admin_api_client
end
end
let
(
:destination_group
)
do
Resource
::
Group
.
fabricate_via_api!
do
|
group
|
group
.
api_client
=
admin_api_client
group
.
sandbox
=
sandbox
group
.
path
=
"imported-group-destination-
#{
SecureRandom
.
hex
(
4
)
}
"
end
end
# Source group and it's objects
#
let
(
:source_group
)
do
Resource
::
Sandbox
.
fabricate_via_api!
do
|
group
|
group
.
api_client
=
source_api_client
group
.
path
=
gitlab_group
end
end
let
(
:source_project
)
{
source_group
.
projects
.
find
{
|
project
|
project
.
name
.
include?
(
"dri"
)
}.
reload!
}
let
(
:source_branches
)
{
source_project
.
repository_branches
(
auto_paginate:
true
).
map
{
|
b
|
b
[
:name
]
}
}
let
(
:source_commits
)
{
source_project
.
commits
(
auto_paginate:
true
).
map
{
|
c
|
c
[
:id
]
}
}
let
(
:source_labels
)
{
source_project
.
labels
(
auto_paginate:
true
).
map
{
|
l
|
l
.
except
(
:id
)
}
}
let
(
:source_milestones
)
{
source_project
.
milestones
(
auto_paginate:
true
).
map
{
|
ms
|
ms
.
except
(
:id
,
:web_url
,
:project_id
)
}
}
let
(
:source_pipelines
)
{
source_project
.
pipelines
.
map
{
|
pp
|
pp
.
except
(
:id
,
:web_url
,
:project_id
)
}
}
let
(
:source_mrs
)
{
fetch_mrs
(
source_project
,
source_api_client
)
}
let
(
:source_issues
)
{
fetch_issues
(
source_project
,
source_api_client
)
}
# Imported group and it's objects
#
let
(
:imported_group
)
do
Resource
::
BulkImportGroup
.
fabricate_via_api!
do
|
group
|
group
.
import_access_token
=
source_api_client
.
personal_access_token
# token for importing on source instance
group
.
api_client
=
target_api_client
# token used by qa framework to access resources in destination instance
group
.
gitlab_address
=
gitlab_source_address
group
.
source_group
=
source_group
group
.
sandbox
=
destination_group
end
end
let
(
:imported_project
)
{
imported_group
.
projects
.
find
{
|
project
|
project
.
name
.
include?
(
"dri"
)
}.
reload!
}
let
(
:branches
)
{
imported_project
.
repository_branches
(
auto_paginate:
true
).
map
{
|
b
|
b
[
:name
]
}
}
let
(
:commits
)
{
imported_project
.
commits
(
auto_paginate:
true
).
map
{
|
c
|
c
[
:id
]
}
}
let
(
:labels
)
{
imported_project
.
labels
(
auto_paginate:
true
).
map
{
|
l
|
l
.
except
(
:id
)
}
}
let
(
:milestones
)
{
imported_project
.
milestones
(
auto_paginate:
true
).
map
{
|
ms
|
ms
.
except
(
:id
,
:web_url
,
:project_id
)
}
}
let
(
:pipelines
)
{
imported_project
.
pipelines
.
map
{
|
pp
|
pp
.
except
(
:id
,
:web_url
,
:project_id
)
}
}
let
(
:mrs
)
{
fetch_mrs
(
imported_project
,
target_api_client
)
}
let
(
:issues
)
{
fetch_issues
(
imported_project
,
target_api_client
)
}
before
do
Runtime
::
Feature
.
enable
(
:bulk_import_projects
)
destination_group
.
add_member
(
user
,
Resource
::
Members
::
AccessLevel
::
MAINTAINER
)
end
# rubocop:disable RSpec/InstanceVariable
after
do
|
example
|
next
unless
defined?
(
@import_time
)
# save data for comparison notification creation
save_json
(
"data"
,
{
importer: :gitlab
,
import_time:
@import_time
,
source:
{
name:
"GitLab Source"
,
project_name:
source_project
.
path_with_namespace
,
data:
{
branches:
source_branches
.
length
,
commits:
source_commits
.
length
,
labels:
source_labels
.
length
,
milestones:
source_milestones
.
length
,
pipelines:
source_pipelines
.
length
,
mrs:
source_mrs
.
length
,
mr_comments:
source_mrs
.
sum
{
|
_k
,
v
|
v
[
:comments
].
length
},
issues:
source_issues
.
length
,
issue_comments:
source_issues
.
sum
{
|
_k
,
v
|
v
[
:comments
].
length
}
}
},
target:
{
name:
"GitLab Target"
,
project_name:
imported_project
.
path_with_namespace
,
data:
{
branches:
branches
.
length
,
commits:
commits
.
length
,
labels:
labels
.
length
,
milestones:
milestones
.
length
,
pipelines:
pipelines
.
length
,
mrs:
mrs
.
length
,
mr_comments:
mrs
.
sum
{
|
_k
,
v
|
v
[
:comments
].
length
},
issues:
issues
.
length
,
issue_comments:
issues
.
sum
{
|
_k
,
v
|
v
[
:comments
].
length
}
}
},
not_imported:
{
mrs:
@mr_diff
,
issues:
@issue_diff
}
}
)
end
# rubocop:enable RSpec/InstanceVariable
it
"migrates large gitlab group via api"
do
start
=
Time
.
now
# trigger import and log imported group path
logger
.
info
(
"== Importing group '
#{
gitlab_group
}
' in to '
#{
imported_group
.
full_path
}
' =="
)
# fetch all objects right after import has started
fetch_source_gitlab_objects
# wait for import to finish and save import time
logger
.
info
(
"== Waiting for import to be finished =="
)
expect
{
imported_group
.
import_status
}.
to
eventually_eq
(
'finished'
).
within
(
import_wait_duration
)
@import_time
=
Time
.
now
-
start
aggregate_failures
do
verify_repository_import
verify_labels_import
verify_milestones_import
verify_pipelines_import
verify_merge_requests_import
verify_issues_import
end
end
# Fetch source project objects for comparison
#
# @return [void]
def
fetch_source_gitlab_objects
logger
.
info
(
"== Fetching source group objects =="
)
source_branches
source_commits
source_labels
source_milestones
source_pipelines
source_mrs
source_issues
end
# Verify repository imported correctly
#
# @return [void]
def
verify_repository_import
logger
.
info
(
"== Verifying repository import =="
)
expect
(
imported_project
.
description
).
to
eq
(
source_project
.
description
)
expect
(
branches
).
to
match_array
(
source_branches
)
expect
(
commits
).
to
match_array
(
source_commits
)
end
# Verify imported labels
#
# @return [void]
def
verify_labels_import
logger
.
info
(
"== Verifying label import =="
)
expect
(
labels
).
to
include
(
*
source_labels
)
end
# Verify milestones import
#
# @return [void]
def
verify_milestones_import
logger
.
info
(
"== Verifying milestones import =="
)
expect
(
milestones
).
to
match_array
(
source_milestones
)
end
# Verify pipelines import
#
# @return [void]
def
verify_pipelines_import
logger
.
info
(
"== Verifying pipelines import =="
)
expect
(
pipelines
).
to
match_array
(
source_pipelines
)
end
# Verify imported merge requests and mr issues
#
# @return [void]
def
verify_merge_requests_import
logger
.
info
(
"== Verifying merge request import =="
)
@mr_diff
=
verify_mrs_or_issues
(
'mr'
)
end
# Verify imported issues and issue comments
#
# @return [void]
def
verify_issues_import
logger
.
info
(
"== Verifying issue import =="
)
@issue_diff
=
verify_mrs_or_issues
(
'issue'
)
end
# Verify imported mrs or issues and return missing items
#
# @param [String] type verification object, 'mr' or 'issue'
# @return [Hash]
def
verify_mrs_or_issues
(
type
)
# Compare length to have easy to read overview how many objects are missing
#
expected
=
type
==
'mr'
?
source_mrs
:
source_issues
actual
=
type
==
'mr'
?
mrs
:
issues
count_msg
=
"Expected to contain same amount of
#{
type
}
s. Source:
#{
expected
.
length
}
, Target:
#{
actual
.
length
}
"
expect
(
actual
.
length
).
to
eq
(
expected
.
length
),
count_msg
missing_comments
=
verify_comments
(
type
,
actual
,
expected
)
{
"
#{
type
}
s"
:
(
expected
.
keys
-
actual
.
keys
).
map
{
|
it
|
actual
[
it
].
slice
(
:title
,
:url
)
},
"
#{
type
}
_comments"
:
missing_comments
}
end
# Verify imported comments
#
# @param [String] type verification object, 'mrs' or 'issues'
# @param [Hash] actual
# @param [Hash] expected
# @return [Hash]
def
verify_comments
(
type
,
actual
,
expected
)
actual
.
each_with_object
([])
do
|
(
key
,
actual_item
),
missing_comments
|
expected_item
=
expected
[
key
]
title
=
actual_item
[
:title
]
msg
=
"expected
#{
type
}
with title '
#{
title
}
' to have"
# Print title in the error message to see which object is missing
#
expect
(
actual_item
).
to
be_truthy
,
"
#{
msg
}
been imported"
next
unless
expected_item
# Print difference in the description
#
expected_body
=
expected_item
[
:body
]
actual_body
=
actual_item
[
:body
]
body_msg
=
<<~
MSG
#{
msg
}
same description. diff:
\n
#{
differ
.
diff
(
expected_body
,
actual_body
)
}
MSG
expect
(
actual_body
).
to
eq
(
expected_body
),
body_msg
# Print amount difference first
#
expected_comments
=
expected_item
[
:comments
]
actual_comments
=
actual_item
[
:comments
]
comment_count_msg
=
<<~
MSG
#{
msg
}
same amount of comments. Source:
#{
expected_comments
.
length
}
, Target:
#{
actual_comments
.
length
}
MSG
expect
(
actual_comments
.
length
).
to
eq
(
expected_comments
.
length
),
comment_count_msg
expect
(
actual_comments
).
to
match_array
(
expected_comments
)
# Save missing comments
#
comment_diff
=
expected_comments
-
actual_comments
next
if
comment_diff
.
empty?
missing_comments
<<
{
title:
title
,
target_url:
actual_item
[
:url
],
source_url:
expected_item
[
:url
],
missing_comments:
comment_diff
}
end
end
private
# Project merge requests with comments
#
# @param [QA::Resource::Project]
# @param [Runtime::API::Client] client
# @return [Hash]
def
fetch_mrs
(
project
,
client
)
imported_mrs
=
project
.
merge_requests
(
auto_paginate:
true
,
attempts:
2
)
Parallel
.
map
(
imported_mrs
,
in_threads:
4
)
do
|
mr
|
resource
=
Resource
::
MergeRequest
.
init
do
|
resource
|
resource
.
project
=
project
resource
.
iid
=
mr
[
:iid
]
resource
.
api_client
=
client
end
[
mr
[
:iid
],
{
url:
mr
[
:web_url
],
title:
mr
[
:title
],
body:
sanitize_description
(
mr
[
:description
])
||
''
,
comments:
resource
.
comments
(
auto_paginate:
true
,
attempts:
2
)
.
map
{
|
c
|
sanitize_comment
(
c
[
:body
])
}
}]
end
.
to_h
end
# Project issues with comments
#
# @param [QA::Resource::Project]
# @param [Runtime::API::Client] client
# @return [Hash]
def
fetch_issues
(
project
,
client
)
imported_issues
=
project
.
issues
(
auto_paginate:
true
,
attempts:
2
)
Parallel
.
map
(
imported_issues
,
in_threads:
4
)
do
|
issue
|
resource
=
Resource
::
Issue
.
init
do
|
issue_resource
|
issue_resource
.
project
=
project
issue_resource
.
iid
=
issue
[
:iid
]
issue_resource
.
api_client
=
client
end
[
issue
[
:iid
],
{
url:
issue
[
:web_url
],
title:
issue
[
:title
],
body:
sanitize_description
(
issue
[
:description
])
||
''
,
comments:
resource
.
comments
(
auto_paginate:
true
,
attempts:
2
)
.
map
{
|
c
|
sanitize_comment
(
c
[
:body
])
}
}]
end
.
to_h
end
# Importer user mention pattern
#
# @return [Regex]
def
created_by_pattern
@created_by_pattern
||=
/\n\n \*By gitlab-migration on \S+ \(imported from GitLab\)\*/
end
# Remove added prefixes and legacy diff format from comments
#
# @param [String] body
# @return [String]
def
sanitize_comment
(
body
)
body
&
.
gsub
(
created_by_pattern
,
""
)
end
# Remove created by prefix from descripion
#
# @param [String] body
# @return [String]
def
sanitize_description
(
body
)
body
&
.
gsub
(
created_by_pattern
,
""
)
end
# Save json as file
#
# @param [String] name
# @param [Hash] json
# @return [void]
def
save_json
(
name
,
json
)
File
.
open
(
"tmp/
#{
name
}
.json"
,
"w"
)
{
|
file
|
file
.
write
(
JSON
.
pretty_generate
(
json
))
}
end
end
end
end
# rubocop:enable Rails/Pluck, Layout/LineLength, RSpec/MultipleMemoizedHelpers
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment