Commit 561e1b47 authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent 7b52c7cb
...@@ -476,12 +476,12 @@ const Api = { ...@@ -476,12 +476,12 @@ const Api = {
return axios.get(url); return axios.get(url);
}, },
lsifData(projectPath, commitId, path) { lsifData(projectPath, commitId, paths) {
const url = Api.buildUrl(this.lsifPath) const url = Api.buildUrl(this.lsifPath)
.replace(':id', encodeURIComponent(projectPath)) .replace(':id', encodeURIComponent(projectPath))
.replace(':commit_id', commitId); .replace(':commit_id', commitId);
return axios.get(url, { params: { path } }); return axios.get(url, { params: { paths } });
}, },
environments(id) { environments(id) {
......
...@@ -13,9 +13,10 @@ export default { ...@@ -13,9 +13,10 @@ export default {
commit(types.REQUEST_DATA); commit(types.REQUEST_DATA);
api api
.lsifData(state.projectPath, state.commitId, state.blobPath) .lsifData(state.projectPath, state.commitId, [state.blobPath])
.then(({ data }) => { .then(({ data }) => {
const normalizedData = data.reduce((acc, d) => { const dataForPath = data[state.blobPath];
const normalizedData = dataForPath.reduce((acc, d) => {
if (d.hover) { if (d.hover) {
acc[`${d.start_line}:${d.start_char}`] = d; acc[`${d.start_line}:${d.start_char}`] = d;
addInteractionClass(d); addInteractionClass(d);
......
...@@ -38,7 +38,7 @@ module CycleAnalyticsParams ...@@ -38,7 +38,7 @@ module CycleAnalyticsParams
end end
def to_utc_time(field) def to_utc_time(field)
date = field.is_a?(Date) ? field : Date.parse(field) date = field.is_a?(Date) || field.is_a?(Time) ? field : Date.parse(field)
date.to_time.utc date.to_time.utc
end end
end end
......
...@@ -10,6 +10,11 @@ module Resolvers ...@@ -10,6 +10,11 @@ module Resolvers
def resolve(**args) def resolve(**args)
return Snippet.none if project.nil? return Snippet.none if project.nil?
unless project.feature_available?(:snippets, current_user)
raise Gitlab::Graphql::Errors::ResourceNotAvailable,
'Snippets are not enabled for this Project'
end
super super
end end
......
...@@ -2039,6 +2039,16 @@ class Project < ApplicationRecord ...@@ -2039,6 +2039,16 @@ class Project < ApplicationRecord
end end
end end
def change_repository_storage(new_repository_storage_key)
return if repository_read_only?
return if repository_storage == new_repository_storage_key
raise ArgumentError unless ::Gitlab.config.repositories.storages.key?(new_repository_storage_key)
run_after_commit { ProjectUpdateRepositoryStorageWorker.perform_async(id, new_repository_storage_key) }
self.repository_read_only = true
end
def pushes_since_gc def pushes_since_gc
Gitlab::Redis::SharedState.with { |redis| redis.get(pushes_since_gc_redis_shared_state_key).to_i } Gitlab::Redis::SharedState.with { |redis| redis.get(pushes_since_gc_redis_shared_state_key).to_i }
end end
......
...@@ -469,6 +469,8 @@ class ProjectPolicy < BasePolicy ...@@ -469,6 +469,8 @@ class ProjectPolicy < BasePolicy
prevent :create_pipeline prevent :create_pipeline
end end
rule { admin }.enable :change_repository_storage
private private
def team_member? def team_member?
......
...@@ -53,7 +53,7 @@ module Projects ...@@ -53,7 +53,7 @@ module Projects
#### Summary #### Summary
#{metadata_list} #{metadata_list}
#{alert_details} #{alert_details}#{metric_embed_for_alert}
MARKDOWN MARKDOWN
end end
...@@ -118,6 +118,10 @@ module Projects ...@@ -118,6 +118,10 @@ module Projects
def host_links def host_links
Array(hosts.value).join(' ') Array(hosts.value).join(' ')
end end
def metric_embed_for_alert; end
end end
end end
end end
Projects::Prometheus::AlertPresenter.prepend_if_ee('EE::Projects::Prometheus::AlertPresenter')
...@@ -3,18 +3,15 @@ ...@@ -3,18 +3,15 @@
class SnippetBlobPresenter < BlobPresenter class SnippetBlobPresenter < BlobPresenter
def rich_data def rich_data
return if blob.binary? return if blob.binary?
return unless blob.rich_viewer
if markup? render_rich_partial
blob.rendered_markup
else
highlight(plain: false)
end
end end
def plain_data def plain_data
return if blob.binary? return if blob.binary?
highlight(plain: !markup?) highlight(plain: false)
end end
def raw_path def raw_path
...@@ -27,10 +24,6 @@ class SnippetBlobPresenter < BlobPresenter ...@@ -27,10 +24,6 @@ class SnippetBlobPresenter < BlobPresenter
private private
def markup?
blob.rich_viewer&.partial_name == 'markup'
end
def snippet def snippet
blob.container blob.container
end end
...@@ -38,4 +31,18 @@ class SnippetBlobPresenter < BlobPresenter ...@@ -38,4 +31,18 @@ class SnippetBlobPresenter < BlobPresenter
def language def language
nil nil
end end
def render_rich_partial
renderer.render("projects/blob/viewers/_#{blob.rich_viewer.partial_name}",
locals: { viewer: blob.rich_viewer, blob: blob, blob_raw_path: raw_path },
layout: false)
end
def renderer
proxy = Warden::Proxy.new({}, Warden::Manager.new({})).tap do |proxy_instance|
proxy_instance.set_user(current_user, scope: :user)
end
ApplicationController.renderer.new('warden' => proxy)
end
end end
...@@ -61,10 +61,15 @@ module Projects ...@@ -61,10 +61,15 @@ module Projects
end end
def filter_by_name(tags) def filter_by_name(tags)
regex = Gitlab::UntrustedRegexp.new("\\A#{params['name_regex']}\\z") # Technical Debt: https://gitlab.com/gitlab-org/gitlab/issues/207267
# name_regex to be removed when container_expiration_policies is updated
# to have both regex columns
regex_delete = Gitlab::UntrustedRegexp.new("\\A#{params['name_regex_delete'] || params['name_regex']}\\z")
regex_retain = Gitlab::UntrustedRegexp.new("\\A#{params['name_regex_keep']}\\z")
tags.select do |tag| tags.select do |tag|
regex.scan(tag.name).any? # regex_retain will override any overlapping matches by regex_delete
regex_delete.match?(tag.name) && !regex_retain.match?(tag.name)
end end
end end
......
...@@ -2,20 +2,22 @@ ...@@ -2,20 +2,22 @@
module Projects module Projects
class LsifDataService class LsifDataService
attr_reader :file, :project, :path, :commit_id, attr_reader :file, :project, :commit_id, :docs,
:docs, :doc_ranges, :ranges, :def_refs, :hover_refs :doc_ranges, :ranges, :def_refs, :hover_refs
CACHE_EXPIRE_IN = 1.hour CACHE_EXPIRE_IN = 1.hour
def initialize(file, project, params) def initialize(file, project, commit_id)
@file = file @file = file
@project = project @project = project
@path = params[:path] @commit_id = commit_id
@commit_id = params[:commit_id]
end
def execute
fetch_data! fetch_data!
end
def execute(path)
doc_id = find_doc_id(docs, path)
dir_absolute_path = docs[doc_id]&.delete_suffix(path)
doc_ranges[doc_id]&.map do |range_id| doc_ranges[doc_id]&.map do |range_id|
location, ref_id = ranges[range_id].values_at('loc', 'ref_id') location, ref_id = ranges[range_id].values_at('loc', 'ref_id')
...@@ -26,7 +28,7 @@ module Projects ...@@ -26,7 +28,7 @@ module Projects
end_line: line_data.last, end_line: line_data.last,
start_char: column_data.first, start_char: column_data.first,
end_char: column_data.last, end_char: column_data.last,
definition_url: definition_url_for(def_refs[ref_id]), definition_url: definition_url_for(def_refs[ref_id], dir_absolute_path),
hover: highlighted_hover(hover_refs[ref_id]) hover: highlighted_hover(hover_refs[ref_id])
} }
end end
...@@ -58,8 +60,8 @@ module Projects ...@@ -58,8 +60,8 @@ module Projects
@hover_refs = data['hover_refs'] @hover_refs = data['hover_refs']
end end
def doc_id def find_doc_id(docs, path)
@doc_id ||= docs.reduce(nil) do |doc_id, (id, doc_path)| docs.reduce(nil) do |doc_id, (id, doc_path)|
next doc_id unless doc_path =~ /#{path}$/ next doc_id unless doc_path =~ /#{path}$/
if doc_id.nil? || docs[doc_id].size > doc_path.size if doc_id.nil? || docs[doc_id].size > doc_path.size
...@@ -70,11 +72,7 @@ module Projects ...@@ -70,11 +72,7 @@ module Projects
end end
end end
def dir_absolute_path def definition_url_for(ref_id, dir_absolute_path)
@dir_absolute_path ||= docs[doc_id]&.delete_suffix(path)
end
def definition_url_for(ref_id)
return unless range = ranges[ref_id] return unless range = ranges[ref_id]
def_doc_id, location = range.values_at('doc_id', 'loc') def_doc_id, location = range.values_at('doc_id', 'loc')
......
# frozen_string_literal: true
module Projects
class UpdateRepositoryStorageService < BaseService
include Gitlab::ShellAdapter
RepositoryAlreadyMoved = Class.new(StandardError)
def initialize(project)
@project = project
end
def execute(new_repository_storage_key)
# Raising an exception is a little heavy handed but this behavior (doing
# nothing if the repo is already on the right storage) prevents data
# loss, so it is valuable for us to be able to observe it via the
# exception.
raise RepositoryAlreadyMoved if project.repository_storage == new_repository_storage_key
if mirror_repositories(new_repository_storage_key)
mark_old_paths_for_archive
project.update(repository_storage: new_repository_storage_key, repository_read_only: false)
project.leave_pool_repository
project.track_project_repository
enqueue_housekeeping
else
project.update(repository_read_only: false)
end
end
private
def mirror_repositories(new_repository_storage_key)
result = mirror_repository(new_repository_storage_key)
if project.wiki.repository_exists?
result &&= mirror_repository(new_repository_storage_key, type: Gitlab::GlRepository::WIKI)
end
result
end
def mirror_repository(new_storage_key, type: Gitlab::GlRepository::PROJECT)
return false unless wait_for_pushes(type)
repository = type.repository_for(project)
full_path = repository.full_path
raw_repository = repository.raw
# Initialize a git repository on the target path
gitlab_shell.create_repository(new_storage_key, raw_repository.relative_path, full_path)
new_repository = Gitlab::Git::Repository.new(new_storage_key,
raw_repository.relative_path,
raw_repository.gl_repository,
full_path)
new_repository.fetch_repository_as_mirror(raw_repository)
end
def mark_old_paths_for_archive
old_repository_storage = project.repository_storage
new_project_path = moved_path(project.disk_path)
# Notice that the block passed to `run_after_commit` will run with `project`
# as its context
project.run_after_commit do
GitlabShellWorker.perform_async(:mv_repository,
old_repository_storage,
disk_path,
new_project_path)
if wiki.repository_exists?
GitlabShellWorker.perform_async(:mv_repository,
old_repository_storage,
wiki.disk_path,
"#{new_project_path}.wiki")
end
end
end
def moved_path(path)
"#{path}+#{project.id}+moved+#{Time.now.to_i}"
end
# The underlying FetchInternalRemote call uses a `git fetch` to move data
# to the new repository, which leaves it in a less-well-packed state,
# lacking bitmaps and commit graphs. Housekeeping will boost performance
# significantly.
def enqueue_housekeeping
return unless Gitlab::CurrentSettings.housekeeping_enabled?
return unless Feature.enabled?(:repack_after_shard_migration, project)
Projects::HousekeepingService.new(project, :gc).execute
rescue Projects::HousekeepingService::LeaseTaken
# No action required
end
def wait_for_pushes(type)
reference_counter = project.reference_counter(type: type)
# Try for 30 seconds, polling every 10
3.times do
return true if reference_counter.value == 0
sleep 10
end
false
end
end
end
Projects::UpdateRepositoryStorageService.prepend_if_ee('EE::Projects::UpdateRepositoryStorageService')
...@@ -13,6 +13,10 @@ module Projects ...@@ -13,6 +13,10 @@ module Projects
ensure_wiki_exists if enabling_wiki? ensure_wiki_exists if enabling_wiki?
if changing_storage_size?
project.change_repository_storage(params.delete(:repository_storage))
end
yield if block_given? yield if block_given?
validate_classification_label(project, :external_authorization_classification_label) validate_classification_label(project, :external_authorization_classification_label)
...@@ -140,6 +144,13 @@ module Projects ...@@ -140,6 +144,13 @@ module Projects
def changing_pages_https_only? def changing_pages_https_only?
project.previous_changes.include?(:pages_https_only) project.previous_changes.include?(:pages_https_only)
end end
def changing_storage_size?
new_repository_storage = params[:repository_storage]
new_repository_storage && project.repository.exists? &&
can?(current_user, :change_repository_storage, project)
end
end end
end end
......
...@@ -1151,6 +1151,13 @@ ...@@ -1151,6 +1151,13 @@
:resource_boundary: :unknown :resource_boundary: :unknown
:weight: 1 :weight: 1
:idempotent: :idempotent:
- :name: project_update_repository_storage
:feature_category: :source_code_management
:has_external_dependencies:
:urgency: :default
:resource_boundary: :unknown
:weight: 1
:idempotent:
- :name: propagate_service_template - :name: propagate_service_template
:feature_category: :source_code_management :feature_category: :source_code_management
:has_external_dependencies: :has_external_dependencies:
......
# frozen_string_literal: true
class ProjectUpdateRepositoryStorageWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
feature_category :source_code_management
def perform(project_id, new_repository_storage_key)
project = Project.find(project_id)
::Projects::UpdateRepositoryStorageService.new(project).execute(new_repository_storage_key)
rescue ::Projects::UpdateRepositoryStorageService::RepositoryAlreadyMoved
Rails.logger.info "#{self.class}: repository already moved: #{project}" # rubocop:disable Gitlab/RailsLogger
end
end
---
title: Add name_regex_keep param to container registry bulk delete API endpoint
merge_request: 25484
author:
type: added
---
title: Project Snippets GraphQL resolver checks feature status
merge_request: 26158
author:
type: performance
---
title: Fix snippet blob viewers for rich and plain data
merge_request: 25945
author:
type: fixed
---
title: "Backport API support to move between repository storages/shards"
merge_request: 18721
author: Ben Bodenmiller
type: added
---
title: Automatically include embedded metrics for GitLab alert incidents
merge_request: 25277
author:
type: added
...@@ -231,7 +231,9 @@ DELETE /projects/:id/registry/repositories/:repository_id/tags ...@@ -231,7 +231,9 @@ DELETE /projects/:id/registry/repositories/:repository_id/tags
| --------- | ---- | -------- | ----------- | | --------- | ---- | -------- | ----------- |
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user. | | `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user. |
| `repository_id` | integer | yes | The ID of registry repository. | | `repository_id` | integer | yes | The ID of registry repository. |
| `name_regex` | string | yes | The [re2](https://github.com/google/re2/wiki/Syntax) regex of the name to delete. To delete all tags specify `.*`.| | `name_regex` | string | no | The [re2](https://github.com/google/re2/wiki/Syntax) regex of the name to delete. To delete all tags specify `.*`. **Note:** `name_regex` is deprecated in favor of `name_regex_delete`.|
| `name_regex_delete` | string | yes | The [re2](https://github.com/google/re2/wiki/Syntax) regex of the name to delete. To delete all tags specify `.*`.|
| `name_regex_keep` | string | no | The [re2](https://github.com/google/re2/wiki/Syntax) regex of the name to keep. This value will override any matches from `name_regex_delete`. Note: setting to `.*` will result in a no-op. |
| `keep_n` | integer | no | The amount of latest tags of given name to keep. | | `keep_n` | integer | no | The amount of latest tags of given name to keep. |
| `older_than` | string | no | Tags to delete that are older than the given time, written in human readable form `1h`, `1d`, `1month`. | | `older_than` | string | no | Tags to delete that are older than the given time, written in human readable form `1h`, `1d`, `1month`. |
...@@ -239,7 +241,7 @@ This API call performs the following operations: ...@@ -239,7 +241,7 @@ This API call performs the following operations:
1. It orders all tags by creation date. The creation date is the time of the 1. It orders all tags by creation date. The creation date is the time of the
manifest creation, not the time of tag push. manifest creation, not the time of tag push.
1. It removes only the tags matching the given `name_regex`. 1. It removes only the tags matching the given `name_regex_delete` (or deprecated `name_regex`), keeping any that match `name_regex_keep`.
1. It never removes the tag named `latest`. 1. It never removes the tag named `latest`.
1. It keeps N latest matching tags (if `keep_n` is specified). 1. It keeps N latest matching tags (if `keep_n` is specified).
1. It only removes tags that are older than X amount of time (if `older_than` is specified). 1. It only removes tags that are older than X amount of time (if `older_than` is specified).
...@@ -261,17 +263,23 @@ Examples: ...@@ -261,17 +263,23 @@ Examples:
and remove ones that are older than 2 days: and remove ones that are older than 2 days:
```shell ```shell
curl --request DELETE --data 'name_regex=[0-9a-z]{40}' --data 'keep_n=5' --data 'older_than=2d' --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/5/registry/repositories/2/tags" curl --request DELETE --data 'name_regex_delete=[0-9a-z]{40}' --data 'keep_n=5' --data 'older_than=2d' --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/5/registry/repositories/2/tags"
``` ```
1. Remove all tags, but keep always the latest 5: 1. Remove all tags, but keep always the latest 5:
```shell ```shell
curl --request DELETE --data 'name_regex=.*' --data 'keep_n=5' --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/5/registry/repositories/2/tags" curl --request DELETE --data 'name_regex_delete=.*' --data 'keep_n=5' --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/5/registry/repositories/2/tags"
```
1. Remove all tags, but keep always tags beginning with `stable`:
```shell
curl --request DELETE --data 'name_regex_delete=.*' --data 'name_regex_keep=stable.*' --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/5/registry/repositories/2/tags"
``` ```
1. Remove all tags that are older than 1 month: 1. Remove all tags that are older than 1 month:
```shell ```shell
curl --request DELETE --data 'name_regex=.*' --data 'older_than=1month' --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/5/registry/repositories/2/tags" curl --request DELETE --data 'name_regex_delete=.*' --data 'older_than=1month' --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/5/registry/repositories/2/tags"
``` ```
...@@ -1041,7 +1041,7 @@ POST /projects ...@@ -1041,7 +1041,7 @@ POST /projects
| `ci_config_path` | string | no | The path to CI config file | | `ci_config_path` | string | no | The path to CI config file |
| `auto_devops_enabled` | boolean | no | Enable Auto DevOps for this project | | `auto_devops_enabled` | boolean | no | Enable Auto DevOps for this project |
| `auto_devops_deploy_strategy` | string | no | Auto Deploy strategy (`continuous`, `manual` or `timed_incremental`) | | `auto_devops_deploy_strategy` | string | no | Auto Deploy strategy (`continuous`, `manual` or `timed_incremental`) |
| `repository_storage` | string | no | **(STARTER ONLY)** Which storage shard the repository is on. Available only to admins | | `repository_storage` | string | no | Which storage shard the repository is on. Available only to admins |
| `approvals_before_merge` | integer | no | **(STARTER)** How many approvers should approve merge requests by default | | `approvals_before_merge` | integer | no | **(STARTER)** How many approvers should approve merge requests by default |
| `external_authorization_classification_label` | string | no | **(PREMIUM)** The classification label for the project | | `external_authorization_classification_label` | string | no | **(PREMIUM)** The classification label for the project |
| `mirror` | boolean | no | **(STARTER)** Enables pull mirroring in a project | | `mirror` | boolean | no | **(STARTER)** Enables pull mirroring in a project |
...@@ -1109,7 +1109,7 @@ POST /projects/user/:user_id ...@@ -1109,7 +1109,7 @@ POST /projects/user/:user_id
| `ci_config_path` | string | no | The path to CI config file | | `ci_config_path` | string | no | The path to CI config file |
| `auto_devops_enabled` | boolean | no | Enable Auto DevOps for this project | | `auto_devops_enabled` | boolean | no | Enable Auto DevOps for this project |
| `auto_devops_deploy_strategy` | string | no | Auto Deploy strategy (`continuous`, `manual` or `timed_incremental`) | | `auto_devops_deploy_strategy` | string | no | Auto Deploy strategy (`continuous`, `manual` or `timed_incremental`) |
| `repository_storage` | string | no | **(STARTER ONLY)** Which storage shard the repository is on. Available only to admins | | `repository_storage` | string | no | Which storage shard the repository is on. Available only to admins |
| `approvals_before_merge` | integer | no | **(STARTER)** How many approvers should approve merge requests by default | | `approvals_before_merge` | integer | no | **(STARTER)** How many approvers should approve merge requests by default |
| `external_authorization_classification_label` | string | no | **(PREMIUM)** The classification label for the project | | `external_authorization_classification_label` | string | no | **(PREMIUM)** The classification label for the project |
| `mirror` | boolean | no | **(STARTER)** Enables pull mirroring in a project | | `mirror` | boolean | no | **(STARTER)** Enables pull mirroring in a project |
...@@ -1177,7 +1177,7 @@ PUT /projects/:id ...@@ -1177,7 +1177,7 @@ PUT /projects/:id
| `ci_default_git_depth` | integer | no | Default number of revisions for [shallow cloning](../user/project/pipelines/settings.md#git-shallow-clone) | | `ci_default_git_depth` | integer | no | Default number of revisions for [shallow cloning](../user/project/pipelines/settings.md#git-shallow-clone) |
| `auto_devops_enabled` | boolean | no | Enable Auto DevOps for this project | | `auto_devops_enabled` | boolean | no | Enable Auto DevOps for this project |
| `auto_devops_deploy_strategy` | string | no | Auto Deploy strategy (`continuous`, `manual` or `timed_incremental`) | | `auto_devops_deploy_strategy` | string | no | Auto Deploy strategy (`continuous`, `manual` or `timed_incremental`) |
| `repository_storage` | string | no | **(STARTER ONLY)** Which storage shard the repository is on. Available only to admins | | `repository_storage` | string | no | Which storage shard the repository is on. Available only to admins |
| `approvals_before_merge` | integer | no | **(STARTER)** How many approvers should approve merge request by default | | `approvals_before_merge` | integer | no | **(STARTER)** How many approvers should approve merge request by default |
| `external_authorization_classification_label` | string | no | **(PREMIUM)** The classification label for the project | | `external_authorization_classification_label` | string | no | **(PREMIUM)** The classification label for the project |
| `mirror` | boolean | no | **(STARTER)** Enables pull mirroring in a project | | `mirror` | boolean | no | **(STARTER)** Enables pull mirroring in a project |
......
...@@ -136,3 +136,5 @@ Incident Management features can be easily enabled & disabled via the Project se ...@@ -136,3 +136,5 @@ Incident Management features can be easily enabled & disabled via the Project se
#### Auto-creation #### Auto-creation
GitLab Issues can automatically be created as a result of an Alert notification. An Issue created this way will contain error information to help you further debug the error. GitLab Issues can automatically be created as a result of an Alert notification. An Issue created this way will contain error information to help you further debug the error.
For [GitLab-managed alerting rules](../project/integrations/prometheus.md#setting-up-alerts-for-prometheus-metrics-ultimate), the issue will include an embedded chart for the query corresponding to the alert. The chart will show an hour of data surrounding the starting point of the incident, 30 minutes before and after.
...@@ -106,6 +106,9 @@ module API ...@@ -106,6 +106,9 @@ module API
project.auto_devops.nil? ? 'continuous' : project.auto_devops.deploy_strategy project.auto_devops.nil? ? 'continuous' : project.auto_devops.deploy_strategy
end end
expose :autoclose_referenced_issues expose :autoclose_referenced_issues
expose :repository_storage, if: ->(project, options) {
Ability.allowed?(options[:current_user], :change_repository_storage, project)
}
# rubocop: disable CodeReuse/ActiveRecord # rubocop: disable CodeReuse/ActiveRecord
def self.preload_relation(projects_relation, options = {}) def self.preload_relation(projects_relation, options = {})
......
...@@ -54,6 +54,7 @@ module API ...@@ -54,6 +54,7 @@ module API
optional :auto_devops_enabled, type: Boolean, desc: 'Flag indication if Auto DevOps is enabled' optional :auto_devops_enabled, type: Boolean, desc: 'Flag indication if Auto DevOps is enabled'
optional :auto_devops_deploy_strategy, type: String, values: %w(continuous manual timed_incremental), desc: 'Auto Deploy strategy' optional :auto_devops_deploy_strategy, type: String, values: %w(continuous manual timed_incremental), desc: 'Auto Deploy strategy'
optional :autoclose_referenced_issues, type: Boolean, desc: 'Flag indication if referenced issues auto-closing is enabled' optional :autoclose_referenced_issues, type: Boolean, desc: 'Flag indication if referenced issues auto-closing is enabled'
optional :repository_storage, type: String, desc: 'Which storage shard the repository is on. Available only to admins'
end end
params :optional_project_params_ee do params :optional_project_params_ee do
...@@ -125,6 +126,7 @@ module API ...@@ -125,6 +126,7 @@ module API
:wiki_access_level, :wiki_access_level,
:avatar, :avatar,
:suggestion_commit_message, :suggestion_commit_message,
:repository_storage,
# TODO: remove in API v5, replaced by *_access_level # TODO: remove in API v5, replaced by *_access_level
:issues_enabled, :issues_enabled,
......
...@@ -15,7 +15,7 @@ module API ...@@ -15,7 +15,7 @@ module API
resource :projects, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do resource :projects, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
segment ':id/commits/:commit_id' do segment ':id/commits/:commit_id' do
params do params do
requires :path, type: String, desc: 'The path of a file' requires :paths, type: Array, desc: 'The paths of the files'
end end
get 'lsif/info' do get 'lsif/info' do
authorize! :download_code, user_project authorize! :download_code, user_project
...@@ -30,7 +30,9 @@ module API ...@@ -30,7 +30,9 @@ module API
authorize! :read_pipeline, artifact.job.pipeline authorize! :read_pipeline, artifact.job.pipeline
file_too_large! if artifact.file.cached_size > MAX_FILE_SIZE file_too_large! if artifact.file.cached_size > MAX_FILE_SIZE
::Projects::LsifDataService.new(artifact.file, @project, params).execute service = ::Projects::LsifDataService.new(artifact.file, @project, params[:commit_id])
params[:paths].to_h { |path| [path, service.execute(path)] }
end end
end end
end end
......
...@@ -69,7 +69,16 @@ module API ...@@ -69,7 +69,16 @@ module API
end end
params do params do
requires :repository_id, type: Integer, desc: 'The ID of the repository' requires :repository_id, type: Integer, desc: 'The ID of the repository'
requires :name_regex, type: String, desc: 'The tag name regexp to delete, specify .* to delete all' optional :name_regex_delete, type: String, desc: 'The tag name regexp to delete, specify .* to delete all'
# require either name_regex (deprecated) or name_regex_delete, it is ok to have both
given name_regex_delete: ->(val) { val.nil? } do
requires :name_regex, type: String, desc: 'The tag name regexp to delete, specify .* to delete all'
end
optional :name_regex, type: String, desc: 'The tag name regexp to delete, specify .* to delete all'
given name_regex: ->(val) { val.nil? } do
requires :name_regex_delete, type: String, desc: 'The tag name regexp to delete, specify .* to delete all'
end
optional :name_regex_keep, type: String, desc: 'The tag name regexp to retain'
optional :keep_n, type: Integer, desc: 'Keep n of latest tags with matching name' optional :keep_n, type: Integer, desc: 'Keep n of latest tags with matching name'
optional :older_than, type: String, desc: 'Delete older than: 1h, 1d, 1month' optional :older_than, type: String, desc: 'Delete older than: 1h, 1d, 1month'
end end
......
...@@ -25,6 +25,7 @@ module API ...@@ -25,6 +25,7 @@ module API
end end
def verify_update_project_attrs!(project, attrs) def verify_update_project_attrs!(project, attrs)
attrs.delete(:repository_storage) unless can?(current_user, :change_repository_storage, project)
end end
def delete_project(user_project) def delete_project(user_project)
......
...@@ -118,10 +118,9 @@ module Gitlab ...@@ -118,10 +118,9 @@ module Gitlab
\.haml-lint_todo.yml | \.haml-lint_todo.yml |
babel\.config\.js | babel\.config\.js |
jest\.config\.js | jest\.config\.js |
karma\.config\.js |
webpack\.config\.js |
package\.json | package\.json |
yarn\.lock | yarn\.lock |
config/.+\.js |
\.gitlab/ci/frontend\.gitlab-ci\.yml \.gitlab/ci/frontend\.gitlab-ci\.yml
)\z}x => :frontend, )\z}x => :frontend,
......
namespace :gitlab do
namespace :cleanup do
desc "GitLab | Cleanup | Delete moved repositories"
task moved: :gitlab_environment do
warn_user_is_not_gitlab
remove_flag = ENV['REMOVE']
Gitlab.config.repositories.storages.each do |name, repository_storage|
repo_root = repository_storage.legacy_disk_path.chomp('/')
# Look for global repos (legacy, depth 1) and normal repos (depth 2)
IO.popen(%W(find #{repo_root} -mindepth 1 -maxdepth 2 -name *+moved*.git)) do |find|
find.each_line do |path|
path.chomp!
if remove_flag
if FileUtils.rm_rf(path)
puts "Removed...#{path}".color(:green)
else
puts "Cannot remove #{path}".color(:red)
end
else
puts "Can be removed: #{path}".color(:green)
end
end
end
end
unless remove_flag
puts "To cleanup these repositories run this command with REMOVE=true".color(:yellow)
end
end
end
end
...@@ -45,18 +45,20 @@ describe('Code navigation actions', () => { ...@@ -45,18 +45,20 @@ describe('Code navigation actions', () => {
describe('success', () => { describe('success', () => {
beforeEach(() => { beforeEach(() => {
mock.onGet(apiUrl).replyOnce(200, [ mock.onGet(apiUrl).replyOnce(200, {
{ index: [
start_line: 0, {
start_char: 0, start_line: 0,
hover: { value: '123' }, start_char: 0,
}, hover: { value: '123' },
{ },
start_line: 1, {
start_char: 0, start_line: 1,
hover: null, start_char: 0,
}, hover: null,
]); },
],
});
}); });
it('commits REQUEST_DATA_SUCCESS with normalized data', done => { it('commits REQUEST_DATA_SUCCESS with normalized data', done => {
......
...@@ -75,6 +75,16 @@ describe Resolvers::Projects::SnippetsResolver do ...@@ -75,6 +75,16 @@ describe Resolvers::Projects::SnippetsResolver do
expect(resolve_snippets(context: { current_user: other_user }, args: { ids: project_snippet.to_global_id })).to be_empty expect(resolve_snippets(context: { current_user: other_user }, args: { ids: project_snippet.to_global_id })).to be_empty
end end
end end
context 'when project snippets are disabled' do
it 'raises an error' do
disabled_snippet_project = create(:project, :snippets_disabled)
disabled_snippet_project.add_developer(current_user)
expect(SnippetsFinder).not_to receive(:new)
expect { resolve_snippets(obj: disabled_snippet_project) }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
end
end
end end
def resolve_snippets(args: {}, context: { current_user: current_user }, obj: project) def resolve_snippets(args: {}, context: { current_user: current_user }, obj: project)
......
...@@ -175,9 +175,12 @@ describe Gitlab::Danger::Helper do ...@@ -175,9 +175,12 @@ describe Gitlab::Danger::Helper do
'spec/javascripts/foo' | :frontend 'spec/javascripts/foo' | :frontend
'spec/frontend/bar' | :frontend 'spec/frontend/bar' | :frontend
'vendor/assets/foo' | :frontend 'vendor/assets/foo' | :frontend
'babel.config.js' | :frontend
'jest.config.js' | :frontend 'jest.config.js' | :frontend
'package.json' | :frontend 'package.json' | :frontend
'yarn.lock' | :frontend 'yarn.lock' | :frontend
'config/foo.js' | :frontend
'config/deep/foo.js' | :frontend
'ee/app/assets/foo' | :frontend 'ee/app/assets/foo' | :frontend
'ee/app/views/foo' | :frontend 'ee/app/views/foo' | :frontend
......
...@@ -2822,6 +2822,44 @@ describe Project do ...@@ -2822,6 +2822,44 @@ describe Project do
end end
end end
describe '#change_repository_storage' do
let(:project) { create(:project, :repository) }
let(:read_only_project) { create(:project, :repository, repository_read_only: true) }
before do
stub_storage_settings('test_second_storage' => { 'path' => 'tmp/tests/extra_storage' })
end
it 'schedules the transfer of the repository to the new storage and locks the project' do
expect(ProjectUpdateRepositoryStorageWorker).to receive(:perform_async).with(project.id, 'test_second_storage')
project.change_repository_storage('test_second_storage')
project.save!
expect(project).to be_repository_read_only
end
it "doesn't schedule the transfer if the repository is already read-only" do
expect(ProjectUpdateRepositoryStorageWorker).not_to receive(:perform_async)
read_only_project.change_repository_storage('test_second_storage')
read_only_project.save!
end
it "doesn't lock or schedule the transfer if the storage hasn't changed" do
expect(ProjectUpdateRepositoryStorageWorker).not_to receive(:perform_async)
project.change_repository_storage(project.repository_storage)
project.save!
expect(project).not_to be_repository_read_only
end
it 'throws an error if an invalid repository storage is provided' do
expect { project.change_repository_storage('unknown') }.to raise_error(ArgumentError)
end
end
describe '#pushes_since_gc' do describe '#pushes_since_gc' do
let(:project) { create(:project) } let(:project) { create(:project) }
......
...@@ -4,36 +4,73 @@ require 'spec_helper' ...@@ -4,36 +4,73 @@ require 'spec_helper'
describe SnippetBlobPresenter do describe SnippetBlobPresenter do
describe '#rich_data' do describe '#rich_data' do
let(:snippet) { build(:personal_snippet) } before do
allow_next_instance_of(described_class) do |instance|
allow(instance).to receive(:current_user).and_return(nil)
end
end
subject { described_class.new(snippet.blob).rich_data } subject { described_class.new(snippet.blob).rich_data }
it 'returns nil when the snippet blob is binary' do context 'with PersonalSnippet' do
allow(snippet.blob).to receive(:binary?).and_return(true) let(:raw_url) { "http://127.0.0.1:3000/snippets/#{snippet.id}/raw" }
let(:snippet) { build(:personal_snippet) }
expect(subject).to be_nil it 'returns nil when the snippet blob is binary' do
end allow(snippet.blob).to receive(:binary?).and_return(true)
it 'returns markdown content when snippet file is markup' do expect(subject).to be_nil
snippet.file_name = 'test.md' end
snippet.content = '*foo*'
expect(subject).to eq '<p data-sourcepos="1:1-1:5" dir="auto"><em>foo</em></p>' context 'with markdown format' do
end let(:snippet) { create(:personal_snippet, file_name: 'test.md', content: '*foo*') }
it 'returns syntax highlighted content' do it 'returns rich markdown content' do
snippet.file_name = 'test.rb' expected = <<~HTML
snippet.content = 'class Foo;end' <div class="file-content md">
<p data-sourcepos="1:1-1:5" dir="auto"><em>foo</em></p>
</div>
HTML
expect(subject) expect(subject).to eq(expected)
.to eq '<span id="LC1" class="line" lang="ruby"><span class="k">class</span> <span class="nc">Foo</span><span class="p">;</span><span class="k">end</span></span>' end
end end
it 'returns plain text highlighted content' do context 'with notebook format' do
snippet.file_name = 'test' let(:snippet) { create(:personal_snippet, file_name: 'test.ipynb') }
snippet.content = 'foo'
expect(subject).to eq '<span id="LC1" class="line" lang="plaintext">foo</span>' it 'returns rich notebook content' do
expect(subject.strip).to eq %Q(<div class="file-content" data-endpoint="/snippets/#{snippet.id}/raw" id="js-notebook-viewer"></div>)
end
end
context 'with openapi format' do
let(:snippet) { create(:personal_snippet, file_name: 'openapi.yml') }
it 'returns rich openapi content' do
expect(subject).to eq %Q(<div class="file-content" data-endpoint="/snippets/#{snippet.id}/raw" id="js-openapi-viewer"></div>\n)
end
end
context 'with svg format' do
let(:snippet) { create(:personal_snippet, file_name: 'test.svg') }
it 'returns rich svg content' do
result = Nokogiri::HTML::DocumentFragment.parse(subject)
image_tag = result.search('img').first
expect(image_tag.attr('src')).to include("data:#{snippet.blob.mime_type};base64")
expect(image_tag.attr('alt')).to eq('test.svg')
end
end
context 'with other format' do
let(:snippet) { create(:personal_snippet, file_name: 'test') }
it 'does not return no rich content' do
expect(subject).to be_nil
end
end
end end
end end
...@@ -55,19 +92,19 @@ describe SnippetBlobPresenter do ...@@ -55,19 +92,19 @@ describe SnippetBlobPresenter do
expect(subject).to eq '<span id="LC1" class="line" lang="markdown"><span class="ge">*foo*</span></span>' expect(subject).to eq '<span id="LC1" class="line" lang="markdown"><span class="ge">*foo*</span></span>'
end end
it 'returns plain syntax content' do it 'returns highlighted syntax content' do
snippet.file_name = 'test.rb' snippet.file_name = 'test.rb'
snippet.content = 'class Foo;end' snippet.content = 'class Foo;end'
expect(subject) expect(subject)
.to eq '<span id="LC1" class="line" lang="">class Foo;end</span>' .to eq '<span id="LC1" class="line" lang="ruby"><span class="k">class</span> <span class="nc">Foo</span><span class="p">;</span><span class="k">end</span></span>'
end end
it 'returns plain text highlighted content' do it 'returns plain text highlighted content' do
snippet.file_name = 'test' snippet.file_name = 'test'
snippet.content = 'foo' snippet.content = 'foo'
expect(subject).to eq '<span id="LC1" class="line" lang="">foo</span>' expect(subject).to eq '<span id="LC1" class="line" lang="plaintext">foo</span>'
end end
end end
......
...@@ -67,7 +67,7 @@ describe 'Creating a Snippet' do ...@@ -67,7 +67,7 @@ describe 'Creating a Snippet' do
it 'returns the created Snippet' do it 'returns the created Snippet' do
post_graphql_mutation(mutation, current_user: current_user) post_graphql_mutation(mutation, current_user: current_user)
expect(mutation_response['snippet']['blob']['richData']).to match(content) expect(mutation_response['snippet']['blob']['richData']).to be_nil
expect(mutation_response['snippet']['blob']['plainData']).to match(content) expect(mutation_response['snippet']['blob']['plainData']).to match(content)
expect(mutation_response['snippet']['title']).to eq(title) expect(mutation_response['snippet']['title']).to eq(title)
expect(mutation_response['snippet']['description']).to eq(description) expect(mutation_response['snippet']['description']).to eq(description)
...@@ -93,7 +93,7 @@ describe 'Creating a Snippet' do ...@@ -93,7 +93,7 @@ describe 'Creating a Snippet' do
it 'returns the created Snippet' do it 'returns the created Snippet' do
post_graphql_mutation(mutation, current_user: current_user) post_graphql_mutation(mutation, current_user: current_user)
expect(mutation_response['snippet']['blob']['richData']).to match(content) expect(mutation_response['snippet']['blob']['richData']).to be_nil
expect(mutation_response['snippet']['blob']['plainData']).to match(content) expect(mutation_response['snippet']['blob']['plainData']).to match(content)
expect(mutation_response['snippet']['title']).to eq(title) expect(mutation_response['snippet']['title']).to eq(title)
expect(mutation_response['snippet']['description']).to eq(description) expect(mutation_response['snippet']['description']).to eq(description)
......
...@@ -56,7 +56,7 @@ describe 'Updating a Snippet' do ...@@ -56,7 +56,7 @@ describe 'Updating a Snippet' do
it 'returns the updated Snippet' do it 'returns the updated Snippet' do
post_graphql_mutation(mutation, current_user: current_user) post_graphql_mutation(mutation, current_user: current_user)
expect(mutation_response['snippet']['blob']['richData']).to match(updated_content) expect(mutation_response['snippet']['blob']['richData']).to be_nil
expect(mutation_response['snippet']['blob']['plainData']).to match(updated_content) expect(mutation_response['snippet']['blob']['plainData']).to match(updated_content)
expect(mutation_response['snippet']['title']).to eq(updated_title) expect(mutation_response['snippet']['title']).to eq(updated_title)
expect(mutation_response['snippet']['description']).to eq(updated_description) expect(mutation_response['snippet']['description']).to eq(updated_description)
...@@ -78,7 +78,7 @@ describe 'Updating a Snippet' do ...@@ -78,7 +78,7 @@ describe 'Updating a Snippet' do
it 'returns the Snippet with its original values' do it 'returns the Snippet with its original values' do
post_graphql_mutation(mutation, current_user: current_user) post_graphql_mutation(mutation, current_user: current_user)
expect(mutation_response['snippet']['blob']['richData']).to match(original_content) expect(mutation_response['snippet']['blob']['richData']).to be_nil
expect(mutation_response['snippet']['blob']['plainData']).to match(original_content) expect(mutation_response['snippet']['blob']['plainData']).to match(original_content)
expect(mutation_response['snippet']['title']).to eq(original_title) expect(mutation_response['snippet']['title']).to eq(original_title)
expect(mutation_response['snippet']['description']).to eq(original_description) expect(mutation_response['snippet']['description']).to eq(original_description)
......
...@@ -9,18 +9,20 @@ describe API::LsifData do ...@@ -9,18 +9,20 @@ describe API::LsifData do
let(:commit) { project.commit } let(:commit) { project.commit }
describe 'GET lsif/info' do describe 'GET lsif/info' do
let(:endpoint_path) { "/projects/#{project.id}/commits/#{commit.id}/lsif/info" } subject do
endpoint_path = "/projects/#{project.id}/commits/#{commit.id}/lsif/info"
get api(endpoint_path, user), params: { paths: ['main.go', 'morestrings/reverse.go'] }
response
end
context 'user does not have access to the project' do context 'user does not have access to the project' do
before do before do
project.add_guest(user) project.add_guest(user)
end end
it 'returns 403' do it { is_expected.to have_gitlab_http_status(:forbidden) }
get api(endpoint_path, user), params: { path: 'main.go' }
expect(response).to have_gitlab_http_status(:forbidden)
end
end end
context 'user has access to the project' do context 'user has access to the project' do
...@@ -28,35 +30,27 @@ describe API::LsifData do ...@@ -28,35 +30,27 @@ describe API::LsifData do
project.add_reporter(user) project.add_reporter(user)
end end
context 'code_navigation feature is disabled' do
before do
stub_feature_flags(code_navigation: false)
end
it 'returns 404' do
get api(endpoint_path, user)
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'there is no job artifact for the passed commit' do context 'there is no job artifact for the passed commit' do
it 'returns 404' do it { is_expected.to have_gitlab_http_status(:not_found) }
get api(endpoint_path, user), params: { path: 'main.go' }
expect(response).to have_gitlab_http_status(:not_found)
end
end end
context 'lsif data is stored as a job artifact' do context 'lsif data is stored as a job artifact' do
let!(:pipeline) { create(:ci_pipeline, project: project, sha: commit.id) } let!(:pipeline) { create(:ci_pipeline, project: project, sha: commit.id) }
let!(:artifact) { create(:ci_job_artifact, :lsif, job: create(:ci_build, pipeline: pipeline)) } let!(:artifact) { create(:ci_job_artifact, :lsif, job: create(:ci_build, pipeline: pipeline)) }
it 'returns code navigation info for a given path' do context 'code_navigation feature is disabled' do
get api(endpoint_path, user), params: { path: 'main.go' } before do
stub_feature_flags(code_navigation: false)
end
expect(response).to have_gitlab_http_status(:ok) it { is_expected.to have_gitlab_http_status(:not_found) }
expect(response.parsed_body.last).to eq({ end
it 'returns code navigation info for a given path', :aggregate_failures do
expect(subject).to have_gitlab_http_status(:ok)
data_for_main = response.parsed_body['main.go']
expect(data_for_main.last).to eq({
'end_char' => 18, 'end_char' => 18,
'end_line' => 8, 'end_line' => 8,
'start_char' => 13, 'start_char' => 13,
...@@ -67,26 +61,33 @@ describe API::LsifData do ...@@ -67,26 +61,33 @@ describe API::LsifData do
'value' => Gitlab::Highlight.highlight(nil, 'func Func2(i int) string', language: 'go') 'value' => Gitlab::Highlight.highlight(nil, 'func Func2(i int) string', language: 'go')
}] }]
}) })
data_for_reverse = response.parsed_body['morestrings/reverse.go']
expect(data_for_reverse.last).to eq({
'end_char' => 9,
'end_line' => 7,
'start_char' => 8,
'start_line' => 7,
'definition_url' => project_blob_path(project, "#{commit.id}/morestrings/reverse.go", anchor: 'L6'),
'hover' => [{
'language' => 'go',
'value' => Gitlab::Highlight.highlight(nil, 'var b string', language: 'go')
}]
})
end end
context 'the stored file is too large' do context 'the stored file is too large' do
it 'returns 413' do before do
allow_any_instance_of(JobArtifactUploader).to receive(:cached_size).and_return(20.megabytes) allow_any_instance_of(JobArtifactUploader).to receive(:cached_size).and_return(20.megabytes)
get api(endpoint_path, user), params: { path: 'main.go' }
expect(response).to have_gitlab_http_status(:payload_too_large)
end end
it { is_expected.to have_gitlab_http_status(:payload_too_large) }
end end
context 'the user does not have access to the pipeline' do context 'the user does not have access to the pipeline' do
let(:project) { create(:project, :repository, builds_access_level: ProjectFeature::DISABLED) } let(:project) { create(:project, :repository, builds_access_level: ProjectFeature::DISABLED) }
it 'returns 403' do it { is_expected.to have_gitlab_http_status(:forbidden) }
get api(endpoint_path, user), params: { path: 'main.go' }
expect(response).to have_gitlab_http_status(:forbidden)
end
end end
end end
end end
......
...@@ -109,7 +109,7 @@ describe API::ProjectContainerRepositories do ...@@ -109,7 +109,7 @@ describe API::ProjectContainerRepositories do
context 'disallowed' do context 'disallowed' do
let(:params) do let(:params) do
{ name_regex: 'v10.*' } { name_regex_delete: 'v10.*' }
end end
it_behaves_like 'rejected container repository access', :developer, :forbidden it_behaves_like 'rejected container repository access', :developer, :forbidden
...@@ -130,16 +130,33 @@ describe API::ProjectContainerRepositories do ...@@ -130,16 +130,33 @@ describe API::ProjectContainerRepositories do
end end
end end
context 'without name_regex' do
let(:params) do
{ keep_n: 100,
older_than: '1 day',
other: 'some value' }
end
it 'returns bad request' do
subject
expect(response).to have_gitlab_http_status(:bad_request)
end
end
context 'passes all declared parameters' do context 'passes all declared parameters' do
let(:params) do let(:params) do
{ name_regex: 'v10.*', { name_regex_delete: 'v10.*',
name_regex_keep: 'v10.1.*',
keep_n: 100, keep_n: 100,
older_than: '1 day', older_than: '1 day',
other: 'some value' } other: 'some value' }
end end
let(:worker_params) do let(:worker_params) do
{ name_regex: 'v10.*', { name_regex: nil,
name_regex_delete: 'v10.*',
name_regex_keep: 'v10.1.*',
keep_n: 100, keep_n: 100,
older_than: '1 day', older_than: '1 day',
container_expiration_policy: false } container_expiration_policy: false }
...@@ -174,6 +191,38 @@ describe API::ProjectContainerRepositories do ...@@ -174,6 +191,38 @@ describe API::ProjectContainerRepositories do
end end
end end
end end
context 'with deprecated name_regex param' do
let(:params) do
{ name_regex: 'v10.*',
name_regex_keep: 'v10.1.*',
keep_n: 100,
older_than: '1 day',
other: 'some value' }
end
let(:worker_params) do
{ name_regex: 'v10.*',
name_regex_delete: nil,
name_regex_keep: 'v10.1.*',
keep_n: 100,
older_than: '1 day',
container_expiration_policy: false }
end
let(:lease_key) { "container_repository:cleanup_tags:#{root_repository.id}" }
it 'schedules cleanup of tags repository' do
stub_last_activity_update
stub_exclusive_lease(lease_key, timeout: 1.hour)
expect(CleanupContainerRepositoryWorker).to receive(:perform_async)
.with(maintainer.id, root_repository.id, worker_params)
subject
expect(response).to have_gitlab_http_status(:accepted)
end
end
end end
end end
......
...@@ -1751,6 +1751,27 @@ describe API::Projects do ...@@ -1751,6 +1751,27 @@ describe API::Projects do
subject { get api("/projects/#{project.id}", user) } subject { get api("/projects/#{project.id}", user) }
end end
describe 'repository_storage attribute' do
before do
get api("/projects/#{project.id}", user)
end
context 'when authenticated as an admin' do
let(:user) { create(:admin) }
it 'returns repository_storage attribute' do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['repository_storage']).to eq(project.repository_storage)
end
end
context 'when authenticated as a regular user' do
it 'does not return repository_storage attribute' do
expect(json_response).not_to have_key('repository_storage')
end
end
end
end end
describe 'GET /projects/:id/users' do describe 'GET /projects/:id/users' do
...@@ -2402,6 +2423,50 @@ describe API::Projects do ...@@ -2402,6 +2423,50 @@ describe API::Projects do
expect(response).to have_gitlab_http_status(:forbidden) expect(response).to have_gitlab_http_status(:forbidden)
end end
end end
context 'when updating repository storage' do
let(:unknown_storage) { 'new-storage' }
let(:new_project) { create(:project, :repository, namespace: user.namespace) }
context 'as a user' do
it 'returns 200 but does not change repository_storage' do
expect do
Sidekiq::Testing.fake! do
put(api("/projects/#{new_project.id}", user), params: { repository_storage: unknown_storage, issues_enabled: false })
end
end.not_to change(ProjectUpdateRepositoryStorageWorker.jobs, :size)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['issues_enabled']).to eq(false)
expect(new_project.reload.repository.storage).to eq('default')
end
end
context 'as an admin' do
include_context 'custom session'
let(:admin) { create(:admin) }
it 'returns 500 when repository storage is unknown' do
put(api("/projects/#{new_project.id}", admin), params: { repository_storage: unknown_storage })
expect(response).to have_gitlab_http_status(:internal_server_error)
expect(json_response['message']).to match('ArgumentError')
end
it 'returns 200 when repository storage has changed' do
stub_storage_settings('test_second_storage' => { 'path' => 'tmp/tests/second_storage' })
expect do
Sidekiq::Testing.fake! do
put(api("/projects/#{new_project.id}", admin), params: { repository_storage: 'test_second_storage' })
end
end.to change(ProjectUpdateRepositoryStorageWorker.jobs, :size).by(1)
expect(response).to have_gitlab_http_status(:ok)
end
end
end
end end
describe 'POST /projects/:id/archive' do describe 'POST /projects/:id/archive' do
......
...@@ -48,25 +48,37 @@ describe Projects::ContainerRepository::CleanupTagsService do ...@@ -48,25 +48,37 @@ describe Projects::ContainerRepository::CleanupTagsService do
end end
context 'when regex matching everything is specified' do context 'when regex matching everything is specified' do
shared_examples 'removes all matches' do
it 'does remove B* and C' do
# The :A cannot be removed as config is shared with :latest
# The :E cannot be removed as it does not have valid manifest
expect_delete('sha256:configB').twice
expect_delete('sha256:configC')
expect_delete('sha256:configD')
is_expected.to include(status: :success, deleted: %w(D Bb Ba C))
end
end
let(:params) do let(:params) do
{ 'name_regex' => '.*' } { 'name_regex_delete' => '.*' }
end end
it 'does remove B* and C' do it_behaves_like 'removes all matches'
# The :A cannot be removed as config is shared with :latest
# The :E cannot be removed as it does not have valid manifest
expect_delete('sha256:configB').twice context 'with deprecated name_regex param' do
expect_delete('sha256:configC') let(:params) do
expect_delete('sha256:configD') { 'name_regex' => '.*' }
end
is_expected.to include(status: :success, deleted: %w(D Bb Ba C)) it_behaves_like 'removes all matches'
end end
end end
context 'when regex matching specific tags is used' do context 'when delete regex matching specific tags is used' do
let(:params) do let(:params) do
{ 'name_regex' => 'C|D' } { 'name_regex_delete' => 'C|D' }
end end
it 'does remove C and D' do it 'does remove C and D' do
...@@ -75,11 +87,37 @@ describe Projects::ContainerRepository::CleanupTagsService do ...@@ -75,11 +87,37 @@ describe Projects::ContainerRepository::CleanupTagsService do
is_expected.to include(status: :success, deleted: %w(D C)) is_expected.to include(status: :success, deleted: %w(D C))
end end
context 'with overriding allow regex' do
let(:params) do
{ 'name_regex_delete' => 'C|D',
'name_regex_keep' => 'C' }
end
it 'does not remove C' do
expect_delete('sha256:configD')
is_expected.to include(status: :success, deleted: %w(D))
end
end
context 'with name_regex_delete overriding deprecated name_regex' do
let(:params) do
{ 'name_regex' => 'C|D',
'name_regex_delete' => 'D' }
end
it 'does not remove C' do
expect_delete('sha256:configD')
is_expected.to include(status: :success, deleted: %w(D))
end
end
end end
context 'when removing a tagged image that is used by another tag' do context 'when removing a tagged image that is used by another tag' do
let(:params) do let(:params) do
{ 'name_regex' => 'Ba' } { 'name_regex_delete' => 'Ba' }
end end
it 'does not remove the tag' do it 'does not remove the tag' do
...@@ -89,9 +127,23 @@ describe Projects::ContainerRepository::CleanupTagsService do ...@@ -89,9 +127,23 @@ describe Projects::ContainerRepository::CleanupTagsService do
end end
end end
context 'with allow regex value' do
let(:params) do
{ 'name_regex_delete' => '.*',
'name_regex_keep' => 'B.*' }
end
it 'does not remove B*' do
expect_delete('sha256:configC')
expect_delete('sha256:configD')
is_expected.to include(status: :success, deleted: %w(D C))
end
end
context 'when removing keeping only 3' do context 'when removing keeping only 3' do
let(:params) do let(:params) do
{ 'name_regex' => '.*', { 'name_regex_delete' => '.*',
'keep_n' => 3 } 'keep_n' => 3 }
end end
...@@ -104,7 +156,7 @@ describe Projects::ContainerRepository::CleanupTagsService do ...@@ -104,7 +156,7 @@ describe Projects::ContainerRepository::CleanupTagsService do
context 'when removing older than 1 day' do context 'when removing older than 1 day' do
let(:params) do let(:params) do
{ 'name_regex' => '.*', { 'name_regex_delete' => '.*',
'older_than' => '1 day' } 'older_than' => '1 day' }
end end
...@@ -118,7 +170,7 @@ describe Projects::ContainerRepository::CleanupTagsService do ...@@ -118,7 +170,7 @@ describe Projects::ContainerRepository::CleanupTagsService do
context 'when combining all parameters' do context 'when combining all parameters' do
let(:params) do let(:params) do
{ 'name_regex' => '.*', { 'name_regex_delete' => '.*',
'keep_n' => 1, 'keep_n' => 1,
'older_than' => '1 day' } 'older_than' => '1 day' }
end end
...@@ -136,7 +188,7 @@ describe Projects::ContainerRepository::CleanupTagsService do ...@@ -136,7 +188,7 @@ describe Projects::ContainerRepository::CleanupTagsService do
context 'with valid container_expiration_policy param' do context 'with valid container_expiration_policy param' do
let(:params) do let(:params) do
{ 'name_regex' => '.*', { 'name_regex_delete' => '.*',
'keep_n' => 1, 'keep_n' => 1,
'older_than' => '1 day', 'older_than' => '1 day',
'container_expiration_policy' => true } 'container_expiration_policy' => true }
...@@ -152,7 +204,7 @@ describe Projects::ContainerRepository::CleanupTagsService do ...@@ -152,7 +204,7 @@ describe Projects::ContainerRepository::CleanupTagsService do
context 'without container_expiration_policy param' do context 'without container_expiration_policy param' do
let(:params) do let(:params) do
{ 'name_regex' => '.*', { 'name_regex_delete' => '.*',
'keep_n' => 1, 'keep_n' => 1,
'older_than' => '1 day' } 'older_than' => '1 day' }
end end
......
...@@ -307,6 +307,27 @@ describe Projects::ForkService do ...@@ -307,6 +307,27 @@ describe Projects::ForkService do
end end
end end
context 'when a project is already forked' do
it 'creates a new poolresository after the project is moved to a new shard' do
project = create(:project, :public, :repository)
fork_before_move = fork_project(project)
# Stub everything required to move a project to a Gitaly shard that does not exist
stub_storage_settings('test_second_storage' => { 'path' => 'tmp/tests/second_storage' })
allow_any_instance_of(Gitlab::Git::Repository).to receive(:fetch_repository_as_mirror).and_return(true)
Projects::UpdateRepositoryStorageService.new(project).execute('test_second_storage')
fork_after_move = fork_project(project)
pool_repository_before_move = PoolRepository.joins(:shard)
.where(source_project: project, shards: { name: 'default' }).first
pool_repository_after_move = PoolRepository.joins(:shard)
.where(source_project: project, shards: { name: 'test_second_storage' }).first
expect(fork_before_move.pool_repository).to eq(pool_repository_before_move)
expect(fork_after_move.pool_repository).to eq(pool_repository_after_move)
end
end
context 'when forking with object pools' do context 'when forking with object pools' do
let(:fork_from_project) { create(:project, :public) } let(:fork_from_project) { create(:project, :public) }
let(:forker) { create(:user) } let(:forker) { create(:user) }
......
...@@ -7,9 +7,8 @@ describe Projects::LsifDataService do ...@@ -7,9 +7,8 @@ describe Projects::LsifDataService do
let(:project) { build_stubbed(:project) } let(:project) { build_stubbed(:project) }
let(:path) { 'main.go' } let(:path) { 'main.go' }
let(:commit_id) { Digest::SHA1.hexdigest(SecureRandom.hex) } let(:commit_id) { Digest::SHA1.hexdigest(SecureRandom.hex) }
let(:params) { { path: path, commit_id: commit_id } }
let(:service) { described_class.new(artifact.file, project, params) } let(:service) { described_class.new(artifact.file, project, commit_id) }
describe '#execute' do describe '#execute' do
def highlighted_value(value) def highlighted_value(value)
...@@ -18,7 +17,7 @@ describe Projects::LsifDataService do ...@@ -18,7 +17,7 @@ describe Projects::LsifDataService do
context 'fetched lsif file', :use_clean_rails_memory_store_caching do context 'fetched lsif file', :use_clean_rails_memory_store_caching do
it 'is cached' do it 'is cached' do
service.execute service.execute(path)
cached_data = Rails.cache.fetch("project:#{project.id}:lsif:#{commit_id}") cached_data = Rails.cache.fetch("project:#{project.id}:lsif:#{commit_id}")
...@@ -30,7 +29,7 @@ describe Projects::LsifDataService do ...@@ -30,7 +29,7 @@ describe Projects::LsifDataService do
let(:path_prefix) { "/#{project.full_path}/-/blob/#{commit_id}" } let(:path_prefix) { "/#{project.full_path}/-/blob/#{commit_id}" }
it 'returns lsif ranges for the file' do it 'returns lsif ranges for the file' do
expect(service.execute).to eq([ expect(service.execute(path)).to eq([
{ {
end_char: 9, end_char: 9,
end_line: 6, end_line: 6,
...@@ -87,7 +86,7 @@ describe Projects::LsifDataService do ...@@ -87,7 +86,7 @@ describe Projects::LsifDataService do
let(:path) { 'morestrings/reverse.go' } let(:path) { 'morestrings/reverse.go' }
it 'returns lsif ranges for the file' do it 'returns lsif ranges for the file' do
expect(service.execute.first).to eq({ expect(service.execute(path).first).to eq({
end_char: 2, end_char: 2,
end_line: 11, end_line: 11,
start_char: 1, start_char: 1,
...@@ -102,7 +101,7 @@ describe Projects::LsifDataService do ...@@ -102,7 +101,7 @@ describe Projects::LsifDataService do
let(:path) { 'unknown.go' } let(:path) { 'unknown.go' }
it 'returns nil' do it 'returns nil' do
expect(service.execute).to eq(nil) expect(service.execute(path)).to eq(nil)
end end
end end
end end
...@@ -120,9 +119,7 @@ describe Projects::LsifDataService do ...@@ -120,9 +119,7 @@ describe Projects::LsifDataService do
end end
it 'fetches the document with the shortest absolute path' do it 'fetches the document with the shortest absolute path' do
service.instance_variable_set(:@docs, docs) expect(service.__send__(:find_doc_id, docs, path)).to eq(3)
expect(service.__send__(:doc_id)).to eq(3)
end end
end end
end end
......
# frozen_string_literal: true
require 'spec_helper'
describe Projects::UpdateRepositoryStorageService do
include Gitlab::ShellAdapter
subject { described_class.new(project) }
describe "#execute" do
let(:time) { Time.now }
before do
allow(Time).to receive(:now).and_return(time)
end
context 'without wiki and design repository' do
let(:project) { create(:project, :repository, repository_read_only: true, wiki_enabled: false) }
context 'when the move succeeds' do
it 'moves the repository to the new storage and unmarks the repository as read only' do
old_path = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
project.repository.path_to_repo
end
expect_any_instance_of(Gitlab::Git::Repository).to receive(:fetch_repository_as_mirror)
.with(project.repository.raw).and_return(true)
subject.execute('test_second_storage')
expect(project).not_to be_repository_read_only
expect(project.repository_storage).to eq('test_second_storage')
expect(gitlab_shell.repository_exists?('default', old_path)).to be(false)
expect(project.project_repository.shard_name).to eq('test_second_storage')
end
end
context 'when the project is already on the target storage' do
it 'bails out and does nothing' do
expect do
subject.execute(project.repository_storage)
end.to raise_error(described_class::RepositoryAlreadyMoved)
end
end
context 'when the move fails' do
it 'unmarks the repository as read-only without updating the repository storage' do
expect_any_instance_of(Gitlab::Git::Repository).to receive(:fetch_repository_as_mirror)
.with(project.repository.raw).and_return(false)
expect(GitlabShellWorker).not_to receive(:perform_async)
subject.execute('test_second_storage')
expect(project).not_to be_repository_read_only
expect(project.repository_storage).to eq('default')
end
end
end
context 'with wiki repository' do
include_examples 'moves repository to another storage', 'wiki' do
let(:project) { create(:project, :repository, repository_read_only: true, wiki_enabled: true) }
let(:repository) { project.wiki.repository }
before do
project.create_wiki
end
end
end
context 'when a object pool was joined' do
let(:project) { create(:project, :repository, wiki_enabled: false, repository_read_only: true) }
let(:pool) { create(:pool_repository, :ready, source_project: project) }
it 'leaves the pool' do
allow_any_instance_of(Gitlab::Git::Repository).to receive(:fetch_repository_as_mirror).and_return(true)
subject.execute('test_second_storage')
expect(project.reload_pool_repository).to be_nil
end
end
end
end
...@@ -613,6 +613,25 @@ describe Projects::UpdateService do ...@@ -613,6 +613,25 @@ describe Projects::UpdateService do
end end
end end
describe 'repository_storage' do
let(:admin) { create(:admin) }
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
let(:opts) { { repository_storage: 'test_second_storage' } }
it 'calls the change repository storage method if the storage changed' do
expect(project).to receive(:change_repository_storage).with('test_second_storage')
update_project(project, admin, opts).inspect
end
it "doesn't call the change repository storage for non-admin users" do
expect(project).not_to receive(:change_repository_storage)
update_project(project, user, opts).inspect
end
end
def update_project(project, user, opts) def update_project(project, user, opts)
described_class.new(project, user, opts).execute described_class.new(project, user, opts).execute
end end
......
# frozen_string_literal: true
RSpec.shared_examples 'moves repository to another storage' do |repository_type|
let(:project_repository_double) { double(:repository) }
let(:repository_double) { double(:repository) }
before do
# Default stub for non-specified params
allow(Gitlab::Git::Repository).to receive(:new).and_call_original
allow(Gitlab::Git::Repository).to receive(:new)
.with('test_second_storage', project.repository.raw.relative_path, project.repository.gl_repository, project.repository.full_path)
.and_return(project_repository_double)
allow(Gitlab::Git::Repository).to receive(:new)
.with('test_second_storage', repository.raw.relative_path, repository.gl_repository, repository.full_path)
.and_return(repository_double)
end
context 'when the move succeeds', :clean_gitlab_redis_shared_state do
before do
allow(project_repository_double)
.to receive(:fetch_repository_as_mirror)
.with(project.repository.raw)
.and_return(true)
allow(repository_double)
.to receive(:fetch_repository_as_mirror)
.with(repository.raw)
.and_return(true)
end
it "moves the project and its #{repository_type} repository to the new storage and unmarks the repository as read only" do
old_project_repository_path = Gitlab::GitalyClient::StorageSettings.allow_disk_access do
project.repository.path_to_repo
end
old_repository_path = repository.full_path
subject.execute('test_second_storage')
expect(project).not_to be_repository_read_only
expect(project.repository_storage).to eq('test_second_storage')
expect(gitlab_shell.repository_exists?('default', old_project_repository_path)).to be(false)
expect(gitlab_shell.repository_exists?('default', old_repository_path)).to be(false)
end
context ':repack_after_shard_migration feature flag disabled' do
before do
stub_feature_flags(repack_after_shard_migration: false)
end
it 'does not enqueue a GC run' do
expect { subject.execute('test_second_storage') }
.not_to change(GitGarbageCollectWorker.jobs, :count)
end
end
context ':repack_after_shard_migration feature flag enabled' do
before do
stub_feature_flags(repack_after_shard_migration: true)
end
it 'does not enqueue a GC run if housekeeping is disabled' do
stub_application_setting(housekeeping_enabled: false)
expect { subject.execute('test_second_storage') }
.not_to change(GitGarbageCollectWorker.jobs, :count)
end
it 'enqueues a GC run' do
expect { subject.execute('test_second_storage') }
.to change(GitGarbageCollectWorker.jobs, :count).by(1)
end
end
end
context 'when the project is already on the target storage' do
it 'bails out and does nothing' do
expect do
subject.execute(project.repository_storage)
end.to raise_error(described_class::RepositoryAlreadyMoved)
end
end
context "when the move of the #{repository_type} repository fails" do
it 'unmarks the repository as read-only without updating the repository storage' do
allow(project_repository_double).to receive(:fetch_repository_as_mirror)
.with(project.repository.raw).and_return(true)
allow(repository_double).to receive(:fetch_repository_as_mirror)
.with(repository.raw).and_return(false)
expect(GitlabShellWorker).not_to receive(:perform_async)
subject.execute('test_second_storage')
expect(project).not_to be_repository_read_only
expect(project.repository_storage).to eq('default')
end
end
end
# frozen_string_literal: true
require 'spec_helper'
describe ProjectUpdateRepositoryStorageWorker do
let(:project) { create(:project, :repository) }
subject { described_class.new }
describe "#perform" do
it "calls the update repository storage service" do
expect_any_instance_of(Projects::UpdateRepositoryStorageService)
.to receive(:execute).with('new_storage')
subject.perform(project.id, 'new_storage')
end
it 'catches and logs RepositoryAlreadyMoved' do
expect(Rails.logger).to receive(:info).with(/repository already moved/)
expect { subject.perform(project.id, project.repository_storage) }.not_to raise_error
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment