Commit 946771d0 authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent f1e2fca1
...@@ -71,8 +71,8 @@ export default { ...@@ -71,8 +71,8 @@ export default {
}, },
computed: { computed: {
statusTitle() { statusTitle() {
return sprintf(s__('Commits|Commit: %{commitText}'), { return sprintf(s__('PipelineStatusTooltip|Pipeline: %{ciStatus}'), {
commitText: this.commit.pipeline.detailedStatus.text, ciStatus: this.commit.pipeline.detailedStatus.text,
}); });
}, },
isLoading() { isLoading() {
......
...@@ -112,6 +112,7 @@ export default { ...@@ -112,6 +112,7 @@ export default {
<div class="image"> <div class="image">
<image-viewer <image-viewer
:path="imagePath" :path="imagePath"
:file-size="isNew ? newSize : oldSize"
:inner-css-classes="[ :inner-css-classes="[
'frame', 'frame',
{ {
......
# frozen_string_literal: true
module Mutations
module Admin
module SidekiqQueues
class DeleteJobs < BaseMutation
graphql_name 'AdminSidekiqQueuesDeleteJobs'
ADMIN_MESSAGE = 'You must be an admin to use this mutation'
Labkit::Context::KNOWN_KEYS.each do |key|
argument key,
GraphQL::STRING_TYPE,
required: false,
description: "Delete jobs matching #{key} in the context metadata"
end
argument :queue_name,
GraphQL::STRING_TYPE,
required: true,
description: 'The name of the queue to delete jobs from'
field :result,
Types::Admin::SidekiqQueues::DeleteJobsResponseType,
null: true,
description: 'Information about the status of the deletion request'
def ready?(**args)
unless current_user&.admin?
raise Gitlab::Graphql::Errors::ResourceNotAvailable, ADMIN_MESSAGE
end
super
end
def resolve(args)
{
result: Gitlab::SidekiqQueue.new(args[:queue_name]).drop_jobs!(args, timeout: 30),
errors: []
}
rescue Gitlab::SidekiqQueue::NoMetadataError
{
result: nil,
errors: ['No metadata provided']
}
rescue Gitlab::SidekiqQueue::InvalidQueueError
raise Gitlab::Graphql::Errors::ResourceNotAvailable, "Queue #{args[:queue_name]} not found"
end
end
end
end
end
# frozen_string_literal: true
module Types
module Admin
module SidekiqQueues
# We can't authorize against the value passed to this because it's
# a plain hash.
class DeleteJobsResponseType < BaseObject # rubocop:disable Graphql/AuthorizeTypes
graphql_name 'DeleteJobsResponse'
description 'The response from the AdminSidekiqQueuesDeleteJobs mutation.'
field :completed,
GraphQL::BOOLEAN_TYPE,
null: true,
description: 'Whether or not the entire queue was processed in time; if not, retrying the same request is safe'
field :deleted_jobs,
GraphQL::INT_TYPE,
null: true,
description: 'The number of matching jobs deleted'
field :queue_size,
GraphQL::INT_TYPE,
null: true,
description: 'The queue size after processing'
end
end
end
end
...@@ -6,6 +6,7 @@ module Types ...@@ -6,6 +6,7 @@ module Types
graphql_name 'Mutation' graphql_name 'Mutation'
mount_mutation Mutations::Admin::SidekiqQueues::DeleteJobs
mount_mutation Mutations::AwardEmojis::Add mount_mutation Mutations::AwardEmojis::Add
mount_mutation Mutations::AwardEmojis::Remove mount_mutation Mutations::AwardEmojis::Remove
mount_mutation Mutations::AwardEmojis::Toggle mount_mutation Mutations::AwardEmojis::Toggle
......
...@@ -172,6 +172,7 @@ class ApplicationSetting < ApplicationRecord ...@@ -172,6 +172,7 @@ class ApplicationSetting < ApplicationRecord
validates :gitaly_timeout_default, validates :gitaly_timeout_default,
presence: true, presence: true,
if: :gitaly_timeout_default_changed?,
numericality: { numericality: {
only_integer: true, only_integer: true,
greater_than_or_equal_to: 0, greater_than_or_equal_to: 0,
...@@ -180,6 +181,7 @@ class ApplicationSetting < ApplicationRecord ...@@ -180,6 +181,7 @@ class ApplicationSetting < ApplicationRecord
validates :gitaly_timeout_medium, validates :gitaly_timeout_medium,
presence: true, presence: true,
if: :gitaly_timeout_medium_changed?,
numericality: { only_integer: true, greater_than_or_equal_to: 0 } numericality: { only_integer: true, greater_than_or_equal_to: 0 }
validates :gitaly_timeout_medium, validates :gitaly_timeout_medium,
numericality: { less_than_or_equal_to: :gitaly_timeout_default }, numericality: { less_than_or_equal_to: :gitaly_timeout_default },
...@@ -190,6 +192,7 @@ class ApplicationSetting < ApplicationRecord ...@@ -190,6 +192,7 @@ class ApplicationSetting < ApplicationRecord
validates :gitaly_timeout_fast, validates :gitaly_timeout_fast,
presence: true, presence: true,
if: :gitaly_timeout_fast_changed?,
numericality: { only_integer: true, greater_than_or_equal_to: 0 } numericality: { only_integer: true, greater_than_or_equal_to: 0 }
validates :gitaly_timeout_fast, validates :gitaly_timeout_fast,
numericality: { less_than_or_equal_to: :gitaly_timeout_default }, numericality: { less_than_or_equal_to: :gitaly_timeout_default },
......
...@@ -107,7 +107,7 @@ class ProjectWiki ...@@ -107,7 +107,7 @@ class ProjectWiki
direction_desc: direction == DIRECTION_DESC, direction_desc: direction == DIRECTION_DESC,
load_content: load_content load_content: load_content
).map do |page| ).map do |page|
WikiPage.new(self, page, true) WikiPage.new(self, page)
end end
end end
...@@ -122,7 +122,7 @@ class ProjectWiki ...@@ -122,7 +122,7 @@ class ProjectWiki
page_title, page_dir = page_title_and_dir(title) page_title, page_dir = page_title_and_dir(title)
if page = wiki.page(title: page_title, version: version, dir: page_dir) if page = wiki.page(title: page_title, version: version, dir: page_dir)
WikiPage.new(self, page, true) WikiPage.new(self, page)
end end
end end
......
...@@ -70,10 +70,9 @@ class WikiPage ...@@ -70,10 +70,9 @@ class WikiPage
Gitlab::HookData::WikiPageBuilder.new(self).build Gitlab::HookData::WikiPageBuilder.new(self).build
end end
def initialize(wiki, page = nil, persisted = false) def initialize(wiki, page = nil)
@wiki = wiki @wiki = wiki
@page = page @page = page
@persisted = persisted
@attributes = {}.with_indifferent_access @attributes = {}.with_indifferent_access
set_attributes if persisted? set_attributes if persisted?
...@@ -94,11 +93,7 @@ class WikiPage ...@@ -94,11 +93,7 @@ class WikiPage
# The formatted title of this page. # The formatted title of this page.
def title def title
if @attributes[:title] @attributes[:title] || ''
CGI.unescape_html(self.class.unhyphenize(@attributes[:title]))
else
""
end
end end
# Sets the title of this page. # Sets the title of this page.
...@@ -176,7 +171,7 @@ class WikiPage ...@@ -176,7 +171,7 @@ class WikiPage
# Returns boolean True or False if this instance # Returns boolean True or False if this instance
# has been fully created on disk or not. # has been fully created on disk or not.
def persisted? def persisted?
@persisted == true @page.present?
end end
# Creates a new Wiki Page. # Creates a new Wiki Page.
...@@ -196,7 +191,7 @@ class WikiPage ...@@ -196,7 +191,7 @@ class WikiPage
def create(attrs = {}) def create(attrs = {})
update_attributes(attrs) update_attributes(attrs)
save(page_details: title) do save do
wiki.create_page(title, content, format, attrs[:message]) wiki.create_page(title, content, format, attrs[:message])
end end
end end
...@@ -222,18 +217,12 @@ class WikiPage ...@@ -222,18 +217,12 @@ class WikiPage
update_attributes(attrs) update_attributes(attrs)
if title_changed? if title.present? && title_changed? && wiki.find_page(title).present?
page_details = title @attributes[:title] = @page.title
raise PageRenameError
if wiki.find_page(page_details).present?
@attributes[:title] = @page.url_path
raise PageRenameError
end
else
page_details = @page.url_path
end end
save(page_details: page_details) do save do
wiki.update_page( wiki.update_page(
@page, @page,
content: content, content: content,
...@@ -266,7 +255,14 @@ class WikiPage ...@@ -266,7 +255,14 @@ class WikiPage
end end
def title_changed? def title_changed?
title.present? && (@page.nil? || self.class.unhyphenize(@page.url_path) != title) if persisted?
old_title, old_dir = wiki.page_title_and_dir(self.class.unhyphenize(@page.url_path))
new_title, new_dir = wiki.page_title_and_dir(title)
new_title != old_title || (title.include?('/') && new_dir != old_dir)
else
title.present?
end
end end
# Updates the current @attributes hash by merging a hash of params # Updates the current @attributes hash by merging a hash of params
...@@ -313,26 +309,24 @@ class WikiPage ...@@ -313,26 +309,24 @@ class WikiPage
attributes[:format] = @page.format attributes[:format] = @page.format
end end
def save(page_details:) def save
return unless valid? return false unless valid?
unless yield unless yield
errors.add(:base, wiki.error_message) errors.add(:base, wiki.error_message)
return false return false
end end
page_title, page_dir = wiki.page_title_and_dir(page_details) @page = wiki.find_page(title).page
gitlab_git_wiki = wiki.wiki
@page = gitlab_git_wiki.page(title: page_title, dir: page_dir)
set_attributes set_attributes
@persisted = errors.blank?
true
end end
def validate_path_limits def validate_path_limits
*dirnames, title = @attributes[:title].split('/') *dirnames, title = @attributes[:title].split('/')
if title.bytesize > MAX_TITLE_BYTES if title && title.bytesize > MAX_TITLE_BYTES
errors.add(:title, _("exceeds the limit of %{bytes} bytes") % { bytes: MAX_TITLE_BYTES }) errors.add(:title, _("exceeds the limit of %{bytes} bytes") % { bytes: MAX_TITLE_BYTES })
end end
......
...@@ -10,6 +10,11 @@ ...@@ -10,6 +10,11 @@
= _('Analytics') = _('Analytics')
%ul.sidebar-sub-level-items{ data: { qa_selector: 'analytics_sidebar_submenu' } } %ul.sidebar-sub-level-items{ data: { qa_selector: 'analytics_sidebar_submenu' } }
= nav_link(path: navbar_links.first.path, html_options: { class: "fly-out-top-item" } ) do
= link_to navbar_links.first.link do
%strong.fly-out-top-item-name
= _('Analytics')
%li.divider.fly-out-top-item
- navbar_links.each do |menu_item| - navbar_links.each do |menu_item|
= nav_link(path: menu_item.path) do = nav_link(path: menu_item.path) do
= link_to(menu_item.link, menu_item.link_to_options) do = link_to(menu_item.link, menu_item.link_to_options) do
......
...@@ -19,9 +19,3 @@ ...@@ -19,9 +19,3 @@
%p.prepend-top-default %p.prepend-top-default
= _("You must have permission to create a project in a namespace before forking.") = _("You must have permission to create a project in a namespace before forking.")
.save-project-loader.hide.js-fork-content
%h2.text-center
= icon('spinner spin')
= _("Forking repository")
%p.text-center
= _("Please wait a moment, this page will automatically refresh when ready.")
...@@ -12,7 +12,7 @@ ...@@ -12,7 +12,7 @@
.form-group.row .form-group.row
.col-sm-12= f.label :title, class: 'control-label-full-width' .col-sm-12= f.label :title, class: 'control-label-full-width'
.col-sm-12 .col-sm-12
= f.text_field :title, class: 'form-control qa-wiki-title-textbox', value: @page.title, required: true, autofocus: !@page.persisted?, placeholder: _('Wiki|Page title') = f.text_field :title, class: 'form-control qa-wiki-title-textbox', value: @page.title, required: true, autofocus: !@page.persisted?, placeholder: s_('Wiki|Page title')
%span.d-inline-block.mw-100.prepend-top-5 %span.d-inline-block.mw-100.prepend-top-5
= icon('lightbulb-o') = icon('lightbulb-o')
- if @page.persisted? - if @page.persisted?
......
---
title: Tweak wiki page title handling
merge_request: 25647
author:
type: changed
---
title: Fix ImportFailure when restore ci_pipelines:external_pull_request relation
merge_request: 26041
author:
type: fixed
---
title: Add title to Analytics sidebar menus
merge_request: 26265
author:
type: added
---
title: Remove .fa-spinner from app/views/projects/forks
merge_request: 25034
author: nuwe1
type: other
---
title: Add admin API endpoint to delete Sidekiq jobs matching metadata
merge_request: 25998
author:
type: added
---
title: Fix saving preferences with unrelated changes when gitaly timeouts became invalid.
merge_request: 26292
author:
type: fixed
---
title: All image diffs (except for renamed files) show the image file size in the
diff
merge_request: 25734
author:
type: added
---
title: Change tooltip text for pipeline on last commit widget
merge_request: 26315
author:
type: other
---
title: Allow creating default branch in snippet repositories
merge_request: 26294
author:
type: fixed
...@@ -858,7 +858,7 @@ you are seeing Gitaly errors. You can control the log level of the ...@@ -858,7 +858,7 @@ you are seeing Gitaly errors. You can control the log level of the
gRPC client with the `GRPC_LOG_LEVEL` environment variable. The gRPC client with the `GRPC_LOG_LEVEL` environment variable. The
default level is `WARN`. default level is `WARN`.
You can run a GRPC trace with: You can run a gRPC trace with:
```shell ```shell
GRPC_TRACE=all GRPC_VERBOSITY=DEBUG sudo gitlab-rake gitlab:gitaly:check GRPC_TRACE=all GRPC_VERBOSITY=DEBUG sudo gitlab-rake gitlab:gitaly:check
......
...@@ -30,6 +30,8 @@ The following metrics are available: ...@@ -30,6 +30,8 @@ The following metrics are available:
| `gitlab_cache_misses_total` | Counter | 10.2 | Cache read miss | controller, action | | `gitlab_cache_misses_total` | Counter | 10.2 | Cache read miss | controller, action |
| `gitlab_cache_operation_duration_seconds` | Histogram | 10.2 | Cache access time | | | `gitlab_cache_operation_duration_seconds` | Histogram | 10.2 | Cache access time | |
| `gitlab_cache_operations_total` | Counter | 12.2 | Cache operations by controller/action | controller, action, operation | | `gitlab_cache_operations_total` | Counter | 12.2 | Cache operations by controller/action | controller, action, operation |
| `job_waiter_started_total` | Counter | 12.9 | Number of batches of jobs started where a web request is waiting for the jobs to complete | worker |
| `job_waiter_timeouts_total` | Counter | 12.9 | Number of batches of jobs that timed out where a web request is waiting for the jobs to complete | worker |
| `gitlab_database_transaction_seconds` | Histogram | 12.1 | Time spent in database transactions, in seconds | | | `gitlab_database_transaction_seconds` | Histogram | 12.1 | Time spent in database transactions, in seconds | |
| `gitlab_method_call_duration_seconds` | Histogram | 10.2 | Method calls real duration | controller, action, module, method | | `gitlab_method_call_duration_seconds` | Histogram | 10.2 | Method calls real duration | controller, action, module, method |
| `gitlab_page_out_of_bounds` | Counter | 12.8 | Counter for the PageLimiter pagination limit being hit | controller, action, bot | | `gitlab_page_out_of_bounds` | Counter | 12.8 | Counter for the PageLimiter pagination limit being hit | controller, action, bot |
......
# Admin Sidekiq queues API
> **Note:** This feature was [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/25998) in GitLab 12.9
Delete jobs from a Sidekiq queue that match the given
[metadata](../development/logging.md#logging-context-metadata-through-rails-or-grape-requests).
The response has three fields:
1. `deleted_jobs` - the number of jobs deleted by the request.
1. `queue_size` - the remaining size of the queue after processing the
request.
1. `completed` - whether or not the request was able to process the
entire queue in time. If not, retrying with the same parameters may
delete further jobs (including those added after the first request
was issued).
This API endpoint is only available to admin users.
```
DELETE /admin/sidekiq/queues/:queue_name
```
| Attribute | Type | Required | Description |
| --------- | -------------- | -------- | ----------- |
| `queue_name` | string | yes | The name of the queue to delete jobs from |
| `user` | string | no | The username of the user who scheduled the jobs |
| `project` | string | no | The full path of the project where the jobs were scheduled from |
| `root_namespace` | string | no | The root namespace of the project |
| `subscription_plan` | string | no | The subscription plan of the root namespace (GitLab.com only) |
| `caller_id` | string | no | The endpoint or background job that schedule the job (for example: `ProjectsController#create`, `/api/:version/projects/:id`, `PostReceive`) |
At least one attribute, other than `queue_name`, is required.
```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" https://gitlab.example.com/api/v4/admin/sidekiq/queues/authorized_projects?user=root
```
Example response:
```json
{
"completed": true,
"deleted_jobs": 7,
"queue_size": 14
}
```
...@@ -105,41 +105,42 @@ The following API resources are available in the group context: ...@@ -105,41 +105,42 @@ The following API resources are available in the group context:
The following API resources are available outside of project and group contexts (including `/users`): The following API resources are available outside of project and group contexts (including `/users`):
| Resource | Available endpoints | | Resource | Available endpoints |
|:--------------------------------------------------|:------------------------------------------------------------------------| |:---------------------------------------------------|:------------------------------------------------------------------------|
| [Appearance](appearance.md) **(CORE ONLY)** | `/application/appearance` | | [Admin Sidekiq queues](admin_sidekiq_queues.md) | `/admin/sidekiq/queues/:queue_name` |
| [Applications](applications.md) | `/applications` | | [Appearance](appearance.md) **(CORE ONLY)** | `/application/appearance` |
| [Applications](applications.md) | `/applications` |
| [Audit Events](audit_events.md) **(PREMIUM ONLY)** | `/audit_events` | | [Audit Events](audit_events.md) **(PREMIUM ONLY)** | `/audit_events` |
| [Avatar](avatar.md) | `/avatar` | | [Avatar](avatar.md) | `/avatar` |
| [Broadcast messages](broadcast_messages.md) | `/broadcast_messages` | | [Broadcast messages](broadcast_messages.md) | `/broadcast_messages` |
| [Code snippets](snippets.md) | `/snippets` | | [Code snippets](snippets.md) | `/snippets` |
| [Custom attributes](custom_attributes.md) | `/users/:id/custom_attributes` (also available for groups and projects) | | [Custom attributes](custom_attributes.md) | `/users/:id/custom_attributes` (also available for groups and projects) |
| [Deploy keys](deploy_keys.md) | `/deploy_keys` (also available for projects) | | [Deploy keys](deploy_keys.md) | `/deploy_keys` (also available for projects) |
| [Events](events.md) | `/events`, `/users/:id/events` (also available for projects) | | [Events](events.md) | `/events`, `/users/:id/events` (also available for projects) |
| [Feature flags](features.md) | `/features` | | [Feature flags](features.md) | `/features` |
| [Geo Nodes](geo_nodes.md) **(PREMIUM ONLY)** | `/geo_nodes` | | [Geo Nodes](geo_nodes.md) **(PREMIUM ONLY)** | `/geo_nodes` |
| [Import repository from GitHub](import.md) | `/import/github` | | [Import repository from GitHub](import.md) | `/import/github` |
| [Issues](issues.md) | `/issues` (also available for groups and projects) | | [Issues](issues.md) | `/issues` (also available for groups and projects) |
| [Issues Statistics](issues_statistics.md) | `/issues_statistics` (also available for groups and projects) | | [Issues Statistics](issues_statistics.md) | `/issues_statistics` (also available for groups and projects) |
| [Keys](keys.md) | `/keys` | | [Keys](keys.md) | `/keys` |
| [License](license.md) **(CORE ONLY)** | `/license` | | [License](license.md) **(CORE ONLY)** | `/license` |
| [Markdown](markdown.md) | `/markdown` | | [Markdown](markdown.md) | `/markdown` |
| [Merge requests](merge_requests.md) | `/merge_requests` (also available for groups and projects) | | [Merge requests](merge_requests.md) | `/merge_requests` (also available for groups and projects) |
| [Namespaces](namespaces.md) | `/namespaces` | | [Namespaces](namespaces.md) | `/namespaces` |
| [Notification settings](notification_settings.md) | `/notification_settings` (also available for groups and projects) | | [Notification settings](notification_settings.md) | `/notification_settings` (also available for groups and projects) |
| [Pages domains](pages_domains.md) | `/pages/domains` (also available for projects) | | [Pages domains](pages_domains.md) | `/pages/domains` (also available for projects) |
| [Projects](projects.md) | `/users/:id/projects` (also available for projects) | | [Projects](projects.md) | `/users/:id/projects` (also available for projects) |
| [Runners](runners.md) | `/runners` (also available for projects) | | [Runners](runners.md) | `/runners` (also available for projects) |
| [Search](search.md) | `/search` (also available for groups and projects) | | [Search](search.md) | `/search` (also available for groups and projects) |
| [Settings](settings.md) **(CORE ONLY)** | `/application/settings` | | [Settings](settings.md) **(CORE ONLY)** | `/application/settings` |
| [Statistics](statistics.md) | `/application/statistics` | | [Statistics](statistics.md) | `/application/statistics` |
| [Sidekiq metrics](sidekiq_metrics.md) | `/sidekiq` | | [Sidekiq metrics](sidekiq_metrics.md) | `/sidekiq` |
| [Suggestions](suggestions.md) | `/suggestions` | | [Suggestions](suggestions.md) | `/suggestions` |
| [System hooks](system_hooks.md) | `/hooks` | | [System hooks](system_hooks.md) | `/hooks` |
| [Todos](todos.md) | `/todos` | | [Todos](todos.md) | `/todos` |
| [Users](users.md) | `/users` | | [Users](users.md) | `/users` |
| [Validate `.gitlab-ci.yml` file](lint.md) | `/lint` | | [Validate `.gitlab-ci.yml` file](lint.md) | `/lint` |
| [Version](version.md) | `/version` | | [Version](version.md) | `/version` |
## Templates API resources ## Templates API resources
......
...@@ -38,6 +38,66 @@ type AddAwardEmojiPayload { ...@@ -38,6 +38,66 @@ type AddAwardEmojiPayload {
errors: [String!]! errors: [String!]!
} }
"""
Autogenerated input type of AdminSidekiqQueuesDeleteJobs
"""
input AdminSidekiqQueuesDeleteJobsInput {
"""
Delete jobs matching caller_id in the context metadata
"""
callerId: String
"""
A unique identifier for the client performing the mutation.
"""
clientMutationId: String
"""
Delete jobs matching project in the context metadata
"""
project: String
"""
The name of the queue to delete jobs from
"""
queueName: String!
"""
Delete jobs matching root_namespace in the context metadata
"""
rootNamespace: String
"""
Delete jobs matching subscription_plan in the context metadata
"""
subscriptionPlan: String
"""
Delete jobs matching user in the context metadata
"""
user: String
}
"""
Autogenerated return type of AdminSidekiqQueuesDeleteJobs
"""
type AdminSidekiqQueuesDeleteJobsPayload {
"""
A unique identifier for the client performing the mutation.
"""
clientMutationId: String
"""
Reasons why the mutation failed.
"""
errors: [String!]!
"""
Information about the status of the deletion request
"""
result: DeleteJobsResponse
}
""" """
An emoji awarded by a user. An emoji awarded by a user.
""" """
...@@ -601,6 +661,26 @@ type CreateSnippetPayload { ...@@ -601,6 +661,26 @@ type CreateSnippetPayload {
snippet: Snippet snippet: Snippet
} }
"""
The response from the AdminSidekiqQueuesDeleteJobs mutation.
"""
type DeleteJobsResponse {
"""
Whether or not the entire queue was processed in time; if not, retrying the same request is safe
"""
completed: Boolean
"""
The number of matching jobs deleted
"""
deletedJobs: Int
"""
The queue size after processing
"""
queueSize: Int
}
""" """
A single design A single design
""" """
...@@ -4767,6 +4847,7 @@ enum MoveType { ...@@ -4767,6 +4847,7 @@ enum MoveType {
type Mutation { type Mutation {
addAwardEmoji(input: AddAwardEmojiInput!): AddAwardEmojiPayload addAwardEmoji(input: AddAwardEmojiInput!): AddAwardEmojiPayload
adminSidekiqQueuesDeleteJobs(input: AdminSidekiqQueuesDeleteJobsInput!): AdminSidekiqQueuesDeleteJobsPayload
createDiffNote(input: CreateDiffNoteInput!): CreateDiffNotePayload createDiffNote(input: CreateDiffNoteInput!): CreateDiffNotePayload
createEpic(input: CreateEpicInput!): CreateEpicPayload createEpic(input: CreateEpicInput!): CreateEpicPayload
createImageDiffNote(input: CreateImageDiffNoteInput!): CreateImageDiffNotePayload createImageDiffNote(input: CreateImageDiffNoteInput!): CreateImageDiffNotePayload
......
...@@ -19106,6 +19106,33 @@ ...@@ -19106,6 +19106,33 @@
"isDeprecated": false, "isDeprecated": false,
"deprecationReason": null "deprecationReason": null
}, },
{
"name": "adminSidekiqQueuesDeleteJobs",
"description": null,
"args": [
{
"name": "input",
"description": null,
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "INPUT_OBJECT",
"name": "AdminSidekiqQueuesDeleteJobsInput",
"ofType": null
}
},
"defaultValue": null
}
],
"type": {
"kind": "OBJECT",
"name": "AdminSidekiqQueuesDeleteJobsPayload",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{ {
"name": "createDiffNote", "name": "createDiffNote",
"description": null, "description": null,
...@@ -19978,6 +20005,213 @@ ...@@ -19978,6 +20005,213 @@
"enumValues": null, "enumValues": null,
"possibleTypes": null "possibleTypes": null
}, },
{
"kind": "OBJECT",
"name": "AdminSidekiqQueuesDeleteJobsPayload",
"description": "Autogenerated return type of AdminSidekiqQueuesDeleteJobs",
"fields": [
{
"name": "clientMutationId",
"description": "A unique identifier for the client performing the mutation.",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "errors",
"description": "Reasons why the mutation failed.",
"args": [
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "LIST",
"name": null,
"ofType": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
}
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "result",
"description": "Information about the status of the deletion request",
"args": [
],
"type": {
"kind": "OBJECT",
"name": "DeleteJobsResponse",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
}
],
"inputFields": null,
"interfaces": [
],
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
"name": "DeleteJobsResponse",
"description": "The response from the AdminSidekiqQueuesDeleteJobs mutation.",
"fields": [
{
"name": "completed",
"description": "Whether or not the entire queue was processed in time; if not, retrying the same request is safe",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "Boolean",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "deletedJobs",
"description": "The number of matching jobs deleted",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "Int",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "queueSize",
"description": "The queue size after processing",
"args": [
],
"type": {
"kind": "SCALAR",
"name": "Int",
"ofType": null
},
"isDeprecated": false,
"deprecationReason": null
}
],
"inputFields": null,
"interfaces": [
],
"enumValues": null,
"possibleTypes": null
},
{
"kind": "INPUT_OBJECT",
"name": "AdminSidekiqQueuesDeleteJobsInput",
"description": "Autogenerated input type of AdminSidekiqQueuesDeleteJobs",
"fields": null,
"inputFields": [
{
"name": "user",
"description": "Delete jobs matching user in the context metadata",
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"defaultValue": null
},
{
"name": "project",
"description": "Delete jobs matching project in the context metadata",
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"defaultValue": null
},
{
"name": "rootNamespace",
"description": "Delete jobs matching root_namespace in the context metadata",
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"defaultValue": null
},
{
"name": "subscriptionPlan",
"description": "Delete jobs matching subscription_plan in the context metadata",
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"defaultValue": null
},
{
"name": "callerId",
"description": "Delete jobs matching caller_id in the context metadata",
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"defaultValue": null
},
{
"name": "queueName",
"description": "The name of the queue to delete jobs from",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
},
"defaultValue": null
},
{
"name": "clientMutationId",
"description": "A unique identifier for the client performing the mutation.",
"type": {
"kind": "SCALAR",
"name": "String",
"ofType": null
},
"defaultValue": null
}
],
"interfaces": null,
"enumValues": null,
"possibleTypes": null
},
{ {
"kind": "OBJECT", "kind": "OBJECT",
"name": "AddAwardEmojiPayload", "name": "AddAwardEmojiPayload",
......
...@@ -26,6 +26,16 @@ Autogenerated return type of AddAwardEmoji ...@@ -26,6 +26,16 @@ Autogenerated return type of AddAwardEmoji
| `clientMutationId` | String | A unique identifier for the client performing the mutation. | | `clientMutationId` | String | A unique identifier for the client performing the mutation. |
| `errors` | String! => Array | Reasons why the mutation failed. | | `errors` | String! => Array | Reasons why the mutation failed. |
## AdminSidekiqQueuesDeleteJobsPayload
Autogenerated return type of AdminSidekiqQueuesDeleteJobs
| Name | Type | Description |
| --- | ---- | ---------- |
| `clientMutationId` | String | A unique identifier for the client performing the mutation. |
| `errors` | String! => Array | Reasons why the mutation failed. |
| `result` | DeleteJobsResponse | Information about the status of the deletion request |
## AwardEmoji ## AwardEmoji
An emoji awarded by a user. An emoji awarded by a user.
...@@ -129,6 +139,16 @@ Autogenerated return type of CreateSnippet ...@@ -129,6 +139,16 @@ Autogenerated return type of CreateSnippet
| `errors` | String! => Array | Reasons why the mutation failed. | | `errors` | String! => Array | Reasons why the mutation failed. |
| `snippet` | Snippet | The snippet after mutation | | `snippet` | Snippet | The snippet after mutation |
## DeleteJobsResponse
The response from the AdminSidekiqQueuesDeleteJobs mutation.
| Name | Type | Description |
| --- | ---- | ---------- |
| `completed` | Boolean | Whether or not the entire queue was processed in time; if not, retrying the same request is safe |
| `deletedJobs` | Int | The number of matching jobs deleted |
| `queueSize` | Int | The queue size after processing |
## Design ## Design
A single design A single design
......
...@@ -118,3 +118,11 @@ different components are making use of. ...@@ -118,3 +118,11 @@ different components are making use of.
[Entity]: https://gitlab.com/gitlab-org/gitlab/blob/master/lib/api/entities.rb [Entity]: https://gitlab.com/gitlab-org/gitlab/blob/master/lib/api/entities.rb
[validation, and coercion of the parameters]: https://github.com/ruby-grape/grape#parameter-validation-and-coercion [validation, and coercion of the parameters]: https://github.com/ruby-grape/grape#parameter-validation-and-coercion
[installing GitLab under a relative URL]: https://docs.gitlab.com/ee/install/relative_url.html [installing GitLab under a relative URL]: https://docs.gitlab.com/ee/install/relative_url.html
## Testing
When writing tests for new API endpoints, consider using a schema [fixture](./testing_guide/best_practices.md#fixtures) located in `/spec/fixtures/api/schemas`. You can `expect` a response to match a given schema:
```ruby
expect(response).to match_response_schema('merge_requests')
```
...@@ -202,13 +202,10 @@ create the actual RDS instance. ...@@ -202,13 +202,10 @@ create the actual RDS instance.
### RDS Subnet Group ### RDS Subnet Group
1. Navigate to the RDS dashboard and select **Subnet Groups** from the left menu. 1. Navigate to the RDS dashboard and select **Subnet Groups** from the left menu.
1. Give it a name (`gitlab-rds-group`), a description, and choose the VPC from 1. Click on **Create DB Subnet Group**.
the VPC dropdown. 1. Under **Subnet group details**, enter a name (we'll use `gitlab-rds-group`), a description, and choose the `gitlab-vpc` from the VPC dropdown.
1. Click "Add all the subnets related to this VPC" and 1. Under **Add subnets**, click **Add all the subnets related to this VPC** and remove the public ones, we only want the **private subnets**. In the end, you should see `10.0.1.0/24` and `10.0.3.0/24` (as we defined them in the [subnets section](#subnets)).
remove the public ones, we only want the **private subnets**. 1. Click **Create** when ready.
In the end, you should see `10.0.1.0/24` and `10.0.3.0/24` (as
we defined them in the [subnets section](#subnets)).
Click **Create** when ready.
![RDS Subnet Group](img/rds_subnet_group.png) ![RDS Subnet Group](img/rds_subnet_group.png)
...@@ -217,33 +214,31 @@ create the actual RDS instance. ...@@ -217,33 +214,31 @@ create the actual RDS instance.
Now, it's time to create the database: Now, it's time to create the database:
1. Select **Databases** from the left menu and click **Create database**. 1. Select **Databases** from the left menu and click **Create database**.
1. Select PostgreSQL and click **Next**. 1. Select **Standard Create** for the database creation method.
1. Since this is a production server, let's choose "Production". Click **Next**. 1. Select **PostgreSQL** as the database engine and select **PostgreSQL 10.9-R1** from the version dropdown menu (check the [database requirements](../../install/requirements.md#postgresql-requirements) to see if there are any updates on this for your chosen version of GitLab).
1. Let's see the instance specifications: 1. Since this is a production server, let's choose **Production** from the **Templates** section.
1. Leave the license model as is (`postgresql-license`). 1. Under **Settings**, set a DB instance identifier, a master username, and a master password. We'll use `gitlab-db-ha`, `gitlab`, and a very secure password respectively. Make a note of these as we'll need them later.
1. For the version, select the latest of the 9.6 series (check the 1. For the DB instance size, select **Standard classes** and select an instance size that meets your requirements from the dropdown menu. We'll use a `db.m4.large` instance.
[database requirements](../../install/requirements.md#postgresql-requirements)) 1. Under **Storage**, configure the following:
if there are any updates on this). 1. Select **Provisioned IOPS (SSD)** from the storage type dropdown menu. Provisioned IOPS (SSD) storage is best suited for HA (though you can choose General Purpose (SSD) to reduce the costs). Read more about it at [Storage for Amazon RDS](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/CHAP_Storage.html).
1. For the size, let's select a `t2.medium` instance. 1. Allocate storage and set provisioned IOPS. We'll use the minimum values, `100` and `1000`, respectively.
1. Multi-AZ-deployment is recommended as redundancy, so choose "Create 1. Enable storage autoscaling (optional) and set a maximum storage threshold.
replica in different zone". Read more at 1. Under **Availability & durability**, select **Create a standby instance** to have a standby RDS instance provisioned in a different Availability Zone. Read more at [High Availability (Multi-AZ)](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Concepts.MultiAZ.html).
[High Availability (Multi-AZ)](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/Concepts.MultiAZ.html). 1. Under **Connectivity**, configure the following:
1. A Provisioned IOPS (SSD) storage type is best suited for HA (though you can 1. Select the VPC we created earlier (`gitlab-vpc`) from the **Virtual Private Cloud (VPC)** dropdown menu.
choose a General Purpose (SSD) to reduce the costs). Read more about it at 1. Expand the **Additional connectivity configuration** section and select the subnet group (`gitlab-rds-group`) we created earlier.
[Storage for Amazon RDS](https://docs.aws.amazon.com/AmazonRDS/latest/UserGuide/CHAP_Storage.html). 1. Set public accessibility to **No**.
1. Under **VPC security group**, select **Create new** and enter a name. We'll use `gitlab-rds-sec-group`.
1. The rest of the settings on this page request a DB instance identifier, username 1. Leave the database port as the default `5432`.
and a master password. We've chosen to use `gitlab-db-ha`, `gitlab` and a 1. For **Database authentication**, select **Password authentication**.
very secure password respectively. Keep these in hand for later. 1. Expand the **Additional configuration** section and complete the following:
1. Click **Next** to proceed to the advanced settings. 1. The initial database name. We'll use `gitlabhq_production`.
1. Make sure to choose our GitLab VPC, our subnet group, set public accessibility to 1. Configure your preferred backup settings.
**No**, and to leave it to create a new security group. The only additional 1. The only other change we'll make here is to disable auto minor version updates under **Maintenance**.
change which will be helpful is the database name for which we can use 1. Leave all the other settings as is or tweak according to your needs.
`gitlabhq_production`. At the very bottom, there's an option to enable 1. Once you're happy, click **Create database**.
auto updates to minor versions. You may want to turn it off.
1. When done, click **Create database**. Now that the database is created, let's move on to setting up Redis with ElastiCache.
Now that the database is created, let's move on to setting up Redis with ElasticCache.
## Redis with ElastiCache ## Redis with ElastiCache
...@@ -311,7 +306,7 @@ On the EC2 dashboard, look for Load Balancer in the left navigation bar: ...@@ -311,7 +306,7 @@ On the EC2 dashboard, look for Load Balancer in the left navigation bar:
1. For **Ping Path**, enter `/explore`. (We use `/explore` as it's a public endpoint that does 1. For **Ping Path**, enter `/explore`. (We use `/explore` as it's a public endpoint that does
not require authorization.) not require authorization.)
1. Keep the default **Advanced Details** or adjust them according to your needs. 1. Keep the default **Advanced Details** or adjust them according to your needs.
1. For now, don't click **Add EC2 Instances**, as we don't have any instances to add yet. Come back 1. Click **Add EC2 Instances** but, as we don't have any instances to add yet, come back
to your load balancer after creating your GitLab instances and add them. to your load balancer after creating your GitLab instances and add them.
1. Click **Add Tags** and add any tags you need. 1. Click **Add Tags** and add any tags you need.
1. Click **Review and Create**, review all your settings, and click **Create** if you're happy. 1. Click **Review and Create**, review all your settings, and click **Create** if you're happy.
......
---
type: reference
---
# Gitaly timeouts
![gitaly timeouts](img/gitaly_timeouts.png)
3 timeout types can be configured to make sure that long running
Gitaly calls don't needlessly take up resources.
- Default timeout
This timeout is the default for most Gitaly calls.
It should be shorter than the worker timeout that can be configured
for
[Puma](https://docs.gitlab.com/omnibus/settings/puma.html#puma-settings)
or [Unicorn](https://docs.gitlab.com/omnibus/settings/unicorn.html).
This makes sure that Gitaly calls made within a web request cannot
exceed these the entire request timeout.
The default for this timeout is 55 seconds.
- Fast timeout
This is the timeout for very short Gitaly calls.
The default for this timeout is 10 seconds.
- Medium timeout
This timeout should be between the default and the fast timeout
The default for this timeout is 30 seconds.
...@@ -24,6 +24,7 @@ include: ...@@ -24,6 +24,7 @@ include:
- [Protected paths](protected_paths.md) **(CORE ONLY)** - [Protected paths](protected_paths.md) **(CORE ONLY)**
- [Help messages for the `/help` page and the login page](help_page.md) - [Help messages for the `/help` page and the login page](help_page.md)
- [Push event activities limit and bulk push events](push_event_activities_limit.md) - [Push event activities limit and bulk push events](push_event_activities_limit.md)
- [Gitaly timeouts](gitaly_timeouts.md)
NOTE: **Note:** NOTE: **Note:**
You can change the [first day of the week](../../profile/preferences.md) for the entire GitLab instance You can change the [first day of the week](../../profile/preferences.md) for the entire GitLab instance
......
...@@ -65,7 +65,7 @@ To enable Container Scanning in your pipeline, you need: ...@@ -65,7 +65,7 @@ To enable Container Scanning in your pipeline, you need:
services: services:
- docker:19.03.1-dind - docker:19.03.1-dind
variables: variables:
IMAGE_TAG: $CI_REGISTRY_IMAGE/$CI_COMMIT_REF_SLUG:$CI_COMMIT_REF_SHA IMAGE_TAG: $CI_REGISTRY_IMAGE/$CI_COMMIT_REF_SLUG:$CI_COMMIT_SHA
script: script:
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY - docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
- docker build -t $IMAGE_TAG . - docker build -t $IMAGE_TAG .
......
...@@ -116,6 +116,7 @@ The following table depicts the various user permission levels in a project. ...@@ -116,6 +116,7 @@ The following table depicts the various user permission levels in a project.
| Turn on/off protected branch push for devs | | | | ✓ | ✓ | | Turn on/off protected branch push for devs | | | | ✓ | ✓ |
| Enable/disable tag protections | | | | ✓ | ✓ | | Enable/disable tag protections | | | | ✓ | ✓ |
| Edit project | | | | ✓ | ✓ | | Edit project | | | | ✓ | ✓ |
| Edit project badges | | | | ✓ | ✓ |
| Add deploy keys to project | | | | ✓ | ✓ | | Add deploy keys to project | | | | ✓ | ✓ |
| Configure project hooks | | | | ✓ | ✓ | | Configure project hooks | | | | ✓ | ✓ |
| Manage Runners | | | | ✓ | ✓ | | Manage Runners | | | | ✓ | ✓ |
......
...@@ -12,7 +12,7 @@ or ways to contact the project maintainers. ...@@ -12,7 +12,7 @@ or ways to contact the project maintainers.
## Project badges ## Project badges
Badges can be added to a project and will then be visible on the project's overview page. Badges can be added to a project by Maintainers or Owners, and will then be visible on the project's overview page.
If you find that you have to add the same badges to several projects, you may want to add them at the [group level](#group-badges). If you find that you have to add the same badges to several projects, you may want to add them at the [group level](#group-badges).
To add a new badge to a project: To add a new badge to a project:
......
...@@ -60,6 +60,14 @@ if you clone the wiki repository locally. All uploaded files prior to GitLab ...@@ -60,6 +60,14 @@ if you clone the wiki repository locally. All uploaded files prior to GitLab
11.3 are stored in GitLab itself. If you want them to be part of the wiki's Git 11.3 are stored in GitLab itself. If you want them to be part of the wiki's Git
repository, you will have to upload them again. repository, you will have to upload them again.
### Special characters in page titles
Wiki pages are stored as files in a Git repository, so certain characters have a special meaning:
- Spaces are converted into hyphens when storing a page.
- Hyphens (`-`) are converted back into spaces when displaying a page.
- Slashes (`/`) can't be used, because they're used as path separator.
### Length restrictions for file and directory names ### Length restrictions for file and directory names
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/24364) in GitLab 12.8. > [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/24364) in GitLab 12.8.
......
# frozen_string_literal: true
module API
module Admin
class Sidekiq < Grape::API
before { authenticated_as_admin! }
namespace 'admin' do
namespace 'sidekiq' do
namespace 'queues' do
desc 'Drop jobs matching the given metadata from the Sidekiq queue'
params do
Labkit::Context::KNOWN_KEYS.each do |key|
optional key, type: String, allow_blank: false
end
at_least_one_of(*Labkit::Context::KNOWN_KEYS)
end
delete ':queue_name' do
result =
Gitlab::SidekiqQueue
.new(params[:queue_name])
.drop_jobs!(declared_params, timeout: 30)
present result
rescue Gitlab::SidekiqQueue::NoMetadataError
render_api_error!("Invalid metadata: #{declared_params}", 400)
rescue Gitlab::SidekiqQueue::InvalidQueueError
not_found!(params[:queue_name])
end
end
end
end
end
end
end
...@@ -110,6 +110,7 @@ module API ...@@ -110,6 +110,7 @@ module API
# Keep in alphabetical order # Keep in alphabetical order
mount ::API::AccessRequests mount ::API::AccessRequests
mount ::API::Admin::Sidekiq
mount ::API::Appearance mount ::API::Appearance
mount ::API::Applications mount ::API::Applications
mount ::API::Avatar mount ::API::Avatar
......
...@@ -17,7 +17,7 @@ module Gitlab ...@@ -17,7 +17,7 @@ module Gitlab
(start_id..stop_id).each_slice(QUERY_ITEM_SIZE).each do |range| (start_id..stop_id).each_slice(QUERY_ITEM_SIZE).each do |range|
model model
.where(lock_version: nil) .where(lock_version: nil)
.where(id: range) .where("ID BETWEEN ? AND ?", range.first, range.last)
.update_all(lock_version: 0) .update_all(lock_version: 0)
end end
end end
......
...@@ -3,6 +3,7 @@ ...@@ -3,6 +3,7 @@
module Gitlab module Gitlab
module Checks module Checks
class SnippetCheck < BaseChecker class SnippetCheck < BaseChecker
DEFAULT_BRANCH = 'master'.freeze
ERROR_MESSAGES = { ERROR_MESSAGES = {
create_delete_branch: 'You can not create or delete branches.' create_delete_branch: 'You can not create or delete branches.'
}.freeze }.freeze
...@@ -29,6 +30,12 @@ module Gitlab ...@@ -29,6 +30,12 @@ module Gitlab
true true
end end
private
def creation?
@branch_name != DEFAULT_BRANCH && super
end
end end
end end
end end
...@@ -65,6 +65,7 @@ tree: ...@@ -65,6 +65,7 @@ tree:
- resource_label_events: - resource_label_events:
- label: - label:
- :priorities - :priorities
- :external_pull_requests
- ci_pipelines: - ci_pipelines:
- notes: - notes:
- :author - :author
...@@ -74,7 +75,6 @@ tree: ...@@ -74,7 +75,6 @@ tree:
- :statuses - :statuses
- :external_pull_request - :external_pull_request
- :merge_request - :merge_request
- :external_pull_requests
- :auto_devops - :auto_devops
- :triggers - :triggers
- :pipeline_schedules - :pipeline_schedules
......
...@@ -51,6 +51,8 @@ module Gitlab ...@@ -51,6 +51,8 @@ module Gitlab
epic epic
ProjectCiCdSetting ProjectCiCdSetting
container_expiration_policy container_expiration_policy
external_pull_request
external_pull_requests
].freeze ].freeze
def create def create
......
# frozen_string_literal: true
module Gitlab
class SidekiqQueue
include Gitlab::Utils::StrongMemoize
NoMetadataError = Class.new(StandardError)
InvalidQueueError = Class.new(StandardError)
attr_reader :queue_name
def initialize(queue_name)
@queue_name = queue_name
end
def drop_jobs!(search_metadata, timeout:)
completed = false
deleted_jobs = 0
job_search_metadata =
search_metadata
.stringify_keys
.slice(*Labkit::Context::KNOWN_KEYS)
.transform_keys { |key| "meta.#{key}" }
.compact
raise NoMetadataError if job_search_metadata.empty?
raise InvalidQueueError unless queue
begin
Timeout.timeout(timeout) do
queue.each do |job|
next unless job_matches?(job, job_search_metadata)
job.delete
deleted_jobs += 1
end
completed = true
end
rescue Timeout::Error
end
{
completed: completed,
deleted_jobs: deleted_jobs,
queue_size: queue.size
}
end
def queue
strong_memoize(:queue) do
# Sidekiq::Queue.new always returns a queue, even if it doesn't
# exist.
Sidekiq::Queue.all.find { |queue| queue.name == queue_name }
end
end
def job_matches?(job, job_search_metadata)
job_search_metadata.all? { |key, value| job[key] == value }
end
end
end
...@@ -4993,9 +4993,6 @@ msgstr "" ...@@ -4993,9 +4993,6 @@ msgstr ""
msgid "Commits|An error occurred while fetching merge requests data." msgid "Commits|An error occurred while fetching merge requests data."
msgstr "" msgstr ""
msgid "Commits|Commit: %{commitText}"
msgstr ""
msgid "Commits|History" msgid "Commits|History"
msgstr "" msgstr ""
...@@ -8774,9 +8771,6 @@ msgstr "" ...@@ -8774,9 +8771,6 @@ msgstr ""
msgid "Forking in progress" msgid "Forking in progress"
msgstr "" msgstr ""
msgid "Forking repository"
msgstr ""
msgid "Forks" msgid "Forks"
msgstr "" msgstr ""
......
...@@ -16,7 +16,7 @@ FactoryBot.define do ...@@ -16,7 +16,7 @@ FactoryBot.define do
page { OpenStruct.new(url_path: 'some-name') } page { OpenStruct.new(url_path: 'some-name') }
association :wiki, factory: :project_wiki, strategy: :build association :wiki, factory: :project_wiki, strategy: :build
initialize_with { new(wiki, page, true) } initialize_with { new(wiki, page) }
before(:create) do |page, evaluator| before(:create) do |page, evaluator|
page.attributes = evaluator.attrs page.attributes = evaluator.attrs
......
...@@ -139,11 +139,6 @@ describe 'Project active tab' do ...@@ -139,11 +139,6 @@ describe 'Project active tab' do
it_behaves_like 'page has active sub tab', _('Repository Analytics') it_behaves_like 'page has active sub tab', _('Repository Analytics')
end end
context 'on project Analytics/Repository Analytics' do
it_behaves_like 'page has active tab', _('Analytics')
it_behaves_like 'page has active sub tab', _('Repository Analytics')
end
context 'on project Analytics/Cycle Analytics' do context 'on project Analytics/Cycle Analytics' do
before do before do
click_tab(_('CI / CD Analytics')) click_tab(_('CI / CD Analytics'))
......
...@@ -14,7 +14,7 @@ describe 'Projects > Show > User sees last commit CI status' do ...@@ -14,7 +14,7 @@ describe 'Projects > Show > User sees last commit CI status' do
page.within '.commit-detail' do page.within '.commit-detail' do
expect(page).to have_content(project.commit.sha[0..6]) expect(page).to have_content(project.commit.sha[0..6])
expect(page).to have_selector('[aria-label="Commit: skipped"]') expect(page).to have_selector('[aria-label="Pipeline: skipped"]')
end end
end end
end end
...@@ -33,6 +33,8 @@ describe 'User views a wiki page' do ...@@ -33,6 +33,8 @@ describe 'User views a wiki page' do
fill_in(:wiki_content, with: 'wiki content') fill_in(:wiki_content, with: 'wiki content')
click_on('Create page') click_on('Create page')
end end
expect(page).to have_content('Wiki was successfully updated.')
end end
it 'shows the history of a page that has a path' do it 'shows the history of a page that has a path' do
...@@ -62,8 +64,10 @@ describe 'User views a wiki page' do ...@@ -62,8 +64,10 @@ describe 'User views a wiki page' do
expect(page).to have_content('Edit Page') expect(page).to have_content('Edit Page')
fill_in('Content', with: 'Updated Wiki Content') fill_in('Content', with: 'Updated Wiki Content')
click_on('Save changes') click_on('Save changes')
expect(page).to have_content('Wiki was successfully updated.')
click_on('Page history') click_on('Page history')
page.within(:css, '.nav-text') do page.within(:css, '.nav-text') do
...@@ -132,6 +136,36 @@ describe 'User views a wiki page' do ...@@ -132,6 +136,36 @@ describe 'User views a wiki page' do
end end
end end
context 'when a page has special characters in its title' do
let(:title) { '<foo> !@#$%^&*()[]{}=_+\'"\\|<>? <bar>' }
before do
wiki_page.update(title: title )
end
it 'preserves the special characters' do
visit(project_wiki_path(project, wiki_page))
expect(page).to have_css('.wiki-page-title', text: title)
expect(page).to have_css('.wiki-pages li', text: title)
end
end
context 'when a page has XSS in its title or content' do
let(:title) { '<script>alert("title")<script>' }
before do
wiki_page.update(title: title, content: 'foo <script>alert("content")</script> bar')
end
it 'safely displays the page' do
visit(project_wiki_path(project, wiki_page))
expect(page).to have_css('.wiki-page-title', text: title)
expect(page).to have_content('foo bar')
end
end
context 'when a page has XSS in its message' do context 'when a page has XSS in its message' do
before do before do
wiki_page.update(message: '<script>alert(true)<script>', content: 'XSS update') wiki_page.update(message: '<script>alert(true)<script>', content: 'XSS update')
......
{
"approvals_before_merge": 0,
"archived": false,
"auto_cancel_pending_pipelines": "enabled",
"autoclose_referenced_issues": true,
"boards": [],
"build_allow_git_fetch": true,
"build_coverage_regex": null,
"build_timeout": 3600,
"ci_cd_settings": {
"group_runners_enabled": true
},
"ci_config_path": null,
"ci_pipelines": [
{
"before_sha": "0000000000000000000000000000000000000000",
"committed_at": null,
"config_source": "repository_source",
"created_at": "2020-02-25T12:08:40.615Z",
"duration": 61,
"external_pull_request": {
"created_at": "2020-02-25T12:08:40.478Z",
"id": 59023,
"project_id": 17121868,
"pull_request_iid": 4,
"source_branch": "new-branch",
"source_repository": "liptonshmidt/dotfiles",
"source_sha": "122bc4bbad5b6448089cacbe16d0bdc3534e7eda",
"status": "open",
"target_branch": "master",
"target_repository": "liptonshmidt/dotfiles",
"target_sha": "86ebe754fa12216e5c0d9d95890936e2fcc62392",
"updated_at": "2020-02-25T12:08:40.478Z"
},
"failure_reason": null,
"finished_at": "2020-02-25T12:09:44.464Z",
"id": 120842687,
"iid": 8,
"lock_version": 3,
"notes": [],
"project_id": 17121868,
"protected": false,
"ref": "new-branch",
"sha": "122bc4bbad5b6448089cacbe16d0bdc3534e7eda",
"source": "external_pull_request_event",
"source_sha": "122bc4bbad5b6448089cacbe16d0bdc3534e7eda",
"stages": [],
"started_at": "2020-02-25T12:08:42.511Z",
"status": "success",
"tag": false,
"target_sha": "86ebe754fa12216e5c0d9d95890936e2fcc62392",
"updated_at": "2020-02-25T12:09:44.473Z",
"user_id": 4087087,
"yaml_errors": null
},
{
"before_sha": "86ebe754fa12216e5c0d9d95890936e2fcc62392",
"committed_at": null,
"config_source": "repository_source",
"created_at": "2020-02-25T12:08:37.434Z",
"duration": 57,
"external_pull_request": {
"created_at": "2020-02-25T12:08:40.478Z",
"id": 59023,
"project_id": 17121868,
"pull_request_iid": 4,
"source_branch": "new-branch",
"source_repository": "liptonshmidt/dotfiles",
"source_sha": "122bc4bbad5b6448089cacbe16d0bdc3534e7eda",
"status": "open",
"target_branch": "master",
"target_repository": "liptonshmidt/dotfiles",
"target_sha": "86ebe754fa12216e5c0d9d95890936e2fcc62392",
"updated_at": "2020-02-25T12:08:40.478Z"
},
"failure_reason": null,
"finished_at": "2020-02-25T12:09:36.557Z",
"id": 120842675,
"iid": 7,
"lock_version": 3,
"notes": [],
"project_id": 17121868,
"protected": false,
"ref": "new-branch",
"sha": "122bc4bbad5b6448089cacbe16d0bdc3534e7eda",
"source": "external_pull_request_event",
"source_sha": "122bc4bbad5b6448089cacbe16d0bdc3534e7eda",
"stages": [],
"started_at": "2020-02-25T12:08:38.682Z",
"status": "success",
"tag": false,
"target_sha": "86ebe754fa12216e5c0d9d95890936e2fcc62392",
"updated_at": "2020-02-25T12:09:36.565Z",
"user_id": 4087087,
"yaml_errors": null
}
],
"custom_attributes": [],
"delete_error": null,
"description": "Vim, Tmux and others",
"disable_overriding_approvers_per_merge_request": null,
"external_authorization_classification_label": "",
"external_pull_requests": [
{
"created_at": "2020-02-25T12:08:40.478Z",
"id": 59023,
"project_id": 17121868,
"pull_request_iid": 4,
"source_branch": "new-branch",
"source_repository": "liptonshmidt/dotfiles",
"source_sha": "122bc4bbad5b6448089cacbe16d0bdc3534e7eda",
"status": "open",
"target_branch": "master",
"target_repository": "liptonshmidt/dotfiles",
"target_sha": "86ebe754fa12216e5c0d9d95890936e2fcc62392",
"updated_at": "2020-02-25T12:08:40.478Z"
}
],
"external_webhook_token": "D3mVYFzZkgZ5kMfcW_wx",
"issues": [],
"labels": [],
"milestones": [],
"pipeline_schedules": [],
"project_feature": {
"builds_access_level": 20,
"created_at": "2020-02-25T11:20:09.925Z",
"forking_access_level": 20,
"id": 17494715,
"issues_access_level": 0,
"merge_requests_access_level": 0,
"pages_access_level": 20,
"project_id": 17121868,
"repository_access_level": 20,
"snippets_access_level": 0,
"updated_at": "2020-02-25T11:20:10.376Z",
"wiki_access_level": 0
},
"public_builds": true,
"releases": [],
"shared_runners_enabled": true,
"snippets": [],
"triggers": [],
"visibility_level": 20
}
...@@ -68,10 +68,10 @@ exports[`Repository last commit component renders commit widget 1`] = ` ...@@ -68,10 +68,10 @@ exports[`Repository last commit component renders commit widget 1`] = `
<gl-link-stub <gl-link-stub
class="js-commit-pipeline" class="js-commit-pipeline"
href="https://test.com/pipeline" href="https://test.com/pipeline"
title="Commit: failed" title="Pipeline: failed"
> >
<ci-icon-stub <ci-icon-stub
aria-label="Commit: failed" aria-label="Pipeline: failed"
cssclasses="" cssclasses=""
size="24" size="24"
status="[object Object]" status="[object Object]"
...@@ -174,10 +174,10 @@ exports[`Repository last commit component renders the signature HTML as returned ...@@ -174,10 +174,10 @@ exports[`Repository last commit component renders the signature HTML as returned
<gl-link-stub <gl-link-stub
class="js-commit-pipeline" class="js-commit-pipeline"
href="https://test.com/pipeline" href="https://test.com/pipeline"
title="Commit: failed" title="Pipeline: failed"
> >
<ci-icon-stub <ci-icon-stub
aria-label="Commit: failed" aria-label="Pipeline: failed"
cssclasses="" cssclasses=""
size="24" size="24"
status="[object Object]" status="[object Object]"
......
...@@ -2,6 +2,8 @@ import $ from 'jquery'; ...@@ -2,6 +2,8 @@ import $ from 'jquery';
import Api from '~/api'; import Api from '~/api';
import Search from '~/pages/search/show/search'; import Search from '~/pages/search/show/search';
jest.mock('~/api');
describe('Search', () => { describe('Search', () => {
const fixturePath = 'search/show.html'; const fixturePath = 'search/show.html';
const searchTerm = 'some search'; const searchTerm = 'some search';
...@@ -19,20 +21,19 @@ describe('Search', () => { ...@@ -19,20 +21,19 @@ describe('Search', () => {
new Search(); // eslint-disable-line no-new new Search(); // eslint-disable-line no-new
}); });
it('requests groups from backend when filtering', done => { it('requests groups from backend when filtering', () => {
spyOn(Api, 'groups').and.callFake(term => { jest.spyOn(Api, 'groups').mockImplementation(term => {
expect(term).toBe(searchTerm); expect(term).toBe(searchTerm);
done();
}); });
const inputElement = fillDropdownInput('.js-search-group-dropdown'); const inputElement = fillDropdownInput('.js-search-group-dropdown');
$(inputElement).trigger('input'); $(inputElement).trigger('input');
}); });
it('requests projects from backend when filtering', done => { it('requests projects from backend when filtering', () => {
spyOn(Api, 'projects').and.callFake(term => { jest.spyOn(Api, 'projects').mockImplementation(term => {
expect(term).toBe(searchTerm); expect(term).toBe(searchTerm);
done();
}); });
const inputElement = fillDropdownInput('.js-search-project-dropdown'); const inputElement = fillDropdownInput('.js-search-project-dropdown');
......
import Vue from 'vue'; import Vue from 'vue';
import mountComponent from 'spec/helpers/vue_mount_component_helper'; import { compileToFunctions } from 'vue-template-compiler';
import { mount } from '@vue/test-utils';
import { GREEN_BOX_IMAGE_URL, RED_BOX_IMAGE_URL } from 'spec/test_constants'; import { GREEN_BOX_IMAGE_URL, RED_BOX_IMAGE_URL } from 'spec/test_constants';
import imageDiffViewer from '~/vue_shared/components/diff_viewer/viewers/image_diff_viewer.vue'; import imageDiffViewer from '~/vue_shared/components/diff_viewer/viewers/image_diff_viewer.vue';
...@@ -9,50 +11,52 @@ describe('ImageDiffViewer', () => { ...@@ -9,50 +11,52 @@ describe('ImageDiffViewer', () => {
newPath: GREEN_BOX_IMAGE_URL, newPath: GREEN_BOX_IMAGE_URL,
oldPath: RED_BOX_IMAGE_URL, oldPath: RED_BOX_IMAGE_URL,
}; };
const allProps = {
...requiredProps,
oldSize: 2048,
newSize: 1024,
};
let wrapper;
let vm; let vm;
function createComponent(props) { function createComponent(props) {
const ImageDiffViewer = Vue.extend(imageDiffViewer); const ImageDiffViewer = Vue.extend(imageDiffViewer);
vm = mountComponent(ImageDiffViewer, props); wrapper = mount(ImageDiffViewer, { propsData: props });
vm = wrapper.vm;
} }
const triggerEvent = (eventName, el = vm.$el, clientX = 0) => { const triggerEvent = (eventName, el = vm.$el, clientX = 0) => {
const event = document.createEvent('MouseEvents'); const event = new MouseEvent(eventName, {
event.initMouseEvent( bubbles: true,
eventName, cancelable: true,
true, view: window,
true, detail: 1,
window, screenX: clientX,
1,
clientX, clientX,
0, });
clientX,
0, // JSDOM does not implement experimental APIs
false, event.pageX = clientX;
false,
false,
false,
0,
null,
);
el.dispatchEvent(event); el.dispatchEvent(event);
}; };
const dragSlider = (sliderElement, dragPixel = 20) => { const dragSlider = (sliderElement, doc, dragPixel) => {
triggerEvent('mousedown', sliderElement); triggerEvent('mousedown', sliderElement);
triggerEvent('mousemove', document.body, dragPixel); triggerEvent('mousemove', doc.body, dragPixel);
triggerEvent('mouseup', document.body); triggerEvent('mouseup', doc.body);
}; };
afterEach(() => { afterEach(() => {
vm.$destroy(); wrapper.destroy();
}); });
it('renders image diff for replaced', done => { it('renders image diff for replaced', done => {
createComponent(requiredProps); createComponent({ ...allProps });
vm.$nextTick(() => {
const metaInfoElements = vm.$el.querySelectorAll('.image-info');
setTimeout(() => {
expect(vm.$el.querySelector('.added img').getAttribute('src')).toBe(GREEN_BOX_IMAGE_URL); expect(vm.$el.querySelector('.added img').getAttribute('src')).toBe(GREEN_BOX_IMAGE_URL);
expect(vm.$el.querySelector('.deleted img').getAttribute('src')).toBe(RED_BOX_IMAGE_URL); expect(vm.$el.querySelector('.deleted img').getAttribute('src')).toBe(RED_BOX_IMAGE_URL);
...@@ -66,35 +70,35 @@ describe('ImageDiffViewer', () => { ...@@ -66,35 +70,35 @@ describe('ImageDiffViewer', () => {
'Onion skin', 'Onion skin',
); );
expect(metaInfoElements.length).toBe(2);
expect(metaInfoElements[0]).toHaveText('2.00 KiB');
expect(metaInfoElements[1]).toHaveText('1.00 KiB');
done(); done();
}); });
}); });
it('renders image diff for new', done => { it('renders image diff for new', done => {
createComponent( createComponent({ ...allProps, diffMode: 'new', oldPath: '' });
Object.assign({}, requiredProps, {
diffMode: 'new', setImmediate(() => {
oldPath: '', const metaInfoElement = vm.$el.querySelector('.image-info');
}),
);
setTimeout(() => {
expect(vm.$el.querySelector('.added img').getAttribute('src')).toBe(GREEN_BOX_IMAGE_URL); expect(vm.$el.querySelector('.added img').getAttribute('src')).toBe(GREEN_BOX_IMAGE_URL);
expect(metaInfoElement).toHaveText('1.00 KiB');
done(); done();
}); });
}); });
it('renders image diff for deleted', done => { it('renders image diff for deleted', done => {
createComponent( createComponent({ ...allProps, diffMode: 'deleted', newPath: '' });
Object.assign({}, requiredProps, {
diffMode: 'deleted', setImmediate(() => {
newPath: '', const metaInfoElement = vm.$el.querySelector('.image-info');
}),
);
setTimeout(() => {
expect(vm.$el.querySelector('.deleted img').getAttribute('src')).toBe(RED_BOX_IMAGE_URL); expect(vm.$el.querySelector('.deleted img').getAttribute('src')).toBe(RED_BOX_IMAGE_URL);
expect(metaInfoElement).toHaveText('2.00 KiB');
done(); done();
}); });
...@@ -105,26 +109,40 @@ describe('ImageDiffViewer', () => { ...@@ -105,26 +109,40 @@ describe('ImageDiffViewer', () => {
components: { components: {
imageDiffViewer, imageDiffViewer,
}, },
template: ` data: {
<image-diff-viewer diff-mode="renamed" new-path="${GREEN_BOX_IMAGE_URL}" old-path=""> ...allProps,
diffMode: 'renamed',
},
...compileToFunctions(`
<image-diff-viewer
:diff-mode="diffMode"
:new-path="newPath"
:old-path="oldPath"
:new-size="newSize"
:old-size="oldSize"
>
<span slot="image-overlay" class="overlay">test</span> <span slot="image-overlay" class="overlay">test</span>
</image-diff-viewer> </image-diff-viewer>
`, `),
}).$mount(); }).$mount();
setTimeout(() => { setImmediate(() => {
const metaInfoElement = vm.$el.querySelector('.image-info');
expect(vm.$el.querySelector('img').getAttribute('src')).toBe(GREEN_BOX_IMAGE_URL); expect(vm.$el.querySelector('img').getAttribute('src')).toBe(GREEN_BOX_IMAGE_URL);
expect(vm.$el.querySelector('.overlay')).not.toBe(null); expect(vm.$el.querySelector('.overlay')).not.toBe(null);
expect(metaInfoElement).toHaveText('2.00 KiB');
done(); done();
}); });
}); });
describe('swipeMode', () => { describe('swipeMode', () => {
beforeEach(done => { beforeEach(done => {
createComponent(requiredProps); createComponent({ ...requiredProps });
setTimeout(() => { setImmediate(() => {
done(); done();
}); });
}); });
...@@ -141,9 +159,9 @@ describe('ImageDiffViewer', () => { ...@@ -141,9 +159,9 @@ describe('ImageDiffViewer', () => {
describe('onionSkin', () => { describe('onionSkin', () => {
beforeEach(done => { beforeEach(done => {
createComponent(requiredProps); createComponent({ ...requiredProps });
setTimeout(() => { setImmediate(() => {
done(); done();
}); });
}); });
...@@ -163,7 +181,7 @@ describe('ImageDiffViewer', () => { ...@@ -163,7 +181,7 @@ describe('ImageDiffViewer', () => {
vm.$el.querySelector('.view-modes-menu li:nth-child(3)').click(); vm.$el.querySelector('.view-modes-menu li:nth-child(3)').click();
vm.$nextTick(() => { vm.$nextTick(() => {
dragSlider(vm.$el.querySelector('.dragger')); dragSlider(vm.$el.querySelector('.dragger'), document, 20);
vm.$nextTick(() => { vm.$nextTick(() => {
expect(vm.$el.querySelector('.dragger').style.left).toBe('20px'); expect(vm.$el.querySelector('.dragger').style.left).toBe('20px');
......
...@@ -25,10 +25,19 @@ describe Gitlab::Checks::SnippetCheck do ...@@ -25,10 +25,19 @@ describe Gitlab::Checks::SnippetCheck do
context 'trying to create the branch' do context 'trying to create the branch' do
let(:oldrev) { '0000000000000000000000000000000000000000' } let(:oldrev) { '0000000000000000000000000000000000000000' }
let(:ref) { 'refs/heads/feature' }
it 'raises an error' do it 'raises an error' do
expect { subject.exec }.to raise_error(Gitlab::GitAccess::ForbiddenError, 'You can not create or delete branches.') expect { subject.exec }.to raise_error(Gitlab::GitAccess::ForbiddenError, 'You can not create or delete branches.')
end end
context "when branch is 'master'" do
let(:ref) { 'refs/heads/master' }
it "allows the operation" do
expect { subject.exec }.not_to raise_error
end
end
end end
end end
end end
...@@ -426,6 +426,10 @@ describe Gitlab::ImportExport::Project::TreeRestorer do ...@@ -426,6 +426,10 @@ describe Gitlab::ImportExport::Project::TreeRestorer do
expect(pipeline_with_external_pr.external_pull_request).to be_persisted expect(pipeline_with_external_pr.external_pull_request).to be_persisted
end end
it 'has no import failures' do
expect(@project.import_failures.size).to eq 0
end
end end
end end
end end
...@@ -499,6 +503,30 @@ describe Gitlab::ImportExport::Project::TreeRestorer do ...@@ -499,6 +503,30 @@ describe Gitlab::ImportExport::Project::TreeRestorer do
end end
end end
context 'multiple pipelines reference the same external pull request' do
before do
setup_import_export_config('multi_pipeline_ref_one_external_pr')
expect(restored_project_json).to eq(true)
end
it_behaves_like 'restores project successfully',
issues: 0,
labels: 0,
milestones: 0,
ci_pipelines: 2,
external_pull_requests: 1,
import_failures: 0
it 'restores external pull request for the restored pipelines' do
external_pr = project.external_pull_requests.first
project.ci_pipelines.each do |pipeline_with_external_pr|
expect(pipeline_with_external_pr.external_pull_request).to be_persisted
expect(pipeline_with_external_pr.external_pull_request).to eq(external_pr)
end
end
end
context 'when post import action throw non-retriable exception' do context 'when post import action throw non-retriable exception' do
let(:exception) { StandardError.new('post_import_error') } let(:exception) { StandardError.new('post_import_error') }
......
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::SidekiqQueue do
around do |example|
Sidekiq::Queue.new('authorized_projects').clear
Sidekiq::Testing.disable!(&example)
Sidekiq::Queue.new('authorized_projects').clear
end
def add_job(user)
Sidekiq::Client.push(
'class' => 'AuthorizedProjectsWorker',
'queue' => 'authorized_projects',
'args' => [user.id],
'meta.user' => user.username
)
end
describe '#drop_jobs!' do
shared_examples 'queue processing' do
let(:sidekiq_queue) { described_class.new('authorized_projects') }
let_it_be(:sidekiq_queue_user) { create(:user) }
before do
add_job(create(:user))
add_job(sidekiq_queue_user)
add_job(sidekiq_queue_user)
end
context 'when the queue is not processed in time' do
before do
calls = 0
allow(sidekiq_queue).to receive(:job_matches?).and_wrap_original do |m, *args|
raise Timeout::Error if calls > 0
calls += 1
m.call(*args)
end
end
it 'returns a non-completion flag, the number of jobs deleted, and the remaining queue size' do
expect(sidekiq_queue.drop_jobs!(search_metadata, timeout: 10))
.to eq(completed: false,
deleted_jobs: timeout_deleted,
queue_size: 3 - timeout_deleted)
end
end
context 'when the queue is processed in time' do
it 'returns a completion flag, the number of jobs deleted, and the remaining queue size' do
expect(sidekiq_queue.drop_jobs!(search_metadata, timeout: 10))
.to eq(completed: true,
deleted_jobs: no_timeout_deleted,
queue_size: 3 - no_timeout_deleted)
end
end
end
context 'when there are no matching jobs' do
include_examples 'queue processing' do
let(:search_metadata) { { project: 1 } }
let(:timeout_deleted) { 0 }
let(:no_timeout_deleted) { 0 }
end
end
context 'when there are matching jobs' do
include_examples 'queue processing' do
let(:search_metadata) { { user: sidekiq_queue_user.username } }
let(:timeout_deleted) { 1 }
let(:no_timeout_deleted) { 2 }
end
end
context 'when there are no valid metadata keys passed' do
it 'raises NoMetadataError' do
add_job(create(:user))
expect { described_class.new('authorized_projects').drop_jobs!({ username: 'sidekiq_queue_user' }, timeout: 1) }
.to raise_error(described_class::NoMetadataError)
end
end
context 'when the queue does not exist' do
it 'raises InvalidQueueError' do
expect { described_class.new('foo').drop_jobs!({ user: 'sidekiq_queue_user' }, timeout: 1) }
.to raise_error(described_class::InvalidQueueError)
end
end
end
end
...@@ -380,6 +380,12 @@ describe ApplicationSetting do ...@@ -380,6 +380,12 @@ describe ApplicationSetting do
expect(subject).to be_invalid expect(subject).to be_invalid
end end
it 'does not prevent from saving when gitaly timeouts were previously invalid' do
subject.update_column(:gitaly_timeout_default, Settings.gitlab.max_request_duration_seconds + 1)
expect(subject.reload).to be_valid
end
end end
describe 'enforcing terms' do describe 'enforcing terms' do
......
...@@ -475,43 +475,59 @@ describe WikiPage do ...@@ -475,43 +475,59 @@ describe WikiPage do
end end
end end
describe "#title" do describe '#title_changed?' do
it "replaces a hyphen to a space" do using RSpec::Parameterized::TableSyntax
subject.title = "Import-existing-repositories-into-GitLab"
expect(subject.title).to eq("Import existing repositories into GitLab") let(:untitled_page) { described_class.new(wiki) }
let(:directory_page) do
create_page('parent/child', 'test content')
wiki.find_page('parent/child')
end end
it 'unescapes html' do where(:page, :title, :changed) do
subject.title = 'foo &amp; bar' :untitled_page | nil | false
:untitled_page | 'new title' | true
expect(subject.title).to eq('foo & bar') :new_page | nil | true
:new_page | 'test page' | true
:new_page | 'new title' | true
:existing_page | nil | false
:existing_page | 'test page' | false
:existing_page | '/test page' | false
:existing_page | 'new title' | true
:directory_page | nil | false
:directory_page | 'parent/child' | false
:directory_page | 'child' | false
:directory_page | '/child' | true
:directory_page | 'parent/other' | true
:directory_page | 'other/child' | true
end
with_them do
it 'returns the expected value' do
subject = public_send(page)
subject.title = title if title
expect(subject.title_changed?).to be(changed)
end
end end
end end
describe '#path' do describe '#path' do
let(:path) { 'mypath.md' }
let(:git_page) { instance_double('Gitlab::Git::WikiPage', path: path).as_null_object }
it 'returns the path when persisted' do it 'returns the path when persisted' do
page = described_class.new(wiki, git_page, true) expect(existing_page.path).to eq('test-page.md')
expect(page.path).to eq(path)
end end
it 'returns nil when not persisted' do it 'returns nil when not persisted' do
page = described_class.new(wiki, git_page, false) expect(new_page.path).to be_nil
expect(page.path).to be_nil
end end
end end
describe '#directory' do describe '#directory' do
context 'when the page is at the root directory' do context 'when the page is at the root directory' do
subject do subject { existing_page }
create_page('file', 'content')
wiki.find_page('file')
end
it 'returns an empty string' do it 'returns an empty string' do
expect(subject.directory).to eq('') expect(subject.directory).to eq('')
......
# frozen_string_literal: true
require 'spec_helper'
describe API::Admin::Sidekiq do
let_it_be(:admin) { create(:admin) }
describe 'DELETE /admin/sidekiq/queues/:queue_name' do
context 'when the user is not an admin' do
it 'returns a 403' do
delete api("/admin/sidekiq/queues/authorized_projects?user=#{admin.username}", create(:user))
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'when the user is an admin' do
around do |example|
Sidekiq::Queue.new('authorized_projects').clear
Sidekiq::Testing.disable!(&example)
Sidekiq::Queue.new('authorized_projects').clear
end
def add_job(user)
Sidekiq::Client.push(
'class' => 'AuthorizedProjectsWorker',
'queue' => 'authorized_projects',
'args' => [user.id],
'meta.user' => user.username
)
end
context 'valid request' do
it 'returns info about the deleted jobs' do
add_job(admin)
add_job(admin)
add_job(create(:user))
delete api("/admin/sidekiq/queues/authorized_projects?user=#{admin.username}", admin)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to eq('completed' => true,
'deleted_jobs' => 2,
'queue_size' => 1)
end
end
context 'when no required params are provided' do
it 'returns a 400' do
delete api("/admin/sidekiq/queues/authorized_projects?user_2=#{admin.username}", admin)
expect(response).to have_gitlab_http_status(:bad_request)
end
end
context 'when the queue does not exist' do
it 'returns a 404' do
delete api("/admin/sidekiq/queues/authorized_projects_2?user=#{admin.username}", admin)
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
describe 'Deleting Sidekiq jobs' do
include GraphqlHelpers
let_it_be(:admin) { create(:admin) }
let(:variables) { { user: admin.username, queue_name: 'authorized_projects' } }
let(:mutation) { graphql_mutation(:admin_sidekiq_queues_delete_jobs, variables) }
def mutation_response
graphql_mutation_response(:admin_sidekiq_queues_delete_jobs)
end
context 'when the user is not an admin' do
let(:current_user) { create(:user) }
it_behaves_like 'a mutation that returns top-level errors',
errors: ['You must be an admin to use this mutation']
end
context 'when the user is an admin' do
let(:current_user) { admin }
context 'valid request' do
around do |example|
Sidekiq::Queue.new('authorized_projects').clear
Sidekiq::Testing.disable!(&example)
Sidekiq::Queue.new('authorized_projects').clear
end
def add_job(user)
Sidekiq::Client.push(
'class' => 'AuthorizedProjectsWorker',
'queue' => 'authorized_projects',
'args' => [user.id],
'meta.user' => user.username
)
end
it 'returns info about the deleted jobs' do
add_job(admin)
add_job(admin)
add_job(create(:user))
post_graphql_mutation(mutation, current_user: admin)
expect(mutation_response['errors']).to be_empty
expect(mutation_response['result']).to eq('completed' => true,
'deletedJobs' => 2,
'queueSize' => 1)
end
end
context 'when no required params are provided' do
let(:variables) { { queue_name: 'authorized_projects' } }
it_behaves_like 'a mutation that returns errors in the response',
errors: ['No metadata provided']
end
context 'when the queue does not exist' do
let(:variables) { { user: admin.username, queue_name: 'authorized_projects_2' } }
it_behaves_like 'a mutation that returns top-level errors',
errors: ['Queue authorized_projects_2 not found']
end
end
end
...@@ -30,6 +30,14 @@ RSpec.shared_examples 'restores project successfully' do |**results| ...@@ -30,6 +30,14 @@ RSpec.shared_examples 'restores project successfully' do |**results|
expect(project.issues.size).to eq(results.fetch(:issues, 0)) expect(project.issues.size).to eq(results.fetch(:issues, 0))
end end
it 'has ci pipelines' do
expect(project.ci_pipelines.size).to eq(results.fetch(:ci_pipelines, 0))
end
it 'has external pull requests' do
expect(project.external_pull_requests.size).to eq(results.fetch(:external_pull_requests, 0))
end
# This test is quarantined because the use of magic number 999 causes failure on CI # This test is quarantined because the use of magic number 999 causes failure on CI
it 'does not set params that are excluded from import_export settings', quarantine: 'https://gitlab.com/gitlab-org/gitlab/issues/207932#note_293724442' do it 'does not set params that are excluded from import_export settings', quarantine: 'https://gitlab.com/gitlab-org/gitlab/issues/207932#note_293724442' do
expect(project.import_type).to be_nil expect(project.import_type).to be_nil
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment