Commit 9c83aadd authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent 23bc19cb
...@@ -22,7 +22,6 @@ ...@@ -22,7 +22,6 @@
"style": "---" "style": "---"
}, },
"no-emphasis-as-heading": false, "no-emphasis-as-heading": false,
"fenced-code-language": false,
"first-line-h1": false, "first-line-h1": false,
"code-block-style": { "code-block-style": {
"style": "fenced" "style": "fenced"
......
...@@ -513,14 +513,6 @@ module Ci ...@@ -513,14 +513,6 @@ module Ci
success? && !deployment.try(:last?) success? && !deployment.try(:last?)
end end
def depends_on_builds
# Get builds of the same type
latest_builds = self.pipeline.builds.latest
# Return builds from previous stages
latest_builds.where('stage_idx < ?', stage_idx)
end
def triggered_by?(current_user) def triggered_by?(current_user)
user == current_user user == current_user
end end
...@@ -825,41 +817,15 @@ module Ci ...@@ -825,41 +817,15 @@ module Ci
end end
def all_dependencies def all_dependencies
(dependencies + cross_dependencies).uniq dependencies.all
end
def dependencies
return [] if empty_dependencies?
depended_jobs = depends_on_builds
# find all jobs that are needed
if Feature.enabled?(:ci_dag_support, project, default_enabled: true) && scheduling_type_dag?
depended_jobs = depended_jobs.where(name: needs.artifacts.select(:name))
end
# find all jobs that are dependent on
if options[:dependencies].present?
depended_jobs = depended_jobs.where(name: options[:dependencies])
end
# if both needs and dependencies are used,
# the end result will be an intersection between them
depended_jobs
end
def cross_dependencies
[]
end
def empty_dependencies?
options[:dependencies]&.empty?
end end
def has_valid_build_dependencies? def has_valid_build_dependencies?
return true if Feature.enabled?('ci_disable_validates_dependencies') dependencies.valid?
end
dependencies.all?(&:valid_dependency?) def invalid_dependencies
dependencies.invalid_local
end end
def valid_dependency? def valid_dependency?
...@@ -869,10 +835,6 @@ module Ci ...@@ -869,10 +835,6 @@ module Ci
true true
end end
def invalid_dependencies
dependencies.reject(&:valid_dependency?)
end
def runner_required_feature_names def runner_required_feature_names
strong_memoize(:runner_required_feature_names) do strong_memoize(:runner_required_feature_names) do
RUNNER_FEATURES.select do |feature, method| RUNNER_FEATURES.select do |feature, method|
...@@ -950,6 +912,12 @@ module Ci ...@@ -950,6 +912,12 @@ module Ci
private private
def dependencies
strong_memoize(:dependencies) do
Ci::Processable::Dependencies.new(self)
end
end
def build_data def build_data
@build_data ||= Gitlab::DataBuilder::Build.build(self) @build_data ||= Gitlab::DataBuilder::Build.build(self)
end end
......
# frozen_string_literal: true
module Ci
class Processable
class Dependencies
attr_reader :processable
def initialize(processable)
@processable = processable
end
def all
(local + cross_pipeline).uniq
end
# Dependencies local to the given pipeline
def local
return [] if no_local_dependencies_specified?
deps = model_class.where(pipeline_id: processable.pipeline_id).latest
deps = from_previous_stages(deps)
deps = from_needs(deps)
deps = from_dependencies(deps)
deps
end
# Dependencies that are defined in other pipelines
def cross_pipeline
[]
end
def invalid_local
local.reject(&:valid_dependency?)
end
def valid?
valid_local? && valid_cross_pipeline?
end
private
# Dependencies can only be of Ci::Build type because only builds
# can create artifacts
def model_class
::Ci::Build
end
def valid_local?
return true if Feature.enabled?('ci_disable_validates_dependencies')
local.all?(&:valid_dependency?)
end
def valid_cross_pipeline?
true
end
def project
processable.project
end
def no_local_dependencies_specified?
processable.options[:dependencies]&.empty?
end
def from_previous_stages(scope)
scope.before_stage(processable.stage_idx)
end
def from_needs(scope)
return scope unless Feature.enabled?(:ci_dag_support, project, default_enabled: true)
return scope unless processable.scheduling_type_dag?
needs_names = processable.needs.artifacts.select(:name)
scope.where(name: needs_names)
end
def from_dependencies(scope)
return scope unless processable.options[:dependencies].present?
scope.where(name: processable.options[:dependencies])
end
end
end
end
Ci::Processable::Dependencies.prepend_if_ee('EE::Ci::Processable::Dependencies')
...@@ -7,6 +7,7 @@ class Deployment < ApplicationRecord ...@@ -7,6 +7,7 @@ class Deployment < ApplicationRecord
include UpdatedAtFilterable include UpdatedAtFilterable
include Importable include Importable
include Gitlab::Utils::StrongMemoize include Gitlab::Utils::StrongMemoize
include FastDestroyAll
belongs_to :project, required: true belongs_to :project, required: true
belongs_to :environment, required: true belongs_to :environment, required: true
...@@ -113,6 +114,26 @@ class Deployment < ApplicationRecord ...@@ -113,6 +114,26 @@ class Deployment < ApplicationRecord
success.find_by!(iid: iid) success.find_by!(iid: iid)
end end
class << self
##
# FastDestroyAll concerns
def begin_fast_destroy
preload(:project).find_each.map do |deployment|
[deployment.project, deployment.ref_path]
end
end
##
# FastDestroyAll concerns
def finalize_fast_destroy(params)
by_project = params.group_by(&:shift)
by_project.each do |project, ref_paths|
project.repository.delete_refs(*ref_paths.flatten)
end
end
end
def commit def commit
project.commit(sha) project.commit(sha)
end end
...@@ -280,12 +301,12 @@ class Deployment < ApplicationRecord ...@@ -280,12 +301,12 @@ class Deployment < ApplicationRecord
errors.add(:ref, _('The branch or tag does not exist')) errors.add(:ref, _('The branch or tag does not exist'))
end end
private
def ref_path def ref_path
File.join(environment.ref_path, 'deployments', iid.to_s) File.join(environment.ref_path, 'deployments', iid.to_s)
end end
private
def legacy_finished_at def legacy_finished_at
self.created_at if success? && !read_attribute(:finished_at) self.created_at if success? && !read_attribute(:finished_at)
end end
......
...@@ -3,6 +3,7 @@ ...@@ -3,6 +3,7 @@
class Environment < ApplicationRecord class Environment < ApplicationRecord
include Gitlab::Utils::StrongMemoize include Gitlab::Utils::StrongMemoize
include ReactiveCaching include ReactiveCaching
include FastDestroyAll::Helpers
self.reactive_cache_refresh_interval = 1.minute self.reactive_cache_refresh_interval = 1.minute
self.reactive_cache_lifetime = 55.seconds self.reactive_cache_lifetime = 55.seconds
...@@ -10,7 +11,10 @@ class Environment < ApplicationRecord ...@@ -10,7 +11,10 @@ class Environment < ApplicationRecord
belongs_to :project, required: true belongs_to :project, required: true
has_many :deployments, -> { visible }, dependent: :destroy # rubocop:disable Cop/ActiveRecordDependent use_fast_destroy :all_deployments
has_many :all_deployments, class_name: 'Deployment'
has_many :deployments, -> { visible }
has_many :successful_deployments, -> { success }, class_name: 'Deployment' has_many :successful_deployments, -> { success }, class_name: 'Deployment'
has_many :active_deployments, -> { active }, class_name: 'Deployment' has_many :active_deployments, -> { active }, class_name: 'Deployment'
has_many :prometheus_alerts, inverse_of: :environment has_many :prometheus_alerts, inverse_of: :environment
......
...@@ -612,7 +612,7 @@ To check the configuration: ...@@ -612,7 +612,7 @@ To check the configuration:
If everything is working, you should see something like this: If everything is working, you should see something like this:
``` ```plaintext
gitlabhq_geo_production=# SELECT * from information_schema.foreign_tables; gitlabhq_geo_production=# SELECT * from information_schema.foreign_tables;
foreign_table_catalog | foreign_table_schema | foreign_table_name | foreign_server_catalog | foreign_server_name foreign_table_catalog | foreign_table_schema | foreign_table_name | foreign_server_catalog | foreign_server_name
-------------------------+----------------------+-------------------------------------------------+-------------------------+--------------------- -------------------------+----------------------+-------------------------------------------------+-------------------------+---------------------
......
...@@ -920,7 +920,7 @@ after it has been restored to service. ...@@ -920,7 +920,7 @@ after it has been restored to service.
Once `repmgrd` and PostgreSQL are runnning, the node will need to follow the new Once `repmgrd` and PostgreSQL are runnning, the node will need to follow the new
as a standby node. as a standby node.
``` ```shell
gitlab-ctl repmgr standby follow NEW_MASTER gitlab-ctl repmgr standby follow NEW_MASTER
``` ```
...@@ -930,7 +930,7 @@ after it has been restored to service. ...@@ -930,7 +930,7 @@ after it has been restored to service.
Once the old master node has been unregistered from the cluster, it will need Once the old master node has been unregistered from the cluster, it will need
to be setup as a new standby: to be setup as a new standby:
``` ```shell
gitlab-ctl repmgr standby setup NEW_MASTER gitlab-ctl repmgr standby setup NEW_MASTER
``` ```
......
...@@ -2,6 +2,9 @@ ...@@ -2,6 +2,9 @@
> Introduced in GitLab 9.4. > Introduced in GitLab 9.4.
NOTE: **Note:**
We intend to [rename IP whitelist as `IP allowlist`](https://gitlab.com/gitlab-org/gitlab/-/issues/7554).
GitLab provides some [monitoring endpoints] that provide health check information GitLab provides some [monitoring endpoints] that provide health check information
when probed. when probed.
......
...@@ -93,7 +93,7 @@ open up the page for the project. ...@@ -93,7 +93,7 @@ open up the page for the project.
The "Gitaly relative path" is shown there, for example: The "Gitaly relative path" is shown there, for example:
``` ```plaintext
"@hashed/b1/7e/b17ef6d19c7a5b1ee83b907c595526dcb1eb06db8227d650d5dda0a9f4ce8cd9.git" "@hashed/b1/7e/b17ef6d19c7a5b1ee83b907c595526dcb1eb06db8227d650d5dda0a9f4ce8cd9.git"
``` ```
...@@ -126,7 +126,7 @@ with `.git` from the end of the directory name removed. ...@@ -126,7 +126,7 @@ with `.git` from the end of the directory name removed.
The output includes the project id and the project name: The output includes the project id and the project name:
``` ```plaintext
=> #<Project id:16 it/supportteam/ticketsystem> => #<Project id:16 it/supportteam/ticketsystem>
``` ```
......
...@@ -121,8 +121,8 @@ In this case, the guidance we had from development was to drop deadlock_timeout ...@@ -121,8 +121,8 @@ In this case, the guidance we had from development was to drop deadlock_timeout
PostgresSQL defaults: PostgresSQL defaults:
- statement_timeout = 0 (never) - `statement_timeout = 0` (never)
- idle_in_transaction_session_timeout = 0 (never) - `idle_in_transaction_session_timeout = 0` (never)
Comments in issue [#1](https://gitlab.com/gitlab-org/gitlab/issues/30528) indicate that these should both be set to at least a number of minutes for all Omnibus installations (so they don't hang indefinitely). However, 15s for statement_timeout is very short, and will only be effective if the underlying infrastructure is very performant. Comments in issue [#1](https://gitlab.com/gitlab-org/gitlab/issues/30528) indicate that these should both be set to at least a number of minutes for all Omnibus installations (so they don't hang indefinitely). However, 15s for statement_timeout is very short, and will only be effective if the underlying infrastructure is very performant.
...@@ -136,7 +136,7 @@ puts c.execute('SHOW idle_in_transaction_session_timeout').to_a ;" ...@@ -136,7 +136,7 @@ puts c.execute('SHOW idle_in_transaction_session_timeout').to_a ;"
It may take a little while to respond. It may take a little while to respond.
``` ```ruby
{"statement_timeout"=>"1min"} {"statement_timeout"=>"1min"}
{"lock_timeout"=>"0"} {"lock_timeout"=>"0"}
{"idle_in_transaction_session_timeout"=>"1min"} {"idle_in_transaction_session_timeout"=>"1min"}
......
...@@ -82,7 +82,7 @@ Example response: ...@@ -82,7 +82,7 @@ Example response:
Creates a new deploy token for a project. Creates a new deploy token for a project.
``` ```plaintext
POST /projects/:id/deploy_tokens POST /projects/:id/deploy_tokens
``` ```
...@@ -119,7 +119,7 @@ Example response: ...@@ -119,7 +119,7 @@ Example response:
Removes a deploy token from the project. Removes a deploy token from the project.
``` ```plaintext
DELETE /projects/:id/deploy_tokens/:token_id DELETE /projects/:id/deploy_tokens/:token_id
``` ```
...@@ -144,7 +144,7 @@ These endpoints require group maintainer access or higher. ...@@ -144,7 +144,7 @@ These endpoints require group maintainer access or higher.
Get a list of a group's deploy tokens Get a list of a group's deploy tokens
``` ```plaintext
GET /groups/:id/deploy_tokens GET /groups/:id/deploy_tokens
``` ```
...@@ -183,7 +183,7 @@ Example response: ...@@ -183,7 +183,7 @@ Example response:
Creates a new deploy token for a group. Creates a new deploy token for a group.
``` ```plaintext
POST /groups/:id/deploy_tokens POST /groups/:id/deploy_tokens
``` ```
...@@ -222,7 +222,7 @@ Example response: ...@@ -222,7 +222,7 @@ Example response:
Removes a deploy token from the group. Removes a deploy token from the group.
``` ```plaintext
DELETE /groups/:id/deploy_tokens/:token_id DELETE /groups/:id/deploy_tokens/:token_id
``` ```
......
...@@ -207,7 +207,7 @@ Consul is a tool for service discovery and configuration. Consul is distributed, ...@@ -207,7 +207,7 @@ Consul is a tool for service discovery and configuration. Consul is distributed,
#### Elasticsearch #### Elasticsearch
- [Project page](https://github.com/elastic/elasticsearch/blob/master/README.textile) - [Project page](https://github.com/elastic/elasticsearch/blob/master/README.asciidoc)
- Configuration: [Omnibus][elasticsearch-omnibus], [Charts][elasticsearch-charts], [Source][elasticsearch-source], [GDK][elasticsearch-gdk] - Configuration: [Omnibus][elasticsearch-omnibus], [Charts][elasticsearch-charts], [Source][elasticsearch-source], [GDK][elasticsearch-gdk]
- Layer: Core Service (Data) - Layer: Core Service (Data)
......
...@@ -447,7 +447,7 @@ Where `MD_DOC_PATH` points to the file or directory you would like to run lint c ...@@ -447,7 +447,7 @@ Where `MD_DOC_PATH` points to the file or directory you would like to run lint c
If you omit it completely, it will default to the `doc/` directory. If you omit it completely, it will default to the `doc/` directory.
The output should be similar to: The output should be similar to:
``` ```plaintext
=> Linting documents at path /path/to/gitlab as <user>... => Linting documents at path /path/to/gitlab as <user>...
=> Checking for cURL short options... => Checking for cURL short options...
=> Checking for CHANGELOG.md duplicate entries... => Checking for CHANGELOG.md duplicate entries...
......
...@@ -521,7 +521,7 @@ For ordered lists, use three spaces for each level of indentation: ...@@ -521,7 +521,7 @@ For ordered lists, use three spaces for each level of indentation:
You can nest full lists inside other lists using the same rules as above. If you wish You can nest full lists inside other lists using the same rules as above. If you wish
to mix types, that is also possible, as long as you don't mix items at the same level: to mix types, that is also possible, as long as you don't mix items at the same level:
``` ```markdown
1. Ordered list item one. 1. Ordered list item one.
1. Ordered list item two. 1. Ordered list item two.
- Nested unordered list item one. - Nested unordered list item one.
......
...@@ -184,7 +184,7 @@ If the current version is `v12p1`, and we need to create a new version for `v12p ...@@ -184,7 +184,7 @@ If the current version is `v12p1`, and we need to create a new version for `v12p
You might get an error such as You might get an error such as
``` ```plaintext
[2018-10-31T15:54:19,762][WARN ][o.e.c.r.a.DiskThresholdMonitor] [pval5Ct] [2018-10-31T15:54:19,762][WARN ][o.e.c.r.a.DiskThresholdMonitor] [pval5Ct]
flood stage disk watermark [95%] exceeded on flood stage disk watermark [95%] exceeded on
[pval5Ct7SieH90t5MykM5w][pval5Ct][/usr/local/var/lib/elasticsearch/nodes/0] free: 56.2gb[3%], [pval5Ct7SieH90t5MykM5w][pval5Ct][/usr/local/var/lib/elasticsearch/nodes/0] free: 56.2gb[3%],
...@@ -195,20 +195,20 @@ This is because you've exceeded the disk space threshold - it thinks you don't h ...@@ -195,20 +195,20 @@ This is because you've exceeded the disk space threshold - it thinks you don't h
In addition, the `read_only_allow_delete` setting will be set to `true`. It will block indexing, `forcemerge`, etc In addition, the `read_only_allow_delete` setting will be set to `true`. It will block indexing, `forcemerge`, etc
``` ```shell
curl "http://localhost:9200/gitlab-development/_settings?pretty" curl "http://localhost:9200/gitlab-development/_settings?pretty"
``` ```
Add this to your `elasticsearch.yml` file: Add this to your `elasticsearch.yml` file:
``` ```yaml
# turn off the disk allocator # turn off the disk allocator
cluster.routing.allocation.disk.threshold_enabled: false cluster.routing.allocation.disk.threshold_enabled: false
``` ```
_or_ _or_
``` ```yaml
# set your own limits # set your own limits
cluster.routing.allocation.disk.threshold_enabled: true cluster.routing.allocation.disk.threshold_enabled: true
cluster.routing.allocation.disk.watermark.flood_stage: 5gb # ES 6.x only cluster.routing.allocation.disk.watermark.flood_stage: 5gb # ES 6.x only
......
...@@ -58,7 +58,7 @@ The author then adds a comment to this piece of code and adds a link to the issu ...@@ -58,7 +58,7 @@ The author then adds a comment to this piece of code and adds a link to the issu
- Track necessary events. See the [telemetry guide](../../telemetry/index.md) for details. - Track necessary events. See the [telemetry guide](../../telemetry/index.md) for details.
- After the merge request is merged, use [`chatops`](../../ci/chatops/README.md) to enable the feature flag and start the experiment. For visibility, please run the command in the `#s_growth` channel: - After the merge request is merged, use [`chatops`](../../ci/chatops/README.md) to enable the feature flag and start the experiment. For visibility, please run the command in the `#s_growth` channel:
``` ```shell
/chatops run feature set --project=gitlab-org/gitlab experimental_sign_up_flow true /chatops run feature set --project=gitlab-org/gitlab experimental_sign_up_flow true
``` ```
......
...@@ -10,9 +10,9 @@ Copy the content over to your issue or merge request and if something doesn't ap ...@@ -10,9 +10,9 @@ Copy the content over to your issue or merge request and if something doesn't ap
This checklist is intended to help us during development of bigger features/refactorings, it's not a "use it always and every point always matches" list. This checklist is intended to help us during development of bigger features/refactorings, it's not a "use it always and every point always matches" list.
Please use your best judgement when to use it and please contribute new points through merge requests if something comes to your mind. Please use your best judgment when to use it and please contribute new points through merge requests if something comes to your mind.
``` ```markdown
### Frontend development ### Frontend development
#### Planning development #### Planning development
......
...@@ -149,7 +149,7 @@ Using local Apollo Cache is handy when we have a need to mock some GraphQL API r ...@@ -149,7 +149,7 @@ Using local Apollo Cache is handy when we have a need to mock some GraphQL API r
For example, we have a [fragment](#fragments) on `DesignVersion` used in our queries: For example, we have a [fragment](#fragments) on `DesignVersion` used in our queries:
``` ```javascript
fragment VersionListItem on DesignVersion { fragment VersionListItem on DesignVersion {
id id
sha sha
...@@ -158,7 +158,7 @@ fragment VersionListItem on DesignVersion { ...@@ -158,7 +158,7 @@ fragment VersionListItem on DesignVersion {
We need to fetch also version author and the 'created at' property to display them in the versions dropdown but these changes are still not implemented in our API. We can change the existing fragment to get a mocked response for these new fields: We need to fetch also version author and the 'created at' property to display them in the versions dropdown but these changes are still not implemented in our API. We can change the existing fragment to get a mocked response for these new fields:
``` ```javascript
fragment VersionListItem on DesignVersion { fragment VersionListItem on DesignVersion {
id id
sha sha
......
...@@ -294,7 +294,7 @@ Please check this [rules](https://github.com/vuejs/eslint-plugin-vue#bulb-rules) ...@@ -294,7 +294,7 @@ Please check this [rules](https://github.com/vuejs/eslint-plugin-vue#bulb-rules)
1. Tag order in `.vue` file 1. Tag order in `.vue` file
``` ```html
<script> <script>
// ... // ...
</script> </script>
......
...@@ -64,36 +64,36 @@ The easiest way to include prettier in your workflow is by setting up your prefe ...@@ -64,36 +64,36 @@ The easiest way to include prettier in your workflow is by setting up your prefe
Please take care that you only let Prettier format the same file types as the global Yarn script does (.js, .vue, and .scss). In VSCode by example you can easily exclude file formats in your settings file: Please take care that you only let Prettier format the same file types as the global Yarn script does (.js, .vue, and .scss). In VSCode by example you can easily exclude file formats in your settings file:
``` ```json
"prettier.disableLanguages": [ "prettier.disableLanguages": [
"json", "json",
"markdown" "markdown"
], ]
``` ```
### Yarn Script ### Yarn Script
The following yarn scripts are available to do global formatting: The following yarn scripts are available to do global formatting:
``` ```shell
yarn prettier-staged-save yarn prettier-staged-save
``` ```
Updates all currently staged files (based on `git diff`) with Prettier and saves the needed changes. Updates all currently staged files (based on `git diff`) with Prettier and saves the needed changes.
``` ```shell
yarn prettier-staged yarn prettier-staged
``` ```
Checks all currently staged files (based on `git diff`) with Prettier and log which files would need manual updating to the console. Checks all currently staged files (based on `git diff`) with Prettier and log which files would need manual updating to the console.
``` ```shell
yarn prettier-all yarn prettier-all
``` ```
Checks all files with Prettier and logs which files need manual updating to the console. Checks all files with Prettier and logs which files need manual updating to the console.
``` ```shell
yarn prettier-all-save yarn prettier-all-save
``` ```
...@@ -103,13 +103,13 @@ The source of these Yarn scripts can be found in `/scripts/frontend/prettier.js` ...@@ -103,13 +103,13 @@ The source of these Yarn scripts can be found in `/scripts/frontend/prettier.js`
#### Scripts during Conversion period #### Scripts during Conversion period
``` ```shell
node ./scripts/frontend/prettier.js check-all ./vendor/ node ./scripts/frontend/prettier.js check-all ./vendor/
``` ```
This will go over all files in a specific folder check it. This will go over all files in a specific folder check it.
``` ```shell
node ./scripts/frontend/prettier.js save-all ./vendor/ node ./scripts/frontend/prettier.js save-all ./vendor/
``` ```
......
...@@ -25,7 +25,7 @@ In some features implemented with Vue.js, like the [issue board][issue-boards] ...@@ -25,7 +25,7 @@ In some features implemented with Vue.js, like the [issue board][issue-boards]
or [environments table][environments-table] or [environments table][environments-table]
you can find a clear separation of concerns: you can find a clear separation of concerns:
``` ```plaintext
new_feature new_feature
├── components ├── components
│ └── component.vue │ └── component.vue
......
...@@ -22,7 +22,7 @@ _Note:_ The action itself will not update the state, only a mutation should upda ...@@ -22,7 +22,7 @@ _Note:_ The action itself will not update the state, only a mutation should upda
When using Vuex at GitLab, separate these concerns into different files to improve readability: When using Vuex at GitLab, separate these concerns into different files to improve readability:
``` ```plaintext
└── store └── store
├── index.js # where we assemble modules and export the store ├── index.js # where we assemble modules and export the store
├── actions.js # actions ├── actions.js # actions
......
...@@ -12,7 +12,7 @@ Follow the Chatops document to [request access](../chatops_on_gitlabcom.md#reque ...@@ -12,7 +12,7 @@ Follow the Chatops document to [request access](../chatops_on_gitlabcom.md#reque
Once you are added to the project test if your access propagated, Once you are added to the project test if your access propagated,
run: run:
``` ```shell
/chatops run feature --help /chatops run feature --help
``` ```
...@@ -70,7 +70,7 @@ and <https://dev.gitlab.org>. ...@@ -70,7 +70,7 @@ and <https://dev.gitlab.org>.
For example, to enable a feature for 25% of all users, run the following in For example, to enable a feature for 25% of all users, run the following in
Slack: Slack:
``` ```shell
/chatops run feature set new_navigation_bar 25 --dev /chatops run feature set new_navigation_bar 25 --dev
/chatops run feature set new_navigation_bar 25 --staging /chatops run feature set new_navigation_bar 25 --staging
``` ```
...@@ -92,7 +92,7 @@ feature enabled, you can roll out the change to GitLab.com. ...@@ -92,7 +92,7 @@ feature enabled, you can roll out the change to GitLab.com.
Similar to above, to enable a feature for 25% of all users, run the following in Similar to above, to enable a feature for 25% of all users, run the following in
Slack: Slack:
``` ```shell
/chatops run feature set new_navigation_bar 25 /chatops run feature set new_navigation_bar 25
``` ```
...@@ -117,13 +117,13 @@ Feature gates can also be actor based, for example a feature could first be ...@@ -117,13 +117,13 @@ Feature gates can also be actor based, for example a feature could first be
enabled for only the `gitlab` project. The project is passed by supplying a enabled for only the `gitlab` project. The project is passed by supplying a
`--project` flag: `--project` flag:
``` ```shell
/chatops run feature set --project=gitlab-org/gitlab some_feature true /chatops run feature set --project=gitlab-org/gitlab some_feature true
``` ```
For groups the `--group` flag is available: For groups the `--group` flag is available:
``` ```shell
/chatops run feature set --group=gitlab-org some_feature true /chatops run feature set --group=gitlab-org some_feature true
``` ```
...@@ -131,7 +131,7 @@ Note that actor-based gates are applied before percentages. For example, conside ...@@ -131,7 +131,7 @@ Note that actor-based gates are applied before percentages. For example, conside
`group/project` as `gitlab-org/gitlab` and a given example feature as `some_feature`, if `group/project` as `gitlab-org/gitlab` and a given example feature as `some_feature`, if
you run these 2 commands: you run these 2 commands:
``` ```shell
/chatops run feature set --project=gitlab-org/gitlab some_feature true /chatops run feature set --project=gitlab-org/gitlab some_feature true
/chatops run feature set some_feature 25 /chatops run feature set some_feature 25
``` ```
......
...@@ -60,7 +60,7 @@ hash of the project ID instead, if project migrates to the new approach (introdu ...@@ -60,7 +60,7 @@ hash of the project ID instead, if project migrates to the new approach (introdu
Files are stored at multiple locations and use different path schemes. Files are stored at multiple locations and use different path schemes.
All the `GitlabUploader` derived classes should comply with this path segment schema: All the `GitlabUploader` derived classes should comply with this path segment schema:
``` ```plaintext
| GitlabUploader | GitlabUploader
| ----------------------- + ------------------------- + --------------------------------- + -------------------------------- | | ----------------------- + ------------------------- + --------------------------------- + -------------------------------- |
| `<gitlab_root>/public/` | `uploads/-/system/` | `user/avatar/:id/` | `:filename` | | `<gitlab_root>/public/` | `uploads/-/system/` | `user/avatar/:id/` | `:filename` |
......
...@@ -134,7 +134,7 @@ The **secondary** node authenticates itself via a [JWT request](https://jwt.io/) ...@@ -134,7 +134,7 @@ The **secondary** node authenticates itself via a [JWT request](https://jwt.io/)
When the **secondary** node wishes to download a file, it sends an When the **secondary** node wishes to download a file, it sends an
HTTP request with the `Authorization` header: HTTP request with the `Authorization` header:
``` ```plaintext
Authorization: GL-Geo <access_key>:<JWT payload> Authorization: GL-Geo <access_key>:<JWT payload>
``` ```
...@@ -146,7 +146,7 @@ file for the right database ID. For example, for an LFS object, the ...@@ -146,7 +146,7 @@ file for the right database ID. For example, for an LFS object, the
request must also include the SHA256 sum of the file. An example JWT request must also include the SHA256 sum of the file. An example JWT
payload looks like: payload looks like:
``` ```yaml
{ "data": { sha256: "31806bb23580caab78040f8c45d329f5016b0115" }, iat: "1234567890" } { "data": { sha256: "31806bb23580caab78040f8c45d329f5016b0115" }, iat: "1234567890" }
``` ```
...@@ -199,13 +199,13 @@ contains the schema and migrations for this database. ...@@ -199,13 +199,13 @@ contains the schema and migrations for this database.
To write a migration for the database, use the `GeoMigrationGenerator`: To write a migration for the database, use the `GeoMigrationGenerator`:
``` ```shell
rails g geo_migration [args] [options] rails g geo_migration [args] [options]
``` ```
To migrate the tracking database, run: To migrate the tracking database, run:
``` ```shell
bundle exec rake geo:db:migrate bundle exec rake geo:db:migrate
``` ```
...@@ -252,7 +252,7 @@ Failure to do this will prevent the **secondary** node from ...@@ -252,7 +252,7 @@ Failure to do this will prevent the **secondary** node from
functioning properly. The **secondary** node will generate error functioning properly. The **secondary** node will generate error
messages, as the following PostgreSQL error: messages, as the following PostgreSQL error:
``` ```sql
ERROR: relation "gitlab_secondary.ci_job_artifacts" does not exist at character 323 ERROR: relation "gitlab_secondary.ci_job_artifacts" does not exist at character 323
STATEMENT: SELECT a.attname, format_type(a.atttypid, a.atttypmod), STATEMENT: SELECT a.attname, format_type(a.atttypid, a.atttypmod),
pg_get_expr(d.adbin, d.adrelid), a.attnotnull, a.atttypid, a.atttypmod pg_get_expr(d.adbin, d.adrelid), a.attnotnull, a.atttypid, a.atttypmod
......
...@@ -101,7 +101,7 @@ end ...@@ -101,7 +101,7 @@ end
in a prepended module, which is very likely the case in EE. We could see in a prepended module, which is very likely the case in EE. We could see
error like this: error like this:
``` ```plaintext
1.1) Failure/Error: expect_any_instance_of(ApplicationSetting).to receive_messages(messages) 1.1) Failure/Error: expect_any_instance_of(ApplicationSetting).to receive_messages(messages)
Using `any_instance` to stub a method (elasticsearch_indexing) that has been defined on a prepended module (EE::ApplicationSetting) is not supported. Using `any_instance` to stub a method (elasticsearch_indexing) that has been defined on a prepended module (EE::ApplicationSetting) is not supported.
``` ```
......
...@@ -488,7 +488,7 @@ The linter will take the following into account: ...@@ -488,7 +488,7 @@ The linter will take the following into account:
The errors are grouped per file, and per message ID: The errors are grouped per file, and per message ID:
``` ```plaintext
Errors in `locale/zh_HK/gitlab.po`: Errors in `locale/zh_HK/gitlab.po`:
PO-syntax errors PO-syntax errors
SimplePoParser::ParserErrorSyntax error in lines SimplePoParser::ParserErrorSyntax error in lines
......
...@@ -63,7 +63,7 @@ class StuckImportJobsWorker ...@@ -63,7 +63,7 @@ class StuckImportJobsWorker
Marked stuck import jobs as failed. JIDs: xyz Marked stuck import jobs as failed. JIDs: xyz
``` ```
``` ```plaintext
+-----------+ +-----------------------------------+ +-----------+ +-----------------------------------+
|Export Job |--->| Calls ActiveRecord `as_json` and | |Export Job |--->| Calls ActiveRecord `as_json` and |
+-----------+ | `to_json` on all project models | +-----------+ | `to_json` on all project models |
......
...@@ -148,7 +148,7 @@ In this section we'll detail any known issues we've seen when trying to import a ...@@ -148,7 +148,7 @@ In this section we'll detail any known issues we've seen when trying to import a
If you're attempting to import a large project into a development environment, you may see Gitaly throw an error about too many calls or invocations, for example: If you're attempting to import a large project into a development environment, you may see Gitaly throw an error about too many calls or invocations, for example:
``` ```plaintext
Error importing repository into qa-perf-testing/gitlabhq - GitalyClient#call called 31 times from single request. Potential n+1? Error importing repository into qa-perf-testing/gitlabhq - GitalyClient#call called 31 times from single request. Potential n+1?
``` ```
......
...@@ -87,7 +87,7 @@ Ruby code. In case of the above snippet you'd run the following: ...@@ -87,7 +87,7 @@ Ruby code. In case of the above snippet you'd run the following:
This will print out something along the lines of: This will print out something along the lines of:
``` ```plaintext
From: /path/to/your/gitlab/lib/gitlab/metrics/instrumentation.rb @ line 148: From: /path/to/your/gitlab/lib/gitlab/metrics/instrumentation.rb @ line 148:
Owner: #<Module:0x0055f0865c6d50> Owner: #<Module:0x0055f0865c6d50>
Visibility: public Visibility: public
......
...@@ -31,7 +31,7 @@ The following are required to install and test the app: ...@@ -31,7 +31,7 @@ The following are required to install and test the app:
For example: For example:
``` ```plaintext
https://xxxx.serveo.net/-/jira_connect/app_descriptor.json https://xxxx.serveo.net/-/jira_connect/app_descriptor.json
``` ```
......
...@@ -34,7 +34,7 @@ Gitaly. ...@@ -34,7 +34,7 @@ Gitaly.
When called from Gitaly in a `pre-receive` hook the changes are passed When called from Gitaly in a `pre-receive` hook the changes are passed
and those are validated to determine if the push is allowed. and those are validated to determine if the push is allowed.
``` ```plaintext
POST /internal/allowed POST /internal/allowed
``` ```
...@@ -57,7 +57,7 @@ curl --request POST --header "Gitlab-Shared-Secret: <Base64 encoded token>" --da ...@@ -57,7 +57,7 @@ curl --request POST --header "Gitlab-Shared-Secret: <Base64 encoded token>" --da
Example response: Example response:
``` ```json
{ {
"status": true, "status": true,
"gl_repository": "project-3", "gl_repository": "project-3",
...@@ -103,7 +103,7 @@ Example request: ...@@ -103,7 +103,7 @@ Example request:
curl --request POST --header "Gitlab-Shared-Secret: <Base64 encoded token>" --data "key_id=11&project=gnuwget/wget2" http://localhost:3001/api/v4/internal/lfs_authenticate curl --request POST --header "Gitlab-Shared-Secret: <Base64 encoded token>" --data "key_id=11&project=gnuwget/wget2" http://localhost:3001/api/v4/internal/lfs_authenticate
``` ```
``` ```json
{ {
"username": "root", "username": "root",
"lfs_token": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJkYXRhIjp7ImFjdG9yIjoicm9vdCJ9LCJqdGkiOiIyYWJhZDcxZC0xNDFlLTQ2NGUtOTZlMi1mODllYWRiMGVmZTYiLCJpYXQiOjE1NzAxMTc2NzYsIm5iZiI6MTU3MDExNzY3MSwiZXhwIjoxNTcwMTE5NDc2fQ.g7atlBw1QMY7QEBVPE0LZ8ZlKtaRzaMRmNn41r2YITM", "lfs_token": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJkYXRhIjp7ImFjdG9yIjoicm9vdCJ9LCJqdGkiOiIyYWJhZDcxZC0xNDFlLTQ2NGUtOTZlMi1mODllYWRiMGVmZTYiLCJpYXQiOjE1NzAxMTc2NzYsIm5iZiI6MTU3MDExNzY3MSwiZXhwIjoxNTcwMTE5NDc2fQ.g7atlBw1QMY7QEBVPE0LZ8ZlKtaRzaMRmNn41r2YITM",
...@@ -126,7 +126,7 @@ lookup](../administration/operations/fast_ssh_key_lookup.md). ...@@ -126,7 +126,7 @@ lookup](../administration/operations/fast_ssh_key_lookup.md).
|:----------|:-------|:---------|:------------| |:----------|:-------|:---------|:------------|
| `key` | string | yes | SSH key as passed by OpenSSH to GitLab-shell | | `key` | string | yes | SSH key as passed by OpenSSH to GitLab-shell |
``` ```plaintext
GET /internal/authorized_keys GET /internal/authorized_keys
``` ```
...@@ -138,7 +138,7 @@ curl --request GET --header "Gitlab-Shared-Secret: <Base64 encoded secret>""http ...@@ -138,7 +138,7 @@ curl --request GET --header "Gitlab-Shared-Secret: <Base64 encoded secret>""http
Example response: Example response:
``` ```json
{ {
"id": 11, "id": 11,
"title": "admin@example.com", "title": "admin@example.com",
...@@ -161,7 +161,7 @@ discovers the user associated with an SSH key. ...@@ -161,7 +161,7 @@ discovers the user associated with an SSH key.
| `key_id` | integer | no | The id of the SSH key used as found in the authorized-keys file or through the `/authorized_keys` check | | `key_id` | integer | no | The id of the SSH key used as found in the authorized-keys file or through the `/authorized_keys` check |
| `username` | string | no | Username of the user being looked up, used by GitLab-shell when authenticating using a certificate | | `username` | string | no | Username of the user being looked up, used by GitLab-shell when authenticating using a certificate |
``` ```plaintext
GET /internal/discover GET /internal/discover
``` ```
...@@ -173,7 +173,7 @@ curl --request GET --header "Gitlab-Shared-Secret: <Base64 encoded secret>" "htt ...@@ -173,7 +173,7 @@ curl --request GET --header "Gitlab-Shared-Secret: <Base64 encoded secret>" "htt
Example response: Example response:
``` ```json
{ {
"id": 7, "id": 7,
"name": "Dede Eichmann", "name": "Dede Eichmann",
...@@ -190,7 +190,7 @@ Example response: ...@@ -190,7 +190,7 @@ Example response:
This get's some generic information about the instance. This is used This get's some generic information about the instance. This is used
by Geo nodes to get information about eachother by Geo nodes to get information about eachother
``` ```plaintext
GET /internal/check GET /internal/check
``` ```
...@@ -202,7 +202,7 @@ curl --request GET --header "Gitlab-Shared-Secret: <Base64 encoded secret>" "htt ...@@ -202,7 +202,7 @@ curl --request GET --header "Gitlab-Shared-Secret: <Base64 encoded secret>" "htt
Example response: Example response:
``` ```json
{ {
"api_version": "v4", "api_version": "v4",
"gitlab_version": "12.3.0-pre", "gitlab_version": "12.3.0-pre",
...@@ -226,7 +226,7 @@ recovery codes based on their SSH key ...@@ -226,7 +226,7 @@ recovery codes based on their SSH key
| `key_id` | integer | no | The id of the SSH key used as found in the authorized-keys file or through the `/authorized_keys` check | | `key_id` | integer | no | The id of the SSH key used as found in the authorized-keys file or through the `/authorized_keys` check |
| `user_id` | integer | no | **Deprecated** User_id for which to generate new recovery codes | | `user_id` | integer | no | **Deprecated** User_id for which to generate new recovery codes |
``` ```plaintext
GET /internal/two_factor_recovery_codes GET /internal/two_factor_recovery_codes
``` ```
...@@ -238,7 +238,7 @@ curl --request POST --header "Gitlab-Shared-Secret: <Base64 encoded secret>" --d ...@@ -238,7 +238,7 @@ curl --request POST --header "Gitlab-Shared-Secret: <Base64 encoded secret>" --d
Example response: Example response:
``` ```json
{ {
"success": true, "success": true,
"recovery_codes": [ "recovery_codes": [
...@@ -269,7 +269,7 @@ for a push that might be accepted. ...@@ -269,7 +269,7 @@ for a push that might be accepted.
|:----------|:-------|:---------|:------------| |:----------|:-------|:---------|:------------|
| `gl_repository` | string | yes | repository identifier for the repository receiving the push | | `gl_repository` | string | yes | repository identifier for the repository receiving the push |
``` ```plaintext
POST /internal/pre_receive POST /internal/pre_receive
``` ```
...@@ -281,7 +281,7 @@ curl --request POST --header "Gitlab-Shared-Secret: <Base64 encoded secret>" --d ...@@ -281,7 +281,7 @@ curl --request POST --header "Gitlab-Shared-Secret: <Base64 encoded secret>" --d
Example response: Example response:
``` ```json
{ {
"reference_counter_increased": true "reference_counter_increased": true
} }
...@@ -301,7 +301,7 @@ the user. ...@@ -301,7 +301,7 @@ the user.
| `push_options` | string array | no | array of push options | | `push_options` | string array | no | array of push options |
| `changes` | string | no | refs to be updated in the push in the format `oldrev newrev refname\n`. | | `changes` | string | no | refs to be updated in the push in the format `oldrev newrev refname\n`. |
``` ```plaintext
POST /internal/post_receive POST /internal/post_receive
``` ```
...@@ -313,7 +313,7 @@ curl --request POST --header "Gitlab-Shared-Secret: <Base64 encoded secret>" --d ...@@ -313,7 +313,7 @@ curl --request POST --header "Gitlab-Shared-Secret: <Base64 encoded secret>" --d
Example response: Example response:
``` ```json
{ {
"messages": [ "messages": [
{ {
......
...@@ -27,7 +27,7 @@ On the other hand, if an icon is crucial to understand the context we should do ...@@ -27,7 +27,7 @@ On the other hand, if an icon is crucial to understand the context we should do
In forms we should use the `for` attribute in the label statement: In forms we should use the `for` attribute in the label statement:
``` ```html
<div> <div>
<label for="name">Fill in your name:</label> <label for="name">Fill in your name:</label>
<input type="text" id="name" name="name"> <input type="text" id="name" name="name">
......
...@@ -4,7 +4,7 @@ ...@@ -4,7 +4,7 @@
To clear production compiled assets created with `yarn webpack-prod` you can run: To clear production compiled assets created with `yarn webpack-prod` you can run:
``` ```shell
yarn clean yarn clean
``` ```
......
...@@ -7,13 +7,13 @@ support subgroups, GitLab project and group routes use the wildcard ...@@ -7,13 +7,13 @@ support subgroups, GitLab project and group routes use the wildcard
character to match project and group routes. For example, we might have character to match project and group routes. For example, we might have
a path such as: a path such as:
``` ```plaintext
/gitlab-com/customer-success/north-america/west/customerA /gitlab-com/customer-success/north-america/west/customerA
``` ```
However, paths can be ambiguous. Consider the following example: However, paths can be ambiguous. Consider the following example:
``` ```plaintext
/gitlab-com/edit /gitlab-com/edit
``` ```
...@@ -29,7 +29,7 @@ number of [reserved names](../user/reserved_names.md). ...@@ -29,7 +29,7 @@ number of [reserved names](../user/reserved_names.md).
We have a number of global routes. For example: We have a number of global routes. For example:
``` ```plaintext
/-/health /-/health
/-/metrics /-/metrics
``` ```
...@@ -40,7 +40,7 @@ Every group route must be under the `/-/` scope. ...@@ -40,7 +40,7 @@ Every group route must be under the `/-/` scope.
Examples: Examples:
``` ```plaintext
gitlab-org/-/edit gitlab-org/-/edit
gitlab-org/-/activity gitlab-org/-/activity
gitlab-org/-/security/dashboard gitlab-org/-/security/dashboard
...@@ -56,7 +56,7 @@ client or other software requires something different. ...@@ -56,7 +56,7 @@ client or other software requires something different.
Examples: Examples:
``` ```plaintext
gitlab-org/gitlab/-/activity gitlab-org/gitlab/-/activity
gitlab-org/gitlab/-/jobs/123 gitlab-org/gitlab/-/jobs/123
gitlab-org/gitlab/-/settings/repository gitlab-org/gitlab/-/settings/repository
......
...@@ -71,19 +71,21 @@ Make the difference between options and arguments clear to the argument parsers ...@@ -71,19 +71,21 @@ Make the difference between options and arguments clear to the argument parsers
To understand what `--` does, consider the problem below. To understand what `--` does, consider the problem below.
``` ```shell
# Example # Example
$ echo hello > -l $ echo hello > -l
$ cat -l $ cat -l
cat: illegal option -- l cat: illegal option -- l
usage: cat [-benstuv] [file ...] usage: cat [-benstuv] [file ...]
``` ```
In the example above, the argument parser of `cat` assumes that `-l` is an option. The solution in the example above is to make it clear to `cat` that `-l` is really an argument, not an option. Many Unix command line tools follow the convention of separating options from arguments with `--`. In the example above, the argument parser of `cat` assumes that `-l` is an option. The solution in the example above is to make it clear to `cat` that `-l` is really an argument, not an option. Many Unix command line tools follow the convention of separating options from arguments with `--`.
``` ```shell
# Example (continued) # Example (continued)
$ cat -- -l $ cat -- -l
hello hello
``` ```
...@@ -203,7 +205,7 @@ validates :import_url, format: { with: URI.regexp(%w(ssh git http https)) } ...@@ -203,7 +205,7 @@ validates :import_url, format: { with: URI.regexp(%w(ssh git http https)) }
Suppose the user submits the following as their import URL: Suppose the user submits the following as their import URL:
``` ```plaintext
file://git:/tmp/lol file://git:/tmp/lol
``` ```
......
...@@ -329,7 +329,7 @@ use of extensions and concurrent index removal, you need at least PostgreSQL 9.2 ...@@ -329,7 +329,7 @@ use of extensions and concurrent index removal, you need at least PostgreSQL 9.2
1. Check if the `pg_trgm` extension is enabled: 1. Check if the `pg_trgm` extension is enabled:
```shell ```sql
SELECT true AS enabled SELECT true AS enabled
FROM pg_available_extensions FROM pg_available_extensions
WHERE name = 'pg_trgm' WHERE name = 'pg_trgm'
...@@ -338,7 +338,7 @@ use of extensions and concurrent index removal, you need at least PostgreSQL 9.2 ...@@ -338,7 +338,7 @@ use of extensions and concurrent index removal, you need at least PostgreSQL 9.2
If the extension is enabled this will produce the following output: If the extension is enabled this will produce the following output:
``` ```plaintext
enabled enabled
--------- ---------
t t
......
...@@ -15,7 +15,7 @@ GitLab.com will generate an application ID and secret key for you to use. ...@@ -15,7 +15,7 @@ GitLab.com will generate an application ID and secret key for you to use.
- Name: This can be anything. Consider something like `<Organization>'s GitLab` or `<Your Name>'s GitLab` or something else descriptive. - Name: This can be anything. Consider something like `<Organization>'s GitLab` or `<Your Name>'s GitLab` or something else descriptive.
- Redirect URI: - Redirect URI:
``` ```plaintext
http://your-gitlab.example.com/import/gitlab/callback http://your-gitlab.example.com/import/gitlab/callback
http://your-gitlab.example.com/users/auth/gitlab/callback http://your-gitlab.example.com/users/auth/gitlab/callback
``` ```
...@@ -63,7 +63,7 @@ GitLab.com will generate an application ID and secret key for you to use. ...@@ -63,7 +63,7 @@ GitLab.com will generate an application ID and secret key for you to use.
For installations from source: For installations from source:
``` ```yaml
- { name: 'gitlab', app_id: 'YOUR_APP_ID', - { name: 'gitlab', app_id: 'YOUR_APP_ID',
app_secret: 'YOUR_APP_SECRET', app_secret: 'YOUR_APP_SECRET',
args: { scope: 'api' } } args: { scope: 'api' } }
......
...@@ -28,7 +28,7 @@ In Google's side: ...@@ -28,7 +28,7 @@ In Google's side:
- **Authorized redirect URIs** - Enter your domain name followed by the - **Authorized redirect URIs** - Enter your domain name followed by the
callback URIs one at a time: callback URIs one at a time:
``` ```plaintext
https://gitlab.example.com/users/auth/google_oauth2/callback https://gitlab.example.com/users/auth/google_oauth2/callback
https://gitlab.example.com/-/google_api/auth/callback https://gitlab.example.com/-/google_api/auth/callback
``` ```
......
...@@ -160,7 +160,7 @@ in the `gitlab.rb` config file, followed by the [`gitlab-ctl reconfigure` comman ...@@ -160,7 +160,7 @@ in the `gitlab.rb` config file, followed by the [`gitlab-ctl reconfigure` comman
If you don't find the errors above, but do find *duplicate* entries like below (in `/var/log/gitlab/gitlab-rail`), this If you don't find the errors above, but do find *duplicate* entries like below (in `/var/log/gitlab/gitlab-rail`), this
could also indicate that [webhook requests are timing out](../user/project/integrations/webhooks.md#receiving-duplicate-or-multiple-webhook-requests-triggered-by-one-event): could also indicate that [webhook requests are timing out](../user/project/integrations/webhooks.md#receiving-duplicate-or-multiple-webhook-requests-triggered-by-one-event):
``` ```plaintext
2019-10-25_04:22:41.25630 2019-10-25T04:22:41.256Z 1584 TID-ovowh4tek WebHookWorker JID-941fb7f40b69dff3d833c99b INFO: start 2019-10-25_04:22:41.25630 2019-10-25T04:22:41.256Z 1584 TID-ovowh4tek WebHookWorker JID-941fb7f40b69dff3d833c99b INFO: start
2019-10-25_04:22:41.25630 2019-10-25T04:22:41.256Z 1584 TID-ovowh4tek WebHookWorker JID-941fb7f40b69dff3d833c99b INFO: start 2019-10-25_04:22:41.25630 2019-10-25T04:22:41.256Z 1584 TID-ovowh4tek WebHookWorker JID-941fb7f40b69dff3d833c99b INFO: start
``` ```
...@@ -33,7 +33,7 @@ system's Kerberos settings. ...@@ -33,7 +33,7 @@ system's Kerberos settings.
The keytab is a sensitive file and must be readable by the GitLab user. Set The keytab is a sensitive file and must be readable by the GitLab user. Set
ownership and protect the file appropriately: ownership and protect the file appropriately:
``` ```shell
sudo chown git /etc/http.keytab sudo chown git /etc/http.keytab
sudo chmod 0600 /etc/http.keytab sudo chmod 0600 /etc/http.keytab
``` ```
...@@ -243,7 +243,7 @@ With Kerberos SPNEGO authentication, the browser is expected to send a list of ...@@ -243,7 +243,7 @@ With Kerberos SPNEGO authentication, the browser is expected to send a list of
mechanisms it supports to GitLab. If it doesn't support any of the mechanisms mechanisms it supports to GitLab. If it doesn't support any of the mechanisms
GitLab supports, authentication will fail with a message like this in the log: GitLab supports, authentication will fail with a message like this in the log:
``` ```plaintext
OmniauthKerberosSpnegoController: failed to process Negotiate/Kerberos authentication: gss_accept_sec_context did not return GSS_S_COMPLETE: An unsupported mechanism was requested Unknown error OmniauthKerberosSpnegoController: failed to process Negotiate/Kerberos authentication: gss_accept_sec_context did not return GSS_S_COMPLETE: An unsupported mechanism was requested Unknown error
``` ```
...@@ -282,7 +282,7 @@ fatal: Authentication failed for '<KRB5 path>' ...@@ -282,7 +282,7 @@ fatal: Authentication failed for '<KRB5 path>'
If you are using Git v2.11 or newer and see the above error when cloning, you can If you are using Git v2.11 or newer and see the above error when cloning, you can
set the `http.emptyAuth` Git option to `true` to fix this: set the `http.emptyAuth` Git option to `true` to fix this:
``` ```shell
git config --global http.emptyAuth true git config --global http.emptyAuth true
``` ```
......
...@@ -26,7 +26,7 @@ This strategy is designed to allow configuration of the simple OmniAuth SSO proc ...@@ -26,7 +26,7 @@ This strategy is designed to allow configuration of the simple OmniAuth SSO proc
The redirect URI you provide when registering the application should be: The redirect URI you provide when registering the application should be:
``` ```plaintext
http://your-gitlab.host.com/users/auth/oauth2_generic/callback http://your-gitlab.host.com/users/auth/oauth2_generic/callback
``` ```
......
...@@ -23,7 +23,7 @@ user logins via passwords, the `X-GitLab-Show-Login-Captcha` HTTP header must ...@@ -23,7 +23,7 @@ user logins via passwords, the `X-GitLab-Show-Login-Captcha` HTTP header must
be set. For example, in NGINX, this can be done via the `proxy_set_header` be set. For example, in NGINX, this can be done via the `proxy_set_header`
configuration variable: configuration variable:
``` ```nginx
proxy_set_header X-GitLab-Show-Login-Captcha 1; proxy_set_header X-GitLab-Show-Login-Captcha 1;
``` ```
......
...@@ -60,7 +60,7 @@ To get the credentials (a pair of Client ID and Client Secret), you must [create ...@@ -60,7 +60,7 @@ To get the credentials (a pair of Client ID and Client Secret), you must [create
For installation from source: For installation from source:
``` ```yaml
- { name: 'salesforce', - { name: 'salesforce',
app_id: 'SALESFORCE_CLIENT_ID', app_id: 'SALESFORCE_CLIENT_ID',
app_secret: 'SALESFORCE_CLIENT_SECRET' app_secret: 'SALESFORCE_CLIENT_SECRET'
......
...@@ -129,7 +129,7 @@ To ease configuration, most IdP accept a metadata URL for the application to pro ...@@ -129,7 +129,7 @@ To ease configuration, most IdP accept a metadata URL for the application to pro
configuration information to the IdP. To build the metadata URL for GitLab, append configuration information to the IdP. To build the metadata URL for GitLab, append
`users/auth/saml/metadata` to the HTTPS URL of your GitLab installation, for instance: `users/auth/saml/metadata` to the HTTPS URL of your GitLab installation, for instance:
``` ```plaintext
https://gitlab.example.com/users/auth/saml/metadata https://gitlab.example.com/users/auth/saml/metadata
``` ```
......
...@@ -66,7 +66,7 @@ The following changes are needed to enable Shibboleth: ...@@ -66,7 +66,7 @@ The following changes are needed to enable Shibboleth:
The file should look like this: The file should look like this:
``` ```ruby
external_url 'https://gitlab.example.com' external_url 'https://gitlab.example.com'
gitlab_rails['internal_api_url'] = 'https://gitlab.example.com' gitlab_rails['internal_api_url'] = 'https://gitlab.example.com'
...@@ -101,7 +101,7 @@ On the sign in page, there should now be a "Sign in with: Shibboleth" icon below ...@@ -101,7 +101,7 @@ On the sign in page, there should now be a "Sign in with: Shibboleth" icon below
The order of the first 2 Location directives is important. If they are reversed, The order of the first 2 Location directives is important. If they are reversed,
you will not get a Shibboleth session! you will not get a Shibboleth session!
``` ```plaintext
<Location /> <Location />
Require all granted Require all granted
ProxyPassReverse http://127.0.0.1:8181 ProxyPassReverse http://127.0.0.1:8181
......
...@@ -52,7 +52,7 @@ bundle exec rake gitlab:import:all_users_to_all_groups RAILS_ENV=production ...@@ -52,7 +52,7 @@ bundle exec rake gitlab:import:all_users_to_all_groups RAILS_ENV=production
- Enable this setting to keep new users blocked until they have been cleared by the admin (default: false). - Enable this setting to keep new users blocked until they have been cleared by the admin (default: false).
``` ```plaintext
block_auto_created_users: false block_auto_created_users: false
``` ```
......
...@@ -177,13 +177,13 @@ In case you want to remove a blocked IP, follow these steps: ...@@ -177,13 +177,13 @@ In case you want to remove a blocked IP, follow these steps:
1. You can remove the block using the following syntax, replacing `<ip>` with 1. You can remove the block using the following syntax, replacing `<ip>` with
the actual IP that is blacklisted: the actual IP that is blacklisted:
``` ```plaintext
del cache:gitlab:rack::attack:allow2ban:ban:<ip> del cache:gitlab:rack::attack:allow2ban:ban:<ip>
``` ```
1. Confirm that the key with the IP no longer shows up: 1. Confirm that the key with the IP no longer shows up:
``` ```plaintext
keys *rack::attack* keys *rack::attack*
``` ```
......
...@@ -248,12 +248,15 @@ Seat Link sends to GitLab daily a count of all users in connected self-managed i ...@@ -248,12 +248,15 @@ Seat Link sends to GitLab daily a count of all users in connected self-managed i
Seat Link is mandatory because we need the user count data to enable prorated billing. Seat Link provides **only** the following information to GitLab: Seat Link is mandatory because we need the user count data to enable prorated billing. Seat Link provides **only** the following information to GitLab:
- Date - Date
- Historical maximum user count
- License key - License key
- Historical maximum user count
Here is an example of the POST request: For air-gapped or closed network customers, the existing [true-up model](#users-over-license) will be used. Prorated charges are not possible without user count data.
```plaintext <details>
<summary>Click here to view example content of a Seat Link POST request.</summary>
<pre><code>
{ {
date: '2020-01-29', date: '2020-01-29',
license_key: 'ZXlKa1lYUmhJam9pWm5WNmVsTjVZekZ2YTJoV2NucDBh license_key: 'ZXlKa1lYUmhJam9pWm5WNmVsTjVZekZ2YTJoV2NucDBh
...@@ -292,9 +295,9 @@ SlRORE4xUjFaYVJGb3JlWGM5UFZ4dUlpd2lhWFlpt2lKV00yRnNVbk5RTjJk ...@@ -292,9 +295,9 @@ SlRORE4xUjFaYVJGb3JlWGM5UFZ4dUlpd2lhWFlpt2lKV00yRnNVbk5RTjJk
Sg0KU1hNMGExaE9SVGR2V2pKQlBUMWNiaUo5DQo=', Sg0KU1hNMGExaE9SVGR2V2pKQlBUMWNiaUo5DQo=',
max_historical_user_count: 10 max_historical_user_count: 10
} }
``` </code></pre>
For air-gapped or closed network customers, the existing [true-up model](#users-over-license) will be used. Prorated charges are not possible without user count data. </details>
### Renew or change a GitLab.com subscription ### Renew or change a GitLab.com subscription
......
...@@ -150,7 +150,7 @@ sudo update-ca-certificates ...@@ -150,7 +150,7 @@ sudo update-ca-certificates
If all goes well, this is what you should see: If all goes well, this is what you should see:
``` ```plaintext
1 added, 0 removed; done. 1 added, 0 removed; done.
Running hooks in /etc/ca-certificates/update.d... Running hooks in /etc/ca-certificates/update.d...
done. done.
......
...@@ -239,7 +239,7 @@ After a few minutes you'll notice that there was a failure in a test. ...@@ -239,7 +239,7 @@ After a few minutes you'll notice that there was a failure in a test.
This means there's a test that was 'broken' by our change. This means there's a test that was 'broken' by our change.
Navigating into the `test` job that failed, you can see what the broken test is: Navigating into the `test` job that failed, you can see what the broken test is:
``` ```plaintext
Failure: Failure:
WelcomeControllerTest#test_should_get_index [/app/test/controllers/welcome_controller_test.rb:7]: WelcomeControllerTest#test_should_get_index [/app/test/controllers/welcome_controller_test.rb:7]:
<You're on Rails!> expected but was <You're on Rails!> expected but was
......
...@@ -294,7 +294,7 @@ git push origin <branch> -f ...@@ -294,7 +294,7 @@ git push origin <branch> -f
You may end up with a commit log that looks like this: You may end up with a commit log that looks like this:
``` ```plaintext
Fix issue #13 Fix issue #13
Test Test
Fix Fix
......
...@@ -217,7 +217,7 @@ It's also possible to add the `Gitlab-DAST-Permission` header via a proxy. ...@@ -217,7 +217,7 @@ It's also possible to add the `Gitlab-DAST-Permission` header via a proxy.
The following config allows NGINX to act as a reverse proxy and add the `Gitlab-DAST-Permission` [header](http://nginx.org/en/docs/http/ngx_http_headers_module.html#add_header): The following config allows NGINX to act as a reverse proxy and add the `Gitlab-DAST-Permission` [header](http://nginx.org/en/docs/http/ngx_http_headers_module.html#add_header):
``` ```nginx
# default.conf # default.conf
server { server {
listen 80; listen 80;
...@@ -237,7 +237,7 @@ to add the `Gitlab-DAST-Permission` [header](https://httpd.apache.org/docs/curre ...@@ -237,7 +237,7 @@ to add the `Gitlab-DAST-Permission` [header](https://httpd.apache.org/docs/curre
To do so, add the following lines to `httpd.conf`: To do so, add the following lines to `httpd.conf`:
``` ```plaintext
# httpd.conf # httpd.conf
LoadModule proxy_module modules/mod_proxy.so LoadModule proxy_module modules/mod_proxy.so
LoadModule proxy_connect_module modules/mod_proxy_connect.so LoadModule proxy_connect_module modules/mod_proxy_connect.so
...@@ -472,7 +472,7 @@ Since it keeps most of its information in memory during a scan, ...@@ -472,7 +472,7 @@ Since it keeps most of its information in memory during a scan,
it is possible for DAST to run out of memory while scanning large applications. it is possible for DAST to run out of memory while scanning large applications.
This results in the following error: This results in the following error:
``` ```plaintext
[zap.out] java.lang.OutOfMemoryError: Java heap space [zap.out] java.lang.OutOfMemoryError: Java heap space
``` ```
......
...@@ -518,7 +518,7 @@ For SAST with all [supported languages and frameworks](#supported-languages-and- ...@@ -518,7 +518,7 @@ For SAST with all [supported languages and frameworks](#supported-languages-and-
import the following default SAST analyzer images from `registry.gitlab.com` to your local "offline" import the following default SAST analyzer images from `registry.gitlab.com` to your local "offline"
registry: registry:
``` ```plaintext
registry.gitlab.com/gitlab-org/security-products/analyzers/bandit:2 registry.gitlab.com/gitlab-org/security-products/analyzers/bandit:2
registry.gitlab.com/gitlab-org/security-products/analyzers/brakeman:2 registry.gitlab.com/gitlab-org/security-products/analyzers/brakeman:2
registry.gitlab.com/gitlab-org/security-products/analyzers/eslint:2 registry.gitlab.com/gitlab-org/security-products/analyzers/eslint:2
......
...@@ -17,7 +17,7 @@ Line breaks are not preserved. ...@@ -17,7 +17,7 @@ Line breaks are not preserved.
Line comments, which are lines that start with `//`, are skipped: Line comments, which are lines that start with `//`, are skipped:
``` ```asciidoc
// this is a comment // this is a comment
``` ```
......
...@@ -224,7 +224,7 @@ it is not possible due to a naming collision. For example: ...@@ -224,7 +224,7 @@ it is not possible due to a naming collision. For example:
The regex that is used for naming is validating all package names from all package managers: The regex that is used for naming is validating all package names from all package managers:
``` ```plaintext
/\A\@?(([\w\-\.\+]*)\/)*([\w\-\.]+)@?(([\w\-\.\+]*)\/)*([\w\-\.]*)\z/ /\A\@?(([\w\-\.\+]*)\/)*([\w\-\.]+)@?(([\w\-\.\+]*)\/)*([\w\-\.]*)\z/
``` ```
......
...@@ -140,7 +140,7 @@ NOTE: **Note:** ...@@ -140,7 +140,7 @@ NOTE: **Note:**
In your GitLab deploy stage log, there will be output containing your AWS Lambda endpoint URL. In your GitLab deploy stage log, there will be output containing your AWS Lambda endpoint URL.
The log line will look similar to this: The log line will look similar to this:
``` ```plaintext
endpoints: endpoints:
GET - https://u768nzby1j.execute-api.us-east-1.amazonaws.com/production/hello GET - https://u768nzby1j.execute-api.us-east-1.amazonaws.com/production/hello
``` ```
......
...@@ -29,7 +29,7 @@ the users from last pattern matching the file are displayed on the ...@@ -29,7 +29,7 @@ the users from last pattern matching the file are displayed on the
blob page of the given file. For example, you have the following blob page of the given file. For example, you have the following
`CODEOWNERS` file: `CODEOWNERS` file:
``` ```plaintext
README.md @user1 README.md @user1
# This line would also match the file README.md # This line would also match the file README.md
...@@ -68,7 +68,7 @@ escaped using `\#` to address files for which the name starts with a ...@@ -68,7 +68,7 @@ escaped using `\#` to address files for which the name starts with a
Example `CODEOWNERS` file: Example `CODEOWNERS` file:
``` ```plaintext
# This is an example code owners file, lines starting with a `#` will # This is an example code owners file, lines starting with a `#` will
# be ignored. # be ignored.
......
...@@ -213,7 +213,7 @@ project for which the job is triggered. ...@@ -213,7 +213,7 @@ project for which the job is triggered.
This is how an example usage can look like: This is how an example usage can look like:
``` ```yaml
test: test:
script: script:
- docker login -u gitlab-ci-token -p $CI_JOB_TOKEN $CI_REGISTRY - docker login -u gitlab-ci-token -p $CI_JOB_TOKEN $CI_REGISTRY
......
...@@ -53,6 +53,7 @@ exports[`Dashboard template matches the default snapshot 1`] = ` ...@@ -53,6 +53,7 @@ exports[`Dashboard template matches the default snapshot 1`] = `
<gl-search-box-by-type-stub <gl-search-box-by-type-stub
class="m-2" class="m-2"
clearbuttontitle="Clear"
value="" value=""
/> />
......
...@@ -730,147 +730,6 @@ describe Ci::Build do ...@@ -730,147 +730,6 @@ describe Ci::Build do
end end
end end
describe '#depends_on_builds' do
let!(:build) { create(:ci_build, pipeline: pipeline, name: 'build', stage_idx: 0, stage: 'build') }
let!(:rspec_test) { create(:ci_build, pipeline: pipeline, name: 'rspec', stage_idx: 1, stage: 'test') }
let!(:rubocop_test) { create(:ci_build, pipeline: pipeline, name: 'rubocop', stage_idx: 1, stage: 'test') }
let!(:staging) { create(:ci_build, pipeline: pipeline, name: 'staging', stage_idx: 2, stage: 'deploy') }
it 'expects to have no dependents if this is first build' do
expect(build.depends_on_builds).to be_empty
end
it 'expects to have one dependent if this is test' do
expect(rspec_test.depends_on_builds.map(&:id)).to contain_exactly(build.id)
end
it 'expects to have all builds from build and test stage if this is last' do
expect(staging.depends_on_builds.map(&:id)).to contain_exactly(build.id, rspec_test.id, rubocop_test.id)
end
it 'expects to have retried builds instead the original ones' do
project.add_developer(user)
retried_rspec = described_class.retry(rspec_test, user)
expect(staging.depends_on_builds.map(&:id))
.to contain_exactly(build.id, retried_rspec.id, rubocop_test.id)
end
describe '#dependencies' do
let(:dependencies) { }
let(:needs) { }
let!(:final) do
scheduling_type = needs.present? ? :dag : :stage
create(:ci_build,
pipeline: pipeline, name: 'final', scheduling_type: scheduling_type,
stage_idx: 3, stage: 'deploy', options: {
dependencies: dependencies
}
)
end
before do
needs.to_a.each do |need|
create(:ci_build_need, build: final, **need)
end
end
subject { final.dependencies }
context 'when dependencies are defined' do
let(:dependencies) { %w(rspec staging) }
it { is_expected.to contain_exactly(rspec_test, staging) }
end
context 'when needs are defined' do
let(:needs) do
[
{ name: 'build', artifacts: true },
{ name: 'rspec', artifacts: true },
{ name: 'staging', artifacts: true }
]
end
it { is_expected.to contain_exactly(build, rspec_test, staging) }
context 'when ci_dag_support is disabled' do
before do
stub_feature_flags(ci_dag_support: false)
end
it { is_expected.to contain_exactly(build, rspec_test, rubocop_test, staging) }
end
end
context 'when need artifacts are defined' do
let(:needs) do
[
{ name: 'build', artifacts: true },
{ name: 'rspec', artifacts: false },
{ name: 'staging', artifacts: true }
]
end
it { is_expected.to contain_exactly(build, staging) }
end
context 'when needs and dependencies are defined' do
let(:dependencies) { %w(rspec staging) }
let(:needs) do
[
{ name: 'build', artifacts: true },
{ name: 'rspec', artifacts: true },
{ name: 'staging', artifacts: true }
]
end
it { is_expected.to contain_exactly(rspec_test, staging) }
end
context 'when needs and dependencies contradict' do
let(:dependencies) { %w(rspec staging) }
let(:needs) do
[
{ name: 'build', artifacts: true },
{ name: 'rspec', artifacts: false },
{ name: 'staging', artifacts: true }
]
end
it { is_expected.to contain_exactly(staging) }
end
context 'when nor dependencies or needs are defined' do
it { is_expected.to contain_exactly(build, rspec_test, rubocop_test, staging) }
end
end
describe '#all_dependencies' do
let!(:final_build) do
create(:ci_build,
pipeline: pipeline, name: 'deploy',
stage_idx: 3, stage: 'deploy'
)
end
subject { final_build.all_dependencies }
it 'returns dependencies and cross_dependencies' do
dependencies = [1, 2, 3]
cross_dependencies = [3, 4]
allow(final_build).to receive(:dependencies).and_return(dependencies)
allow(final_build).to receive(:cross_dependencies).and_return(cross_dependencies)
is_expected.to match(a_collection_containing_exactly(1, 2, 3, 4))
end
end
end
describe '#triggered_by?' do describe '#triggered_by?' do
subject { build.triggered_by?(user) } subject { build.triggered_by?(user) }
......
# frozen_string_literal: true
require 'spec_helper'
describe Ci::Processable::Dependencies do
let_it_be(:user) { create(:user) }
let_it_be(:project, reload: true) { create(:project, :repository) }
let_it_be(:pipeline, reload: true) do
create(:ci_pipeline, project: project,
sha: project.commit.id,
ref: project.default_branch,
status: 'success')
end
let!(:build) { create(:ci_build, pipeline: pipeline, name: 'build', stage_idx: 0, stage: 'build') }
let!(:rspec_test) { create(:ci_build, pipeline: pipeline, name: 'rspec', stage_idx: 1, stage: 'test') }
let!(:rubocop_test) { create(:ci_build, pipeline: pipeline, name: 'rubocop', stage_idx: 1, stage: 'test') }
let!(:staging) { create(:ci_build, pipeline: pipeline, name: 'staging', stage_idx: 2, stage: 'deploy') }
describe '#local' do
subject { described_class.new(job).local }
describe 'jobs from previous stages' do
context 'when job is in the first stage' do
let(:job) { build }
it { is_expected.to be_empty }
end
context 'when job is in the second stage' do
let(:job) { rspec_test }
it 'contains all jobs from the first stage' do
is_expected.to contain_exactly(build)
end
end
context 'when job is in the last stage' do
let(:job) { staging }
it 'contains all jobs from all previous stages' do
is_expected.to contain_exactly(build, rspec_test, rubocop_test)
end
context 'when a job is retried' do
before do
project.add_developer(user)
end
let(:retried_job) { Ci::Build.retry(rspec_test, user) }
it 'contains the retried job instead of the original one' do
is_expected.to contain_exactly(build, retried_job, rubocop_test)
end
end
end
end
describe 'jobs from specified dependencies' do
let(:dependencies) { }
let(:needs) { }
let!(:job) do
scheduling_type = needs.present? ? :dag : :stage
create(:ci_build,
pipeline: pipeline,
name: 'final',
scheduling_type: scheduling_type,
stage_idx: 3,
stage: 'deploy',
options: { dependencies: dependencies }
)
end
before do
needs.to_a.each do |need|
create(:ci_build_need, build: job, **need)
end
end
context 'when dependencies are defined' do
let(:dependencies) { %w(rspec staging) }
it { is_expected.to contain_exactly(rspec_test, staging) }
end
context 'when needs are defined' do
let(:needs) do
[
{ name: 'build', artifacts: true },
{ name: 'rspec', artifacts: true },
{ name: 'staging', artifacts: true }
]
end
it { is_expected.to contain_exactly(build, rspec_test, staging) }
context 'when ci_dag_support is disabled' do
before do
stub_feature_flags(ci_dag_support: false)
end
it { is_expected.to contain_exactly(build, rspec_test, rubocop_test, staging) }
end
end
context 'when need artifacts are defined' do
let(:needs) do
[
{ name: 'build', artifacts: true },
{ name: 'rspec', artifacts: false },
{ name: 'staging', artifacts: true }
]
end
it { is_expected.to contain_exactly(build, staging) }
end
context 'when needs and dependencies are defined' do
let(:dependencies) { %w(rspec staging) }
let(:needs) do
[
{ name: 'build', artifacts: true },
{ name: 'rspec', artifacts: true },
{ name: 'staging', artifacts: true }
]
end
it { is_expected.to contain_exactly(rspec_test, staging) }
end
context 'when needs and dependencies contradict' do
let(:dependencies) { %w(rspec staging) }
let(:needs) do
[
{ name: 'build', artifacts: true },
{ name: 'rspec', artifacts: false },
{ name: 'staging', artifacts: true }
]
end
it 'returns only the intersection' do
is_expected.to contain_exactly(staging)
end
end
context 'when nor dependencies or needs are defined' do
it 'returns the jobs from previous stages' do
is_expected.to contain_exactly(build, rspec_test, rubocop_test, staging)
end
end
end
end
describe '#all' do
let!(:job) do
create(:ci_build, pipeline: pipeline, name: 'deploy', stage_idx: 3, stage: 'deploy')
end
let(:dependencies) { described_class.new(job) }
subject { dependencies.all }
it 'returns the union of all local dependencies and any cross pipeline dependencies' do
expect(dependencies).to receive(:local).and_return([1, 2, 3])
expect(dependencies).to receive(:cross_pipeline).and_return([3, 4])
expect(subject).to contain_exactly(1, 2, 3, 4)
end
end
end
...@@ -623,4 +623,26 @@ describe Deployment do ...@@ -623,4 +623,26 @@ describe Deployment do
expect(deploy.errors[:ref]).not_to be_empty expect(deploy.errors[:ref]).not_to be_empty
end end
end end
describe '.fast_destroy_all' do
it 'cleans path_refs for destroyed environments' do
project = create(:project, :repository)
environment = create(:environment, project: project)
destroyed_deployments = create_list(:deployment, 2, :success, environment: environment, project: project)
other_deployments = create_list(:deployment, 2, :success, environment: environment, project: project)
(destroyed_deployments + other_deployments).each(&:create_ref)
described_class.where(id: destroyed_deployments.map(&:id)).fast_destroy_all
destroyed_deployments.each do |deployment|
expect(project.commit(deployment.ref_path)).to be_nil
end
other_deployments.each do |deployment|
expect(project.commit(deployment.ref_path)).not_to be_nil
end
end
end
end end
...@@ -1301,4 +1301,13 @@ describe Environment, :use_clean_rails_memory_store_caching do ...@@ -1301,4 +1301,13 @@ describe Environment, :use_clean_rails_memory_store_caching do
end end
end end
end end
describe '#destroy' do
it 'remove the deployment refs from gitaly' do
deployment = create(:deployment, :success, environment: environment, project: project)
deployment.create_ref
expect { environment.destroy }.to change { project.commit(deployment.ref_path) }.to(nil)
end
end
end end
...@@ -781,15 +781,15 @@ ...@@ -781,15 +781,15 @@
eslint-plugin-vue "^6.2.1" eslint-plugin-vue "^6.2.1"
vue-eslint-parser "^7.0.0" vue-eslint-parser "^7.0.0"
"@gitlab/svgs@^1.114.0": "@gitlab/svgs@^1.115.0":
version "1.114.0" version "1.115.0"
resolved "https://registry.yarnpkg.com/@gitlab/svgs/-/svgs-1.114.0.tgz#ffee243fa540016c8198596686a46e3c459adb32" resolved "https://registry.yarnpkg.com/@gitlab/svgs/-/svgs-1.115.0.tgz#2762ad045d5a2bd728f74fcb4c00caa9bd6dbc22"
integrity sha512-r2tizWDx1fX2QbeXzU0Z5Fi9YS7TLIxt7K+vAnZxOeddgd5KdipCvhT/H7n9Oa9jLU4bMwGSrHZ1usCmCoWOnw== integrity sha512-jlmNGqCTpSiPFrNbLaW6GGXNbvIShLdrpeYTtSEz/yFJMClQfPjHc8Zm9bl/PqAM5d/yGQqk8e+rBc4LeAhEfg==
"@gitlab/ui@^9.31.1": "@gitlab/ui@^10.0.0":
version "9.31.1" version "10.0.0"
resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-9.31.1.tgz#53206aac643d79f8eddbc5715131bad5ffc1e412" resolved "https://registry.yarnpkg.com/@gitlab/ui/-/ui-10.0.0.tgz#dced1119237f328367e8c4922cf4e1ae986fac54"
integrity sha512-MHBVIpVzUYPkr70ti21dTZbzzZttOidWZdN3TeZOdqYbjYg2ONujFBxOGSCNYcaMYr+1/wFtuVRL/JmMVGjJrg== integrity sha512-+qsojtfE5mhryjJyReXBY9C3J4s4jlRpHfEcaCFuhcebtq5Uhd6xgLwgxT+E7fMvtLQpGATMo1DiD80yhLb2pQ==
dependencies: dependencies:
"@babel/standalone" "^7.0.0" "@babel/standalone" "^7.0.0"
"@gitlab/vue-toasted" "^1.3.0" "@gitlab/vue-toasted" "^1.3.0"
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment