Commit ae78b85a authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent 38149afc
...@@ -21,6 +21,8 @@ lib/gitlab/database/ @gitlab-org/maintainers/database ...@@ -21,6 +21,8 @@ lib/gitlab/database/ @gitlab-org/maintainers/database
lib/gitlab/sql/ @gitlab-org/maintainers/database lib/gitlab/sql/ @gitlab-org/maintainers/database
lib/gitlab/github_import/ @gitlab-org/maintainers/database lib/gitlab/github_import/ @gitlab-org/maintainers/database
/ee/db/ @gitlab-org/maintainers/database /ee/db/ @gitlab-org/maintainers/database
/app/finders/ @gitlab-org/maintainers/database
/ee/app/finders/ @gitlab-org/maintainers/database
# Feature specific owners # Feature specific owners
/ee/lib/gitlab/code_owners/ @reprazent /ee/lib/gitlab/code_owners/ @reprazent
......
...@@ -375,7 +375,7 @@ group :development, :test do ...@@ -375,7 +375,7 @@ group :development, :test do
gem 'scss_lint', '~> 0.56.0', require: false gem 'scss_lint', '~> 0.56.0', require: false
gem 'haml_lint', '~> 0.34.0', require: false gem 'haml_lint', '~> 0.34.0', require: false
gem 'simplecov', '~> 0.16.1', require: false gem 'simplecov', '~> 0.16.1', require: false
gem 'bundler-audit', '~> 0.5.0', require: false gem 'bundler-audit', '~> 0.6.1', require: false
gem 'benchmark-ips', '~> 2.3.0', require: false gem 'benchmark-ips', '~> 2.3.0', require: false
......
...@@ -134,8 +134,8 @@ GEM ...@@ -134,8 +134,8 @@ GEM
bullet (6.0.2) bullet (6.0.2)
activesupport (>= 3.0.0) activesupport (>= 3.0.0)
uniform_notifier (~> 1.11) uniform_notifier (~> 1.11)
bundler-audit (0.5.0) bundler-audit (0.6.1)
bundler (~> 1.2) bundler (>= 1.2.0, < 3)
thor (~> 0.18) thor (~> 0.18)
byebug (9.1.0) byebug (9.1.0)
capybara (3.22.0) capybara (3.22.0)
...@@ -1176,7 +1176,7 @@ DEPENDENCIES ...@@ -1176,7 +1176,7 @@ DEPENDENCIES
brakeman (~> 4.2) brakeman (~> 4.2)
browser (~> 2.5) browser (~> 2.5)
bullet (~> 6.0.2) bullet (~> 6.0.2)
bundler-audit (~> 0.5.0) bundler-audit (~> 0.6.1)
capybara (~> 3.22.0) capybara (~> 3.22.0)
capybara-screenshot (~> 1.0.22) capybara-screenshot (~> 1.0.22)
carrierwave (~> 1.3) carrierwave (~> 1.3)
......
/* eslint-disable no-else-return */
/* global CommentsStore */
/* global ResolveService */
import Vue from 'vue';
import { __ } from '~/locale';
const ResolveDiscussionBtn = Vue.extend({
props: {
discussionId: {
type: String,
required: true,
},
mergeRequestId: {
type: Number,
required: true,
},
canResolve: {
type: Boolean,
required: true,
},
},
data() {
return {
discussion: {},
};
},
computed: {
showButton() {
if (this.discussion) {
return this.discussion.isResolvable();
} else {
return false;
}
},
isDiscussionResolved() {
if (this.discussion) {
return this.discussion.isResolved();
} else {
return false;
}
},
buttonText() {
if (this.isDiscussionResolved) {
return __('Unresolve discussion');
} else {
return __('Resolve discussion');
}
},
loading() {
if (this.discussion) {
return this.discussion.loading;
} else {
return false;
}
},
},
created() {
CommentsStore.createDiscussion(this.discussionId, this.canResolve);
this.discussion = CommentsStore.state[this.discussionId];
},
methods: {
resolve() {
ResolveService.toggleResolveForDiscussion(this.mergeRequestId, this.discussionId);
},
},
});
Vue.component('resolve-discussion-btn', ResolveDiscussionBtn);
...@@ -11,7 +11,6 @@ import './components/comment_resolve_btn'; ...@@ -11,7 +11,6 @@ import './components/comment_resolve_btn';
import './components/jump_to_discussion'; import './components/jump_to_discussion';
import './components/resolve_btn'; import './components/resolve_btn';
import './components/resolve_count'; import './components/resolve_count';
import './components/resolve_discussion_btn';
import './components/diff_note_avatars'; import './components/diff_note_avatars';
import './components/new_issue_for_discussion'; import './components/new_issue_for_discussion';
...@@ -20,7 +19,7 @@ export default () => { ...@@ -20,7 +19,7 @@ export default () => {
document.querySelector('.merge-request') || document.querySelector('.commit-box'); document.querySelector('.merge-request') || document.querySelector('.commit-box');
const { projectPath } = projectPathHolder.dataset; const { projectPath } = projectPathHolder.dataset;
const COMPONENT_SELECTOR = const COMPONENT_SELECTOR =
'resolve-btn, resolve-discussion-btn, jump-to-discussion, comment-and-resolve-btn, new-issue-for-discussion-btn'; 'resolve-btn, jump-to-discussion, comment-and-resolve-btn, new-issue-for-discussion-btn';
window.gl = window.gl || {}; window.gl = window.gl || {};
window.gl.diffNoteApps = {}; window.gl.diffNoteApps = {};
......
...@@ -9,7 +9,7 @@ import DropdownUtils from './dropdown_utils'; ...@@ -9,7 +9,7 @@ import DropdownUtils from './dropdown_utils';
import { mergeUrlParams } from '../lib/utils/url_utility'; import { mergeUrlParams } from '../lib/utils/url_utility';
export default class AvailableDropdownMappings { export default class AvailableDropdownMappings {
constructor( constructor({
container, container,
runnerTagsEndpoint, runnerTagsEndpoint,
labelsEndpoint, labelsEndpoint,
...@@ -18,7 +18,7 @@ export default class AvailableDropdownMappings { ...@@ -18,7 +18,7 @@ export default class AvailableDropdownMappings {
groupsOnly, groupsOnly,
includeAncestorGroups, includeAncestorGroups,
includeDescendantGroups, includeDescendantGroups,
) { }) {
this.container = container; this.container = container;
this.runnerTagsEndpoint = runnerTagsEndpoint; this.runnerTagsEndpoint = runnerTagsEndpoint;
this.labelsEndpoint = labelsEndpoint; this.labelsEndpoint = labelsEndpoint;
......
...@@ -13,6 +13,7 @@ export default class FilteredSearchDropdownManager { ...@@ -13,6 +13,7 @@ export default class FilteredSearchDropdownManager {
labelsEndpoint = '', labelsEndpoint = '',
milestonesEndpoint = '', milestonesEndpoint = '',
releasesEndpoint = '', releasesEndpoint = '',
epicsEndpoint = '',
tokenizer, tokenizer,
page, page,
isGroup, isGroup,
...@@ -27,6 +28,7 @@ export default class FilteredSearchDropdownManager { ...@@ -27,6 +28,7 @@ export default class FilteredSearchDropdownManager {
this.labelsEndpoint = removeTrailingSlash(labelsEndpoint); this.labelsEndpoint = removeTrailingSlash(labelsEndpoint);
this.milestonesEndpoint = removeTrailingSlash(milestonesEndpoint); this.milestonesEndpoint = removeTrailingSlash(milestonesEndpoint);
this.releasesEndpoint = removeTrailingSlash(releasesEndpoint); this.releasesEndpoint = removeTrailingSlash(releasesEndpoint);
this.epicsEndpoint = removeTrailingSlash(epicsEndpoint);
this.tokenizer = tokenizer; this.tokenizer = tokenizer;
this.filteredSearchTokenKeys = filteredSearchTokenKeys || FilteredSearchTokenKeys; this.filteredSearchTokenKeys = filteredSearchTokenKeys || FilteredSearchTokenKeys;
this.filteredSearchInput = this.container.querySelector('.filtered-search'); this.filteredSearchInput = this.container.querySelector('.filtered-search');
...@@ -54,16 +56,8 @@ export default class FilteredSearchDropdownManager { ...@@ -54,16 +56,8 @@ export default class FilteredSearchDropdownManager {
setupMapping() { setupMapping() {
const supportedTokens = this.filteredSearchTokenKeys.getKeys(); const supportedTokens = this.filteredSearchTokenKeys.getKeys();
const availableMappings = new AvailableDropdownMappings(
this.container, const availableMappings = new AvailableDropdownMappings({ ...this });
this.runnerTagsEndpoint,
this.labelsEndpoint,
this.milestonesEndpoint,
this.releasesEndpoint,
this.groupsOnly,
this.includeAncestorGroups,
this.includeDescendantGroups,
);
this.mapping = availableMappings.getAllowedMappings(supportedTokens); this.mapping = availableMappings.getAllowedMappings(supportedTokens);
} }
......
...@@ -45,6 +45,11 @@ export default class FilteredSearchManager { ...@@ -45,6 +45,11 @@ export default class FilteredSearchManager {
this.filteredSearchTokenKeys.enableMultipleAssignees(); this.filteredSearchTokenKeys.enableMultipleAssignees();
} }
const { epicsEndpoint } = this.filteredSearchInput.dataset;
if (!epicsEndpoint && this.filteredSearchTokenKeys.removeEpicToken) {
this.filteredSearchTokenKeys.removeEpicToken();
}
this.recentSearchesStore = new RecentSearchesStore({ this.recentSearchesStore = new RecentSearchesStore({
isLocalStorageAvailable: RecentSearchesService.isAvailable(), isLocalStorageAvailable: RecentSearchesService.isAvailable(),
allowedKeys: this.filteredSearchTokenKeys.getKeys(), allowedKeys: this.filteredSearchTokenKeys.getKeys(),
...@@ -88,12 +93,20 @@ export default class FilteredSearchManager { ...@@ -88,12 +93,20 @@ export default class FilteredSearchManager {
if (this.filteredSearchInput) { if (this.filteredSearchInput) {
this.tokenizer = FilteredSearchTokenizer; this.tokenizer = FilteredSearchTokenizer;
const {
runnerTagsEndpoint = '',
labelsEndpoint = '',
milestonesEndpoint = '',
releasesEndpoint = '',
epicsEndpoint = '',
} = this.filteredSearchInput.dataset;
this.dropdownManager = new FilteredSearchDropdownManager({ this.dropdownManager = new FilteredSearchDropdownManager({
runnerTagsEndpoint: runnerTagsEndpoint,
this.filteredSearchInput.getAttribute('data-runner-tags-endpoint') || '', labelsEndpoint,
labelsEndpoint: this.filteredSearchInput.getAttribute('data-labels-endpoint') || '', milestonesEndpoint,
milestonesEndpoint: this.filteredSearchInput.getAttribute('data-milestones-endpoint') || '', releasesEndpoint,
releasesEndpoint: this.filteredSearchInput.getAttribute('data-releases-endpoint') || '', epicsEndpoint,
tokenizer: this.tokenizer, tokenizer: this.tokenizer,
page: this.page, page: this.page,
isGroup: this.isGroup, isGroup: this.isGroup,
......
...@@ -28,6 +28,8 @@ export default class VisualTokenValue { ...@@ -28,6 +28,8 @@ export default class VisualTokenValue {
this.updateUserTokenAppearance(tokenValueContainer, tokenValueElement); this.updateUserTokenAppearance(tokenValueContainer, tokenValueElement);
} else if (tokenType === 'my-reaction') { } else if (tokenType === 'my-reaction') {
this.updateEmojiTokenAppearance(tokenValueContainer, tokenValueElement); this.updateEmojiTokenAppearance(tokenValueContainer, tokenValueElement);
} else if (tokenType === 'epic') {
this.updateEpicLabel(tokenValueContainer, tokenValueElement);
} }
} }
...@@ -83,6 +85,39 @@ export default class VisualTokenValue { ...@@ -83,6 +85,39 @@ export default class VisualTokenValue {
.catch(() => new Flash(__('An error occurred while fetching label colors.'))); .catch(() => new Flash(__('An error occurred while fetching label colors.')));
} }
updateEpicLabel(tokenValueContainer) {
const tokenValue = this.tokenValue.replace(/^&/, '');
const filteredSearchInput = FilteredSearchContainer.container.querySelector('.filtered-search');
const { epicsEndpoint } = filteredSearchInput.dataset;
const epicsEndpointWithParams = FilteredSearchVisualTokens.getEndpointWithQueryParams(
`${epicsEndpoint}.json`,
filteredSearchInput.dataset.endpointQueryParams,
);
return AjaxCache.retrieve(epicsEndpointWithParams)
.then(epics => {
const matchingEpic = (epics || []).find(epic => epic.id === Number(tokenValue));
if (!matchingEpic) {
return;
}
VisualTokenValue.replaceEpicTitle(tokenValueContainer, matchingEpic.title, matchingEpic.id);
})
.catch(() => new Flash(__('An error occurred while adding formatted title for epic')));
}
static replaceEpicTitle(tokenValueContainer, epicTitle, epicId) {
const tokenContainer = tokenValueContainer;
const valueContainer = tokenContainer.querySelector('.value');
if (valueContainer) {
tokenContainer.dataset.originalValue = valueContainer.innerText;
valueContainer.innerText = `"${epicTitle}"::&${epicId}`;
}
}
static setTokenStyle(tokenValueContainer, backgroundColor, textColor) { static setTokenStyle(tokenValueContainer, backgroundColor, textColor) {
const token = tokenValueContainer; const token = tokenValueContainer;
......
...@@ -129,7 +129,7 @@ export default { ...@@ -129,7 +129,7 @@ export default {
</div> </div>
<div class="settings-content"> <div class="settings-content">
<form name="self-monitoring-form"> <form name="self-monitoring-form">
<p v-html="selfMonitoringFormText"></p> <p ref="selfMonitoringFormText" v-html="selfMonitoringFormText"></p>
<gl-form-group :label="$options.formLabels.createProject" label-for="self-monitor-toggle"> <gl-form-group :label="$options.formLabels.createProject" label-for="self-monitor-toggle">
<gl-toggle <gl-toggle
v-model="selfMonitorEnabled" v-model="selfMonitorEnabled"
......
...@@ -9,7 +9,7 @@ export default (initialState = {}) => ({ ...@@ -9,7 +9,7 @@ export default (initialState = {}) => ({
deleteProjectStatusEndpoint: initialState.statusDeleteSelfMonitoringProjectPath || '', deleteProjectStatusEndpoint: initialState.statusDeleteSelfMonitoringProjectPath || '',
selfMonitorProjectPath: initialState.selfMonitoringProjectFullPath || '', selfMonitorProjectPath: initialState.selfMonitoringProjectFullPath || '',
showAlert: false, showAlert: false,
projectPath: '', projectPath: initialState.selfMonitoringProjectFullPath || '',
loading: false, loading: false,
alertContent: {}, alertContent: {},
}); });
...@@ -410,6 +410,15 @@ ...@@ -410,6 +410,15 @@
} }
} }
> button.dropdown-epic-button {
flex-direction: column;
.reference {
color: $gl-gray-400;
margin-top: $gl-padding-4;
}
}
&.droplab-item-selected i { &.droplab-item-selected i {
visibility: visible; visibility: visible;
} }
......
...@@ -11,7 +11,7 @@ class ServerlessDomainFinder ...@@ -11,7 +11,7 @@ class ServerlessDomainFinder
return unless serverless? return unless serverless?
@serverless_domain_cluster = ::Serverless::DomainCluster.for_uuid(serverless_domain_cluster_uuid) @serverless_domain_cluster = ::Serverless::DomainCluster.for_uuid(serverless_domain_cluster_uuid)
return unless serverless_domain_cluster return unless serverless_domain_cluster&.knative&.external_ip
@environment = ::Environment.for_id_and_slug(match[:environment_id].to_i(16), match[:environment_slug]) @environment = ::Environment.for_id_and_slug(match[:environment_id].to_i(16), match[:environment_slug])
return unless environment return unless environment
......
# frozen_string_literal: true
module Serverless
class LookupPath
attr_reader :serverless_domain
delegate :serverless_domain_cluster, to: :serverless_domain
delegate :knative, to: :serverless_domain_cluster
delegate :certificate, to: :serverless_domain_cluster
delegate :key, to: :serverless_domain_cluster
def initialize(serverless_domain)
@serverless_domain = serverless_domain
end
def source
{
type: 'serverless',
service: serverless_domain.knative_uri.host,
cluster: {
hostname: knative.hostname,
address: knative.external_ip,
port: 443,
cert: certificate,
key: key
}
}
end
end
end
# frozen_string_literal: true
module Serverless
class VirtualDomain
attr_reader :serverless_domain
delegate :serverless_domain_cluster, to: :serverless_domain
delegate :pages_domain, to: :serverless_domain_cluster
delegate :certificate, to: :pages_domain
delegate :key, to: :pages_domain
def initialize(serverless_domain)
@serverless_domain = serverless_domain
end
def lookup_paths
[
::Serverless::LookupPath.new(serverless_domain)
]
end
end
end
...@@ -8,6 +8,8 @@ module Ci ...@@ -8,6 +8,8 @@ module Ci
JOB_QUEUE_DURATION_SECONDS_BUCKETS = [1, 3, 10, 30, 60, 300, 900, 1800, 3600].freeze JOB_QUEUE_DURATION_SECONDS_BUCKETS = [1, 3, 10, 30, 60, 300, 900, 1800, 3600].freeze
JOBS_RUNNING_FOR_PROJECT_MAX_BUCKET = 5.freeze JOBS_RUNNING_FOR_PROJECT_MAX_BUCKET = 5.freeze
METRICS_SHARD_TAG_PREFIX = 'metrics_shard::'.freeze
DEFAULT_METRICS_SHARD = 'default'.freeze
Result = Struct.new(:build, :valid?) Result = Struct.new(:build, :valid?)
...@@ -193,7 +195,13 @@ module Ci ...@@ -193,7 +195,13 @@ module Ci
def register_success(job) def register_success(job)
labels = { shared_runner: runner.instance_type?, labels = { shared_runner: runner.instance_type?,
jobs_running_for_project: jobs_running_for_project(job) } jobs_running_for_project: jobs_running_for_project(job),
shard: DEFAULT_METRICS_SHARD }
if runner.instance_type?
shard = runner.tag_list.sort.find { |name| name.starts_with?(METRICS_SHARD_TAG_PREFIX) }
labels[:shard] = shard.gsub(METRICS_SHARD_TAG_PREFIX, '') if shard
end
job_queue_duration_seconds.observe(labels, Time.now - job.queued_at) unless job.queued_at.nil? job_queue_duration_seconds.observe(labels, Time.now - job.queued_at) unless job.queued_at.nil?
attempt_counter.increment attempt_counter.increment
......
%resolve-discussion-btn{ ":discussion-id" => "'#{discussion.id}'",
":merge-request-id" => discussion.noteable.iid,
":can-resolve" => discussion.can_resolve?(current_user),
"inline-template" => true }
.btn-group{ role: "group", "v-if" => "showButton" }
%button.btn.btn-default{ type: "button", "@click" => "resolve", ":disabled" => "loading", "v-cloak" => "true" }
= icon("spinner spin", "v-show" => "loading")
{{ buttonText }}
...@@ -159,6 +159,8 @@ ...@@ -159,6 +159,8 @@
= render_if_exists 'shared/issuable/filter_weight', type: type = render_if_exists 'shared/issuable/filter_weight', type: type
= render_if_exists 'shared/issuable/filter_epic', type: type
%button.clear-search.hidden{ type: 'button' } %button.clear-search.hidden{ type: 'button' }
= icon('times') = icon('times')
.filter-dropdown-container.d-flex.flex-column.flex-md-row .filter-dropdown-container.d-flex.flex-column.flex-md-row
......
---
title: Fix self monitoring project link
merge_request: 25516
author:
type: fixed
---
title: Migrate .fa-spinner to .spinner for ee/app/views/shared/members
merge_request: 25019
author: nuwe1
type: other
---
title: Add 'shard' label for 'job_queue_duration_seconds' metric
merge_request: 23536
author:
type: changed
---
title: Fix an issue with Group Import members with Owner access level being imported with Maintainer access level. Owner access level is now preserved
merge_request: 25595
author:
type: fixed
# frozen_string_literal: true
class UpdateApplicationSettingNpmPackageRequestsForwardingDefault < ActiveRecord::Migration[6.0]
DOWNTIME = false
def up
change_column_default :application_settings, :npm_package_requests_forwarding, true
execute('UPDATE application_settings SET npm_package_requests_forwarding = TRUE')
end
def down
change_column_default :application_settings, :npm_package_requests_forwarding, false
execute('UPDATE application_settings SET npm_package_requests_forwarding = FALSE')
end
end
...@@ -10,7 +10,7 @@ ...@@ -10,7 +10,7 @@
# #
# It's strongly recommended that you check this file into your version control system. # It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2020_02_20_180944) do ActiveRecord::Schema.define(version: 2020_02_21_105436) do
# These are extensions that must be enabled in order to support this database # These are extensions that must be enabled in order to support this database
enable_extension "pg_trgm" enable_extension "pg_trgm"
...@@ -351,7 +351,7 @@ ActiveRecord::Schema.define(version: 2020_02_20_180944) do ...@@ -351,7 +351,7 @@ ActiveRecord::Schema.define(version: 2020_02_20_180944) do
t.boolean "prevent_merge_requests_committers_approval", default: false, null: false t.boolean "prevent_merge_requests_committers_approval", default: false, null: false
t.boolean "email_restrictions_enabled", default: false, null: false t.boolean "email_restrictions_enabled", default: false, null: false
t.text "email_restrictions" t.text "email_restrictions"
t.boolean "npm_package_requests_forwarding", default: false, null: false t.boolean "npm_package_requests_forwarding", default: true, null: false
t.index ["custom_project_templates_group_id"], name: "index_application_settings_on_custom_project_templates_group_id" t.index ["custom_project_templates_group_id"], name: "index_application_settings_on_custom_project_templates_group_id"
t.index ["file_template_project_id"], name: "index_application_settings_on_file_template_project_id" t.index ["file_template_project_id"], name: "index_application_settings_on_file_template_project_id"
t.index ["instance_administration_project_id"], name: "index_applicationsettings_on_instance_administration_project_id" t.index ["instance_administration_project_id"], name: "index_applicationsettings_on_instance_administration_project_id"
......
...@@ -3,9 +3,9 @@ ...@@ -3,9 +3,9 @@
NOTE: **Note:** NOTE: **Note:**
The GitLab Slack application is only configurable for GitLab.com. It will **not** The GitLab Slack application is only configurable for GitLab.com. It will **not**
work for on-premises installations where you can configure the work for on-premises installations where you can configure the
[Slack slash commands](slack_slash_commands.md) service instead. We're working [Slack slash commands](slack_slash_commands.md) service instead. We're planning
with Slack on making this configurable for all GitLab installations, but there's to make this configurable for all GitLab installations, but there's
no ETA. no ETA - see [#28164](https://gitlab.com/gitlab-org/gitlab/issues/28164).
It was first introduced in GitLab 9.4 and distributed to Slack App Directory in It was first introduced in GitLab 9.4 and distributed to Slack App Directory in
GitLab 10.2. GitLab 10.2.
......
...@@ -174,7 +174,8 @@ X-Gitlab-Event: Push Hook ...@@ -174,7 +174,8 @@ X-Gitlab-Event: Push Hook
"commits": [ "commits": [
{ {
"id": "b6568db1bc1dcd7f8b4d5a946b0b91f9dacd7327", "id": "b6568db1bc1dcd7f8b4d5a946b0b91f9dacd7327",
"message": "Update Catalan translation to e38cb41.", "message": "Update Catalan translation to e38cb41.\n\nSee https://gitlab.com/gitlab-org/gitlab for more information",
"title": "Update Catalan translation to e38cb41.",
"timestamp": "2011-12-12T14:27:31+02:00", "timestamp": "2011-12-12T14:27:31+02:00",
"url": "http://example.com/mike/diaspora/commit/b6568db1bc1dcd7f8b4d5a946b0b91f9dacd7327", "url": "http://example.com/mike/diaspora/commit/b6568db1bc1dcd7f8b4d5a946b0b91f9dacd7327",
"author": { "author": {
...@@ -188,6 +189,7 @@ X-Gitlab-Event: Push Hook ...@@ -188,6 +189,7 @@ X-Gitlab-Event: Push Hook
{ {
"id": "da1560886d4f094c3e6c9ef40349f7d38b5d27d7", "id": "da1560886d4f094c3e6c9ef40349f7d38b5d27d7",
"message": "fixed readme", "message": "fixed readme",
"title": "fixed readme",
"timestamp": "2012-01-03T23:36:29+02:00", "timestamp": "2012-01-03T23:36:29+02:00",
"url": "http://example.com/mike/diaspora/commit/da1560886d4f094c3e6c9ef40349f7d38b5d27d7", "url": "http://example.com/mike/diaspora/commit/da1560886d4f094c3e6c9ef40349f7d38b5d27d7",
"author": { "author": {
......
...@@ -41,6 +41,7 @@ groups: ...@@ -41,6 +41,7 @@ groups:
- [Label](../project/labels.md) - [Label](../project/labels.md)
- My-reaction - My-reaction
- Confidential - Confidential
- Epic ([Introduced](https://gitlab.com/gitlab-org/gitlab/issues/195704) in GitLab 12.8)
- Search for this text - Search for this text
1. Select or type the operator to use for filtering the attribute. The following operators are 1. Select or type the operator to use for filtering the attribute. The following operators are
available: available:
......
...@@ -8,11 +8,6 @@ module API ...@@ -8,11 +8,6 @@ module API
expose :project_id, :access_control, expose :project_id, :access_control,
:source, :https_only, :prefix :source, :https_only, :prefix
end end
class VirtualDomain < Grape::Entity
expose :certificate, :key
expose :lookup_paths, using: LookupPath
end
end end
end end
end end
......
# frozen_string_literal: true
module API
module Entities
module Internal
module Pages
class VirtualDomain < Grape::Entity
expose :certificate, :key
expose :lookup_paths, using: LookupPath
end
end
end
end
end
# frozen_string_literal: true
module API
module Entities
module Internal
module Serverless
class LookupPath < Grape::Entity
expose :source
end
end
end
end
end
# frozen_string_literal: true
module API
module Entities
module Internal
module Serverless
class VirtualDomain < Grape::Entity
expose :certificate, :key
expose :lookup_paths, using: LookupPath
end
end
end
end
end
...@@ -24,6 +24,18 @@ module API ...@@ -24,6 +24,18 @@ module API
requires :host, type: String, desc: 'The host to query for' requires :host, type: String, desc: 'The host to query for'
end end
get "/" do get "/" do
serverless_domain_finder = ServerlessDomainFinder.new(params[:host])
if serverless_domain_finder.serverless?
# Handle Serverless domains
serverless_domain = serverless_domain_finder.execute
no_content! unless serverless_domain
virtual_domain = Serverless::VirtualDomain.new(serverless_domain)
no_content! unless virtual_domain
present virtual_domain, with: Entities::Internal::Serverless::VirtualDomain
else
# Handle Pages domains
host = Namespace.find_by_pages_host(params[:host]) || PagesDomain.find_by_domain(params[:host]) host = Namespace.find_by_pages_host(params[:host]) || PagesDomain.find_by_domain(params[:host])
no_content! unless host no_content! unless host
...@@ -36,4 +48,5 @@ module API ...@@ -36,4 +48,5 @@ module API
end end
end end
end end
end
end end
...@@ -9,34 +9,21 @@ module Gitlab ...@@ -9,34 +9,21 @@ module Gitlab
# defining a downstream project trigger. # defining a downstream project trigger.
# #
class Bridge < ::Gitlab::Config::Entry::Node class Bridge < ::Gitlab::Config::Entry::Node
include ::Gitlab::Config::Entry::Configurable include ::Gitlab::Ci::Config::Entry::Processable
include ::Gitlab::Config::Entry::Attributable
include ::Gitlab::Config::Entry::Inheritable
ALLOWED_KEYS = %i[trigger stage allow_failure only except ALLOWED_KEYS = %i[trigger allow_failure when variables needs].freeze
when extends variables needs rules].freeze
validations do validations do
validates :config, allowed_keys: ALLOWED_KEYS validates :config, allowed_keys: ALLOWED_KEYS + PROCESSABLE_ALLOWED_KEYS
validates :config, presence: true
validates :name, presence: true
validates :name, type: Symbol
validates :config, disallowed_keys: {
in: %i[only except when start_in],
message: 'key may not be used with `rules`'
},
if: :has_rules?
with_options allow_nil: true do with_options allow_nil: true do
validates :when, validates :when,
inclusion: { in: %w[on_success on_failure always], inclusion: { in: %w[on_success on_failure always],
message: 'should be on_success, on_failure or always' } message: 'should be on_success, on_failure or always' }
validates :extends, type: String
validates :rules, array_of_hashes: true
end end
validate on: :composed do validate on: :composed do
unless trigger.present? || bridge_needs.present? unless trigger_defined? || bridge_needs.present?
errors.add(:config, 'should contain either a trigger or a needs:pipeline') errors.add(:config, 'should contain either a trigger or a needs:pipeline')
end end
end end
...@@ -58,32 +45,13 @@ module Gitlab ...@@ -58,32 +45,13 @@ module Gitlab
inherit: false, inherit: false,
metadata: { allowed_needs: %i[job bridge] } metadata: { allowed_needs: %i[job bridge] }
entry :stage, ::Gitlab::Ci::Config::Entry::Stage,
description: 'Pipeline stage this job will be executed into.',
inherit: false
entry :only, ::Gitlab::Ci::Config::Entry::Policy,
description: 'Refs policy this job will be executed for.',
default: ::Gitlab::Ci::Config::Entry::Policy::DEFAULT_ONLY,
inherit: false
entry :except, ::Gitlab::Ci::Config::Entry::Policy,
description: 'Refs policy this job will be executed for.',
inherit: false
entry :rules, ::Gitlab::Ci::Config::Entry::Rules,
description: 'List of evaluable Rules to determine job inclusion.',
inherit: false,
metadata: {
allowed_when: %w[on_success on_failure always never manual delayed].freeze
}
entry :variables, ::Gitlab::Ci::Config::Entry::Variables, entry :variables, ::Gitlab::Ci::Config::Entry::Variables,
description: 'Environment variables available for this job.', description: 'Environment variables available for this job.',
inherit: false inherit: false
helpers(*ALLOWED_KEYS) helpers :trigger, :needs, :variables
attributes(*ALLOWED_KEYS)
attributes :when, :allow_failure
def self.matching?(name, config) def self.matching?(name, config)
!name.to_s.start_with?('.') && !name.to_s.start_with?('.') &&
...@@ -95,56 +63,20 @@ module Gitlab ...@@ -95,56 +63,20 @@ module Gitlab
true true
end end
def compose!(deps = nil)
super do
has_workflow_rules = deps&.workflow&.has_rules?
# If workflow:rules: or rules: are used
# they are considered not compatible
# with `only/except` defaults
#
# Context: https://gitlab.com/gitlab-org/gitlab/merge_requests/21742
if has_rules? || has_workflow_rules
# Remove only/except defaults
# defaults are not considered as defined
@entries.delete(:only) unless only_defined?
@entries.delete(:except) unless except_defined?
end
end
end
def has_rules?
@config&.key?(:rules)
end
def name
@metadata[:name]
end
def value def value
{ name: name, super.merge(
trigger: (trigger_value if trigger_defined?), trigger: (trigger_value if trigger_defined?),
needs: (needs_value if needs_defined?), needs: (needs_value if needs_defined?),
ignore: !!allow_failure, ignore: !!allow_failure,
stage: stage_value, when: self.when,
when: when_value,
extends: extends_value,
variables: (variables_value if variables_defined?), variables: (variables_value if variables_defined?),
rules: (rules_value if has_rules?), scheduling_type: needs_defined? && !bridge_needs ? :dag : :stage
only: only_value, ).compact
except: except_value,
scheduling_type: needs_defined? && !bridge_needs ? :dag : :stage }.compact
end end
def bridge_needs def bridge_needs
needs_value[:bridge] if needs_value needs_value[:bridge] if needs_value
end end
private
def overwrite_entry(deps, key, current_entry)
deps.default[key] unless current_entry.specified?
end
end end
end end
end end
......
...@@ -8,33 +8,21 @@ module Gitlab ...@@ -8,33 +8,21 @@ module Gitlab
# Entry that represents a concrete CI/CD job. # Entry that represents a concrete CI/CD job.
# #
class Job < ::Gitlab::Config::Entry::Node class Job < ::Gitlab::Config::Entry::Node
include ::Gitlab::Config::Entry::Configurable include ::Gitlab::Ci::Config::Entry::Processable
include ::Gitlab::Config::Entry::Attributable
include ::Gitlab::Config::Entry::Inheritable
ALLOWED_WHEN = %w[on_success on_failure always manual delayed].freeze ALLOWED_WHEN = %w[on_success on_failure always manual delayed].freeze
ALLOWED_KEYS = %i[tags script only except rules type image services ALLOWED_KEYS = %i[tags script type image services
allow_failure type stage when start_in artifacts cache allow_failure type when start_in artifacts cache
dependencies before_script needs after_script variables dependencies before_script needs after_script variables
environment coverage retry parallel extends interruptible timeout environment coverage retry parallel interruptible timeout
resource_group release].freeze resource_group release].freeze
REQUIRED_BY_NEEDS = %i[stage].freeze REQUIRED_BY_NEEDS = %i[stage].freeze
validations do validations do
validates :config, type: Hash validates :config, allowed_keys: ALLOWED_KEYS + PROCESSABLE_ALLOWED_KEYS
validates :config, allowed_keys: ALLOWED_KEYS
validates :config, required_keys: REQUIRED_BY_NEEDS, if: :has_needs? validates :config, required_keys: REQUIRED_BY_NEEDS, if: :has_needs?
validates :config, presence: true
validates :script, presence: true validates :script, presence: true
validates :name, presence: true
validates :name, type: Symbol
validates :config,
disallowed_keys: {
in: %i[only except when start_in],
message: 'key may not be used with `rules`'
},
if: :has_rules?
validates :config, validates :config,
disallowed_keys: { disallowed_keys: {
in: %i[release], in: %i[release],
...@@ -53,8 +41,6 @@ module Gitlab ...@@ -53,8 +41,6 @@ module Gitlab
} }
validates :dependencies, array_of_strings: true validates :dependencies, array_of_strings: true
validates :extends, array_of_strings_or_string: true
validates :rules, array_of_hashes: true
validates :resource_group, type: String validates :resource_group, type: String
end end
...@@ -81,10 +67,6 @@ module Gitlab ...@@ -81,10 +67,6 @@ module Gitlab
description: 'Commands that will be executed in this job.', description: 'Commands that will be executed in this job.',
inherit: false inherit: false
entry :stage, Entry::Stage,
description: 'Pipeline stage this job will be executed into.',
inherit: false
entry :type, Entry::Stage, entry :type, Entry::Stage,
description: 'Deprecated: stage this job will be executed into.', description: 'Deprecated: stage this job will be executed into.',
inherit: false inherit: false
...@@ -125,22 +107,6 @@ module Gitlab ...@@ -125,22 +107,6 @@ module Gitlab
description: 'Artifacts configuration for this job.', description: 'Artifacts configuration for this job.',
inherit: true inherit: true
entry :only, Entry::Policy,
description: 'Refs policy this job will be executed for.',
default: ::Gitlab::Ci::Config::Entry::Policy::DEFAULT_ONLY,
inherit: false
entry :except, Entry::Policy,
description: 'Refs policy this job will be executed for.',
inherit: false
entry :rules, Entry::Rules,
description: 'List of evaluable Rules to determine job inclusion.',
inherit: false,
metadata: {
allowed_when: %w[on_success on_failure always never manual delayed].freeze
}
entry :needs, Entry::Needs, entry :needs, Entry::Needs,
description: 'Needs configuration for this job.', description: 'Needs configuration for this job.',
metadata: { allowed_needs: %i[job cross_dependency] }, metadata: { allowed_needs: %i[job cross_dependency] },
...@@ -162,13 +128,13 @@ module Gitlab ...@@ -162,13 +128,13 @@ module Gitlab
description: 'This job will produce a release.', description: 'This job will produce a release.',
inherit: false inherit: false
helpers :before_script, :script, :stage, :type, :after_script, helpers :before_script, :script, :type, :after_script,
:cache, :image, :services, :only, :except, :variables, :cache, :image, :services, :variables,
:artifacts, :environment, :coverage, :retry, :rules, :artifacts, :environment, :coverage, :retry,
:parallel, :needs, :interruptible, :release, :tags :needs, :interruptible, :release, :tags
attributes :script, :tags, :allow_failure, :when, :dependencies, attributes :script, :tags, :allow_failure, :when, :dependencies,
:needs, :retry, :parallel, :extends, :start_in, :rules, :needs, :retry, :parallel, :start_in,
:interruptible, :timeout, :resource_group, :release :interruptible, :timeout, :resource_group, :release
def self.matching?(name, config) def self.matching?(name, config)
...@@ -187,31 +153,9 @@ module Gitlab ...@@ -187,31 +153,9 @@ module Gitlab
end end
@entries.delete(:type) @entries.delete(:type)
has_workflow_rules = deps&.workflow&.has_rules?
# If workflow:rules: or rules: are used
# they are considered not compatible
# with `only/except` defaults
#
# Context: https://gitlab.com/gitlab-org/gitlab/merge_requests/21742
if has_rules? || has_workflow_rules
# Remove only/except defaults
# defaults are not considered as defined
@entries.delete(:only) unless only_defined?
@entries.delete(:except) unless except_defined?
end
end end
end end
def name
@metadata[:name]
end
def value
@config.merge(to_hash.compact)
end
def manual_action? def manual_action?
self.when == 'manual' self.when == 'manual'
end end
...@@ -220,38 +164,27 @@ module Gitlab ...@@ -220,38 +164,27 @@ module Gitlab
self.when == 'delayed' self.when == 'delayed'
end end
def has_rules?
@config.try(:key?, :rules)
end
def ignored? def ignored?
allow_failure.nil? ? manual_action? : allow_failure allow_failure.nil? ? manual_action? : allow_failure
end end
private def value
super.merge(
def overwrite_entry(deps, key, current_entry)
deps.default[key] unless current_entry.specified?
end
def to_hash
{ name: name,
before_script: before_script_value, before_script: before_script_value,
script: script_value, script: script_value,
image: image_value, image: image_value,
services: services_value, services: services_value,
stage: stage_value,
cache: cache_value, cache: cache_value,
tags: tags_value, tags: tags_value,
only: only_value, when: self.when,
except: except_value, start_in: self.start_in,
rules: has_rules? ? rules_value : nil, dependencies: dependencies,
variables: variables_defined? ? variables_value : {}, variables: variables_defined? ? variables_value : {},
environment: environment_defined? ? environment_value : nil, environment: environment_defined? ? environment_value : nil,
environment_name: environment_defined? ? environment_value[:name] : nil, environment_name: environment_defined? ? environment_value[:name] : nil,
coverage: coverage_defined? ? coverage_value : nil, coverage: coverage_defined? ? coverage_value : nil,
retry: retry_defined? ? retry_value : nil, retry: retry_defined? ? retry_value : nil,
parallel: parallel_defined? ? parallel_value.to_i : nil, parallel: has_parallel? ? parallel.to_i : nil,
interruptible: interruptible_defined? ? interruptible_value : nil, interruptible: interruptible_defined? ? interruptible_value : nil,
timeout: has_timeout? ? ChronicDuration.parse(timeout.to_s) : nil, timeout: has_timeout? ? ChronicDuration.parse(timeout.to_s) : nil,
artifacts: artifacts_value, artifacts: artifacts_value,
...@@ -260,7 +193,8 @@ module Gitlab ...@@ -260,7 +193,8 @@ module Gitlab
ignore: ignored?, ignore: ignored?,
needs: needs_defined? ? needs_value : nil, needs: needs_defined? ? needs_value : nil,
resource_group: resource_group, resource_group: resource_group,
scheduling_type: needs_defined? ? :dag : :stage } scheduling_type: needs_defined? ? :dag : :stage
).compact
end end
end end
end end
......
# frozen_string_literal: true
module Gitlab
module Ci
class Config
module Entry
##
# Entry that represents a CI/CD Processable (a job)
#
module Processable
extend ActiveSupport::Concern
include ::Gitlab::Config::Entry::Configurable
include ::Gitlab::Config::Entry::Attributable
include ::Gitlab::Config::Entry::Inheritable
PROCESSABLE_ALLOWED_KEYS = %i[extends stage only except rules].freeze
included do
validations do
validates :config, presence: true
validates :name, presence: true
validates :name, type: Symbol
validates :config, disallowed_keys: {
in: %i[only except when start_in],
message: 'key may not be used with `rules`'
},
if: :has_rules?
with_options allow_nil: true do
validates :extends, array_of_strings_or_string: true
validates :rules, array_of_hashes: true
end
end
entry :stage, Entry::Stage,
description: 'Pipeline stage this job will be executed into.',
inherit: false
entry :only, ::Gitlab::Ci::Config::Entry::Policy,
description: 'Refs policy this job will be executed for.',
default: ::Gitlab::Ci::Config::Entry::Policy::DEFAULT_ONLY,
inherit: false
entry :except, ::Gitlab::Ci::Config::Entry::Policy,
description: 'Refs policy this job will be executed for.',
inherit: false
entry :rules, ::Gitlab::Ci::Config::Entry::Rules,
description: 'List of evaluable Rules to determine job inclusion.',
inherit: false,
metadata: {
allowed_when: %w[on_success on_failure always never manual delayed].freeze
}
helpers :stage, :only, :except, :rules
attributes :extends, :rules
end
def compose!(deps = nil)
super do
has_workflow_rules = deps&.workflow&.has_rules?
# If workflow:rules: or rules: are used
# they are considered not compatible
# with `only/except` defaults
#
# Context: https://gitlab.com/gitlab-org/gitlab/merge_requests/21742
if has_rules? || has_workflow_rules
# Remove only/except defaults
# defaults are not considered as defined
@entries.delete(:only) unless only_defined? # rubocop:disable Gitlab/ModuleWithInstanceVariables
@entries.delete(:except) unless except_defined? # rubocop:disable Gitlab/ModuleWithInstanceVariables
end
yield if block_given?
end
end
def name
metadata[:name]
end
def overwrite_entry(deps, key, current_entry)
deps.default[key] unless current_entry.specified?
end
def value
{ name: name,
stage: stage_value,
extends: extends,
rules: rules_value,
only: only_value,
except: except_value }.compact
end
end
end
end
end
end
...@@ -67,7 +67,9 @@ module Gitlab ...@@ -67,7 +67,9 @@ module Gitlab
entry :workflow, Entry::Workflow, entry :workflow, Entry::Workflow,
description: 'List of evaluable rules to determine Pipeline status' description: 'List of evaluable rules to determine Pipeline status'
helpers :default, :jobs, :stages, :types, :variables, :workflow helpers :default, :stages, :types, :variables, :workflow
helpers :jobs, dynamic: true
delegate :before_script_value, delegate :before_script_value,
:image_value, :image_value,
......
...@@ -75,6 +75,8 @@ module Gitlab ...@@ -75,6 +75,8 @@ module Gitlab
# rubocop: disable CodeReuse/ActiveRecord # rubocop: disable CodeReuse/ActiveRecord
def entry(key, entry, description: nil, default: nil, inherit: nil, reserved: nil, metadata: {}) def entry(key, entry, description: nil, default: nil, inherit: nil, reserved: nil, metadata: {})
raise ArgumentError, "Entry #{key} already defined" if @nodes.to_h[key.to_sym]
factory = ::Gitlab::Config::Entry::Factory.new(entry) factory = ::Gitlab::Config::Entry::Factory.new(entry)
.with(description: description) .with(description: description)
.with(default: default) .with(default: default)
...@@ -86,8 +88,16 @@ module Gitlab ...@@ -86,8 +88,16 @@ module Gitlab
end end
# rubocop: enable CodeReuse/ActiveRecord # rubocop: enable CodeReuse/ActiveRecord
def helpers(*nodes) def helpers(*nodes, dynamic: false)
nodes.each do |symbol| nodes.each do |symbol|
if method_defined?("#{symbol}_defined?") || method_defined?("#{symbol}_value")
raise ArgumentError, "Method #{symbol}_defined? or #{symbol}_value already defined"
end
unless @nodes.to_h[symbol]
raise ArgumentError, "Entry for #{symbol} is undefined" unless dynamic
end
define_method("#{symbol}_defined?") do define_method("#{symbol}_defined?") do
entries[symbol]&.specified? entries[symbol]&.specified?
end end
......
...@@ -128,9 +128,12 @@ module Gitlab ...@@ -128,9 +128,12 @@ module Gitlab
%r{\A(ee/)?db/(?!fixtures)[^/]+} => :database, %r{\A(ee/)?db/(?!fixtures)[^/]+} => :database,
%r{\A(ee/)?lib/gitlab/(database|background_migration|sql|github_import)(/|\.rb)} => :database, %r{\A(ee/)?lib/gitlab/(database|background_migration|sql|github_import)(/|\.rb)} => :database,
%r{\A(app/models/project_authorization|app/services/users/refresh_authorized_projects_service)(/|\.rb)} => :database, %r{\A(app/models/project_authorization|app/services/users/refresh_authorized_projects_service)(/|\.rb)} => :database,
%r{\A(ee/)?app/finders/} => :database,
%r{\Arubocop/cop/migration(/|\.rb)} => :database, %r{\Arubocop/cop/migration(/|\.rb)} => :database,
%r{\A(\.gitlab-ci\.yml\z|\.gitlab\/ci)} => :engineering_productivity, %r{\A(\.gitlab-ci\.yml\z|\.gitlab\/ci)} => :engineering_productivity,
%r{\A\.overcommit\.yml\.example\z} => :engineering_productivity,
%r{\Atooling/overcommit/} => :engineering_productivity,
%r{Dangerfile\z} => :engineering_productivity, %r{Dangerfile\z} => :engineering_productivity,
%r{\A(ee/)?(danger/|lib/gitlab/danger/)} => :engineering_productivity, %r{\A(ee/)?(danger/|lib/gitlab/danger/)} => :engineering_productivity,
%r{\A(ee/)?scripts/} => :engineering_productivity, %r{\A(ee/)?scripts/} => :engineering_productivity,
......
...@@ -46,6 +46,7 @@ module Gitlab ...@@ -46,6 +46,7 @@ module Gitlab
push_frontend_feature_flag(:monaco_snippets, default_enabled: false) push_frontend_feature_flag(:monaco_snippets, default_enabled: false)
push_frontend_feature_flag(:monaco_blobs, default_enabled: false) push_frontend_feature_flag(:monaco_blobs, default_enabled: false)
push_frontend_feature_flag(:monaco_ci, default_enabled: false) push_frontend_feature_flag(:monaco_ci, default_enabled: false)
push_frontend_feature_flag(:snippets_edit_vue, default_enabled: false)
end end
# Exposes the state of a feature flag to the frontend code. # Exposes the state of a feature flag to the frontend code.
......
...@@ -51,7 +51,7 @@ module Gitlab ...@@ -51,7 +51,7 @@ module Gitlab
@importable.members.destroy_all # rubocop: disable DestroyAll @importable.members.destroy_all # rubocop: disable DestroyAll
relation_class.create!(user: @user, access_level: relation_class::MAINTAINER, source_id: @importable.id, importing: true) relation_class.create!(user: @user, access_level: highest_access_level, source_id: @importable.id, importing: true)
rescue => e rescue => e
raise e, "Error adding importer user to #{@importable.class} members. #{e.message}" raise e, "Error adding importer user to #{@importable.class} members. #{e.message}"
end end
...@@ -59,7 +59,7 @@ module Gitlab ...@@ -59,7 +59,7 @@ module Gitlab
def user_already_member? def user_already_member?
member = @importable.members&.first member = @importable.members&.first
member&.user == @user && member.access_level >= relation_class::MAINTAINER member&.user == @user && member.access_level >= highest_access_level
end end
def add_team_member(member, existing_user = nil) def add_team_member(member, existing_user = nil)
...@@ -72,7 +72,7 @@ module Gitlab ...@@ -72,7 +72,7 @@ module Gitlab
parsed_hash(member).merge( parsed_hash(member).merge(
'source_id' => @importable.id, 'source_id' => @importable.id,
'importing' => true, 'importing' => true,
'access_level' => [member['access_level'], relation_class::MAINTAINER].min 'access_level' => [member['access_level'], highest_access_level].min
).except('user_id') ).except('user_id')
end end
...@@ -97,6 +97,12 @@ module Gitlab ...@@ -97,6 +97,12 @@ module Gitlab
GroupMember GroupMember
end end
end end
def highest_access_level
return relation_class::OWNER if relation_class == GroupMember
relation_class::MAINTAINER
end
end end
end end
end end
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
module Gitlab module Gitlab
module Kubernetes module Kubernetes
module Helm module Helm
HELM_VERSION = '2.16.1' HELM_VERSION = '2.16.3'
KUBECTL_VERSION = '1.13.12' KUBECTL_VERSION = '1.13.12'
NAMESPACE = 'gitlab-managed-apps' NAMESPACE = 'gitlab-managed-apps'
NAMESPACE_LABELS = { 'app.gitlab.com/managed_by' => :gitlab }.freeze NAMESPACE_LABELS = { 'app.gitlab.com/managed_by' => :gitlab }.freeze
......
# frozen_string_literal: true # frozen_string_literal: true
module Gitlab module Gitlab
# Reference Counter
#
# A reference counter is used as a mechanism to identify when
# a repository is being accessed by a writable operation.
#
# Maintenance operations would use this as a clue to when it should
# execute significant changes in order to avoid disrupting running traffic
class ReferenceCounter class ReferenceCounter
REFERENCE_EXPIRE_TIME = 600 REFERENCE_EXPIRE_TIME = 600
attr_reader :gl_repository, :key attr_reader :gl_repository, :key
# Reference Counter instance
#
# @example
# Gitlab::ReferenceCounter.new('project-1')
#
# @see Gitlab::GlRepository::RepoType.identifier_for_repositorable
# @param [String] gl_repository repository identifier
def initialize(gl_repository) def initialize(gl_repository)
@gl_repository = gl_repository @gl_repository = gl_repository
@key = "git-receive-pack-reference-counter:#{gl_repository}" @key = "git-receive-pack-reference-counter:#{gl_repository}"
end end
# Return the actual counter value
#
# @return [Integer] value
def value def value
Gitlab::Redis::SharedState.with { |redis| (redis.get(key) || 0).to_i } Gitlab::Redis::SharedState.with do |redis|
(redis.get(key) || 0).to_i
end
end end
# Increase the counter
#
# @return [Boolean] whether operation was a success
def increase def increase
redis_cmd do |redis| redis_cmd do |redis|
redis.incr(key) redis.incr(key)
...@@ -22,26 +44,51 @@ module Gitlab ...@@ -22,26 +44,51 @@ module Gitlab
end end
end end
# rubocop:disable Gitlab/RailsLogger # Decrease the counter
#
# @return [Boolean] whether operation was a success
def decrease def decrease
redis_cmd do |redis| redis_cmd do |redis|
current_value = redis.decr(key) current_value = redis.decr(key)
if current_value < 0 if current_value < 0
# rubocop:disable Gitlab/RailsLogger
Rails.logger.warn("Reference counter for #{gl_repository} decreased" \ Rails.logger.warn("Reference counter for #{gl_repository} decreased" \
" when its value was less than 1. Reseting the counter.") " when its value was less than 1. Resetting the counter.")
# rubocop:enable Gitlab/RailsLogger
redis.del(key) redis.del(key)
end end
end end
end end
# rubocop:enable Gitlab/RailsLogger
# Reset the reference counter
#
# @private Used internally by SRE and debugging purpose
# @return [Boolean] whether reset was a success
def reset!
redis_cmd do |redis|
redis.del(key)
end
end
# When the reference counter would expire
#
# @api private Used internally by SRE and debugging purpose
# @return [Integer] Number in seconds until expiration or false if never
def expires_in
Gitlab::Redis::SharedState.with do |redis|
redis.ttl(key)
end
end
private private
def redis_cmd def redis_cmd
Gitlab::Redis::SharedState.with { |redis| yield(redis) } Gitlab::Redis::SharedState.with { |redis| yield(redis) }
true true
rescue => e rescue => e
Rails.logger.warn("GitLab: An unexpected error occurred in writing to Redis: #{e}") # rubocop:disable Gitlab/RailsLogger Rails.logger.warn("GitLab: An unexpected error occurred in writing to Redis: #{e}") # rubocop:disable Gitlab/RailsLogger
false false
end end
end end
......
...@@ -1728,6 +1728,9 @@ msgstr "" ...@@ -1728,6 +1728,9 @@ msgstr ""
msgid "An error occurred when updating the issue weight" msgid "An error occurred when updating the issue weight"
msgstr "" msgstr ""
msgid "An error occurred while adding formatted title for epic"
msgstr ""
msgid "An error occurred while checking group path" msgid "An error occurred while checking group path"
msgstr "" msgstr ""
...@@ -16372,9 +16375,6 @@ msgstr "" ...@@ -16372,9 +16375,6 @@ msgstr ""
msgid "Resolve conflicts on source branch" msgid "Resolve conflicts on source branch"
msgstr "" msgstr ""
msgid "Resolve discussion"
msgstr ""
msgid "Resolve thread" msgid "Resolve thread"
msgstr "" msgstr ""
...@@ -20780,9 +20780,6 @@ msgstr "" ...@@ -20780,9 +20780,6 @@ msgstr ""
msgid "Unresolve" msgid "Unresolve"
msgstr "" msgstr ""
msgid "Unresolve discussion"
msgstr ""
msgid "Unresolve thread" msgid "Unresolve thread"
msgstr "" msgstr ""
......
...@@ -5,12 +5,34 @@ require 'spec_helper' ...@@ -5,12 +5,34 @@ require 'spec_helper'
describe ServerlessDomainFinder do describe ServerlessDomainFinder do
let(:function_name) { 'test-function' } let(:function_name) { 'test-function' }
let(:pages_domain_name) { 'serverless.gitlab.io' } let(:pages_domain_name) { 'serverless.gitlab.io' }
let(:pages_domain) { create(:pages_domain, :instance_serverless, domain: pages_domain_name) }
let!(:serverless_domain_cluster) { create(:serverless_domain_cluster, uuid: 'abcdef12345678', pages_domain: pages_domain) }
let(:valid_cluster_uuid) { 'aba1cdef123456f278' } let(:valid_cluster_uuid) { 'aba1cdef123456f278' }
let(:invalid_cluster_uuid) { 'aba1cdef123456f178' } let(:invalid_cluster_uuid) { 'aba1cdef123456f178' }
let!(:environment) { create(:environment, name: 'test') } let!(:environment) { create(:environment, name: 'test') }
let(:pages_domain) do
create(
:pages_domain,
:instance_serverless,
domain: pages_domain_name
)
end
let(:knative_with_ingress) do
create(
:clusters_applications_knative,
external_ip: '10.0.0.1'
)
end
let!(:serverless_domain_cluster) do
create(
:serverless_domain_cluster,
uuid: 'abcdef12345678',
pages_domain: pages_domain,
knative: knative_with_ingress
)
end
let(:valid_uri) { "https://#{function_name}-#{valid_cluster_uuid}#{"%x" % environment.id}-#{environment.slug}.#{pages_domain_name}" } let(:valid_uri) { "https://#{function_name}-#{valid_cluster_uuid}#{"%x" % environment.id}-#{environment.slug}.#{pages_domain_name}" }
let(:valid_fqdn) { "#{function_name}-#{valid_cluster_uuid}#{"%x" % environment.id}-#{environment.slug}.#{pages_domain_name}" } let(:valid_fqdn) { "#{function_name}-#{valid_cluster_uuid}#{"%x" % environment.id}-#{environment.slug}.#{pages_domain_name}" }
let(:invalid_uri) { "https://#{function_name}-#{invalid_cluster_uuid}#{"%x" % environment.id}-#{environment.slug}.#{pages_domain_name}" } let(:invalid_uri) { "https://#{function_name}-#{invalid_cluster_uuid}#{"%x" % environment.id}-#{environment.slug}.#{pages_domain_name}" }
......
{
"type": "object",
"required": [
"source"
],
"properties": {
"source": { "type": "object",
"required": ["type", "service", "cluster"],
"properties" : {
"type": { "type": "string", "enum": ["serverless"] },
"service": { "type": "string" },
"cluster": { "type": "object",
"required": ["hostname", "address", "port", "cert", "key"],
"properties": {
"hostname": { "type": "string" },
"address": { "type": "string" },
"port": { "type": "integer" },
"cert": { "type": "string" },
"key": { "type": "string" }
},
"additionalProperties": false
}
},
"additionalProperties": false
}
},
"additionalProperties": false
}
{
"type": "object",
"required": [
"lookup_paths",
"certificate",
"key"
],
"properties": {
"certificate": { "type": ["string", "null"] },
"key": { "type": ["string", "null"] },
"lookup_paths": { "type": "array", "items": { "$ref": "lookup_path.json" } }
},
"additionalProperties": false
}
import {
gapiProjectsResponseMock,
gapiZonesResponseMock,
gapiMachineTypesResponseMock,
} from './mock_data';
const cloudbilling = {
projects: {
getBillingInfo: jest.fn(
() =>
new Promise(resolve => {
resolve({
result: { billingEnabled: true },
});
}),
),
},
};
const cloudresourcemanager = {
projects: {
list: jest.fn(
() =>
new Promise(resolve => {
resolve({
result: { ...gapiProjectsResponseMock },
});
}),
),
},
};
const compute = {
zones: {
list: jest.fn(
() =>
new Promise(resolve => {
resolve({
result: { ...gapiZonesResponseMock },
});
}),
),
},
machineTypes: {
list: jest.fn(
() =>
new Promise(resolve => {
resolve({
result: { ...gapiMachineTypesResponseMock },
});
}),
),
},
};
const gapi = {
client: {
cloudbilling,
cloudresourcemanager,
compute,
},
};
export { gapi as default };
import testAction from 'spec/helpers/vuex_action_helper'; import testAction from 'helpers/vuex_action_helper';
import * as actions from '~/create_cluster/gke_cluster/store/actions'; import * as actions from '~/create_cluster/gke_cluster/store/actions';
import { createStore } from '~/create_cluster/gke_cluster/store'; import { createStore } from '~/create_cluster/gke_cluster/store';
import { gapi } from '../helpers'; import gapi from '../helpers';
import { selectedProjectMock, selectedZoneMock, selectedMachineTypeMock } from '../mock_data'; import { selectedProjectMock, selectedZoneMock, selectedMachineTypeMock } from '../mock_data';
describe('GCP Cluster Dropdown Store Actions', () => { describe('GCP Cluster Dropdown Store Actions', () => {
...@@ -65,9 +65,10 @@ describe('GCP Cluster Dropdown Store Actions', () => { ...@@ -65,9 +65,10 @@ describe('GCP Cluster Dropdown Store Actions', () => {
describe('async fetch methods', () => { describe('async fetch methods', () => {
let originalGapi; let originalGapi;
beforeAll(() => { beforeAll(() => {
originalGapi = window.gapi; originalGapi = window.gapi;
window.gapi = gapi(); window.gapi = gapi;
}); });
afterAll(() => { afterAll(() => {
......
...@@ -72,11 +72,17 @@ describe('self monitor component', () => { ...@@ -72,11 +72,17 @@ describe('self monitor component', () => {
selfMonitoringProjectExists: true, selfMonitoringProjectExists: true,
createSelfMonitoringProjectPath: '/create', createSelfMonitoringProjectPath: '/create',
deleteSelfMonitoringProjectPath: '/delete', deleteSelfMonitoringProjectPath: '/delete',
selfMonitoringProjectFullPath: 'instance-administrators-random/gitlab-self-monitoring',
}); });
wrapper = shallowMount(SelfMonitor, { store }); wrapper = shallowMount(SelfMonitor, { store });
expect(wrapper.vm.selfMonitoringFormText).toContain('<a href="http://localhost/">'); expect(
wrapper
.find({ ref: 'selfMonitoringFormText' })
.find('a')
.attributes('href'),
).toEqual('http://localhost/instance-administrators-random/gitlab-self-monitoring');
}); });
}); });
}); });
......
import {
gapiProjectsResponseMock,
gapiZonesResponseMock,
gapiMachineTypesResponseMock,
} from './mock_data';
// eslint-disable-next-line import/prefer-default-export
export const gapi = () => ({
client: {
cloudbilling: {
projects: {
getBillingInfo: () =>
new Promise(resolve => {
resolve({
result: { billingEnabled: true },
});
}),
},
},
cloudresourcemanager: {
projects: {
list: () =>
new Promise(resolve => {
resolve({
result: { ...gapiProjectsResponseMock },
});
}),
},
},
compute: {
zones: {
list: () =>
new Promise(resolve => {
resolve({
result: { ...gapiZonesResponseMock },
});
}),
},
machineTypes: {
list: () =>
new Promise(resolve => {
resolve({
result: { ...gapiMachineTypesResponseMock },
});
}),
},
},
},
});
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::Ci::Config::Entry::Processable do
let(:node_class) do
Class.new(::Gitlab::Config::Entry::Node) do
include Gitlab::Ci::Config::Entry::Processable
def self.name
'job'
end
end
end
let(:entry) { node_class.new(config, name: :rspec) }
describe 'validations' do
before do
entry.compose!
end
context 'when entry config value is correct' do
let(:config) { { stage: 'test' } }
describe '#valid?' do
it 'is valid' do
expect(entry).to be_valid
end
end
context 'when job name is empty' do
let(:entry) { node_class.new(config, name: ''.to_sym) }
it 'reports error' do
expect(entry.errors).to include "job name can't be blank"
end
end
end
context 'when entry value is not correct' do
context 'incorrect config value type' do
let(:config) { ['incorrect'] }
describe '#errors' do
it 'reports error about a config type' do
expect(entry.errors)
.to include 'job config should be a hash'
end
end
end
context 'when config is empty' do
let(:config) { {} }
describe '#valid' do
it 'is invalid' do
expect(entry).not_to be_valid
end
end
end
context 'when extends key is not a string' do
let(:config) { { extends: 123 } }
it 'returns error about wrong value type' do
expect(entry).not_to be_valid
expect(entry.errors).to include "job extends should be an array of strings or a string"
end
end
context 'when it uses both "when:" and "rules:"' do
let(:config) do
{
script: 'echo',
when: 'on_failure',
rules: [{ if: '$VARIABLE', when: 'on_success' }]
}
end
it 'returns an error about when: being combined with rules' do
expect(entry).not_to be_valid
expect(entry.errors).to include 'job config key may not be used with `rules`: when'
end
end
context 'when only: is used with rules:' do
let(:config) { { only: ['merge_requests'], rules: [{ if: '$THIS' }] } }
it 'returns error about mixing only: with rules:' do
expect(entry).not_to be_valid
expect(entry.errors).to include /may not be used with `rules`/
end
context 'and only: is blank' do
let(:config) { { only: nil, rules: [{ if: '$THIS' }] } }
it 'returns error about mixing only: with rules:' do
expect(entry).not_to be_valid
expect(entry.errors).to include /may not be used with `rules`/
end
end
context 'and rules: is blank' do
let(:config) { { only: ['merge_requests'], rules: nil } }
it 'returns error about mixing only: with rules:' do
expect(entry).not_to be_valid
expect(entry.errors).to include /may not be used with `rules`/
end
end
end
context 'when except: is used with rules:' do
let(:config) { { except: { refs: %w[master] }, rules: [{ if: '$THIS' }] } }
it 'returns error about mixing except: with rules:' do
expect(entry).not_to be_valid
expect(entry.errors).to include /may not be used with `rules`/
end
context 'and except: is blank' do
let(:config) { { except: nil, rules: [{ if: '$THIS' }] } }
it 'returns error about mixing except: with rules:' do
expect(entry).not_to be_valid
expect(entry.errors).to include /may not be used with `rules`/
end
end
context 'and rules: is blank' do
let(:config) { { except: { refs: %w[master] }, rules: nil } }
it 'returns error about mixing except: with rules:' do
expect(entry).not_to be_valid
expect(entry.errors).to include /may not be used with `rules`/
end
end
end
context 'when only: and except: are both used with rules:' do
let(:config) do
{
only: %w[merge_requests],
except: { refs: %w[master] },
rules: [{ if: '$THIS' }]
}
end
it 'returns errors about mixing both only: and except: with rules:' do
expect(entry).not_to be_valid
expect(entry.errors).to include /may not be used with `rules`/
expect(entry.errors).to include /may not be used with `rules`/
end
context 'when only: and except: as both blank' do
let(:config) do
{ only: nil, except: nil, rules: [{ if: '$THIS' }] }
end
it 'returns errors about mixing both only: and except: with rules:' do
expect(entry).not_to be_valid
expect(entry.errors).to include /may not be used with `rules`/
expect(entry.errors).to include /may not be used with `rules`/
end
end
context 'when rules: is blank' do
let(:config) do
{ only: %w[merge_requests], except: { refs: %w[master] }, rules: nil }
end
it 'returns errors about mixing both only: and except: with rules:' do
expect(entry).not_to be_valid
expect(entry.errors).to include /may not be used with `rules`/
expect(entry.errors).to include /may not be used with `rules`/
end
end
end
end
end
describe '#relevant?' do
it 'is a relevant entry' do
entry = node_class.new({ stage: 'test' }, name: :rspec)
expect(entry).to be_relevant
end
end
describe '#compose!' do
let(:specified) do
double('specified', 'specified?' => true, value: 'specified')
end
let(:unspecified) { double('unspecified', 'specified?' => false) }
let(:default) { double('default', '[]' => unspecified) }
let(:workflow) { double('workflow', 'has_rules?' => false) }
let(:deps) { double('deps', 'default' => default, '[]' => unspecified, 'workflow' => workflow) }
context 'with workflow rules' do
using RSpec::Parameterized::TableSyntax
where(:name, :has_workflow_rules?, :only, :rules, :result) do
"uses default only" | false | nil | nil | { refs: %w[branches tags] }
"uses user only" | false | %w[branches] | nil | { refs: %w[branches] }
"does not define only" | false | nil | [] | nil
"does not define only" | true | nil | nil | nil
"uses user only" | true | %w[branches] | nil | { refs: %w[branches] }
"does not define only" | true | nil | [] | nil
end
with_them do
let(:config) { { script: 'ls', rules: rules, only: only }.compact }
it "#{name}" do
expect(workflow).to receive(:has_rules?) { has_workflow_rules? }
entry.compose!(deps)
expect(entry.only_value).to eq(result)
end
end
end
context 'when workflow rules is used' do
context 'when rules are used' do
let(:config) { { script: 'ls', cache: { key: 'test' }, rules: [] } }
it 'does not define only' do
expect(entry).not_to be_only_defined
end
end
context 'when rules are not used' do
let(:config) { { script: 'ls', cache: { key: 'test' }, only: [] } }
it 'does not define only' do
expect(entry).not_to be_only_defined
end
end
end
end
context 'when composed' do
before do
entry.compose!
end
describe '#value' do
context 'when entry is correct' do
let(:config) do
{ stage: 'test' }
end
it 'returns correct value' do
expect(entry.value)
.to eq(name: :rspec,
stage: 'test',
only: { refs: %w[branches tags] })
end
end
end
end
end
...@@ -2419,7 +2419,9 @@ module Gitlab ...@@ -2419,7 +2419,9 @@ module Gitlab
it 'returns errors and empty configuration' do it 'returns errors and empty configuration' do
expect(subject.valid?).to eq(false) expect(subject.valid?).to eq(false)
expect(subject.errors).to eq(['jobs:rspec config contains unknown keys: bad_tags', 'jobs:rspec rules should be an array of hashes']) expect(subject.errors).to contain_exactly(
'jobs:rspec config contains unknown keys: bad_tags',
'jobs:rspec rules should be an array of hashes')
expect(subject.content).to be_blank expect(subject.content).to be_blank
end end
end end
......
...@@ -218,6 +218,8 @@ describe Gitlab::Danger::Helper do ...@@ -218,6 +218,8 @@ describe Gitlab::Danger::Helper do
'scripts/foo' | :engineering_productivity 'scripts/foo' | :engineering_productivity
'lib/gitlab/danger/foo' | :engineering_productivity 'lib/gitlab/danger/foo' | :engineering_productivity
'ee/lib/gitlab/danger/foo' | :engineering_productivity 'ee/lib/gitlab/danger/foo' | :engineering_productivity
'.overcommit.yml.example' | :engineering_productivity
'tooling/overcommit/foo' | :engineering_productivity
'lib/gitlab/ci/templates/Security/SAST.gitlab-ci.yml' | :backend 'lib/gitlab/ci/templates/Security/SAST.gitlab-ci.yml' | :backend
......
...@@ -4,8 +4,8 @@ require 'spec_helper' ...@@ -4,8 +4,8 @@ require 'spec_helper'
describe Gitlab::ImportExport::MembersMapper do describe Gitlab::ImportExport::MembersMapper do
describe 'map members' do describe 'map members' do
shared_examples 'imports exported members' do
let(:user) { create(:admin) } let(:user) { create(:admin) }
let(:project) { create(:project, :public, name: 'searchable_project') }
let(:user2) { create(:user) } let(:user2) { create(:user) }
let(:exported_user_id) { 99 } let(:exported_user_id) { 99 }
let(:exported_members) do let(:exported_members) do
...@@ -13,7 +13,7 @@ describe Gitlab::ImportExport::MembersMapper do ...@@ -13,7 +13,7 @@ describe Gitlab::ImportExport::MembersMapper do
"id" => 2, "id" => 2,
"access_level" => 40, "access_level" => 40,
"source_id" => 14, "source_id" => 14,
"source_type" => "Project", "source_type" => source_type,
"notification_level" => 3, "notification_level" => 3,
"created_at" => "2016-03-11T10:21:44.822Z", "created_at" => "2016-03-11T10:21:44.822Z",
"updated_at" => "2016-03-11T10:21:44.822Z", "updated_at" => "2016-03-11T10:21:44.822Z",
...@@ -33,7 +33,7 @@ describe Gitlab::ImportExport::MembersMapper do ...@@ -33,7 +33,7 @@ describe Gitlab::ImportExport::MembersMapper do
"id" => 3, "id" => 3,
"access_level" => 40, "access_level" => 40,
"source_id" => 14, "source_id" => 14,
"source_type" => "Project", "source_type" => source_type,
"user_id" => nil, "user_id" => nil,
"notification_level" => 3, "notification_level" => 3,
"created_at" => "2016-03-11T10:21:44.822Z", "created_at" => "2016-03-11T10:21:44.822Z",
...@@ -47,43 +47,29 @@ describe Gitlab::ImportExport::MembersMapper do ...@@ -47,43 +47,29 @@ describe Gitlab::ImportExport::MembersMapper do
let(:members_mapper) do let(:members_mapper) do
described_class.new( described_class.new(
exported_members: exported_members, user: user, importable: project) exported_members: exported_members, user: user, importable: importable)
end end
it 'includes the exported user ID in the map' do it 'includes the exported user ID in the map' do
expect(members_mapper.map.keys).to include(exported_user_id) expect(members_mapper.map.keys).to include(exported_user_id)
end end
it 'maps a project member' do it 'maps a member' do
expect(members_mapper.map[exported_user_id]).to eq(user2.id) expect(members_mapper.map[exported_user_id]).to eq(user2.id)
end end
it 'defaults to importer project member if it does not exist' do it 'defaults to importer member if it does not exist' do
expect(members_mapper.map[-1]).to eq(user.id) expect(members_mapper.map[-1]).to eq(user.id)
end end
it 'has invited members with no user' do it 'has invited members with no user' do
members_mapper.map members_mapper.map
expect(ProjectMember.find_by_invite_email('invite@test.com')).not_to be_nil expect(member_class.find_by_invite_email('invite@test.com')).not_to be_nil
end
it 'authorizes the users to the project' do
members_mapper.map
expect(user.authorized_project?(project)).to be true
expect(user2.authorized_project?(project)).to be true
end
it 'maps an owner as a maintainer' do
exported_members.first['access_level'] = ProjectMember::OWNER
expect(members_mapper.map[exported_user_id]).to eq(user2.id)
expect(ProjectMember.find_by_user_id(user2.id).access_level).to eq(ProjectMember::MAINTAINER)
end end
it 'removes old user_id from member_hash to avoid conflict with user key' do it 'removes old user_id from member_hash to avoid conflict with user key' do
expect(ProjectMember) expect(member_class)
.to receive(:create) .to receive(:create)
.twice .twice
.with(hash_excluding('user_id')) .with(hash_excluding('user_id'))
...@@ -95,30 +81,51 @@ describe Gitlab::ImportExport::MembersMapper do ...@@ -95,30 +81,51 @@ describe Gitlab::ImportExport::MembersMapper do
context 'user is not an admin' do context 'user is not an admin' do
let(:user) { create(:user) } let(:user) { create(:user) }
it 'does not map a project member' do it 'does not map a member' do
expect(members_mapper.map[exported_user_id]).to eq(user.id) expect(members_mapper.map[exported_user_id]).to eq(user.id)
end end
it 'defaults to importer project member if it does not exist' do it 'defaults to importer member if it does not exist' do
expect(members_mapper.map[-1]).to eq(user.id) expect(members_mapper.map[-1]).to eq(user.id)
end end
end end
context 'chooses the one with an email first' do context 'chooses the one with an email' do
let(:user3) { create(:user, username: 'test') } let(:user3) { create(:user, username: 'test') }
it 'maps the project member that has a matching email first' do it 'maps the member that has a matching email' do
expect(members_mapper.map[exported_user_id]).to eq(user2.id) expect(members_mapper.map[exported_user_id]).to eq(user2.id)
end end
end end
end
context 'when importable is Project' do
include_examples 'imports exported members' do
let(:source_type) { 'Project' }
let(:member_class) { ProjectMember }
let(:importable) { create(:project, :public, name: 'searchable_project') }
it 'authorizes the users to the project' do
members_mapper.map
expect(user.authorized_project?(importable)).to be true
expect(user2.authorized_project?(importable)).to be true
end
it 'maps an owner as a maintainer' do
exported_members.first['access_level'] = ProjectMember::OWNER
expect(members_mapper.map[exported_user_id]).to eq(user2.id)
expect(member_class.find_by_user_id(user2.id).access_level).to eq(ProjectMember::MAINTAINER)
end
context 'importer same as group member' do context 'importer same as group member' do
let(:user2) { create(:admin) } let(:user2) { create(:admin) }
let(:group) { create(:group) } let(:group) { create(:group) }
let(:project) { create(:project, :public, name: 'searchable_project', namespace: group) } let(:importable) { create(:project, :public, name: 'searchable_project', namespace: group) }
let(:members_mapper) do let(:members_mapper) do
described_class.new( described_class.new(
exported_members: exported_members, user: user2, importable: project) exported_members: exported_members, user: user2, importable: importable)
end end
before do before do
...@@ -130,7 +137,7 @@ describe Gitlab::ImportExport::MembersMapper do ...@@ -130,7 +137,7 @@ describe Gitlab::ImportExport::MembersMapper do
end end
it 'maps the project member if it already exists' do it 'maps the project member if it already exists' do
project.add_maintainer(user2) importable.add_maintainer(user2)
expect(members_mapper.map[exported_user_id]).to eq(user2.id) expect(members_mapper.map[exported_user_id]).to eq(user2.id)
end end
...@@ -138,10 +145,10 @@ describe Gitlab::ImportExport::MembersMapper do ...@@ -138,10 +145,10 @@ describe Gitlab::ImportExport::MembersMapper do
context 'importing group members' do context 'importing group members' do
let(:group) { create(:group) } let(:group) { create(:group) }
let(:project) { create(:project, namespace: group) } let(:importable) { create(:project, namespace: group) }
let(:members_mapper) do let(:members_mapper) do
described_class.new( described_class.new(
exported_members: exported_members, user: user, importable: project) exported_members: exported_members, user: user, importable: importable)
end end
before do before do
...@@ -162,10 +169,27 @@ describe Gitlab::ImportExport::MembersMapper do ...@@ -162,10 +169,27 @@ describe Gitlab::ImportExport::MembersMapper do
let(:exception_message) { 'Something went wrong' } let(:exception_message) { 'Something went wrong' }
it 'includes importer specific error message' do it 'includes importer specific error message' do
expect(ProjectMember).to receive(:create!).and_raise(StandardError.new(exception_message)) expect(member_class).to receive(:create!).and_raise(StandardError.new(exception_message))
expect { members_mapper.map }.to raise_error(StandardError, "Error adding importer user to Project members. #{exception_message}") expect { members_mapper.map }.to raise_error(StandardError, "Error adding importer user to Project members. #{exception_message}")
end end
end end
end end
end
context 'when importable is Group' do
include_examples 'imports exported members' do
let(:source_type) { 'Namespace' }
let(:member_class) { GroupMember }
let(:importable) { create(:group) }
it 'does not lower owner access level' do
exported_members.first['access_level'] = member_class::OWNER
expect(members_mapper.map[exported_user_id]).to eq(user2.id)
expect(member_class.find_by_user_id(user2.id).access_level).to eq(member_class::OWNER)
end
end
end
end
end end
...@@ -32,7 +32,7 @@ describe Gitlab::Kubernetes::Helm::Pod do ...@@ -32,7 +32,7 @@ describe Gitlab::Kubernetes::Helm::Pod do
it 'generates the appropriate specifications for the container' do it 'generates the appropriate specifications for the container' do
container = subject.generate.spec.containers.first container = subject.generate.spec.containers.first
expect(container.name).to eq('helm') expect(container.name).to eq('helm')
expect(container.image).to eq('registry.gitlab.com/gitlab-org/cluster-integration/helm-install-image/releases/2.16.1-kube-1.13.12') expect(container.image).to eq('registry.gitlab.com/gitlab-org/cluster-integration/helm-install-image/releases/2.16.3-kube-1.13.12')
expect(container.env.count).to eq(3) expect(container.env.count).to eq(3)
expect(container.env.map(&:name)).to match_array([:HELM_VERSION, :TILLER_NAMESPACE, :COMMAND_SCRIPT]) expect(container.env.map(&:name)).to match_array([:HELM_VERSION, :TILLER_NAMESPACE, :COMMAND_SCRIPT])
expect(container.command).to match_array(["/bin/sh"]) expect(container.command).to match_array(["/bin/sh"])
......
...@@ -2,38 +2,54 @@ ...@@ -2,38 +2,54 @@
require 'spec_helper' require 'spec_helper'
describe Gitlab::ReferenceCounter do describe Gitlab::ReferenceCounter, :clean_gitlab_redis_shared_state do
let(:redis) { double('redis') }
let(:reference_counter_key) { "git-receive-pack-reference-counter:project-1" }
let(:reference_counter) { described_class.new('project-1') } let(:reference_counter) { described_class.new('project-1') }
before do describe '#increase' do
allow(Gitlab::Redis::SharedState).to receive(:with).and_yield(redis) it 'increases and sets the expire time of a reference count for a path' do
end expect { reference_counter.increase }.to change { reference_counter.value }.by(1)
expect(reference_counter.expires_in).to be_positive
it 'increases and set the expire time of a reference count for a path' do
expect(redis).to receive(:incr).with(reference_counter_key)
expect(redis).to receive(:expire).with(reference_counter_key,
described_class::REFERENCE_EXPIRE_TIME)
expect(reference_counter.increase).to be(true) expect(reference_counter.increase).to be(true)
end end
end
describe '#decrease' do
it 'decreases the reference count for a path' do it 'decreases the reference count for a path' do
allow(redis).to receive(:decr).and_return(0) reference_counter.increase
expect(redis).to receive(:decr).with(reference_counter_key)
expect(reference_counter.decrease).to be(true) expect { reference_counter.decrease }.to change { reference_counter.value }.by(-1)
end end
it 'warns if attempting to decrease a counter with a value of one or less, and resets the counter' do it 'warns if attempting to decrease a counter with a value of zero or less, and resets the counter' do
expect(redis).to receive(:decr).and_return(-1)
expect(redis).to receive(:del)
expect(Rails.logger).to receive(:warn).with("Reference counter for project-1" \ expect(Rails.logger).to receive(:warn).with("Reference counter for project-1" \
" decreased when its value was less than 1. Reseting the counter.") " decreased when its value was less than 1. Resetting the counter.")
expect(reference_counter.decrease).to be(true) expect { reference_counter.decrease }.not_to change { reference_counter.value }
end
end end
describe '#value' do
it 'get the reference count for a path' do it 'get the reference count for a path' do
allow(redis).to receive(:get).and_return(1) expect(reference_counter.value).to eq(0)
expect(reference_counter.value).to be(1)
reference_counter.increase
expect(reference_counter.value).to eq(1)
end
end
describe '#reset!' do
it 'resets reference count down to zero' do
3.times { reference_counter.increase }
expect { reference_counter.reset! }.to change { reference_counter.value}.from(3).to(0)
end
end
describe '#expires_in' do
it 'displays the expiration time in seconds' do
reference_counter.increase
expect(reference_counter.expires_in).to be_between(500, 600)
end
end end
end end
# frozen_string_literal: true
require 'spec_helper'
require Rails.root.join('db', 'migrate', '20200221105436_update_application_setting_npm_package_requests_forwarding_default.rb')
describe UpdateApplicationSettingNpmPackageRequestsForwardingDefault, :migration do
# Create test data - pipeline and CI/CD jobs.
let(:application_settings) { table(:application_settings) }
before do
application_settings.create!(npm_package_requests_forwarding: false)
end
# Test just the up migration.
it 'correctly migrates the application setting' do
expect { migrate! }.to change { current_application_setting }.from(false).to(true)
end
# Test a reversible migration.
it 'correctly migrates up and down the application setting' do
reversible_migration do |migration|
# Expectations will run before the up migration,
# and then again after the down migration
migration.before -> {
expect(current_application_setting).to eq false
}
# Expectations will run after the up migration.
migration.after -> {
expect(current_application_setting).to eq true
}
end
end
def current_application_setting
ApplicationSetting.current_without_cache.npm_package_requests_forwarding
end
end
...@@ -56,6 +56,88 @@ describe API::Internal::Pages do ...@@ -56,6 +56,88 @@ describe API::Internal::Pages do
end end
end end
context 'serverless domain' do
let(:namespace) { create(:namespace, name: 'gitlab-org') }
let(:project) { create(:project, namespace: namespace, name: 'gitlab-ce') }
let(:environment) { create(:environment, project: project) }
let(:pages_domain) { create(:pages_domain, domain: 'serverless.gitlab.io') }
let(:knative_without_ingress) { create(:clusters_applications_knative) }
let(:knative_with_ingress) { create(:clusters_applications_knative, external_ip: '10.0.0.1') }
context 'without a knative ingress gateway IP' do
let!(:serverless_domain_cluster) do
create(
:serverless_domain_cluster,
uuid: 'abcdef12345678',
pages_domain: pages_domain,
knative: knative_without_ingress
)
end
let(:serverless_domain) do
create(
:serverless_domain,
serverless_domain_cluster: serverless_domain_cluster,
environment: environment
)
end
it 'responds with 204 no content' do
query_host(serverless_domain.uri.host)
expect(response).to have_gitlab_http_status(:no_content)
expect(response.body).to be_empty
end
end
context 'with a knative ingress gateway IP' do
let!(:serverless_domain_cluster) do
create(
:serverless_domain_cluster,
uuid: 'abcdef12345678',
pages_domain: pages_domain,
knative: knative_with_ingress
)
end
let(:serverless_domain) do
create(
:serverless_domain,
serverless_domain_cluster: serverless_domain_cluster,
environment: environment
)
end
it 'responds with proxy configuration' do
query_host(serverless_domain.uri.host)
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('internal/serverless/virtual_domain')
expect(json_response['certificate']).to eq(pages_domain.certificate)
expect(json_response['key']).to eq(pages_domain.key)
expect(json_response['lookup_paths']).to eq(
[
{
'source' => {
'type' => 'serverless',
'service' => "test-function.#{project.name}-#{project.id}-#{environment.slug}.#{serverless_domain_cluster.knative.hostname}",
'cluster' => {
'hostname' => serverless_domain_cluster.knative.hostname,
'address' => serverless_domain_cluster.knative.external_ip,
'port' => 443,
'cert' => serverless_domain_cluster.certificate,
'key' => serverless_domain_cluster.key
}
}
}
]
)
end
end
end
context 'custom domain' do context 'custom domain' do
let(:namespace) { create(:namespace, name: 'gitlab-org') } let(:namespace) { create(:namespace, name: 'gitlab-org') }
let(:project) { create(:project, namespace: namespace, name: 'gitlab-ce') } let(:project) { create(:project, namespace: namespace, name: 'gitlab-ce') }
......
...@@ -612,7 +612,8 @@ module Ci ...@@ -612,7 +612,8 @@ module Ci
allow(attempt_counter).to receive(:increment) allow(attempt_counter).to receive(:increment)
expect(job_queue_duration_seconds).to receive(:observe) expect(job_queue_duration_seconds).to receive(:observe)
.with({ shared_runner: expected_shared_runner, .with({ shared_runner: expected_shared_runner,
jobs_running_for_project: expected_jobs_running_for_project_first_job }, 1800) jobs_running_for_project: expected_jobs_running_for_project_first_job,
shard: expected_shard }, 1800)
execute(runner) execute(runner)
end end
...@@ -625,7 +626,8 @@ module Ci ...@@ -625,7 +626,8 @@ module Ci
allow(attempt_counter).to receive(:increment) allow(attempt_counter).to receive(:increment)
expect(job_queue_duration_seconds).to receive(:observe) expect(job_queue_duration_seconds).to receive(:observe)
.with({ shared_runner: expected_shared_runner, .with({ shared_runner: expected_shared_runner,
jobs_running_for_project: expected_jobs_running_for_project_third_job }, 1800) jobs_running_for_project: expected_jobs_running_for_project_third_job,
shard: expected_shard }, 1800)
execute(runner) execute(runner)
end end
...@@ -638,13 +640,28 @@ module Ci ...@@ -638,13 +640,28 @@ module Ci
end end
context 'when shared runner is used' do context 'when shared runner is used' do
let(:runner) { shared_runner } let(:runner) { create(:ci_runner, :instance, tag_list: %w(tag1 tag2)) }
let(:expected_shared_runner) { true } let(:expected_shared_runner) { true }
let(:expected_shard) { Ci::RegisterJobService::DEFAULT_METRICS_SHARD }
let(:expected_jobs_running_for_project_first_job) { 0 } let(:expected_jobs_running_for_project_first_job) { 0 }
let(:expected_jobs_running_for_project_third_job) { 2 } let(:expected_jobs_running_for_project_third_job) { 2 }
it_behaves_like 'metrics collector' it_behaves_like 'metrics collector'
context 'when metrics_shard tag is defined' do
let(:runner) { create(:ci_runner, :instance, tag_list: %w(tag1 metrics_shard::shard_tag tag2)) }
let(:expected_shard) { 'shard_tag' }
it_behaves_like 'metrics collector'
end
context 'when multiple metrics_shard tag is defined' do
let(:runner) { create(:ci_runner, :instance, tag_list: %w(tag1 metrics_shard::shard_tag metrics_shard::shard_tag_2 tag2)) }
let(:expected_shard) { 'shard_tag' }
it_behaves_like 'metrics collector'
end
context 'when pending job with queued_at=nil is used' do context 'when pending job with queued_at=nil is used' do
before do before do
pending_job.update(queued_at: nil) pending_job.update(queued_at: nil)
...@@ -662,8 +679,9 @@ module Ci ...@@ -662,8 +679,9 @@ module Ci
end end
context 'when specific runner is used' do context 'when specific runner is used' do
let(:runner) { specific_runner } let(:runner) { create(:ci_runner, :project, projects: [project], tag_list: %w(tag1 metrics_shard::shard_tag tag2)) }
let(:expected_shared_runner) { false } let(:expected_shared_runner) { false }
let(:expected_shard) { Ci::RegisterJobService::DEFAULT_METRICS_SHARD }
let(:expected_jobs_running_for_project_first_job) { '+Inf' } let(:expected_jobs_running_for_project_first_job) { '+Inf' }
let(:expected_jobs_running_for_project_third_job) { '+Inf' } let(:expected_jobs_running_for_project_third_job) { '+Inf' }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment