Commit 914ea32e authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent 3546e1bb
# frozen_string_literal: true # frozen_string_literal: true
require_relative 'lib/gitlab_danger' require_relative 'lib/gitlab_danger'
require_relative 'lib/gitlab/danger/request_helper'
danger.import_plugin('danger/plugins/helper.rb') danger.import_plugin('danger/plugins/helper.rb')
danger.import_plugin('danger/plugins/roulette.rb') danger.import_plugin('danger/plugins/roulette.rb')
......
<script>
export default {
props: {
signedIn: {
type: Boolean,
required: true,
},
sidebarStatusClass: {
type: String,
required: false,
default: '',
},
},
};
</script>
<template>
<aside
:class="sidebarStatusClass"
class="right-sidebar js-right-sidebar js-issuable-sidebar"
aria-live="polite"
></aside>
</template>
import Vue from 'vue';
import SidebarApp from './components/sidebar_app.vue';
export default () => {
const el = document.getElementById('js-vue-issuable-sidebar');
if (!el) {
return false;
}
const { sidebarStatusClass } = el.dataset;
// An empty string is present when user is signed in.
const signedIn = el.dataset.signedIn === '';
return new Vue({
el,
components: { SidebarApp },
render: createElement =>
createElement('sidebar-app', {
props: {
signedIn,
sidebarStatusClass,
},
}),
});
};
...@@ -9,7 +9,7 @@ export default { ...@@ -9,7 +9,7 @@ export default {
}; };
</script> </script>
<template> <template>
<div class="log-duration-badge rounded align-self-start px-2 ml-2 flex-shrink-0"> <div class="log-duration-badge rounded align-self-start px-2 ml-2 flex-shrink-0 ws-normal">
{{ duration }} {{ duration }}
</div> </div>
</template> </template>
...@@ -21,8 +21,12 @@ export default { ...@@ -21,8 +21,12 @@ export default {
<template> <template>
<div class="js-line log-line"> <div class="js-line log-line">
<line-number :line-number="line.lineNumber" :path="path" /> <line-number :line-number="line.lineNumber" :path="path" />
<span v-for="(content, i) in line.content" :key="i" :class="content.style">{{ <span
content.text v-for="(content, i) in line.content"
}}</span> :key="i"
:class="content.style"
class="ws-pre-wrap"
>{{ content.text }}</span
>
</div> </div>
</template> </template>
...@@ -43,15 +43,19 @@ export default { ...@@ -43,15 +43,19 @@ export default {
<template> <template>
<div <div
class="log-line collapsible-line d-flex justify-content-between" class="log-line collapsible-line d-flex justify-content-between ws-normal"
role="button" role="button"
@click="handleOnClick" @click="handleOnClick"
> >
<icon :name="iconName" class="arrow position-absolute" /> <icon :name="iconName" class="arrow position-absolute" />
<line-number :line-number="line.lineNumber" :path="path" /> <line-number :line-number="line.lineNumber" :path="path" />
<span v-for="(content, i) in line.content" :key="i" class="line-text" :class="content.style">{{ <span
content.text v-for="(content, i) in line.content"
}}</span> :key="i"
class="line-text w-100 ws-pre-wrap"
:class="content.style"
>{{ content.text }}</span
>
<duration-badge v-if="duration" :duration="duration" /> <duration-badge v-if="duration" :duration="duration" />
</div> </div>
</template> </template>
...@@ -48,7 +48,7 @@ export default { ...@@ -48,7 +48,7 @@ export default {
<template> <template>
<gl-link <gl-link
:id="lineNumberId" :id="lineNumberId"
class="d-inline-block text-right line-number" class="d-inline-block text-right line-number flex-shrink-0"
:href="buildLineNumber" :href="buildLineNumber"
>{{ parsedLineNumber }}</gl-link >{{ parsedLineNumber }}</gl-link
> >
......
...@@ -19,7 +19,7 @@ export default { ...@@ -19,7 +19,7 @@ export default {
state.isSidebarOpen = true; state.isSidebarOpen = true;
}, },
[types.RECEIVE_TRACE_SUCCESS](state, log) { [types.RECEIVE_TRACE_SUCCESS](state, log = {}) {
if (log.state) { if (log.state) {
state.traceState = log.state; state.traceState = log.state;
} }
......
...@@ -416,7 +416,6 @@ export default { ...@@ -416,7 +416,6 @@ export default {
<gl-button <gl-button
v-if="showRearrangePanelsBtn" v-if="showRearrangePanelsBtn"
:pressed="isRearrangingPanels" :pressed="isRearrangingPanels"
new-style
variant="default" variant="default"
class="mr-2 mt-1 js-rearrange-button" class="mr-2 mt-1 js-rearrange-button"
@click="toggleRearrangingPanels" @click="toggleRearrangingPanels"
...@@ -426,7 +425,6 @@ export default { ...@@ -426,7 +425,6 @@ export default {
<gl-button <gl-button
v-if="addingMetricsAvailable" v-if="addingMetricsAvailable"
v-gl-modal="$options.addMetric.modalId" v-gl-modal="$options.addMetric.modalId"
new-style
variant="outline-success" variant="outline-success"
class="mr-2 mt-1 js-add-metric-button" class="mr-2 mt-1 js-add-metric-button"
> >
......
...@@ -5,6 +5,7 @@ import ZenMode from '~/zen_mode'; ...@@ -5,6 +5,7 @@ import ZenMode from '~/zen_mode';
import '~/notes/index'; import '~/notes/index';
import initIssueableApp from '~/issue_show'; import initIssueableApp from '~/issue_show';
import initRelatedMergeRequestsApp from '~/related_merge_requests'; import initRelatedMergeRequestsApp from '~/related_merge_requests';
import initVueIssuableSidebarApp from '~/issuable_sidebar/sidebar_bundle';
export default function() { export default function() {
initIssueableApp(); initIssueableApp();
...@@ -12,5 +13,9 @@ export default function() { ...@@ -12,5 +13,9 @@ export default function() {
new Issue(); // eslint-disable-line no-new new Issue(); // eslint-disable-line no-new
new ShortcutsIssuable(); // eslint-disable-line no-new new ShortcutsIssuable(); // eslint-disable-line no-new
new ZenMode(); // eslint-disable-line no-new new ZenMode(); // eslint-disable-line no-new
initIssuableSidebar(); if (gon.features && gon.features.vueIssuableSidebar) {
initVueIssuableSidebarApp();
} else {
initIssuableSidebar();
}
} }
...@@ -3,5 +3,7 @@ import initShow from '../show'; ...@@ -3,5 +3,7 @@ import initShow from '../show';
document.addEventListener('DOMContentLoaded', () => { document.addEventListener('DOMContentLoaded', () => {
initShow(); initShow();
initSidebarBundle(); if (gon.features && !gon.features.vueIssuableSidebar) {
initSidebarBundle();
}
}); });
...@@ -4,11 +4,16 @@ import ShortcutsIssuable from '~/behaviors/shortcuts/shortcuts_issuable'; ...@@ -4,11 +4,16 @@ import ShortcutsIssuable from '~/behaviors/shortcuts/shortcuts_issuable';
import { handleLocationHash } from '~/lib/utils/common_utils'; import { handleLocationHash } from '~/lib/utils/common_utils';
import howToMerge from '~/how_to_merge'; import howToMerge from '~/how_to_merge';
import initPipelines from '~/commit/pipelines/pipelines_bundle'; import initPipelines from '~/commit/pipelines/pipelines_bundle';
import initVueIssuableSidebarApp from '~/issuable_sidebar/sidebar_bundle';
import initWidget from '../../../vue_merge_request_widget'; import initWidget from '../../../vue_merge_request_widget';
export default function() { export default function() {
new ZenMode(); // eslint-disable-line no-new new ZenMode(); // eslint-disable-line no-new
initIssuableSidebar(); if (gon.features && gon.features.vueIssuableSidebar) {
initVueIssuableSidebarApp();
} else {
initIssuableSidebar();
}
initPipelines(); initPipelines();
new ShortcutsIssuable(true); // eslint-disable-line no-new new ShortcutsIssuable(true); // eslint-disable-line no-new
handleLocationHash(); handleLocationHash();
......
...@@ -4,6 +4,8 @@ import initShow from '../init_merge_request_show'; ...@@ -4,6 +4,8 @@ import initShow from '../init_merge_request_show';
document.addEventListener('DOMContentLoaded', () => { document.addEventListener('DOMContentLoaded', () => {
initShow(); initShow();
initSidebarBundle(); if (gon.features && !gon.features.vueIssuableSidebar) {
initSidebarBundle();
}
initMrNotes(); initMrNotes();
}); });
...@@ -440,6 +440,7 @@ img.emoji { ...@@ -440,6 +440,7 @@ img.emoji {
.flex-no-shrink { flex-shrink: 0; } .flex-no-shrink { flex-shrink: 0; }
.ws-initial { white-space: initial; } .ws-initial { white-space: initial; }
.ws-normal { white-space: normal; } .ws-normal { white-space: normal; }
.ws-pre-wrap { white-space: pre-wrap; }
.overflow-auto { overflow: auto; } .overflow-auto { overflow: auto; }
.d-flex-center { .d-flex-center {
......
...@@ -9,7 +9,6 @@ ...@@ -9,7 +9,6 @@
border-radius: $border-radius-small; border-radius: $border-radius-small;
min-height: 42px; min-height: 42px;
background-color: $builds-trace-bg; background-color: $builds-trace-bg;
white-space: pre-wrap;
} }
.log-line { .log-line {
......
...@@ -104,7 +104,6 @@ class GroupsController < Groups::ApplicationController ...@@ -104,7 +104,6 @@ class GroupsController < Groups::ApplicationController
redirect_to edit_group_path(@group, anchor: params[:update_section]), notice: "Group '#{@group.name}' was successfully updated." redirect_to edit_group_path(@group, anchor: params[:update_section]), notice: "Group '#{@group.name}' was successfully updated."
else else
@group.path = @group.path_before_last_save || @group.path_was @group.path = @group.path_before_last_save || @group.path_was
render action: "edit" render action: "edit"
end end
end end
...@@ -124,7 +123,7 @@ class GroupsController < Groups::ApplicationController ...@@ -124,7 +123,7 @@ class GroupsController < Groups::ApplicationController
flash[:notice] = "Group '#{@group.name}' was successfully transferred." flash[:notice] = "Group '#{@group.name}' was successfully transferred."
redirect_to group_path(@group) redirect_to group_path(@group)
else else
flash[:alert] = service.error flash[:alert] = service.error.html_safe
redirect_to edit_group_path(@group) redirect_to edit_group_path(@group)
end end
end end
......
...@@ -42,6 +42,10 @@ class Projects::IssuesController < Projects::ApplicationController ...@@ -42,6 +42,10 @@ class Projects::IssuesController < Projects::ApplicationController
before_action :authorize_import_issues!, only: [:import_csv] before_action :authorize_import_issues!, only: [:import_csv]
before_action :authorize_download_code!, only: [:related_branches] before_action :authorize_download_code!, only: [:related_branches]
before_action do
push_frontend_feature_flag(:vue_issuable_sidebar, project.group)
end
respond_to :html respond_to :html
alias_method :designs, :show alias_method :designs, :show
......
...@@ -21,6 +21,10 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo ...@@ -21,6 +21,10 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
push_frontend_feature_flag(:diffs_batch_load, @project) push_frontend_feature_flag(:diffs_batch_load, @project)
end end
before_action do
push_frontend_feature_flag(:vue_issuable_sidebar, @project.group)
end
around_action :allow_gitaly_ref_name_caching, only: [:index, :show, :discussions] around_action :allow_gitaly_ref_name_caching, only: [:index, :show, :discussions]
def index def index
......
...@@ -184,7 +184,7 @@ class Projects::PipelinesController < Projects::ApplicationController ...@@ -184,7 +184,7 @@ class Projects::PipelinesController < Projects::ApplicationController
end end
def show_represent_params def show_represent_params
{ grouped: true } { grouped: true, expanded: params[:expanded].to_a.map(&:to_i) }
end end
def create_params def create_params
......
...@@ -42,6 +42,7 @@ module Ci ...@@ -42,6 +42,7 @@ module Ci
has_many :job_artifacts, class_name: 'Ci::JobArtifact', foreign_key: :job_id, dependent: :destroy, inverse_of: :job # rubocop:disable Cop/ActiveRecordDependent has_many :job_artifacts, class_name: 'Ci::JobArtifact', foreign_key: :job_id, dependent: :destroy, inverse_of: :job # rubocop:disable Cop/ActiveRecordDependent
has_many :job_variables, class_name: 'Ci::JobVariable', foreign_key: :job_id has_many :job_variables, class_name: 'Ci::JobVariable', foreign_key: :job_id
has_many :sourced_pipelines, class_name: 'Ci::Sources::Pipeline', foreign_key: :source_job_id
Ci::JobArtifact.file_types.each do |key, value| Ci::JobArtifact.file_types.each do |key, value|
has_one :"job_artifacts_#{key}", -> { where(file_type: value) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id has_one :"job_artifacts_#{key}", -> { where(file_type: value) }, class_name: 'Ci::JobArtifact', inverse_of: :job, foreign_key: :job_id
......
...@@ -52,9 +52,15 @@ module Ci ...@@ -52,9 +52,15 @@ module Ci
has_many :auto_canceled_pipelines, class_name: 'Ci::Pipeline', foreign_key: 'auto_canceled_by_id' has_many :auto_canceled_pipelines, class_name: 'Ci::Pipeline', foreign_key: 'auto_canceled_by_id'
has_many :auto_canceled_jobs, class_name: 'CommitStatus', foreign_key: 'auto_canceled_by_id' has_many :auto_canceled_jobs, class_name: 'CommitStatus', foreign_key: 'auto_canceled_by_id'
has_many :sourced_pipelines, class_name: 'Ci::Sources::Pipeline', foreign_key: :source_pipeline_id
has_one :source_pipeline, class_name: 'Ci::Sources::Pipeline', inverse_of: :pipeline
has_one :chat_data, class_name: 'Ci::PipelineChatData' has_one :chat_data, class_name: 'Ci::PipelineChatData'
has_many :triggered_pipelines, through: :sourced_pipelines, source: :pipeline
has_one :triggered_by_pipeline, through: :source_pipeline, source: :source_pipeline
has_one :source_job, through: :source_pipeline, source: :source_job
accepts_nested_attributes_for :variables, reject_if: :persisted? accepts_nested_attributes_for :variables, reject_if: :persisted?
delegate :id, to: :project, prefix: true delegate :id, to: :project, prefix: true
......
...@@ -22,6 +22,7 @@ module Ci ...@@ -22,6 +22,7 @@ module Ci
schedule: 4, schedule: 4,
api: 5, api: 5,
external: 6, external: 6,
pipeline: 7,
chat: 8, chat: 8,
merge_request_event: 10, merge_request_event: 10,
external_pull_request_event: 11 external_pull_request_event: 11
......
# frozen_string_literal: true
module Ci
module Sources
class Pipeline < ApplicationRecord
self.table_name = "ci_sources_pipelines"
belongs_to :project, class_name: "Project"
belongs_to :pipeline, class_name: "Ci::Pipeline", inverse_of: :source_pipeline
belongs_to :source_project, class_name: "Project", foreign_key: :source_project_id
belongs_to :source_job, class_name: "CommitStatus", foreign_key: :source_job_id
belongs_to :source_pipeline, class_name: "Ci::Pipeline", foreign_key: :source_pipeline_id
validates :project, presence: true
validates :pipeline, presence: true
validates :source_project, presence: true
validates :source_job, presence: true
validates :source_pipeline, presence: true
end
end
end
::Ci::Sources::Pipeline.prepend_if_ee('::EE::Ci::Sources::Pipeline')
...@@ -259,6 +259,10 @@ class Group < Namespace ...@@ -259,6 +259,10 @@ class Group < Namespace
members_with_parents.maintainers.exists?(user_id: user) members_with_parents.maintainers.exists?(user_id: user)
end end
def has_container_repositories?
container_repositories.exists?
end
# @deprecated # @deprecated
alias_method :has_master?, :has_maintainer? alias_method :has_master?, :has_maintainer?
......
...@@ -297,6 +297,9 @@ class Project < ApplicationRecord ...@@ -297,6 +297,9 @@ class Project < ApplicationRecord
has_many :external_pull_requests, inverse_of: :project has_many :external_pull_requests, inverse_of: :project
has_many :sourced_pipelines, class_name: 'Ci::Sources::Pipeline', foreign_key: :source_project_id
has_many :source_pipelines, class_name: 'Ci::Sources::Pipeline', foreign_key: :project_id
has_one :pages_metadatum, class_name: 'ProjectPagesMetadatum', inverse_of: :project has_one :pages_metadatum, class_name: 'ProjectPagesMetadatum', inverse_of: :project
accepts_nested_attributes_for :variables, allow_destroy: true accepts_nested_attributes_for :variables, allow_destroy: true
......
...@@ -10,6 +10,7 @@ class PipelineDetailsEntity < PipelineEntity ...@@ -10,6 +10,7 @@ class PipelineDetailsEntity < PipelineEntity
expose :manual_actions, using: BuildActionEntity expose :manual_actions, using: BuildActionEntity
expose :scheduled_actions, using: BuildActionEntity expose :scheduled_actions, using: BuildActionEntity
end end
end
PipelineDetailsEntity.prepend_if_ee('EE::PipelineDetailsEntity') expose :triggered_by_pipeline, as: :triggered_by, with: TriggeredPipelineEntity
expose :triggered_pipelines, as: :triggered, using: TriggeredPipelineEntity
end
...@@ -54,9 +54,9 @@ class PipelineSerializer < BaseSerializer ...@@ -54,9 +54,9 @@ class PipelineSerializer < BaseSerializer
artifacts: { artifacts: {
project: [:route, { namespace: :route }] project: [:route, { namespace: :route }]
} }
} },
{ triggered_by_pipeline: [:project, :user] },
{ triggered_pipelines: [:project, :user] }
] ]
end end
end end
PipelineSerializer.prepend_if_ee('EE::PipelineSerializer')
# frozen_string_literal: true
class TriggeredPipelineEntity < Grape::Entity
include RequestAwareEntity
MAX_EXPAND_DEPTH = 3
expose :id
expose :user, using: UserEntity
expose :active?, as: :active
expose :coverage
expose :source
expose :path do |pipeline|
project_pipeline_path(pipeline.project, pipeline)
end
expose :details do
expose :detailed_status, as: :status, with: DetailedStatusEntity
expose :ordered_stages,
as: :stages, using: StageEntity,
if: -> (_, opts) { can_read_details? && expand?(opts) }
end
expose :triggered_by_pipeline,
as: :triggered_by, with: TriggeredPipelineEntity,
if: -> (_, opts) { can_read_details? && expand_for_path?(opts) }
expose :triggered_pipelines,
as: :triggered, using: TriggeredPipelineEntity,
if: -> (_, opts) { can_read_details? && expand_for_path?(opts) }
expose :project, using: ProjectEntity
private
alias_method :pipeline, :object
def can_read_details?
can?(request.current_user, :read_pipeline, pipeline)
end
def detailed_status
pipeline.detailed_status(request.current_user)
end
def expand?(opts)
opts[:expanded].to_a.include?(pipeline.id)
end
def expand_for_path?(opts)
# The `opts[:attr_path]` holds a list of all `exposes` in path
# The check ensures that we always expand only `triggered_by`, `triggered_by`, ...
# but not the `triggered_by`, `triggered` which would result in dead loop
attr_path = opts[:attr_path]
current_expose = attr_path.last
# We expand at most to depth of MAX_DEPTH
# We ensure that we expand in one direction: triggered_by,... or triggered, ...
attr_path.length < MAX_EXPAND_DEPTH &&
attr_path.all?(current_expose) &&
expand?(opts)
end
end
...@@ -38,11 +38,34 @@ module Ci ...@@ -38,11 +38,34 @@ module Ci
end end
def create_pipeline_from_job(job) def create_pipeline_from_job(job)
# overridden in EE # this check is to not leak the presence of the project if user cannot read it
return unless can?(job.user, :read_project, project)
return error("400 Job has to be running", 400) unless job.running?
pipeline = Ci::CreatePipelineService.new(project, job.user, ref: params[:ref])
.execute(:pipeline, ignore_skip_ci: true) do |pipeline|
source = job.sourced_pipelines.build(
source_pipeline: job.pipeline,
source_project: job.project,
pipeline: pipeline,
project: project)
pipeline.source_pipeline = source
pipeline.variables.build(variables)
end
if pipeline.persisted?
success(pipeline: pipeline)
else
error(pipeline.errors.messages, 400)
end
end end
def job_from_token def job_from_token
# overridden in EE strong_memoize(:job) do
Ci::Build.find_by_token(params[:token].to_s)
end
end end
def variables def variables
...@@ -52,5 +75,3 @@ module Ci ...@@ -52,5 +75,3 @@ module Ci
end end
end end
end end
Ci::PipelineTriggerService.prepend_if_ee('EE::Ci::PipelineTriggerService')
...@@ -7,7 +7,8 @@ module Groups ...@@ -7,7 +7,8 @@ module Groups
namespace_with_same_path: s_('TransferGroup|The parent group already has a subgroup with the same path.'), namespace_with_same_path: s_('TransferGroup|The parent group already has a subgroup with the same path.'),
group_is_already_root: s_('TransferGroup|Group is already a root group.'), group_is_already_root: s_('TransferGroup|Group is already a root group.'),
same_parent_as_current: s_('TransferGroup|Group is already associated to the parent group.'), same_parent_as_current: s_('TransferGroup|Group is already associated to the parent group.'),
invalid_policies: s_("TransferGroup|You don't have enough permissions.") invalid_policies: s_("TransferGroup|You don't have enough permissions."),
group_contains_images: s_('TransferGroup|Cannot update the path because there are projects under this group that contain Docker images in their Container Registry. Please remove the images from your projects first and try again.')
}.freeze }.freeze
TransferError = Class.new(StandardError) TransferError = Class.new(StandardError)
...@@ -46,6 +47,7 @@ module Groups ...@@ -46,6 +47,7 @@ module Groups
raise_transfer_error(:same_parent_as_current) if same_parent? raise_transfer_error(:same_parent_as_current) if same_parent?
raise_transfer_error(:invalid_policies) unless valid_policies? raise_transfer_error(:invalid_policies) unless valid_policies?
raise_transfer_error(:namespace_with_same_path) if namespace_with_same_path? raise_transfer_error(:namespace_with_same_path) if namespace_with_same_path?
raise_transfer_error(:group_contains_images) if group_projects_contain_registry_images?
end end
def group_is_already_root? def group_is_already_root?
...@@ -72,6 +74,10 @@ module Groups ...@@ -72,6 +74,10 @@ module Groups
end end
# rubocop: enable CodeReuse/ActiveRecord # rubocop: enable CodeReuse/ActiveRecord
def group_projects_contain_registry_images?
@group.has_container_repositories?
end
def update_group_attributes def update_group_attributes
if @new_parent_group && @new_parent_group.visibility_level < @group.visibility_level if @new_parent_group && @new_parent_group.visibility_level < @group.visibility_level
update_children_and_projects_visibility update_children_and_projects_visibility
......
...@@ -8,6 +8,11 @@ module Groups ...@@ -8,6 +8,11 @@ module Groups
reject_parent_id! reject_parent_id!
remove_unallowed_params remove_unallowed_params
if renaming_group_with_container_registry_images?
group.errors.add(:base, container_images_error)
return false
end
return false unless valid_visibility_level_change?(group, params[:visibility_level]) return false unless valid_visibility_level_change?(group, params[:visibility_level])
return false unless valid_share_with_group_lock_change? return false unless valid_share_with_group_lock_change?
...@@ -35,6 +40,17 @@ module Groups ...@@ -35,6 +40,17 @@ module Groups
# overridden in EE # overridden in EE
end end
def renaming_group_with_container_registry_images?
new_path = params[:path]
new_path && new_path != group.path &&
group.has_container_repositories?
end
def container_images_error
s_("GroupSettings|Cannot update the path because there are projects under this group that contain Docker images in their Container Registry. Please remove the images from your projects first and try again.")
end
def after_update def after_update
if group.previous_changes.include?(:visibility_level) && group.private? if group.previous_changes.include?(:visibility_level) && group.private?
# don't enqueue immediately to prevent todos removal in case of a mistake # don't enqueue immediately to prevent todos removal in case of a mistake
......
---
title: Prevents a group path change when a project inside the group has container
registry images
merge_request: 17583
author:
type: fixed
---
title: Allow cross-project pipeline triggering with CI_JOB_TOKEN in core
merge_request: 17251
author:
type: added
...@@ -34,7 +34,9 @@ module Gitlab ...@@ -34,7 +34,9 @@ module Gitlab
@mutex.synchronize do @mutex.synchronize do
break thread if thread? break thread if thread?
@thread = Thread.new { start_working } if start_working
@thread = Thread.new { run_thread }
end
end end
end end
...@@ -57,10 +59,18 @@ module Gitlab ...@@ -57,10 +59,18 @@ module Gitlab
private private
# Executed in lock context before starting thread
# Needs to return success
def start_working def start_working
true
end
# Executed in separate thread
def run_thread
raise NotImplementedError raise NotImplementedError
end end
# Executed in lock context
def stop_working def stop_working
# no-ops # no-ops
end end
......
# frozen_string_literal: true
require 'net/http'
require 'json'
module Gitlab
module Danger
module RequestHelper
HTTPError = Class.new(RuntimeError)
# @param [String] url
def self.http_get_json(url)
rsp = Net::HTTP.get_response(URI.parse(url))
unless rsp.is_a?(Net::HTTPOK)
raise HTTPError, "Failed to read #{url}: #{rsp.code} #{rsp.message}"
end
JSON.parse(rsp.body)
end
end
end
end
# frozen_string_literal: true # frozen_string_literal: true
require 'net/http'
require 'json'
require 'cgi'
require_relative 'teammate' require_relative 'teammate'
module Gitlab module Gitlab
module Danger module Danger
module Roulette module Roulette
ROULETTE_DATA_URL = 'https://about.gitlab.com/roulette.json' ROULETTE_DATA_URL = 'https://about.gitlab.com/roulette.json'
HTTPError = Class.new(RuntimeError)
# Looks up the current list of GitLab team members and parses it into a # Looks up the current list of GitLab team members and parses it into a
# useful form # useful form
...@@ -19,7 +14,7 @@ module Gitlab ...@@ -19,7 +14,7 @@ module Gitlab
def team def team
@team ||= @team ||=
begin begin
data = http_get_json(ROULETTE_DATA_URL) data = Gitlab::Danger::RequestHelper.http_get_json(ROULETTE_DATA_URL)
data.map { |hash| ::Gitlab::Danger::Teammate.new(hash) } data.map { |hash| ::Gitlab::Danger::Teammate.new(hash) }
rescue JSON::ParserError rescue JSON::ParserError
raise "Failed to parse JSON response from #{ROULETTE_DATA_URL}" raise "Failed to parse JSON response from #{ROULETTE_DATA_URL}"
...@@ -44,6 +39,7 @@ module Gitlab ...@@ -44,6 +39,7 @@ module Gitlab
# Known issue: If someone is rejected due to OOO, and then becomes not OOO, the # Known issue: If someone is rejected due to OOO, and then becomes not OOO, the
# selection will change on next spin # selection will change on next spin
# @param [Array<Teammate>] people
def spin_for_person(people, random:) def spin_for_person(people, random:)
people.shuffle(random: random) people.shuffle(random: random)
.find(&method(:valid_person?)) .find(&method(:valid_person?))
...@@ -51,32 +47,17 @@ module Gitlab ...@@ -51,32 +47,17 @@ module Gitlab
private private
# @param [Teammate] person
# @return [Boolean]
def valid_person?(person) def valid_person?(person)
!mr_author?(person) && !out_of_office?(person) !mr_author?(person) && !person.out_of_office?
end end
# @param [Teammate] person
# @return [Boolean]
def mr_author?(person) def mr_author?(person)
person.username == gitlab.mr_author person.username == gitlab.mr_author
end end
def out_of_office?(person)
username = CGI.escape(person.username)
api_endpoint = "https://gitlab.com/api/v4/users/#{username}/status"
response = http_get_json(api_endpoint)
response["message"]&.match?(/OOO/i)
rescue HTTPError, JSON::ParserError
false # this is no worse than not checking for OOO
end
def http_get_json(url)
rsp = Net::HTTP.get_response(URI.parse(url))
unless rsp.is_a?(Net::HTTPSuccess)
raise HTTPError, "Failed to read #{url}: #{rsp.code} #{rsp.message}"
end
JSON.parse(rsp.body)
end
end end
end end
end end
# frozen_string_literal: true # frozen_string_literal: true
require 'cgi'
module Gitlab module Gitlab
module Danger module Danger
class Teammate class Teammate
...@@ -34,6 +36,18 @@ module Gitlab ...@@ -34,6 +36,18 @@ module Gitlab
has_capability?(project, category, :maintainer, labels) has_capability?(project, category, :maintainer, labels)
end end
def status
api_endpoint = "https://gitlab.com/api/v4/users/#{CGI.escape(username)}/status"
@status ||= Gitlab::Danger::RequestHelper.http_get_json(api_endpoint)
rescue Gitlab::Danger::RequestHelper::HTTPError, JSON::ParserError
nil # better no status than a crashing Danger
end
# @return [Boolean]
def out_of_office?
status&.dig("message")&.match?(/OOO/i) || false
end
private private
def has_capability?(project, category, kind, labels) def has_capability?(project, category, kind, labels)
......
...@@ -40,7 +40,14 @@ module Gitlab ...@@ -40,7 +40,14 @@ module Gitlab
::Gitlab::HealthChecks::Probes::Liveness.new, req, res) ::Gitlab::HealthChecks::Probes::Liveness.new, req, res)
end end
server.mount '/', Rack::Handler::WEBrick, rack_app server.mount '/', Rack::Handler::WEBrick, rack_app
server.start
true
end
def run_thread
server&.start
rescue IOError
# ignore forcibily closed servers
end end
def stop_working def stop_working
......
...@@ -50,6 +50,11 @@ module Gitlab ...@@ -50,6 +50,11 @@ module Gitlab
def start_working def start_working
@running = true @running = true
true
end
def run_thread
sleep(sleep_interval) sleep(sleep_interval)
while running while running
safe_sample safe_sample
......
...@@ -29,7 +29,7 @@ module Gitlab ...@@ -29,7 +29,7 @@ module Gitlab
private private
def start_working def run_thread
Sidekiq.logger.info( Sidekiq.logger.info(
class: self.class.to_s, class: self.class.to_s,
action: 'start', action: 'start',
......
...@@ -61,7 +61,7 @@ module Gitlab ...@@ -61,7 +61,7 @@ module Gitlab
private private
def start_working def run_thread
return unless notification_channel_enabled? return unless notification_channel_enabled?
begin begin
......
...@@ -8282,6 +8282,9 @@ msgstr "" ...@@ -8282,6 +8282,9 @@ msgstr ""
msgid "GroupSettings|Be careful. Changing a group's parent can have unintended %{side_effects_link_start}side effects%{side_effects_link_end}." msgid "GroupSettings|Be careful. Changing a group's parent can have unintended %{side_effects_link_start}side effects%{side_effects_link_end}."
msgstr "" msgstr ""
msgid "GroupSettings|Cannot update the path because there are projects under this group that contain Docker images in their Container Registry. Please remove the images from your projects first and try again."
msgstr ""
msgid "GroupSettings|Change group path" msgid "GroupSettings|Change group path"
msgstr "" msgstr ""
...@@ -17290,6 +17293,9 @@ msgstr "" ...@@ -17290,6 +17293,9 @@ msgstr ""
msgid "Transfer project" msgid "Transfer project"
msgstr "" msgstr ""
msgid "TransferGroup|Cannot update the path because there are projects under this group that contain Docker images in their Container Registry. Please remove the images from your projects first and try again."
msgstr ""
msgid "TransferGroup|Database is not supported." msgid "TransferGroup|Database is not supported."
msgstr "" msgstr ""
......
...@@ -385,6 +385,29 @@ describe GroupsController do ...@@ -385,6 +385,29 @@ describe GroupsController do
expect(response).to have_gitlab_http_status(302) expect(response).to have_gitlab_http_status(302)
expect(group.reload.project_creation_level).to eq(::Gitlab::Access::MAINTAINER_PROJECT_ACCESS) expect(group.reload.project_creation_level).to eq(::Gitlab::Access::MAINTAINER_PROJECT_ACCESS)
end end
context 'when a project inside the group has container repositories' do
before do
stub_container_registry_config(enabled: true)
stub_container_registry_tags(repository: /image/, tags: %w[rc1])
create(:container_repository, project: project, name: :image)
end
it 'does allow the group to be renamed' do
post :update, params: { id: group.to_param, group: { name: 'new_name' } }
expect(controller).to set_flash[:notice]
expect(response).to have_gitlab_http_status(302)
expect(group.reload.name).to eq('new_name')
end
it 'does not allow to path of the group to be changed' do
post :update, params: { id: group.to_param, group: { path: 'new_path' } }
expect(assigns(:group).errors[:base].first).to match(/Docker images in their Container Registry/)
expect(response).to have_gitlab_http_status(200)
end
end
end end
describe '#ensure_canonical_path' do describe '#ensure_canonical_path' do
...@@ -673,6 +696,28 @@ describe GroupsController do ...@@ -673,6 +696,28 @@ describe GroupsController do
expect(response).to have_gitlab_http_status(404) expect(response).to have_gitlab_http_status(404)
end end
end end
context 'transferring when a project has container images' do
let(:group) { create(:group, :public, :nested) }
let!(:group_member) { create(:group_member, :owner, group: group, user: user) }
before do
stub_container_registry_config(enabled: true)
stub_container_registry_tags(repository: /image/, tags: %w[rc1])
create(:container_repository, project: project, name: :image)
put :transfer,
params: {
id: group.to_param,
new_parent_group_id: ''
}
end
it 'does not allow the group to be transferred' do
expect(controller).to set_flash[:alert].to match(/Docker images in their Container Registry/)
expect(response).to redirect_to(edit_group_path(group))
end
end
end end
context 'token authentication' do context 'token authentication' do
......
...@@ -217,6 +217,193 @@ describe Projects::PipelinesController do ...@@ -217,6 +217,193 @@ describe Projects::PipelinesController do
end end
end end
context 'with triggered pipelines' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:source_project) { create(:project, :repository) }
let_it_be(:target_project) { create(:project, :repository) }
let_it_be(:root_pipeline) { create_pipeline(project) }
let_it_be(:source_pipeline) { create_pipeline(source_project) }
let_it_be(:source_of_source_pipeline) { create_pipeline(source_project) }
let_it_be(:target_pipeline) { create_pipeline(target_project) }
let_it_be(:target_of_target_pipeline) { create_pipeline(target_project) }
before do
create_link(source_of_source_pipeline, source_pipeline)
create_link(source_pipeline, root_pipeline)
create_link(root_pipeline, target_pipeline)
create_link(target_pipeline, target_of_target_pipeline)
end
shared_examples 'not expanded' do
let(:expected_stages) { be_nil }
it 'does return base details' do
get_pipeline_json(root_pipeline)
expect(json_response['triggered_by']).to include('id' => source_pipeline.id)
expect(json_response['triggered']).to contain_exactly(
include('id' => target_pipeline.id))
end
it 'does not expand triggered_by pipeline' do
get_pipeline_json(root_pipeline)
triggered_by = json_response['triggered_by']
expect(triggered_by['triggered_by']).to be_nil
expect(triggered_by['triggered']).to be_nil
expect(triggered_by['details']['stages']).to expected_stages
end
it 'does not expand triggered pipelines' do
get_pipeline_json(root_pipeline)
first_triggered = json_response['triggered'].first
expect(first_triggered['triggered_by']).to be_nil
expect(first_triggered['triggered']).to be_nil
expect(first_triggered['details']['stages']).to expected_stages
end
end
shared_examples 'expanded' do
it 'does return base details' do
get_pipeline_json(root_pipeline)
expect(json_response['triggered_by']).to include('id' => source_pipeline.id)
expect(json_response['triggered']).to contain_exactly(
include('id' => target_pipeline.id))
end
it 'does expand triggered_by pipeline' do
get_pipeline_json(root_pipeline)
triggered_by = json_response['triggered_by']
expect(triggered_by['triggered_by']).to include(
'id' => source_of_source_pipeline.id)
expect(triggered_by['details']['stages']).not_to be_nil
end
it 'does not recursively expand triggered_by' do
get_pipeline_json(root_pipeline)
triggered_by = json_response['triggered_by']
expect(triggered_by['triggered']).to be_nil
end
it 'does expand triggered pipelines' do
get_pipeline_json(root_pipeline)
first_triggered = json_response['triggered'].first
expect(first_triggered['triggered']).to contain_exactly(
include('id' => target_of_target_pipeline.id))
expect(first_triggered['details']['stages']).not_to be_nil
end
it 'does not recursively expand triggered' do
get_pipeline_json(root_pipeline)
first_triggered = json_response['triggered'].first
expect(first_triggered['triggered_by']).to be_nil
end
end
context 'when it does have permission to read other projects' do
before do
source_project.add_developer(user)
target_project.add_developer(user)
end
context 'when not-expanding any pipelines' do
let(:expanded) { nil }
it_behaves_like 'not expanded'
end
context 'when expanding non-existing pipeline' do
let(:expanded) { [-1] }
it_behaves_like 'not expanded'
end
context 'when expanding pipeline that is not directly expandable' do
let(:expanded) { [source_of_source_pipeline.id, target_of_target_pipeline.id] }
it_behaves_like 'not expanded'
end
context 'when expanding self' do
let(:expanded) { [root_pipeline.id] }
context 'it does not recursively expand pipelines' do
it_behaves_like 'not expanded'
end
end
context 'when expanding source and target pipeline' do
let(:expanded) { [source_pipeline.id, target_pipeline.id] }
it_behaves_like 'expanded'
context 'when expand depth is limited to 1' do
before do
stub_const('TriggeredPipelineEntity::MAX_EXPAND_DEPTH', 1)
end
it_behaves_like 'not expanded' do
# We expect that triggered/triggered_by is not expanded,
# but we still return details.stages for that pipeline
let(:expected_stages) { be_a(Array) }
end
end
end
context 'when expanding all' do
let(:expanded) do
[
source_of_source_pipeline.id,
source_pipeline.id,
root_pipeline.id,
target_pipeline.id,
target_of_target_pipeline.id
]
end
it_behaves_like 'expanded'
end
end
context 'when does not have permission to read other projects' do
let(:expanded) { [source_pipeline.id, target_pipeline.id] }
it_behaves_like 'not expanded'
end
def create_pipeline(project)
create(:ci_empty_pipeline, project: project).tap do |pipeline|
create(:ci_build, pipeline: pipeline, stage: 'test', name: 'rspec')
end
end
def create_link(source_pipeline, pipeline)
source_pipeline.sourced_pipelines.create!(
source_job: source_pipeline.builds.all.sample,
source_project: source_pipeline.project,
project: pipeline.project,
pipeline: pipeline
)
end
def get_pipeline_json(pipeline)
params = {
namespace_id: pipeline.project.namespace,
project_id: pipeline.project,
id: pipeline,
expanded: expanded
}
get :show, params: params.compact, format: :json
end
end
def get_pipeline_json def get_pipeline_json
get :show, params: { namespace_id: project.namespace, project_id: project, id: pipeline }, format: :json get :show, params: { namespace_id: project.namespace, project_id: project, id: pipeline }, format: :json
end end
......
# frozen_string_literal: true
require 'spec_helper' require 'spec_helper'
require 'omniauth/strategies/saml' require 'omniauth/strategies/saml'
......
# frozen_string_literal: true
FactoryBot.define do
factory :ci_sources_pipeline, class: Ci::Sources::Pipeline do
after(:build) do |source|
source.project ||= source.pipeline.project
source.source_pipeline ||= source.source_job.pipeline
source.source_project ||= source.source_pipeline.project
end
source_job factory: :ci_build
pipeline factory: :ci_empty_pipeline
end
end
# frozen_string_literal: true
require 'spec_helper' require 'spec_helper'
require 'tempfile' require 'tempfile'
...@@ -424,8 +426,8 @@ describe 'Jobs', :clean_gitlab_redis_shared_state do ...@@ -424,8 +426,8 @@ describe 'Jobs', :clean_gitlab_redis_shared_state do
it 'loads job trace' do it 'loads job trace' do
expect(page).to have_content 'BUILD TRACE' expect(page).to have_content 'BUILD TRACE'
job.trace.write('a+b') do |stream| job.trace.write(+'a+b') do |stream|
stream.append(' and more trace', 11) stream.append(+' and more trace', 11)
end end
expect(page).to have_content 'BUILD TRACE and more trace' expect(page).to have_content 'BUILD TRACE and more trace'
......
# frozen_string_literal: true
require 'spec_helper' require 'spec_helper'
describe AccessRequestsFinder do describe AccessRequestsFinder do
......
# frozen_string_literal: true
require 'spec_helper' require 'spec_helper'
describe Admin::ProjectsFinder do describe Admin::ProjectsFinder do
......
# frozen_string_literal: true
require 'spec_helper' require 'spec_helper'
describe Autocomplete::MoveToProjectFinder do describe Autocomplete::MoveToProjectFinder do
......
# frozen_string_literal: true
require 'spec_helper' require 'spec_helper'
describe Autocomplete::UsersFinder do describe Autocomplete::UsersFinder do
......
# frozen_string_literal: true
require 'spec_helper' require 'spec_helper'
describe BranchesFinder do describe BranchesFinder do
......
# frozen_string_literal: true
require 'spec_helper' require 'spec_helper'
describe ClustersFinder do describe ClustersFinder do
......
# frozen_string_literal: true
require 'spec_helper' require 'spec_helper'
describe FinderMethods do describe FinderMethods do
......
# frozen_string_literal: true
require 'spec_helper' require 'spec_helper'
describe FinderWithCrossProjectAccess do describe FinderWithCrossProjectAccess do
......
# frozen_string_literal: true
require 'spec_helper' require 'spec_helper'
describe ContributedProjectsFinder do describe ContributedProjectsFinder do
......
# frozen_string_literal: true
require 'spec_helper' require 'spec_helper'
describe EnvironmentsFinder do describe EnvironmentsFinder do
......
# frozen_string_literal: true
require 'spec_helper' require 'spec_helper'
describe EventsFinder do describe EventsFinder do
......
# frozen_string_literal: true
require 'spec_helper' require 'spec_helper'
describe ForkProjectsFinder do describe ForkProjectsFinder do
......
# frozen_string_literal: true
require 'spec_helper' require 'spec_helper'
describe GroupDescendantsFinder do describe GroupDescendantsFinder do
......
# frozen_string_literal: true
require 'spec_helper' require 'spec_helper'
describe GroupMembersFinder, '#execute' do describe GroupMembersFinder, '#execute' do
......
# frozen_string_literal: true
require 'spec_helper' require 'spec_helper'
describe GroupProjectsFinder do describe GroupProjectsFinder do
......
# frozen_string_literal: true
require 'spec_helper' require 'spec_helper'
describe GroupsFinder do describe GroupsFinder do
......
# frozen_string_literal: true
require 'spec_helper' require 'spec_helper'
describe IssuesFinder do describe IssuesFinder do
......
# frozen_string_literal: true
require 'spec_helper' require 'spec_helper'
describe JoinedGroupsFinder do describe JoinedGroupsFinder do
......
# frozen_string_literal: true
require 'spec_helper' require 'spec_helper'
describe LabelsFinder do describe LabelsFinder do
......
# frozen_string_literal: true
require 'spec_helper' require 'spec_helper'
describe LicenseTemplateFinder do describe LicenseTemplateFinder do
......
# frozen_string_literal: true
require 'spec_helper' require 'spec_helper'
describe MembersFinder, '#execute' do describe MembersFinder, '#execute' do
......
# frozen_string_literal: true
require 'spec_helper' require 'spec_helper'
describe MergeRequestTargetProjectFinder do describe MergeRequestTargetProjectFinder do
......
# frozen_string_literal: true
require 'spec_helper' require 'spec_helper'
describe MergeRequestsFinder do describe MergeRequestsFinder do
......
# frozen_string_literal: true
require 'spec_helper' require 'spec_helper'
describe MilestonesFinder do describe MilestonesFinder do
......
# frozen_string_literal: true
require 'spec_helper' require 'spec_helper'
describe NotesFinder do describe NotesFinder do
......
# frozen_string_literal: true
require 'spec_helper' require 'spec_helper'
describe PersonalAccessTokensFinder do describe PersonalAccessTokensFinder do
......
# frozen_string_literal: true
require 'spec_helper' require 'spec_helper'
describe PersonalProjectsFinder do describe PersonalProjectsFinder do
......
# frozen_string_literal: true
require 'spec_helper' require 'spec_helper'
describe PipelineSchedulesFinder do describe PipelineSchedulesFinder do
......
# frozen_string_literal: true
require 'spec_helper' require 'spec_helper'
describe PipelinesFinder do describe PipelinesFinder do
......
# frozen_string_literal: true
require 'spec_helper' require 'spec_helper'
describe ProjectsFinder do describe ProjectsFinder do
......
# frozen_string_literal: true
require 'spec_helper' require 'spec_helper'
describe RunnerJobsFinder do describe RunnerJobsFinder do
......
# frozen_string_literal: true
require 'spec_helper' require 'spec_helper'
describe SnippetsFinder do describe SnippetsFinder do
......
# frozen_string_literal: true
require 'spec_helper' require 'spec_helper'
describe TagsFinder do describe TagsFinder do
......
# frozen_string_literal: true
require 'spec_helper' require 'spec_helper'
describe TemplateFinder do describe TemplateFinder do
......
# frozen_string_literal: true
require 'spec_helper' require 'spec_helper'
describe TodosFinder do describe TodosFinder do
......
# frozen_string_literal: true
require 'spec_helper' require 'spec_helper'
describe UserRecentEventsFinder do describe UserRecentEventsFinder do
......
# frozen_string_literal: true
require 'spec_helper' require 'spec_helper'
describe UsersFinder do describe UsersFinder do
......
...@@ -6,7 +6,7 @@ describe Gitlab::Daemon do ...@@ -6,7 +6,7 @@ describe Gitlab::Daemon do
subject { described_class.new } subject { described_class.new }
before do before do
allow(subject).to receive(:start_working) allow(subject).to receive(:run_thread)
allow(subject).to receive(:stop_working) allow(subject).to receive(:stop_working)
end end
...@@ -44,7 +44,7 @@ describe Gitlab::Daemon do ...@@ -44,7 +44,7 @@ describe Gitlab::Daemon do
it 'starts the Daemon' do it 'starts the Daemon' do
expect { subject.start.join }.to change { subject.thread? }.from(false).to(true) expect { subject.start.join }.to change { subject.thread? }.from(false).to(true)
expect(subject).to have_received(:start_working) expect(subject).to have_received(:run_thread)
end end
end end
...@@ -52,7 +52,21 @@ describe Gitlab::Daemon do ...@@ -52,7 +52,21 @@ describe Gitlab::Daemon do
it "doesn't shutdown stopped Daemon" do it "doesn't shutdown stopped Daemon" do
expect { subject.stop }.not_to change { subject.thread? } expect { subject.stop }.not_to change { subject.thread? }
expect(subject).not_to have_received(:start_working) expect(subject).not_to have_received(:run_thread)
end
end
end
describe '#start_working' do
context 'when start_working fails' do
before do
expect(subject).to receive(:start_working) { false }
end
it 'does not start thread' do
expect(subject).not_to receive(:run_thread)
expect(subject.start).to eq(nil)
end end
end end
end end
...@@ -66,7 +80,7 @@ describe Gitlab::Daemon do ...@@ -66,7 +80,7 @@ describe Gitlab::Daemon do
it "doesn't start running Daemon" do it "doesn't start running Daemon" do
expect { subject.start.join }.not_to change { subject.thread } expect { subject.start.join }.not_to change { subject.thread }
expect(subject).to have_received(:start_working).once expect(subject).to have_received(:run_thread).once
end end
end end
...@@ -79,7 +93,7 @@ describe Gitlab::Daemon do ...@@ -79,7 +93,7 @@ describe Gitlab::Daemon do
context 'when stop_working raises exception' do context 'when stop_working raises exception' do
before do before do
allow(subject).to receive(:start_working) do allow(subject).to receive(:run_thread) do
sleep(1000) sleep(1000)
end end
end end
...@@ -108,7 +122,7 @@ describe Gitlab::Daemon do ...@@ -108,7 +122,7 @@ describe Gitlab::Daemon do
expect(subject.start).to be_nil expect(subject.start).to be_nil
expect { subject.start }.not_to change { subject.thread? } expect { subject.start }.not_to change { subject.thread? }
expect(subject).not_to have_received(:start_working) expect(subject).not_to have_received(:run_thread)
end end
end end
......
...@@ -2,11 +2,13 @@ ...@@ -2,11 +2,13 @@
require 'fast_spec_helper' require 'fast_spec_helper'
require 'rspec-parameterized'
require 'gitlab/danger/teammate' require 'gitlab/danger/teammate'
describe Gitlab::Danger::Teammate do describe Gitlab::Danger::Teammate do
subject { described_class.new(options) } subject { described_class.new(options.stringify_keys) }
let(:options) { { 'projects' => projects, 'role' => role } } let(:options) { { username: 'luigi', projects: projects, role: role } }
let(:projects) { { project => capabilities } } let(:projects) { { project => capabilities } }
let(:role) { 'Engineer, Manage' } let(:role) { 'Engineer, Manage' }
let(:labels) { [] } let(:labels) { [] }
...@@ -95,4 +97,64 @@ describe Gitlab::Danger::Teammate do ...@@ -95,4 +97,64 @@ describe Gitlab::Danger::Teammate do
expect(subject.maintainer?(project, :frontend, labels)).to be_falsey expect(subject.maintainer?(project, :frontend, labels)).to be_falsey
end end
end end
describe '#status' do
let(:capabilities) { ['dish washing'] }
context 'with empty cache' do
context 'for successful request' do
it 'returns the response' do
mock_status = double(does_not: 'matter')
expect(Gitlab::Danger::RequestHelper).to receive(:http_get_json)
.and_return(mock_status)
expect(subject.status).to be mock_status
end
end
context 'for failing request' do
it 'returns nil' do
expect(Gitlab::Danger::RequestHelper).to receive(:http_get_json)
.and_raise(Gitlab::Danger::RequestHelper::HTTPError.new)
expect(subject.status).to be nil
end
end
end
context 'with filled cache' do
it 'returns the cached response' do
mock_status = double(does_not: 'matter')
expect(Gitlab::Danger::RequestHelper).to receive(:http_get_json)
.and_return(mock_status)
subject.status
expect(Gitlab::Danger::RequestHelper).not_to receive(:http_get_json)
expect(subject.status).to be mock_status
end
end
end
describe '#out_of_office?' do
using RSpec::Parameterized::TableSyntax
let(:capabilities) { ['dry head'] }
where(:status, :result) do
nil | false
{} | false
{ message: 'dear reader' } | false
{ message: 'OOO: massage' } | true
{ message: 'love it SOOO much' } | true
end
with_them do
before do
expect(Gitlab::Danger::RequestHelper).to receive(:http_get_json)
.and_return(status&.stringify_keys)
end
it { expect(subject.out_of_office?).to be result }
end
end
end end
...@@ -12,8 +12,8 @@ describe Gitlab::SidekiqDaemon::MemoryKiller do ...@@ -12,8 +12,8 @@ describe Gitlab::SidekiqDaemon::MemoryKiller do
allow(Sidekiq.logger).to receive(:warn) allow(Sidekiq.logger).to receive(:warn)
end end
describe '#start_working' do describe '#run_thread' do
subject { memory_killer.send(:start_working) } subject { memory_killer.send(:run_thread) }
before do before do
# let enabled? return 3 times: true, true, false # let enabled? return 3 times: true, true, false
...@@ -37,7 +37,7 @@ describe Gitlab::SidekiqDaemon::MemoryKiller do ...@@ -37,7 +37,7 @@ describe Gitlab::SidekiqDaemon::MemoryKiller do
.with( .with(
class: described_class.to_s, class: described_class.to_s,
pid: pid, pid: pid,
message: "Exception from start_working: My Exception") message: "Exception from run_thread: My Exception")
expect(memory_killer).to receive(:rss_within_range?).twice.and_raise(StandardError, 'My Exception') expect(memory_killer).to receive(:rss_within_range?).twice.and_raise(StandardError, 'My Exception')
expect(memory_killer).to receive(:sleep).twice.with(Gitlab::SidekiqDaemon::MemoryKiller::CHECK_INTERVAL_SECONDS) expect(memory_killer).to receive(:sleep).twice.with(Gitlab::SidekiqDaemon::MemoryKiller::CHECK_INTERVAL_SECONDS)
...@@ -50,7 +50,7 @@ describe Gitlab::SidekiqDaemon::MemoryKiller do ...@@ -50,7 +50,7 @@ describe Gitlab::SidekiqDaemon::MemoryKiller do
.with( .with(
class: described_class.to_s, class: described_class.to_s,
pid: pid, pid: pid,
message: "Exception from start_working: My Exception") message: "Exception from run_thread: My Exception")
expect(memory_killer).to receive(:rss_within_range?).once.and_raise(Exception, 'My Exception') expect(memory_killer).to receive(:rss_within_range?).once.and_raise(Exception, 'My Exception')
......
...@@ -37,8 +37,8 @@ describe Gitlab::SidekiqDaemon::Monitor do ...@@ -37,8 +37,8 @@ describe Gitlab::SidekiqDaemon::Monitor do
end end
end end
describe '#start_working when notification channel not enabled' do describe '#run_thread when notification channel not enabled' do
subject { monitor.send(:start_working) } subject { monitor.send(:run_thread) }
it 'return directly' do it 'return directly' do
allow(monitor).to receive(:notification_channel_enabled?).and_return(nil) allow(monitor).to receive(:notification_channel_enabled?).and_return(nil)
...@@ -52,8 +52,8 @@ describe Gitlab::SidekiqDaemon::Monitor do ...@@ -52,8 +52,8 @@ describe Gitlab::SidekiqDaemon::Monitor do
end end
end end
describe '#start_working when notification channel enabled' do describe '#run_thread when notification channel enabled' do
subject { monitor.send(:start_working) } subject { monitor.send(:run_thread) }
before do before do
# we want to run at most once cycle # we want to run at most once cycle
......
...@@ -19,17 +19,24 @@ describe Ci::Build do ...@@ -19,17 +19,24 @@ describe Ci::Build do
it { is_expected.to belong_to(:runner) } it { is_expected.to belong_to(:runner) }
it { is_expected.to belong_to(:trigger_request) } it { is_expected.to belong_to(:trigger_request) }
it { is_expected.to belong_to(:erased_by) } it { is_expected.to belong_to(:erased_by) }
it { is_expected.to have_many(:trace_sections) } it { is_expected.to have_many(:trace_sections) }
it { is_expected.to have_many(:needs) } it { is_expected.to have_many(:needs) }
it { is_expected.to have_many(:sourced_pipelines) }
it { is_expected.to have_many(:job_variables) }
it { is_expected.to have_one(:deployment) } it { is_expected.to have_one(:deployment) }
it { is_expected.to have_one(:runner_session) } it { is_expected.to have_one(:runner_session) }
it { is_expected.to have_many(:job_variables) }
it { is_expected.to validate_presence_of(:ref) } it { is_expected.to validate_presence_of(:ref) }
it { is_expected.to respond_to(:has_trace?) } it { is_expected.to respond_to(:has_trace?) }
it { is_expected.to respond_to(:trace) } it { is_expected.to respond_to(:trace) }
it { is_expected.to delegate_method(:merge_request_event?).to(:pipeline) } it { is_expected.to delegate_method(:merge_request_event?).to(:pipeline) }
it { is_expected.to delegate_method(:merge_request_ref?).to(:pipeline) } it { is_expected.to delegate_method(:merge_request_ref?).to(:pipeline) }
it { is_expected.to delegate_method(:legacy_detached_merge_request_pipeline?).to(:pipeline) } it { is_expected.to delegate_method(:legacy_detached_merge_request_pipeline?).to(:pipeline) }
it { is_expected.to include_module(Ci::PipelineDelegator) } it { is_expected.to include_module(Ci::PipelineDelegator) }
describe 'associations' do describe 'associations' do
......
...@@ -28,7 +28,13 @@ describe Ci::Pipeline, :mailer do ...@@ -28,7 +28,13 @@ describe Ci::Pipeline, :mailer do
it { is_expected.to have_many(:builds) } it { is_expected.to have_many(:builds) }
it { is_expected.to have_many(:auto_canceled_pipelines) } it { is_expected.to have_many(:auto_canceled_pipelines) }
it { is_expected.to have_many(:auto_canceled_jobs) } it { is_expected.to have_many(:auto_canceled_jobs) }
it { is_expected.to have_many(:sourced_pipelines) }
it { is_expected.to have_many(:triggered_pipelines) }
it { is_expected.to have_one(:chat_data) } it { is_expected.to have_one(:chat_data) }
it { is_expected.to have_one(:source_pipeline) }
it { is_expected.to have_one(:triggered_by_pipeline) }
it { is_expected.to have_one(:source_job) }
it { is_expected.to validate_presence_of(:sha) } it { is_expected.to validate_presence_of(:sha) }
it { is_expected.to validate_presence_of(:status) } it { is_expected.to validate_presence_of(:status) }
......
# frozen_string_literal: true
require 'spec_helper'
describe Ci::Sources::Pipeline do
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:pipeline) }
it { is_expected.to belong_to(:source_project) }
it { is_expected.to belong_to(:source_job) }
it { is_expected.to belong_to(:source_pipeline) }
it { is_expected.to validate_presence_of(:project) }
it { is_expected.to validate_presence_of(:pipeline) }
it { is_expected.to validate_presence_of(:source_project) }
it { is_expected.to validate_presence_of(:source_job) }
it { is_expected.to validate_presence_of(:source_pipeline) }
end
...@@ -101,6 +101,8 @@ describe Project do ...@@ -101,6 +101,8 @@ describe Project do
it { is_expected.to have_many(:deploy_tokens).through(:project_deploy_tokens) } it { is_expected.to have_many(:deploy_tokens).through(:project_deploy_tokens) }
it { is_expected.to have_many(:cycle_analytics_stages) } it { is_expected.to have_many(:cycle_analytics_stages) }
it { is_expected.to have_many(:external_pull_requests) } it { is_expected.to have_many(:external_pull_requests) }
it { is_expected.to have_many(:sourced_pipelines) }
it { is_expected.to have_many(:source_pipelines) }
it 'has an inverse relationship with merge requests' do it 'has an inverse relationship with merge requests' do
expect(described_class.reflect_on_association(:merge_requests).has_inverse?).to eq(:target_project) expect(described_class.reflect_on_association(:merge_requests).has_inverse?).to eq(:target_project)
......
...@@ -138,5 +138,40 @@ describe PipelineDetailsEntity do ...@@ -138,5 +138,40 @@ describe PipelineDetailsEntity do
expect(subject[:flags][:yaml_errors]).to be false expect(subject[:flags][:yaml_errors]).to be false
end end
end end
context 'when pipeline is triggered by other pipeline' do
let(:pipeline) { create(:ci_empty_pipeline) }
before do
create(:ci_sources_pipeline, pipeline: pipeline)
end
it 'contains an information about depedent pipeline' do
expect(subject[:triggered_by]).to be_a(Hash)
expect(subject[:triggered_by][:path]).not_to be_nil
expect(subject[:triggered_by][:details]).not_to be_nil
expect(subject[:triggered_by][:details][:status]).not_to be_nil
expect(subject[:triggered_by][:project]).not_to be_nil
end
end
context 'when pipeline triggered other pipeline' do
let(:pipeline) { create(:ci_empty_pipeline) }
let(:build) { create(:ci_build, pipeline: pipeline) }
before do
create(:ci_sources_pipeline, source_job: build)
create(:ci_sources_pipeline, source_job: build)
end
it 'contains an information about depedent pipeline' do
expect(subject[:triggered]).to be_a(Array)
expect(subject[:triggered].length).to eq(2)
expect(subject[:triggered].first[:path]).not_to be_nil
expect(subject[:triggered].first[:details]).not_to be_nil
expect(subject[:triggered].first[:details][:status]).not_to be_nil
expect(subject[:triggered].first[:project]).not_to be_nil
end
end
end end
end end
...@@ -158,7 +158,7 @@ describe PipelineSerializer do ...@@ -158,7 +158,7 @@ describe PipelineSerializer do
it 'verifies number of queries', :request_store do it 'verifies number of queries', :request_store do
recorded = ActiveRecord::QueryRecorder.new { subject } recorded = ActiveRecord::QueryRecorder.new { subject }
expected_queries = Gitlab.ee? ? 38 : 31 expected_queries = Gitlab.ee? ? 38 : 35
expect(recorded.count).to be_within(2).of(expected_queries) expect(recorded.count).to be_within(2).of(expected_queries)
expect(recorded.cached_count).to eq(0) expect(recorded.cached_count).to eq(0)
...@@ -179,7 +179,8 @@ describe PipelineSerializer do ...@@ -179,7 +179,8 @@ describe PipelineSerializer do
# pipeline. With the same ref this check is cached but if refs are # pipeline. With the same ref this check is cached but if refs are
# different then there is an extra query per ref # different then there is an extra query per ref
# https://gitlab.com/gitlab-org/gitlab-foss/issues/46368 # https://gitlab.com/gitlab-org/gitlab-foss/issues/46368
expected_queries = Gitlab.ee? ? 44 : 38 expected_queries = Gitlab.ee? ? 44 : 41
expect(recorded.count).to be_within(2).of(expected_queries) expect(recorded.count).to be_within(2).of(expected_queries)
expect(recorded.cached_count).to eq(0) expect(recorded.cached_count).to eq(0)
end end
......
...@@ -11,76 +11,158 @@ describe Ci::PipelineTriggerService do ...@@ -11,76 +11,158 @@ describe Ci::PipelineTriggerService do
describe '#execute' do describe '#execute' do
let(:user) { create(:user) } let(:user) { create(:user) }
let(:trigger) { create(:ci_trigger, project: project, owner: user) }
let(:result) { described_class.new(project, user, params).execute } let(:result) { described_class.new(project, user, params).execute }
before do before do
project.add_developer(user) project.add_developer(user)
end end
context 'when trigger belongs to a different project' do context 'with a trigger token' do
let(:params) { { token: trigger.token, ref: 'master', variables: nil } } let(:trigger) { create(:ci_trigger, project: project, owner: user) }
let(:trigger) { create(:ci_trigger, project: create(:project), owner: user) }
it 'does nothing' do context 'when trigger belongs to a different project' do
expect { result }.not_to change { Ci::Pipeline.count }
end
end
context 'when params have an existsed trigger token' do
context 'when params have an existsed ref' do
let(:params) { { token: trigger.token, ref: 'master', variables: nil } } let(:params) { { token: trigger.token, ref: 'master', variables: nil } }
let(:trigger) { create(:ci_trigger, project: create(:project), owner: user) }
it 'triggers a pipeline' do it 'does nothing' do
expect { result }.to change { Ci::Pipeline.count }.by(1) expect { result }.not_to change { Ci::Pipeline.count }
expect(result[:pipeline].ref).to eq('master')
expect(result[:pipeline].project).to eq(project)
expect(result[:pipeline].user).to eq(trigger.owner)
expect(result[:pipeline].trigger_requests.to_a)
.to eq(result[:pipeline].builds.map(&:trigger_request).uniq)
expect(result[:status]).to eq(:success)
end end
end
context 'when commit message has [ci skip]' do context 'when params have an existsed trigger token' do
before do context 'when params have an existsed ref' do
allow_any_instance_of(Ci::Pipeline).to receive(:git_commit_message) { '[ci skip]' } let(:params) { { token: trigger.token, ref: 'master', variables: nil } }
end
it 'ignores [ci skip] and create as general' do it 'triggers a pipeline' do
expect { result }.to change { Ci::Pipeline.count }.by(1) expect { result }.to change { Ci::Pipeline.count }.by(1)
expect(result[:pipeline].ref).to eq('master')
expect(result[:pipeline].project).to eq(project)
expect(result[:pipeline].user).to eq(trigger.owner)
expect(result[:pipeline].trigger_requests.to_a)
.to eq(result[:pipeline].builds.map(&:trigger_request).uniq)
expect(result[:status]).to eq(:success) expect(result[:status]).to eq(:success)
end end
context 'when commit message has [ci skip]' do
before do
allow_any_instance_of(Ci::Pipeline).to receive(:git_commit_message) { '[ci skip]' }
end
it 'ignores [ci skip] and create as general' do
expect { result }.to change { Ci::Pipeline.count }.by(1)
expect(result[:status]).to eq(:success)
end
end
context 'when params have a variable' do
let(:params) { { token: trigger.token, ref: 'master', variables: variables } }
let(:variables) { { 'AAA' => 'AAA123' } }
it 'has a variable' do
expect { result }.to change { Ci::PipelineVariable.count }.by(1)
.and change { Ci::TriggerRequest.count }.by(1)
expect(result[:pipeline].variables.map { |v| { v.key => v.value } }.first).to eq(variables)
expect(result[:pipeline].trigger_requests.last.variables).to be_nil
end
end
end end
context 'when params have a variable' do context 'when params have a non-existsed ref' do
let(:params) { { token: trigger.token, ref: 'master', variables: variables } } let(:params) { { token: trigger.token, ref: 'invalid-ref', variables: nil } }
let(:variables) { { 'AAA' => 'AAA123' } }
it 'has a variable' do it 'does not trigger a pipeline' do
expect { result }.to change { Ci::PipelineVariable.count }.by(1) expect { result }.not_to change { Ci::Pipeline.count }
.and change { Ci::TriggerRequest.count }.by(1) expect(result[:http_status]).to eq(400)
expect(result[:pipeline].variables.map { |v| { v.key => v.value } }.first).to eq(variables)
expect(result[:pipeline].trigger_requests.last.variables).to be_nil
end end
end end
end end
context 'when params have a non-existsed ref' do context 'when params have a non-existsed trigger token' do
let(:params) { { token: trigger.token, ref: 'invalid-ref', variables: nil } } let(:params) { { token: 'invalid-token', ref: nil, variables: nil } }
it 'does not trigger a pipeline' do it 'does not trigger a pipeline' do
expect { result }.not_to change { Ci::Pipeline.count } expect { result }.not_to change { Ci::Pipeline.count }
expect(result[:http_status]).to eq(400) expect(result).to be_nil
end end
end end
end end
context 'when params have a non-existsed trigger token' do context 'with a pipeline job token' do
let(:params) { { token: 'invalid-token', ref: nil, variables: nil } } let!(:pipeline) { create(:ci_empty_pipeline, project: project) }
let(:job) { create(:ci_build, :running, pipeline: pipeline, user: user) }
context 'when job user does not have a permission to read a project' do
let(:params) { { token: job.token, ref: 'master', variables: nil } }
let(:job) { create(:ci_build, pipeline: pipeline, user: create(:user)) }
it 'does nothing' do
expect { result }.not_to change { Ci::Pipeline.count }
end
end
context 'when job is not running' do
let(:params) { { token: job.token, ref: 'master', variables: nil } }
let(:job) { create(:ci_build, :success, pipeline: pipeline, user: user) }
it 'does nothing' do
expect { result }.not_to change { Ci::Pipeline.count }
expect(result[:message]).to eq('400 Job has to be running')
end
end
it 'does not trigger a pipeline' do context 'when params have an existsed job token' do
expect { result }.not_to change { Ci::Pipeline.count } context 'when params have an existsed ref' do
expect(result).to be_nil let(:params) { { token: job.token, ref: 'master', variables: nil } }
it 'triggers a pipeline' do
expect { result }.to change { Ci::Pipeline.count }.by(1)
expect(result[:pipeline].ref).to eq('master')
expect(result[:pipeline].project).to eq(project)
expect(result[:pipeline].user).to eq(job.user)
expect(result[:status]).to eq(:success)
end
context 'when commit message has [ci skip]' do
before do
allow_any_instance_of(Ci::Pipeline).to receive(:git_commit_message) { '[ci skip]' }
end
it 'ignores [ci skip] and create as general' do
expect { result }.to change { Ci::Pipeline.count }.by(1)
expect(result[:status]).to eq(:success)
end
end
context 'when params have a variable' do
let(:params) { { token: job.token, ref: 'master', variables: variables } }
let(:variables) { { 'AAA' => 'AAA123' } }
it 'has a variable' do
expect { result }.to change { Ci::PipelineVariable.count }.by(1)
.and change { Ci::Sources::Pipeline.count }.by(1)
expect(result[:pipeline].variables.map { |v| { v.key => v.value } }.first).to eq(variables)
expect(job.sourced_pipelines.last.pipeline_id).to eq(result[:pipeline].id)
end
end
end
context 'when params have a non-existsed ref' do
let(:params) { { token: job.token, ref: 'invalid-ref', variables: nil } }
it 'does not job a pipeline' do
expect { result }.not_to change { Ci::Pipeline.count }
expect(result[:http_status]).to eq(400)
end
end
end
context 'when params have a non-existsed trigger token' do
let(:params) { { token: 'invalid-token', ref: nil, variables: nil } }
it 'does not trigger a pipeline' do
expect { result }.not_to change { Ci::Pipeline.count }
expect(result).to be_nil
end
end end
end end
end end
......
...@@ -426,5 +426,22 @@ describe Groups::TransferService do ...@@ -426,5 +426,22 @@ describe Groups::TransferService do
end end
end end
end end
context 'when a project in group has container images' do
let(:group) { create(:group, :public, :nested) }
let!(:project) { create(:project, :repository, :public, namespace: group) }
before do
stub_container_registry_tags(repository: /image/, tags: %w[rc1])
create(:container_repository, project: project, name: :image)
create(:group_member, :owner, group: new_parent_group, user: user)
end
it 'does not allow group to be transferred' do
transfer_service.execute(new_parent_group)
expect(transfer_service.error).to match(/Docker images in their Container Registry/)
end
end
end end
end end
...@@ -148,6 +148,30 @@ describe Groups::UpdateService do ...@@ -148,6 +148,30 @@ describe Groups::UpdateService do
end end
end end
context 'projects in group have container images' do
let(:service) { described_class.new(public_group, user, path: SecureRandom.hex) }
let(:project) { create(:project, :internal, group: public_group) }
before do
stub_container_registry_tags(repository: /image/, tags: %w[rc1])
create(:container_repository, project: project, name: :image)
end
it 'does not allow path to be changed' do
result = described_class.new(public_group, user, path: 'new-path').execute
expect(result).to eq false
expect(public_group.errors[:base].first).to match(/Docker images in their Container Registry/)
end
it 'allows other settings to be changed' do
result = described_class.new(public_group, user, name: 'new-name').execute
expect(result).to eq true
expect(public_group.reload.name).to eq('new-name')
end
end
context 'for a subgroup' do context 'for a subgroup' do
let(:subgroup) { create(:group, :private, parent: private_group) } let(:subgroup) { create(:group, :private, parent: private_group) }
......
...@@ -154,6 +154,17 @@ RSpec.configure do |config| ...@@ -154,6 +154,17 @@ RSpec.configure do |config|
.with(:force_autodevops_on_by_default, anything) .with(:force_autodevops_on_by_default, anything)
.and_return(false) .and_return(false)
# The following can be removed once Vue Issuable Sidebar
# is feature-complete and can be made default in place
# of older sidebar.
# See https://gitlab.com/groups/gitlab-org/-/epics/1863
allow(Feature).to receive(:enabled?)
.with(:vue_issuable_sidebar, anything)
.and_return(false)
allow(Feature).to receive(:enabled?)
.with(:vue_issuable_epic_sidebar, anything)
.and_return(false)
# Stub these calls due to being expensive operations # Stub these calls due to being expensive operations
# It can be reenabled for specific tests via: # It can be reenabled for specific tests via:
# #
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment