Commit 038a53e8 authored by GitLab Bot's avatar GitLab Bot

Automatic merge of gitlab-org/gitlab master

parents a2f340d4 5d11ba29
......@@ -645,7 +645,7 @@
rules:
- if: '$DAST_DISABLED || $GITLAB_FEATURES !~ /\bdast\b/'
when: never
- <<: *if-dot-com-gitlab-org-schedule
- <<: *if-master-schedule-nightly
allow_failure: true
################
......
......@@ -18,19 +18,6 @@ Capybara/CurrentPathExpectation:
Layout/ArgumentAlignment:
Enabled: false
# Offense count: 13
# Cop supports --auto-correct.
Layout/ClosingHeredocIndentation:
Exclude:
- 'app/graphql/mutations/merge_requests/set_wip.rb'
- 'ee/db/geo/migrate/20180322062741_migrate_ci_job_artifacts_to_separate_registry.rb'
- 'ee/lib/gitlab/geo/health_check.rb'
- 'spec/features/merge_request/user_sees_diff_spec.rb'
- 'spec/lib/gitlab/asciidoc_spec.rb'
- 'spec/lib/gitlab/checks/project_moved_spec.rb'
- 'spec/rubocop/cop/active_record_association_reload_spec.rb'
- 'spec/services/task_list_toggle_service_spec.rb'
# Offense count: 13
# Cop supports --auto-correct.
Layout/ClosingParenthesisIndentation:
......
import Vue from 'vue';
import VueApollo from 'vue-apollo';
import { uniqueId } from 'lodash';
import produce from 'immer';
import { defaultDataIdFromObject } from 'apollo-cache-inmemory';
import createDefaultClient from '~/lib/graphql';
import activeDiscussionQuery from './graphql/queries/active_discussion.query.graphql';
......@@ -11,12 +12,17 @@ Vue.use(VueApollo);
const resolvers = {
Mutation: {
updateActiveDiscussion: (_, { id = null, source }, { cache }) => {
const data = cache.readQuery({ query: activeDiscussionQuery });
data.activeDiscussion = {
const sourceData = cache.readQuery({ query: activeDiscussionQuery });
const data = produce(sourceData, draftData => {
// eslint-disable-next-line no-param-reassign
draftData.activeDiscussion = {
__typename: 'ActiveDiscussion',
id,
source,
};
});
cache.writeQuery({ query: activeDiscussionQuery, data });
},
},
......@@ -37,6 +43,7 @@ const defaultClient = createDefaultClient(
},
},
typeDefs,
assumeImmutableResults: true,
},
);
......
......@@ -281,13 +281,8 @@ export default {
.mutate({
mutation: moveDesignMutation,
variables: this.designMoveVariables(newIndex, element),
update: (store, { data: { designManagementMove } }) => {
return updateDesignsOnStoreAfterReorder(
store,
designManagementMove,
this.projectQueryBody,
);
},
update: (store, { data: { designManagementMove } }) =>
updateDesignsOnStoreAfterReorder(store, designManagementMove, this.projectQueryBody),
optimisticResponse: moveDesignOptimisticResponse(this.reorderedDesigns),
})
.catch(() => {
......@@ -327,7 +322,7 @@ export default {
v-if="isLatestVersion"
variant="link"
size="small"
class="gl-mr-3 js-select-all"
class="gl-mr-4 js-select-all"
@click="toggleDesignsSelection"
>{{ selectAllButtonText }}
</gl-button>
......
/* eslint-disable @gitlab/require-i18n-strings */
import { groupBy } from 'lodash';
import produce from 'immer';
import { deprecatedCreateFlash as createFlash } from '~/flash';
import { extractCurrentDiscussion, extractDesign } from './design_management_utils';
import { extractCurrentDiscussion, extractDesign, extractDesigns } from './design_management_utils';
import {
ADD_IMAGE_DIFF_NOTE_ERROR,
UPDATE_IMAGE_DIFF_NOTE_ERROR,
......@@ -10,13 +11,20 @@ import {
designDeletionError,
} from './error_messages';
const designsOf = data => data.project.issue.designCollection.designs;
const isParticipating = (design, username) =>
design.issue.participants.nodes.some(participant => participant.username === username);
const deleteDesignsFromStore = (store, query, selectedDesigns) => {
const data = store.readQuery(query);
const sourceData = store.readQuery(query);
const changedDesigns = data.project.issue.designCollection.designs.nodes.filter(
node => !selectedDesigns.includes(node.filename),
const data = produce(sourceData, draftData => {
const changedDesigns = designsOf(sourceData).nodes.filter(
design => !selectedDesigns.includes(design.filename),
);
data.project.issue.designCollection.designs.nodes = [...changedDesigns];
designsOf(draftData).nodes = [...changedDesigns];
});
store.writeQuery({
...query,
......@@ -33,13 +41,15 @@ const deleteDesignsFromStore = (store, query, selectedDesigns) => {
*/
const addNewVersionToStore = (store, query, version) => {
if (!version) return;
const sourceData = store.readQuery(query);
const data = store.readQuery(query);
data.project.issue.designCollection.versions.nodes = [
const data = produce(sourceData, draftData => {
// eslint-disable-next-line no-param-reassign
draftData.project.issue.designCollection.versions.nodes = [
version,
...data.project.issue.designCollection.versions.nodes,
...draftData.project.issue.designCollection.versions.nodes,
];
});
store.writeQuery({
...query,
......@@ -48,46 +58,41 @@ const addNewVersionToStore = (store, query, version) => {
};
const addDiscussionCommentToStore = (store, createNote, query, queryVariables, discussionId) => {
const data = store.readQuery({
const sourceData = store.readQuery({
query,
variables: queryVariables,
});
const design = extractDesign(data);
const newParticipant = {
__typename: 'User',
...createNote.note.author,
};
const data = produce(sourceData, draftData => {
const design = extractDesign(draftData);
const currentDiscussion = extractCurrentDiscussion(design.discussions, discussionId);
currentDiscussion.notes.nodes = [...currentDiscussion.notes.nodes, createNote.note];
design.notesCount += 1;
if (
!design.issue.participants.nodes.some(
participant => participant.username === createNote.note.author.username,
)
) {
design.issue.participants.nodes = [
...design.issue.participants.nodes,
{
__typename: 'User',
...createNote.note.author,
},
];
if (!isParticipating(design, createNote.note.author.username)) {
design.issue.participants.nodes = [...design.issue.participants.nodes, newParticipant];
}
design.notesCount += 1;
});
store.writeQuery({
query,
variables: queryVariables,
data: {
...data,
design: {
...design,
},
},
data,
});
};
const addImageDiffNoteToStore = (store, createImageDiffNote, query, variables) => {
const data = store.readQuery({
const sourceData = store.readQuery({
query,
variables,
});
const newDiscussion = {
__typename: 'Discussion',
id: createImageDiffNote.note.discussion.id,
......@@ -101,9 +106,12 @@ const addImageDiffNoteToStore = (store, createImageDiffNote, query, variables) =
nodes: [createImageDiffNote.note],
},
};
const design = extractDesign(data);
const notesCount = design.notesCount + 1;
const data = produce(sourceData, draftData => {
const design = extractDesign(draftData);
design.notesCount += 1;
design.discussions.nodes = [...design.discussions.nodes, newDiscussion];
if (
!design.issue.participants.nodes.some(
participant => participant.username === createImageDiffNote.note.author.username,
......@@ -117,26 +125,23 @@ const addImageDiffNoteToStore = (store, createImageDiffNote, query, variables) =
},
];
}
});
store.writeQuery({
query,
variables,
data: {
...data,
design: {
...design,
notesCount,
},
},
data,
});
};
const updateImageDiffNoteInStore = (store, updateImageDiffNote, query, variables) => {
const data = store.readQuery({
const sourceData = store.readQuery({
query,
variables,
});
const design = extractDesign(data);
const data = produce(sourceData, draftData => {
const design = extractDesign(draftData);
const discussion = extractCurrentDiscussion(
design.discussions,
updateImageDiffNote.note.discussion.id,
......@@ -146,21 +151,20 @@ const updateImageDiffNoteInStore = (store, updateImageDiffNote, query, variables
...discussion.notes,
nodes: [updateImageDiffNote.note, ...discussion.notes.nodes.slice(1)],
};
});
store.writeQuery({
query,
variables,
data: {
...data,
design,
},
data,
});
};
const addNewDesignToStore = (store, designManagementUpload, query) => {
const data = store.readQuery(query);
const sourceData = store.readQuery(query);
const currentDesigns = data.project.issue.designCollection.designs.nodes;
const data = produce(sourceData, draftData => {
const currentDesigns = extractDesigns(draftData);
const existingDesigns = groupBy(currentDesigns, 'filename');
const newDesigns = currentDesigns.concat(
designManagementUpload.designs.filter(d => !existingDesigns[d.filename]),
......@@ -179,7 +183,7 @@ const addNewDesignToStore = (store, designManagementUpload, query) => {
const newVersions = [
...(newVersionNode || []),
...data.project.issue.designCollection.versions.nodes,
...draftData.project.issue.designCollection.versions.nodes,
];
const updatedDesigns = {
......@@ -193,8 +197,9 @@ const addNewDesignToStore = (store, designManagementUpload, query) => {
nodes: newVersions,
},
};
data.project.issue.designCollection = updatedDesigns;
// eslint-disable-next-line no-param-reassign
draftData.project.issue.designCollection = updatedDesigns;
});
store.writeQuery({
...query,
......@@ -203,8 +208,14 @@ const addNewDesignToStore = (store, designManagementUpload, query) => {
};
const moveDesignInStore = (store, designManagementMove, query) => {
const data = store.readQuery(query);
data.project.issue.designCollection.designs = designManagementMove.designCollection.designs;
const sourceData = store.readQuery(query);
const data = produce(sourceData, draftData => {
// eslint-disable-next-line no-param-reassign
draftData.project.issue.designCollection.designs =
designManagementMove.designCollection.designs;
});
store.writeQuery({
...query,
data,
......
......@@ -90,7 +90,7 @@ export default {
if (this.newSnippet) {
return this.projectPath
? `${gon.relative_url_root}${this.projectPath}/-/snippets`
: `${gon.relative_url_root}-/snippets`;
: `${gon.relative_url_root}/-/snippets`;
}
return this.snippet.webUrl;
},
......
......@@ -97,7 +97,7 @@ export default {
text: __('New snippet'),
href: this.snippet.project
? `${this.snippet.project.webUrl}/-/snippets/new`
: `${gon.relative_url_root}-/snippets/new`,
: `${gon.relative_url_root}/-/snippets/new`,
variant: 'success',
category: 'secondary',
cssClass: 'ml-2',
......
......@@ -40,5 +40,9 @@ module Ci
def self.has_code_coverage?
where(file_type: :code_coverage).exists?
end
def self.find_with_code_coverage
find_by(file_type: :code_coverage)
end
end
end
......@@ -1344,7 +1344,7 @@ class MergeRequest < ApplicationRecord
def has_coverage_reports?
return false unless Feature.enabled?(:coverage_report_view, project)
actual_head_pipeline&.has_reports?(Ci::JobArtifact.coverage_reports)
actual_head_pipeline&.pipeline_artifacts&.has_code_coverage?
end
def has_terraform_reports?
......
......@@ -135,6 +135,10 @@ class Namespace < ApplicationRecord
uniquify.string(path) { |s| Namespace.find_by_path_or_name(s) }
end
def clean_name(value)
value.scan(Gitlab::Regex.group_name_regex_chars).join(' ')
end
def find_by_pages_host(host)
gitlab_host = "." + Settings.pages.host.downcase
host = host.downcase
......
......@@ -12,7 +12,7 @@ module Ci
{
status: :parsed,
key: key(base_pipeline, head_pipeline),
data: head_pipeline.coverage_reports.pick(merge_request.new_paths)
data: Gitlab::Ci::Pipeline::Artifact::CodeCoverage.new(head_pipeline.pipeline_artifacts.find_with_code_coverage).for_files(merge_request.new_paths)
}
rescue => e
Gitlab::ErrorTracking.track_exception(e, project_id: project.id)
......
......@@ -22,9 +22,6 @@ module Projects
end
success
rescue => e
Gitlab::ErrorTracking.track_exception(e)
error(e.message, pass_back: { exception: e })
end
private
......
......@@ -10,14 +10,6 @@ class PagesUpdateConfigurationWorker
project = Project.find_by_id(project_id)
return unless project
result = Projects::UpdatePagesConfigurationService.new(project).execute
# The ConfigurationService swallows all exceptions and wraps them in a status
# we need to keep this while the feature flag still allows running this
# service within a request.
# But we might as well take advantage of sidekiq retries here.
# We should let the service raise after we remove the feature flag
# https://gitlab.com/gitlab-org/gitlab/-/issues/230695
raise result[:exception] if result[:exception]
Projects::UpdatePagesConfigurationService.new(project).execute
end
end
---
title: Add spacing to design management toolbar buttons
merge_request: 38889
author: George Tsiolis
type: changed
---
title: Create a POC for 'immer' library
merge_request: 39738
author:
type: other
......@@ -56,7 +56,7 @@ def note_for_spin_role(spin, role)
return OPTIONAL_REVIEW_TEMPLATE % { role: role.capitalize, category: helper.label_for_category(spin.category) }
end
spin.public_send(role)&.markdown_name(timezone_experiment: spin.timezone_experiment, author: roulette.team_mr_author) # rubocop:disable GitlabSecurity/PublicSend
spin.public_send(role)&.markdown_name(author: roulette.team_mr_author) # rubocop:disable GitlabSecurity/PublicSend
end
def markdown_row_for_spins(category, spins_array)
......
# frozen_string_literal: true
class SchedulePopulateResolvedOnDefaultBranchColumn < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
BATCH_SIZE = 100
DELAY_INTERVAL = 5.minutes.to_i
MIGRATION_CLASS = 'PopulateResolvedOnDefaultBranchColumn'
BASE_MODEL = EE::Gitlab::BackgroundMigration::PopulateResolvedOnDefaultBranchColumn::Vulnerability
disable_ddl_transaction!
def up
return unless run_migration?
BASE_MODEL.distinct.each_batch(of: BATCH_SIZE, column: :project_id) do |batch, index|
project_ids = batch.pluck(:project_id)
migrate_in(index * DELAY_INTERVAL, MIGRATION_CLASS, project_ids)
end
end
def down; end
private
def run_migration?
Gitlab.ee? && table_exists?(:projects) && table_exists?(:vulnerabilities)
end
end
fdcce45050f972d8edf2c645022f517ff6b9f4c76767e6cebe45a11fe34dd388
\ No newline at end of file
......@@ -2164,10 +2164,7 @@ build_job:
```
Environment variables support for `project:`, `job:`, and `ref` was [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/202093)
in GitLab 13.3. This is under development, but it is ready for production use. It is deployed
behind the `ci_expand_names_for_cross_pipeline_artifacts` feature flag, which is **disabled by default**.
[GitLab administrators with access to the GitLab Rails console](../../administration/feature_flags.md)
can enable it for your instance.
in GitLab 13.3. [Feature flag removed](https://gitlab.com/gitlab-org/gitlab/-/issues/235761) in GitLab 13.4.
For example:
......
......@@ -39,7 +39,7 @@ Changes to the schema should be committed to `db/structure.sql`. This
file is automatically generated by Rails, so you normally should not
edit this file by hand. If your migration is adding a column to a
table, that column will be added at the bottom. Please do not reorder
columns manually for existing tables as this will cause confusing to
columns manually for existing tables as this will cause confusion to
other people using `db/structure.sql` generated by Rails.
When your local database in your GDK is diverging from the schema from
......
......@@ -647,16 +647,13 @@ a fixed timeout of 60 seconds.
- Click **Create new DAST scan**.
- Click **Delete** in the matching site profile's row.
### Enable or disable On-demand Scans and site profiles
### Enable or disable On-demand Scans
On-demand Scans with site profiles is enabled by default. You can disable On-demand Scans
instance-wide, or disable it for specific projects if you prefer. DAST site profiles are not
available if the On-demand Scans feature is disabled.
On-demand Scans is enabled by default. You can disable On-demand Scans
instance-wide, or disable it for specific projects if you prefer.
Use of On-demand Scans with site profiles requires **both** the following feature flags enabled:
- security_on_demand_scans_feature_flag
- security_on_demand_scans_site_profiles_feature_flag
Use of On-demand Scans requires the `security_on_demand_scans_feature_flag`
feature flag enabled.
[GitLab administrators with access to the GitLab Rails console](../../../administration/feature_flags.md)
can disable or enable the feature flags.
......@@ -681,31 +678,6 @@ Feature.enable(:security_on_demand_scans_feature_flag)
Feature.enable(:security_on_demand_scans_feature_flag, Project.find(<project ID>))
```
#### Enable or disable site profiles
The Site Profiles feature is enabled instance-wide by default. You can disable it instance-wide, or disable it
for specific projects if you prefer.
[GitLab administrators with access to the GitLab Rails console](../../../administration/feature_flags.md)
can disable or enable the feature flag.
To disable Site Profiles:
```ruby
# Instance-wide
Feature.disable(:security_on_demand_scans_site_profiles_feature_flag)
# or by project
Feature.disable(:security_on_demand_scans_site_profiles_feature_flag, Project.find(<project id>))
```
To enable Site Profiles:
```ruby
# Instance-wide
Feature.enable(:security_on_demand_scans_site_profiles_feature_flag)
# or by project
Feature.enable(:security_on_demand_scans_site_profiles_feature_flag, Project.find(<project ID>))
```
## Reports
The DAST tool outputs a report file in JSON format by default. However, this tool can also generate reports in
......
......@@ -69,6 +69,12 @@ export default {
tdClass: 'merge-request-analytics-td',
thAttr: TH_TEST_ID,
},
{
key: 'commits',
label: s__('Commits'),
tdClass: 'merge-request-analytics-td',
thAttr: TH_TEST_ID,
},
{
key: 'pipelines',
label: s__('MergeRequestAnalytics|Pipelines'),
......@@ -228,6 +234,10 @@ export default {
</div>
</template>
<template #cell(commits)="{ item }">
<div :data-testid="$options.testIds.COMMITS">{{ item.commitCount }}</div>
</template>
<template #cell(pipelines)="{ item }">
<div :data-testid="$options.testIds.PIPELINES">{{ item.pipelines.nodes.length }}</div>
</template>
......
......@@ -36,6 +36,7 @@ export const THROUGHPUT_TABLE_TEST_IDS = {
PIPELINES: 'pipelinesCol',
LINE_CHANGES: 'lineChangesCol',
ASSIGNEES: 'assigneesCol',
COMMITS: 'commitsCol',
};
export const PIPELINE_STATUS_ICON_CLASSES = {
......
......@@ -33,6 +33,7 @@ query($fullPath: ID!, $startDate: Time!, $endDate: Time!, $limit: Int!) {
}
}
}
commitCount
}
}
}
......
<script>
import OnDemandScansFormOld from './on_demand_scans_form_old.vue';
import OnDemandScansForm from './on_demand_scans_form.vue';
import OnDemandScansEmptyState from './on_demand_scans_empty_state.vue';
import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
export default {
name: 'OnDemandScansApp',
components: {
OnDemandScansFormOld,
OnDemandScansForm,
OnDemandScansEmptyState,
},
mixins: [glFeatureFlagsMixin()],
props: {
helpPagePath: {
type: String,
......@@ -50,7 +46,6 @@ export default {
<div>
<template v-if="showForm">
<on-demand-scans-form
v-if="glFeatures.securityOnDemandScansSiteProfilesFeatureFlag"
:help-page-path="helpPagePath"
:project-path="projectPath"
:default-branch="defaultBranch"
......@@ -58,13 +53,6 @@ export default {
:new-site-profile-path="newSiteProfilePath"
@cancel="showForm = false"
/>
<on-demand-scans-form-old
v-else
:help-page-path="helpPagePath"
:project-path="projectPath"
:default-branch="defaultBranch"
@cancel="showForm = false"
/>
</template>
<on-demand-scans-empty-state
v-else
......
<script>
import * as Sentry from '@sentry/browser';
import {
GlButton,
GlForm,
GlFormGroup,
GlFormInput,
GlIcon,
GlLink,
GlSprintf,
GlTooltipDirective,
} from '@gitlab/ui';
import { s__, sprintf } from '~/locale';
import { deprecatedCreateFlash as createFlash } from '~/flash';
import { isAbsolute, redirectTo } from '~/lib/utils/url_utility';
import runDastScanMutation from '../graphql/run_dast_scan.mutation.graphql';
import { SCAN_TYPES } from '../constants';
const initField = value => ({
value,
state: null,
feedback: null,
});
export default {
components: {
GlButton,
GlForm,
GlFormGroup,
GlFormInput,
GlIcon,
GlLink,
GlSprintf,
},
directives: {
GlTooltip: GlTooltipDirective,
},
props: {
helpPagePath: {
type: String,
required: true,
},
projectPath: {
type: String,
required: true,
},
defaultBranch: {
type: String,
required: true,
},
},
data() {
return {
form: {
scanType: initField(SCAN_TYPES.PASSIVE),
branch: initField(this.defaultBranch),
targetUrl: initField(''),
},
loading: false,
};
},
computed: {
formData() {
return {
projectPath: this.projectPath,
...Object.fromEntries(Object.entries(this.form).map(([key, { value }]) => [key, value])),
};
},
formHasErrors() {
return Object.values(this.form).some(({ state }) => state === false);
},
someFieldEmpty() {
return Object.values(this.form).some(({ value }) => !value);
},
isSubmitDisabled() {
return this.formHasErrors || this.someFieldEmpty;
},
},
methods: {
validateTargetUrl() {
let [state, feedback] = [true, null];
const { value: targetUrl } = this.form.targetUrl;
if (!isAbsolute(targetUrl)) {
state = false;
feedback = s__(
'OnDemandScans|Please enter a valid URL format, ex: http://www.example.com/home',
);
}
this.form.targetUrl = {
...this.form.targetUrl,
state,
feedback,
};
},
onSubmit() {
this.loading = true;
this.$apollo
.mutate({
mutation: runDastScanMutation,
variables: this.formData,
})
.then(({ data: { runDastScan: { pipelineUrl, errors } } }) => {
if (errors?.length) {
createFlash(
sprintf(s__('OnDemandScans|Could not run the scan: %{backendErrorMessage}'), {
backendErrorMessage: errors.join(', '),
}),
);
this.loading = false;
} else {
redirectTo(pipelineUrl);
}
})
.catch(e => {
Sentry.captureException(e);
createFlash(s__('OnDemandScans|Could not run the scan. Please try again.'));
this.loading = false;
});
},
},
};
</script>
<template>
<gl-form @submit.prevent="onSubmit">
<header class="gl-mb-6">
<h2>{{ s__('OnDemandScans|New on-demand DAST scan') }}</h2>
<p>
<gl-icon name="information-o" class="gl-vertical-align-text-bottom gl-text-gray-600" />
<gl-sprintf
:message="
s__(
'OnDemandScans|On-demand scans run outside the DevOps cycle and find vulnerabilities in your projects. %{learnMoreLinkStart}Learn more%{learnMoreLinkEnd}',
)
"
>
<template #learnMoreLink="{ content }">
<gl-link :href="helpPagePath">
{{ content }}
</gl-link>
</template>
</gl-sprintf>
</p>
</header>
<gl-form-group>
<template #label>
{{ s__('OnDemandScans|Scan mode') }}
<gl-icon
v-gl-tooltip.hover
name="information-o"
class="gl-vertical-align-text-bottom gl-text-gray-600"
:title="s__('OnDemandScans|Only a passive scan can be performed on demand.')"
/>
</template>
{{ s__('OnDemandScans|Passive DAST Scan') }}
</gl-form-group>
<gl-form-group>
<template #label>
{{ s__('OnDemandScans|Attached branch') }}
<gl-icon
v-gl-tooltip.hover
name="information-o"
class="gl-vertical-align-text-bottom gl-text-gray-600"
:title="s__('OnDemandScans|Attached branch is where the scan job runs.')"
/>
</template>
{{ defaultBranch }}
</gl-form-group>
<gl-form-group :invalid-feedback="form.targetUrl.feedback">
<template #label>
{{ s__('OnDemandScans|Target URL') }}
<gl-icon
v-gl-tooltip.hover
name="information-o"
class="gl-vertical-align-text-bottom gl-text-gray-600"
:title="s__('OnDemandScans|DAST will scan the target URL and any discovered sub URLs.')"
/>
</template>
<gl-form-input
v-model="form.targetUrl.value"
class="mw-460"
data-testid="target-url-input"
type="url"
:state="form.targetUrl.state"
@input="validateTargetUrl"
/>
</gl-form-group>
<div class="gl-mt-6 gl-pt-6">
<gl-button
type="submit"
variant="success"
class="js-no-auto-disable"
:disabled="isSubmitDisabled"
:loading="loading"
>
{{ s__('OnDemandScans|Run this scan') }}
</gl-button>
<gl-button @click="$emit('cancel')">
{{ __('Cancel') }}
</gl-button>
</div>
</gl-form>
</template>
<script>
import { GlDeprecatedButton, GlLink, GlIcon } from '@gitlab/ui';
import { GlButton, GlLink, GlIcon } from '@gitlab/ui';
import ProjectAvatar from '~/vue_shared/components/project_avatar/default.vue';
import { numberToHumanSize, isOdd } from '~/lib/utils/number_utils';
import { s__ } from '~/locale';
......@@ -8,7 +8,7 @@ import StorageRow from './storage_row.vue';
export default {
components: {
GlIcon,
GlDeprecatedButton,
GlButton,
GlLink,
ProjectAvatar,
StorageRow,
......@@ -87,13 +87,14 @@ export default {
<div class="table-section section-wrap section-70 text-truncate" role="gridcell">
<div class="table-mobile-header font-weight-bold" role="rowheader">{{ __('Project') }}</div>
<div class="table-mobile-content">
<gl-deprecated-button
<gl-button
class="btn-transparent float-left p-0 mr-2"
:aria-label="__('Toggle project')"
category="tertiary"
@click="toggleProject"
>
<gl-icon :name="iconName" class="folder-icon" />
</gl-deprecated-button>
</gl-button>
<project-avatar :project="projectAvatar" :size="20" />
......
......@@ -4,7 +4,6 @@ module Projects
class OnDemandScansController < Projects::ApplicationController
before_action do
authorize_read_on_demand_scans!
push_frontend_feature_flag(:security_on_demand_scans_site_profiles_feature_flag, project, default_enabled: true)
end
def index
......
......@@ -44,9 +44,9 @@ class SubscriptionsController < ApplicationController
if params[:selected_group]
group = current_user.manageable_groups_eligible_for_subscription.find(params[:selected_group])
else
group_name = params[:setup_for_company] ? customer_params[:company] : "#{current_user.name}'s Group"
path = Namespace.clean_path(group_name)
group = Groups::CreateService.new(current_user, name: group_name, path: path).execute
name = Namespace.clean_name(params[:setup_for_company] ? customer_params[:company] : current_user.name)
path = Namespace.clean_path(name)
group = Groups::CreateService.new(current_user, name: name, path: path).execute
return render json: group.errors.to_json unless group.persisted?
end
......
......@@ -163,6 +163,7 @@ module EE
projects/on_demand_scans#index
projects/dast_profiles#index
projects/dast_site_profiles#new
projects/dast_site_profiles#edit
projects/dast_scanner_profiles#new
projects/dependencies#index
projects/licenses#index
......@@ -182,6 +183,7 @@ module EE
projects/on_demand_scans#index
projects/dast_profiles#index
projects/dast_site_profiles#new
projects/dast_site_profiles#edit
projects/dast_scanner_profiles#new
]
end
......
......@@ -52,9 +52,7 @@ module EE
def build_cross_dependency_relationship_fragment(dependency, search_scope)
args = dependency.values_at(:job, :ref, :project)
if ::Gitlab::Ci::Features.expand_names_for_cross_pipeline_artifacts?(project)
args = args.map { |value| ExpandVariables.expand(value, processable_variables) }
end
dep_id = search_scope.max_build_id_by(*args)
model_class.id_in(dep_id)
......
---
title: Populate `resolved_on_default_branch` column for existing vulnerabilities
merge_request: 38795
author:
type: added
---
title: Fix group name bug for new purchase flow
merge_request: 39915
author:
type: fixed
---
title: Activate on-demand scans nav item when editing a site profile
merge_request: 40148
author:
type: fixed
---
name: security_on_demand_scans_site_profiles_feature_flag
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/38412
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/233110
group: group::dynamic analysis
type: development
default_enabled: true
......@@ -114,6 +114,13 @@ RSpec.describe SubscriptionsController do
it 'updates the setup_for_company attribute of the current user' do
expect { subject }.to change { user.reload.setup_for_company }.from(nil).to(true)
end
it 'creates a group based on the company' do
expect(Namespace).to receive(:clean_name).with(params.dig(:customer, :company)).and_call_original
expect_any_instance_of(EE::Groups::CreateService).to receive(:execute)
subject
end
end
context 'when not setting up for a company' do
......@@ -130,13 +137,14 @@ RSpec.describe SubscriptionsController do
it 'does not update the setup_for_company attribute of the current user' do
expect { subject }.not_to change { user.reload.setup_for_company }
end
end
it 'creates a group' do
it 'creates a group based on the user' do
expect(Namespace).to receive(:clean_name).with(user.name).and_call_original
expect_any_instance_of(EE::Groups::CreateService).to receive(:execute)
subject
end
end
context 'when an error occurs creating a group' do
let(:group) { Group.new(path: 'foo') }
......
......@@ -215,6 +215,10 @@ describe('ThroughputTable', () => {
expect(findCol(TEST_IDS.MILESTONE).text()).toBe(title);
});
it('displays the correct commit count', () => {
expect(findCol(TEST_IDS.COMMITS).text()).toBe('1');
});
it('displays the correct pipeline count', () => {
expect(findCol(TEST_IDS.PIPELINES).text()).toBe('0');
});
......
......@@ -51,6 +51,7 @@ export const throughputTableHeaders = [
'Date Merged',
'Time to merge',
'Milestone',
'Commits',
'Pipelines',
'Line changes',
'Assignees',
......@@ -81,5 +82,6 @@ export const throughputTableData = [
pipelines: {
nodes: [],
},
commitCount: 1,
},
];
......@@ -3,7 +3,6 @@ import { shallowMount } from '@vue/test-utils';
import { TEST_HOST } from 'helpers/test_constants';
import OnDemandScansApp from 'ee/on_demand_scans/components/on_demand_scans_app.vue';
import OnDemandScansForm from 'ee/on_demand_scans/components/on_demand_scans_form.vue';
import OnDemandScansFormOld from 'ee/on_demand_scans/components/on_demand_scans_form_old.vue';
import OnDemandScansEmptyState from 'ee/on_demand_scans/components/on_demand_scans_empty_state.vue';
const helpPagePath = `${TEST_HOST}/application_security/dast/index#on-demand-scans`;
......@@ -17,12 +16,18 @@ describe('OnDemandScansApp', () => {
let wrapper;
const findOnDemandScansEmptyState = () => wrapper.find(OnDemandScansEmptyState);
const findOnDemandScansForm = () => wrapper.find(OnDemandScansForm);
const expectEmptyState = () => {
expect(wrapper.contains(OnDemandScansForm)).toBe(false);
expect(wrapper.contains(OnDemandScansEmptyState)).toBe(true);
};
const expectForm = () => {
expect(wrapper.contains(OnDemandScansForm)).toBe(true);
expect(wrapper.contains(OnDemandScansEmptyState)).toBe(false);
};
const createComponent = options => {
wrapper = shallowMount(
OnDemandScansApp,
......@@ -43,16 +48,16 @@ describe('OnDemandScansApp', () => {
);
};
beforeEach(() => {
createComponent();
});
afterEach(() => {
wrapper.destroy();
wrapper = null;
});
describe('empty state', () => {
beforeEach(() => {
createComponent();
});
it('renders an empty state by default', () => {
expectEmptyState();
});
......@@ -65,30 +70,6 @@ describe('OnDemandScansApp', () => {
});
});
describe.each`
description | securityOnDemandScansSiteProfilesFeatureFlag | expectedComponent | unexpectedComponent
${'enabled'} | ${true} | ${OnDemandScansForm} | ${OnDemandScansFormOld}
${'disabled'} | ${false} | ${OnDemandScansFormOld} | ${OnDemandScansForm}
`(
'with :security_on_demand_scans_site_profiles_feature_flag $description',
({ securityOnDemandScansSiteProfilesFeatureFlag, expectedComponent, unexpectedComponent }) => {
const findOnDemandScansForm = () => wrapper.find(expectedComponent);
const expectForm = () => {
expect(wrapper.contains(expectedComponent)).toBe(true);
expect(wrapper.contains(unexpectedComponent)).toBe(false);
expect(wrapper.contains(OnDemandScansEmptyState)).toBe(false);
};
beforeEach(() => {
createComponent({
provide: {
glFeatures: {
securityOnDemandScansSiteProfilesFeatureFlag,
},
},
});
});
describe('form', () => {
beforeEach(async () => {
findOnDemandScansEmptyState().vm.$emit('createNewScan');
......@@ -99,11 +80,13 @@ describe('OnDemandScansApp', () => {
expectForm();
});
it('passes correct props to GlEmptyState', () => {
it('passes correct props to OnDemandScansForm', () => {
expect(findOnDemandScansForm().props()).toMatchObject({
defaultBranch,
helpPagePath,
projectPath,
defaultBranch,
profilesLibraryPath,
newSiteProfilePath,
});
});
......@@ -114,6 +97,4 @@ describe('OnDemandScansApp', () => {
expectEmptyState();
});
});
},
);
});
import { shallowMount } from '@vue/test-utils';
import { GlForm } from '@gitlab/ui';
import { TEST_HOST } from 'helpers/test_constants';
import OnDemandScansForm from 'ee/on_demand_scans/components/on_demand_scans_form_old.vue';
import runDastScanMutation from 'ee/on_demand_scans/graphql/run_dast_scan.mutation.graphql';
import { deprecatedCreateFlash as createFlash } from '~/flash';
import { redirectTo } from '~/lib/utils/url_utility';
const helpPagePath = `${TEST_HOST}/application_security/dast/index#on-demand-scans`;
const projectPath = 'group/project';
const defaultBranch = 'master';
const targetUrl = 'http://example.com';
const pipelineUrl = `${TEST_HOST}/${projectPath}/pipelines/123`;
jest.mock('~/flash');
jest.mock('~/lib/utils/url_utility', () => ({
isAbsolute: jest.requireActual('~/lib/utils/url_utility').isAbsolute,
redirectTo: jest.fn(),
}));
describe('OnDemandScansApp', () => {
let wrapper;
const findForm = () => wrapper.find(GlForm);
const findTargetUrlInput = () => wrapper.find('[data-testid="target-url-input"]');
const submitForm = () => findForm().vm.$emit('submit', { preventDefault: () => {} });
const createComponent = ({ props = {}, computed = {} } = {}) => {
wrapper = shallowMount(OnDemandScansForm, {
attachToDocument: true,
propsData: {
helpPagePath,
projectPath,
defaultBranch,
...props,
},
computed,
mocks: {
$apollo: {
mutate: jest.fn(),
},
},
});
};
beforeEach(() => {
createComponent();
});
afterEach(() => {
wrapper.destroy();
wrapper = null;
});
it('renders properly', () => {
expect(wrapper.isVueInstance()).toBe(true);
});
describe('computed props', () => {
describe('formData', () => {
it('returns an object with a key:value mapping from the form object including the project path', () => {
wrapper.vm.form = {
targetUrl: {
value: targetUrl,
state: null,
feedback: '',
},
};
expect(wrapper.vm.formData).toEqual({
projectPath,
targetUrl,
});
});
});
describe('formHasErrors', () => {
it('returns true if any of the fields are invalid', () => {
wrapper.vm.form = {
targetUrl: {
value: targetUrl,
state: false,
feedback: '',
},
foo: {
value: 'bar',
state: null,
},
};
expect(wrapper.vm.formHasErrors).toBe(true);
});
it('returns false if none of the fields are invalid', () => {
wrapper.vm.form = {
targetUrl: {
value: targetUrl,
state: null,
feedback: '',
},
foo: {
value: 'bar',
state: null,
},
};
expect(wrapper.vm.formHasErrors).toBe(false);
});
});
describe('someFieldEmpty', () => {
it('returns true if any of the fields are empty', () => {
wrapper.vm.form = {
targetUrl: {
value: '',
state: false,
feedback: '',
},
foo: {
value: 'bar',
state: null,
},
};
expect(wrapper.vm.someFieldEmpty).toBe(true);
});
it('returns false if no field is empty', () => {
wrapper.vm.form = {
targetUrl: {
value: targetUrl,
state: null,
feedback: '',
},
foo: {
value: 'bar',
state: null,
},
};
expect(wrapper.vm.someFieldEmpty).toBe(false);
});
});
describe('isSubmitDisabled', () => {
it.each`
formHasErrors | someFieldEmpty | expected
${true} | ${true} | ${true}
${true} | ${false} | ${true}
${false} | ${true} | ${true}
${false} | ${false} | ${false}
`(
'is $expected when formHasErrors is $formHasErrors and someFieldEmpty is $someFieldEmpty',
({ formHasErrors, someFieldEmpty, expected }) => {
createComponent({
computed: {
formHasErrors: () => formHasErrors,
someFieldEmpty: () => someFieldEmpty,
},
});
expect(wrapper.vm.isSubmitDisabled).toBe(expected);
},
);
});
});
describe('target URL input', () => {
it.each(['asd', 'example.com'])('is marked as invalid provided an invalid URL', async value => {
const input = findTargetUrlInput();
input.vm.$emit('input', value);
await wrapper.vm.$nextTick();
expect(wrapper.vm.form.targetUrl).toEqual({
value,
state: false,
feedback: 'Please enter a valid URL format, ex: http://www.example.com/home',
});
expect(input.attributes().state).toBeUndefined();
});
it('is marked as valid provided a valid URL', async () => {
const input = findTargetUrlInput();
input.vm.$emit('input', targetUrl);
await wrapper.vm.$nextTick();
expect(wrapper.vm.form.targetUrl).toEqual({
value: targetUrl,
state: true,
feedback: null,
});
expect(input.attributes().state).toBe('true');
});
});
describe('submission', () => {
describe('on success', () => {
beforeEach(async () => {
jest
.spyOn(wrapper.vm.$apollo, 'mutate')
.mockResolvedValue({ data: { runDastScan: { pipelineUrl, errors: [] } } });
const input = findTargetUrlInput();
input.vm.$emit('input', targetUrl);
submitForm();
});
it('sets loading state', () => {
expect(wrapper.vm.loading).toBe(true);
});
it('triggers GraphQL mutation', () => {
expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
mutation: runDastScanMutation,
variables: {
scanType: 'PASSIVE',
branch: 'master',
targetUrl,
projectPath,
},
});
});
it('redirects to the URL provided in the response', () => {
expect(redirectTo).toHaveBeenCalledWith(pipelineUrl);
});
});
describe('on top-level error', () => {
beforeEach(async () => {
jest.spyOn(wrapper.vm.$apollo, 'mutate').mockRejectedValue();
const input = findTargetUrlInput();
input.vm.$emit('input', targetUrl);
submitForm();
});
it('resets loading state', () => {
expect(wrapper.vm.loading).toBe(false);
});
it('shows an error flash', () => {
expect(createFlash).toHaveBeenCalledWith('Could not run the scan. Please try again.');
});
});
describe('on errors as data', () => {
beforeEach(async () => {
const errors = ['A', 'B', 'C'];
jest
.spyOn(wrapper.vm.$apollo, 'mutate')
.mockResolvedValue({ data: { runDastScan: { pipelineUrl: null, errors } } });
const input = findTargetUrlInput();
input.vm.$emit('input', targetUrl);
submitForm();
});
it('resets loading state', () => {
expect(wrapper.vm.loading).toBe(false);
});
it('shows an error flash', () => {
expect(createFlash).toHaveBeenCalledWith('Could not run the scan: A, B, C');
});
});
});
});
/**
* @deprecated
* DO NOT USE! This causes issues when `vue-test-utils` is used elsewhere.
* This function will be removed in https://gitlab.com/gitlab-org/gitlab/issues/9594.
*/
export function getChildInstances(vm, WrappedComponent) {
return vm.$children.filter(child => child instanceof WrappedComponent);
}
export function mouseEvent(el, eventType) {
const event = document.createEvent('MouseEvent');
event.initMouseEvent(eventType);
el.dispatchEvent(event);
}
import { shallowMount } from '@vue/test-utils';
import { GlDeprecatedButton } from '@gitlab/ui';
import { GlButton } from '@gitlab/ui';
import Project from 'ee/storage_counter/components/project.vue';
import ProjectAvatar from '~/vue_shared/components/project_avatar/default.vue';
import { numberToHumanSize } from '~/lib/utils/number_utils';
......@@ -54,11 +54,11 @@ describe('Storage Counter project component', () => {
it('toggles isOpen', () => {
expect(wrapper.vm.isOpen).toEqual(false);
wrapper.find(GlDeprecatedButton).vm.$emit('click');
wrapper.find(GlButton).vm.$emit('click');
expect(wrapper.vm.isOpen).toEqual(true);
wrapper.find(GlDeprecatedButton).vm.$emit('click');
wrapper.find(GlButton).vm.$emit('click');
expect(wrapper.vm.isOpen).toEqual(false);
});
......
......@@ -176,6 +176,7 @@ RSpec.describe ProjectsHelper do
projects/on_demand_scans#index
projects/dast_profiles#index
projects/dast_site_profiles#new
projects/dast_site_profiles#edit
projects/dast_scanner_profiles#new
projects/dependencies#index
projects/licenses#index
......@@ -195,6 +196,7 @@ RSpec.describe ProjectsHelper do
projects/on_demand_scans#index
projects/dast_profiles#index
projects/dast_site_profiles#new
projects/dast_site_profiles#edit
projects/dast_scanner_profiles#new
]
end
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe ::Gitlab::BackgroundMigration::PopulateResolvedOnDefaultBranchColumn do
let(:users) { table(:users) }
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:pipelines) { table(:ci_pipelines) }
let(:vulnerabilities) { table(:vulnerabilities) }
let(:findings) { table(:vulnerability_occurrences) }
let(:builds) { table(:ci_builds) }
let(:scanners) { table(:vulnerability_scanners) }
let(:vulnerability_identifiers) { table(:vulnerability_identifiers) }
let(:namespace) { namespaces.create!(name: "foo", path: "bar") }
describe '#perform' do
let!(:project_1) { projects.create!(namespace_id: namespace.id) }
let!(:project_2) { projects.create!(namespace_id: namespace.id) }
let(:utility_class) { described_class::PopulateResolvedOnDefaultBranchColumnForProject }
subject(:populate_resolved_on_default_branch_column) { described_class.new.perform([project_1.id, project_2.id]) }
before do
allow(utility_class).to receive(:perform)
end
it 'calls `PopulateResolvedOnDefaultBranchColumnForProject.perform` for each project by given ids' do
populate_resolved_on_default_branch_column
expect(utility_class).to have_received(:perform).twice
expect(utility_class).to have_received(:perform).with(project_1.id)
expect(utility_class).to have_received(:perform).with(project_2.id)
end
end
describe EE::Gitlab::BackgroundMigration::PopulateResolvedOnDefaultBranchColumn::PopulateResolvedOnDefaultBranchColumnForProject do
describe '.perform' do
let(:project_id) { 1 }
let(:mock_utility_object) { instance_double(described_class, perform: true) }
subject(:populate_for_project) { described_class.perform(project_id) }
before do
allow(described_class).to receive(:new).and_return(mock_utility_object)
end
it 'instantiates the utility service object and calls #perform on it' do
populate_for_project
expect(described_class).to have_received(:new).with(project_id)
expect(mock_utility_object).to have_received(:perform)
end
end
describe '#perform' do
let(:user) { users.create!(name: 'John Doe', email: 'test@example.com', projects_limit: 5) }
let(:project) { projects.create!(namespace_id: namespace.id) }
let(:pipeline) { pipelines.create!(project_id: project.id, ref: 'master', sha: 'adf43c3a', status: 'success') }
let(:utility_object) { described_class.new(project.id) }
let(:scanner) { scanners.create!(project_id: project.id, external_id: 'bandit', name: 'Bandit') }
let(:artifact_model) { EE::Gitlab::BackgroundMigration::PopulateResolvedOnDefaultBranchColumn::JobArtifact }
let(:artifact_fixture_path) { Rails.root.join('ee/spec/fixtures/security_reports/master/gl-sast-report.json') }
let(:sha_attribute) { Gitlab::Database::ShaAttribute.new }
let(:vulnerability_identifier) do
vulnerability_identifiers.create!(
project_id: project.id,
name: 'identifier',
fingerprint: sha_attribute.serialize('e6dd15eda2137be0034977a85b300a94a4f243a3'),
external_type: 'bar',
external_id: 'zoo')
end
let(:disappeared_vulnerability) do
vulnerabilities.create!(
project_id: project.id,
author_id: user.id,
title: 'Vulnerability',
severity: 5,
confidence: 5,
report_type: 5
)
end
let(:existing_vulnerability) do
vulnerabilities.create!(
project_id: project.id,
author_id: user.id,
title: 'Vulnerability',
severity: 5,
confidence: 5,
report_type: 5
)
end
subject(:populate_for_project) { utility_object.perform }
before do
build = builds.create!(commit_id: pipeline.id, retried: false, type: 'Ci::Build')
artifact = artifact_model.new(project_id: project.id, job_id: build.id, file_type: 5, file_format: 1)
artifact.file = fixture_file_upload(artifact_fixture_path, 'application/json')
artifact.save!
findings.create!(
project_id: project.id,
vulnerability_id: existing_vulnerability.id,
severity: 5,
confidence: 5,
report_type: 5,
scanner_id: scanner.id,
primary_identifier_id: vulnerability_identifier.id,
project_fingerprint: 'foo',
location_fingerprint: sha_attribute.serialize('d869ba3f0b3347eb2749135a437dc07c8ae0f420'),
uuid: SecureRandom.uuid,
name: 'Solar blast vulnerability',
metadata_version: '1',
raw_metadata: '')
allow(::Gitlab::CurrentSettings).to receive(:default_branch_name).and_return(:master)
end
it 'sets `resolved_on_default_branch` attribute of disappeared vulnerabilities' do
expect { populate_for_project }.to change { disappeared_vulnerability.reload[:resolved_on_default_branch] }.from(false).to(true)
.and not_change { existing_vulnerability.reload[:resolved_on_default_branch] }
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20200806100713_schedule_populate_resolved_on_default_branch_column.rb')
RSpec.describe SchedulePopulateResolvedOnDefaultBranchColumn do
before do
allow_any_instance_of(Gitlab).to receive(:ee?).and_return(ee?)
end
around do |example|
Timecop.freeze { Sidekiq::Testing.fake! { example.run } }
end
context 'when the Gitlab instance is CE' do
let(:ee?) { false }
it 'does not run the migration' do
expect { migrate! }.not_to change { BackgroundMigrationWorker.jobs.size }
end
end
context 'when the Gitlab instance is EE' do
let(:ee?) { true }
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:vulnerabilities) { table(:vulnerabilities) }
let(:users) { table(:users) }
let(:namespace) { namespaces.create!(name: "foo", path: "bar") }
let!(:project_1) { projects.create!(namespace_id: namespace.id) }
let!(:project_2) { projects.create!(namespace_id: namespace.id) }
let!(:project_3) { projects.create!(namespace_id: namespace.id) }
let(:user) { users.create!(name: 'John Doe', email: 'test@example.com', projects_limit: 1) }
let(:vulnerability_data) do
{
author_id: user.id,
title: 'Vulnerability',
severity: 5,
confidence: 5,
report_type: 5
}
end
before do
vulnerabilities.create!(**vulnerability_data, project_id: project_1.id)
vulnerabilities.create!(**vulnerability_data, project_id: project_2.id)
stub_const("#{described_class.name}::BATCH_SIZE", 1)
end
it 'schedules the background jobs', :aggregate_failures do
migrate!
expect(BackgroundMigrationWorker.jobs.size).to be(2)
expect(described_class::MIGRATION_CLASS).to be_scheduled_delayed_migration(5.minutes, project_1.id)
expect(described_class::MIGRATION_CLASS).to be_scheduled_delayed_migration(10.minutes, project_2.id)
end
end
end
......@@ -105,14 +105,6 @@ RSpec.describe Ci::BuildDependencies do
end
it { is_expected.to contain_exactly(dependency) }
context 'with the feature flag disabled' do
before do
stub_feature_flags(ci_expand_names_for_cross_pipeline_artifacts: false)
end
it { is_expected.to be_empty }
end
end
end
......
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# rubocop:disable Style/Documentation
class PopulateResolvedOnDefaultBranchColumn
def perform(*); end
end
end
end
Gitlab::BackgroundMigration::PopulateResolvedOnDefaultBranchColumn.prepend_if_ee('EE::Gitlab::BackgroundMigration::PopulateResolvedOnDefaultBranchColumn')
# frozen_string_literal: true
module Gitlab
module Ci
module Pipeline
module Artifact
class CodeCoverage
def initialize(pipeline_artifact)
@pipeline_artifact = pipeline_artifact
end
def for_files(filenames)
coverage_files = raw_report["files"].select { |key| filenames.include?(key) }
{ files: coverage_files }
end
private
def raw_report
@raw_report ||= Gitlab::Json.parse(@pipeline_artifact.file.read)
end
end
end
end
end
end
......@@ -45,9 +45,7 @@ module Gitlab
has_capability?(project, category, :maintainer, labels)
end
def markdown_name(timezone_experiment: false, author: nil)
return @markdown_name unless timezone_experiment
def markdown_name(author: nil)
"#{@markdown_name} (#{utc_offset_text(author)})"
end
......
......@@ -122,7 +122,11 @@ module Gitlab
end
def group_name_regex
@group_name_regex ||= /\A[\p{Alnum}\u{00A9}-\u{1f9ff}_][\p{Alnum}\p{Pd}\u{00A9}-\u{1f9ff}_()\. ]*\z/.freeze
@group_name_regex ||= /\A#{group_name_regex_chars}\z/.freeze
end
def group_name_regex_chars
@group_name_regex_chars ||= /[\p{Alnum}\u{00A9}-\u{1f9ff}_][\p{Alnum}\p{Pd}\u{00A9}-\u{1f9ff}_()\. ]*/.freeze
end
def group_name_regex_message
......
......@@ -91,7 +91,7 @@ module Gitlab
end
def expiry(event)
return event[:expiry] if event[:expiry].present?
return event[:expiry].days if event[:expiry].present?
event[:aggregation].to_sym == :daily ? DEFAULT_DAILY_KEY_EXPIRY_LENGTH : DEFAULT_WEEKLY_KEY_EXPIRY_LENGTH
end
......
......@@ -16907,18 +16907,12 @@ msgstr ""
msgid "OnDemandScans|Could not run the scan. Please try again."
msgstr ""
msgid "OnDemandScans|Could not run the scan: %{backendErrorMessage}"
msgstr ""
msgid "OnDemandScans|Create a new site profile"
msgstr ""
msgid "OnDemandScans|Create new DAST scan"
msgstr ""
msgid "OnDemandScans|DAST will scan the target URL and any discovered sub URLs."
msgstr ""
msgid "OnDemandScans|Manage profiles"
msgstr ""
......@@ -16940,18 +16934,9 @@ msgstr ""
msgid "OnDemandScans|Passive"
msgstr ""
msgid "OnDemandScans|Passive DAST Scan"
msgstr ""
msgid "OnDemandScans|Please enter a valid URL format, ex: http://www.example.com/home"
msgstr ""
msgid "OnDemandScans|Run scan"
msgstr ""
msgid "OnDemandScans|Run this scan"
msgstr ""
msgid "OnDemandScans|Scan mode"
msgstr ""
......@@ -16967,9 +16952,6 @@ msgstr ""
msgid "OnDemandScans|Site profiles"
msgstr ""
msgid "OnDemandScans|Target URL"
msgstr ""
msgid "OnDemandScans|Use existing site profile"
msgstr ""
......
......@@ -24,5 +24,15 @@ FactoryBot.define do
)
end
end
trait :with_code_coverage_with_multiple_files do
after(:build) do |artifact, _evaluator|
artifact.file = fixture_file_upload(
Rails.root.join('spec/fixtures/pipeline_artifacts/code_coverage_with_multiple_files.json'), 'application/json'
)
end
size { file.size }
end
end
end
......@@ -121,6 +121,12 @@ FactoryBot.define do
end
end
trait :with_coverage_report_artifact do
after(:build) do |pipeline, evaluator|
pipeline.pipeline_artifacts << build(:ci_pipeline_artifact, pipeline: pipeline, project: pipeline.project)
end
end
trait :with_terraform_reports do
status { :success }
......
......@@ -169,7 +169,7 @@ FactoryBot.define do
merge_request.head_pipeline = build(
:ci_pipeline,
:success,
:with_coverage_reports,
:with_coverage_report_artifact,
project: merge_request.source_project,
ref: merge_request.source_branch,
sha: merge_request.diff_head_sha)
......
{
"files": {
"file_a.rb": {
"1": 1,
"2": 1,
"3": 1
},
"file_b.rb": {
"1": 0,
"2": 0,
"3": 0
}
}
}
......@@ -111,7 +111,7 @@ exports[`Design management index page designs renders designs list and header wi
>
<gl-button-stub
category="primary"
class="gl-mr-3 js-select-all"
class="gl-mr-4 js-select-all"
icon=""
size="small"
variant="link"
......
......@@ -200,7 +200,7 @@ describe('Snippet Edit app', () => {
it.each`
projectPath | snippetArg | expectation
${''} | ${[]} | ${`${relativeUrlRoot}-/snippets`}
${''} | ${[]} | ${`${relativeUrlRoot}/-/snippets`}
${'project/path'} | ${[]} | ${`${relativeUrlRoot}project/path/-/snippets`}
${''} | ${[createTestSnippet()]} | ${TEST_WEB_URL}
${'project/path'} | ${[createTestSnippet()]} | ${TEST_WEB_URL}
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Ci::Pipeline::Artifact::CodeCoverage do
let(:pipeline_artifact) { create(:ci_pipeline_artifact, :with_code_coverage_with_multiple_files) }
let(:code_coverage) { described_class.new(pipeline_artifact) }
describe '#for_files' do
subject { code_coverage.for_files(filenames) }
context 'when code coverage has data' do
context 'when filenames is empty' do
let(:filenames) { %w() }
it 'returns hash without coverage' do
expect(subject).to match(files: {})
end
end
context 'when filenames do not match code coverage data' do
let(:filenames) { %w(demo.rb) }
it 'returns hash without coverage' do
expect(subject).to match(files: {})
end
end
context 'when filenames matches code coverage data' do
context 'when asking for one filename' do
let(:filenames) { %w(file_a.rb) }
it 'returns coverage for the given filename' do
expect(subject).to match(files: { "file_a.rb" => { "1" => 1, "2" => 1, "3" => 1 } })
end
end
context 'when asking for multiple filenames' do
let(:filenames) { %w(file_a.rb file_b.rb) }
it 'returns coverage for a the given filenames' do
expect(subject).to match(
files: {
"file_a.rb" => {
"1" => 1,
"2" => 1,
"3" => 1
},
"file_b.rb" => {
"1" => 0,
"2" => 0,
"3" => 0
}
}
)
end
end
end
end
end
end
......@@ -170,23 +170,15 @@ RSpec.describe Gitlab::Danger::Teammate do
end
describe '#markdown_name' do
context 'when timezone_experiment == false' do
it 'returns markdown name as-is' do
expect(subject.markdown_name).to eq(options['markdown_name'])
expect(subject.markdown_name(timezone_experiment: false)).to eq(options['markdown_name'])
end
end
context 'when timezone_experiment == true' do
it 'returns markdown name with timezone info' do
expect(subject.markdown_name(timezone_experiment: true)).to eq("#{options['markdown_name']} (UTC+2)")
expect(subject.markdown_name).to eq("#{options['markdown_name']} (UTC+2)")
end
context 'when offset is 1.5' do
let(:tz_offset_hours) { 1.5 }
it 'returns markdown name with timezone info, not truncated' do
expect(subject.markdown_name(timezone_experiment: true)).to eq("#{options['markdown_name']} (UTC+1.5)")
expect(subject.markdown_name).to eq("#{options['markdown_name']} (UTC+1.5)")
end
end
......@@ -209,8 +201,7 @@ RSpec.describe Gitlab::Danger::Teammate do
floored_offset_hours = subject.__send__(:floored_offset_hours)
utc_offset = floored_offset_hours >= 0 ? "+#{floored_offset_hours}" : floored_offset_hours
expect(subject.markdown_name(timezone_experiment: true, author: author)).to eq("#{options['markdown_name']} (UTC#{utc_offset}, #{diff_text})")
end
expect(subject.markdown_name(author: author)).to eq("#{options['markdown_name']} (UTC#{utc_offset}, #{diff_text})")
end
end
end
......
......@@ -3,14 +3,19 @@
require 'fast_spec_helper'
RSpec.describe Gitlab::Regex do
shared_examples_for 'project/group name regex' do
shared_examples_for 'project/group name chars regex' do
it { is_expected.to match('gitlab-ce') }
it { is_expected.to match('GitLab CE') }
it { is_expected.to match('100 lines') }
it { is_expected.to match('gitlab.git') }
it { is_expected.to match('Český název') }
it { is_expected.to match('Dash – is this') }
end
shared_examples_for 'project/group name regex' do
it_behaves_like 'project/group name chars regex'
it { is_expected.not_to match('?gitlab') }
it { is_expected.not_to match("Users's something") }
end
describe '.project_name_regex' do
......@@ -33,6 +38,16 @@ RSpec.describe Gitlab::Regex do
end
end
describe '.group_name_regex_chars' do
subject { described_class.group_name_regex_chars }
it_behaves_like 'project/group name chars regex'
it 'allows partial matches' do
is_expected.to match(',Valid name wrapped in ivalid chars&')
end
end
describe '.project_name_regex_message' do
subject { described_class.project_name_regex_message }
......
......@@ -45,6 +45,32 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
it 'raise error if metrics of unknown aggregation' do
expect { described_class.track_event(entity1, 'unknown', Date.current) } .to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::UnknownEvent)
end
it 'sets the keys in Redis to expire automatically after 12 weeks' do
described_class.track_event(entity1, "g_analytics_contribution")
Gitlab::Redis::SharedState.with do |redis|
keys = redis.scan_each(match: "g_{analytics}_contribution-*").to_a
expect(keys).not_to be_empty
keys.each do |key|
expect(redis.ttl(key)).to be_within(5.seconds).of(12.weeks)
end
end
end
it 'sets the keys in Redis to expire automatically after 6 weeks by default' do
described_class.track_event(entity1, "g_compliance_dashboard")
Gitlab::Redis::SharedState.with do |redis|
keys = redis.scan_each(match: "g_{compliance}_dashboard-*").to_a
expect(keys).not_to be_empty
keys.each do |key|
expect(redis.ttl(key)).to be_within(5.seconds).of(6.weeks)
end
end
end
end
describe '.unique_events' do
......
......@@ -91,4 +91,22 @@ RSpec.describe Ci::PipelineArtifact, type: :model do
end
end
end
describe '.find_with_code_coverage' do
subject { Ci::PipelineArtifact.find_with_code_coverage }
context 'when pipeline artifact has a coverage report' do
let!(:coverage_report) { create(:ci_pipeline_artifact) }
it 'returns a pipeline artifact with a code coverage' do
expect(subject.file_type).to eq('code_coverage')
end
end
context 'when pipeline artifact does not have a coverage report' do
it 'returns nil' do
expect(subject).to be_nil
end
end
end
end
......@@ -2,12 +2,13 @@
require 'spec_helper'
RSpec.describe Ci::Pipeline, :mailer do
RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
include ProjectForksHelper
include StubRequests
let(:user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
let_it_be(:namespace) { create_default(:namespace) }
let_it_be(:project) { create_default(:project, :repository) }
let(:pipeline) do
create(:ci_empty_pipeline, status: :created, project: project)
......@@ -1436,8 +1437,6 @@ RSpec.describe Ci::Pipeline, :mailer do
context 'when repository exists' do
using RSpec::Parameterized::TableSyntax
let(:project) { create(:project, :repository) }
where(:tag, :ref, :result) do
false | 'master' | true
false | 'non-existent-branch' | false
......@@ -1457,6 +1456,7 @@ RSpec.describe Ci::Pipeline, :mailer do
end
context 'when repository does not exist' do
let(:project) { create(:project) }
let(:pipeline) do
create(:ci_empty_pipeline, project: project, ref: 'master')
end
......@@ -1468,8 +1468,6 @@ RSpec.describe Ci::Pipeline, :mailer do
end
context 'with non-empty project' do
let(:project) { create(:project, :repository) }
let(:pipeline) do
create(:ci_pipeline,
project: project,
......@@ -1596,8 +1594,6 @@ RSpec.describe Ci::Pipeline, :mailer do
describe '#modified_paths' do
context 'when old and new revisions are set' do
let(:project) { create(:project, :repository) }
before do
pipeline.update(before_sha: '1234abcd', sha: '2345bcde')
end
......@@ -1866,8 +1862,6 @@ RSpec.describe Ci::Pipeline, :mailer do
end
describe '.latest_pipeline_per_commit' do
let(:project) { create(:project) }
let!(:commit_123_ref_master) do
create(
:ci_empty_pipeline,
......@@ -1962,7 +1956,6 @@ RSpec.describe Ci::Pipeline, :mailer do
end
describe '.last_finished_for_ref_id' do
let(:project) { create(:project, :repository) }
let(:branch) { project.default_branch }
let(:ref) { project.ci_refs.take }
let(:config_source) { Enums::Ci::Pipeline.config_sources[:parameter_source] }
......@@ -2452,7 +2445,6 @@ RSpec.describe Ci::Pipeline, :mailer do
end
describe "#merge_requests_as_head_pipeline" do
let(:project) { create(:project) }
let(:pipeline) { create(:ci_empty_pipeline, status: 'created', project: project, ref: 'master', sha: 'a288a022a53a5a944fae87bcec6efc87b7061808') }
it "returns merge requests whose `diff_head_sha` matches the pipeline's SHA" do
......@@ -2685,7 +2677,8 @@ RSpec.describe Ci::Pipeline, :mailer do
end
describe 'notifications when pipeline success or failed' do
let(:project) { create(:project, :repository) }
let(:namespace) { create(:namespace) }
let(:project) { create(:project, :repository, namespace: namespace) }
let(:pipeline) do
create(:ci_pipeline,
......@@ -3260,7 +3253,8 @@ RSpec.describe Ci::Pipeline, :mailer do
end
describe '#parent_pipeline' do
let(:project) { create(:project) }
let_it_be(:project) { create(:project) }
let(:pipeline) { create(:ci_pipeline, project: project) }
context 'when pipeline is triggered by a pipeline from the same project' do
......@@ -3315,7 +3309,7 @@ RSpec.describe Ci::Pipeline, :mailer do
end
describe '#child_pipelines' do
let(:project) { create(:project) }
let_it_be(:project) { create(:project) }
let(:pipeline) { create(:ci_pipeline, project: project) }
context 'when pipeline triggered other pipelines on same project' do
......
......@@ -1890,12 +1890,6 @@ RSpec.describe MergeRequest do
subject { merge_request.find_coverage_reports }
context 'when head pipeline has coverage reports' do
let!(:job) do
create(:ci_build, options: { artifacts: { reports: { cobertura: ['cobertura-coverage.xml'] } } }, pipeline: pipeline)
end
let!(:artifacts_metadata) { create(:ci_job_artifact, :metadata, job: job) }
context 'when reactive cache worker is parsing results asynchronously' do
it 'returns status' do
expect(subject[:status]).to eq(:parsing)
......
......@@ -588,6 +588,21 @@ RSpec.describe Namespace do
end
end
describe ".clean_name" do
context "when the name complies with the group name regex" do
it "returns the name as is" do
valid_name = "Hello - World _ (Hi.)"
expect(described_class.clean_name(valid_name)).to eq(valid_name)
end
end
context "when the name does not comply with the group name regex" do
it "sanitizes the name by replacing all invalid char sequences with a space" do
expect(described_class.clean_name("Green'! Test~~~")).to eq("Green Test")
end
end
end
describe "#default_branch_protection" do
let(:namespace) { create(:namespace) }
let(:default_branch_protection) { nil }
......
......@@ -15,7 +15,11 @@ RSpec.describe Ci::GenerateCoverageReportsService do
let!(:head_pipeline) { merge_request.head_pipeline }
let!(:base_pipeline) { nil }
it 'returns status and data' do
it 'returns status and data', :aggregate_failures do
expect_next_instance_of(Gitlab::Ci::Pipeline::Artifact::CodeCoverage) do |instance|
expect(instance).to receive(:for_files).with(merge_request.new_paths).and_call_original
end
expect(subject[:status]).to eq(:parsed)
expect(subject[:data]).to eq(files: {})
end
......@@ -28,8 +32,7 @@ RSpec.describe Ci::GenerateCoverageReportsService do
let!(:base_pipeline) { nil }
before do
build = create(:ci_build, pipeline: head_pipeline, project: head_pipeline.project)
create(:ci_job_artifact, :coverage_with_corrupted_data, job: build, project: project)
head_pipeline.pipeline_artifacts.destroy_all # rubocop: disable Cop/DestroyAll
end
it 'returns status and error message' do
......
......@@ -48,15 +48,6 @@ RSpec.describe Projects::UpdatePagesConfigurationService do
expect(subject).to include(status: :success)
end
end
context 'when an error occurs' do
it 'returns an error object' do
e = StandardError.new("Failure")
allow(service).to receive(:reload_daemon).and_raise(e)
expect(subject).to eq(status: :error, message: "Failure", exception: e)
end
end
end
context 'when pages are not deployed' do
......
......@@ -15,6 +15,7 @@ require 'rspec/retry'
require 'rspec-parameterized'
require 'shoulda/matchers'
require 'test_prof/recipes/rspec/let_it_be'
require 'test_prof/factory_default'
rspec_profiling_is_configured =
ENV['RSPEC_PROFILING_POSTGRES_URL'].present? ||
......@@ -360,3 +361,6 @@ Rugged::Settings['search_path_global'] = Rails.root.join('tmp/tests').to_s
# Disable timestamp checks for invisible_captcha
InvisibleCaptcha.timestamp_enabled = false
# Initialize FactoryDefault to use create_default helper
TestProf::FactoryDefault.init
# frozen_string_literal: true
RSpec.configure do |config|
config.after do |ex|
TestProf::FactoryDefault.reset unless ex.metadata[:factory_default] == :keep
end
config.after(:all) do
TestProf::FactoryDefault.reset
end
end
......@@ -17,14 +17,6 @@ RSpec.describe PagesUpdateConfigurationWorker do
subject.perform(project.id)
end
it "raises an exception if the service returned an error" do
allow_next_instance_of(Projects::UpdatePagesConfigurationService) do |service|
allow(service).to receive(:execute).and_return({ exception: ":boom:" })
end
expect { subject.perform(project.id) }.to raise_error(":boom:")
end
it_behaves_like "an idempotent worker" do
let(:job_args) { [project.id] }
let(:pages_dir) { Dir.mktmpdir }
......
......@@ -5954,6 +5954,11 @@ immediate@~3.0.5:
resolved "https://registry.yarnpkg.com/immediate/-/immediate-3.0.6.tgz#9db1dbd0faf8de6fbe0f5dd5e56bb606280de69b"
integrity sha1-nbHb0Pr43m++D13V5Wu2BigN5ps=
immer@^7.0.7:
version "7.0.7"
resolved "https://registry.yarnpkg.com/immer/-/immer-7.0.7.tgz#9dfe713d49bf871cc59aedfce59b1992fa37a977"
integrity sha512-Q8yYwVADJXrNfp1ZUAh4XDHkcoE3wpdpb4mC5abDSajs2EbW8+cGdPyAnglMyLnm7EF6ojD2xBFX7L5i4TIytw==
import-fresh@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-2.0.0.tgz#d81355c15612d386c61f9ddd3922d4304822a546"
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment