Commit be130b7b authored by Michael Kozono's avatar Michael Kozono

Merge branch 'master' into...

Merge branch 'master' into '217477-remove-feature-flags-to-make-registry-table-ssot-for-job-artifacts'

# Conflicts:
#   ee/spec/workers/geo/secondary/registry_consistency_worker_spec.rb
parents 3b4eb406 389ff4da
...@@ -2,7 +2,7 @@ import { slugify } from '~/lib/utils/text_utility'; ...@@ -2,7 +2,7 @@ import { slugify } from '~/lib/utils/text_utility';
import createGqClient, { fetchPolicies } from '~/lib/graphql'; import createGqClient, { fetchPolicies } from '~/lib/graphql';
import { SUPPORTED_FORMATS } from '~/lib/utils/unit_format'; import { SUPPORTED_FORMATS } from '~/lib/utils/unit_format';
import { getIdFromGraphQLId } from '~/graphql_shared/utils'; import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import { parseTemplatingVariables } from './variable_mapping'; import { mergeURLVariables, parseTemplatingVariables } from './variable_mapping';
import { DATETIME_RANGE_TYPES } from '~/lib/utils/constants'; import { DATETIME_RANGE_TYPES } from '~/lib/utils/constants';
import { timeRangeToParams, getRangeType } from '~/lib/utils/datetime_range'; import { timeRangeToParams, getRangeType } from '~/lib/utils/datetime_range';
import { isSafeURL, mergeUrlParams } from '~/lib/utils/url_utility'; import { isSafeURL, mergeUrlParams } from '~/lib/utils/url_utility';
...@@ -289,7 +289,7 @@ export const mapToDashboardViewModel = ({ ...@@ -289,7 +289,7 @@ export const mapToDashboardViewModel = ({
}) => { }) => {
return { return {
dashboard, dashboard,
variables: parseTemplatingVariables(templating), variables: mergeURLVariables(parseTemplatingVariables(templating)),
links: links.map(mapLinksToViewModel), links: links.map(mapLinksToViewModel),
panelGroups: panel_groups.map(mapToPanelGroupViewModel), panelGroups: panel_groups.map(mapToPanelGroupViewModel),
}; };
......
import { isString } from 'lodash'; import { isString } from 'lodash';
import { templatingVariablesFromUrl } from '../utils';
import { VARIABLE_TYPES } from '../constants'; import { VARIABLE_TYPES } from '../constants';
/** /**
...@@ -164,4 +165,39 @@ export const parseTemplatingVariables = ({ variables = {} } = {}) => ...@@ -164,4 +165,39 @@ export const parseTemplatingVariables = ({ variables = {} } = {}) =>
return acc; return acc;
}, {}); }, {});
/**
* Custom variables are defined in the dashboard yml file
* and their values can be passed through the URL.
*
* On component load, this method merges variables data
* from the yml file with URL data to store in the Vuex store.
* Not all params coming from the URL need to be stored. Only
* the ones that have a corresponding variable defined in the
* yml file.
*
* This ensures that there is always a single source of truth
* for variables
*
* This method can be improved further. See the below issue
* https://gitlab.com/gitlab-org/gitlab/-/issues/217713
*
* @param {Object} varsFromYML template variables from yml file
* @returns {Object}
*/
export const mergeURLVariables = (varsFromYML = {}) => {
const varsFromURL = templatingVariablesFromUrl();
const variables = {};
Object.keys(varsFromYML).forEach(key => {
if (Object.prototype.hasOwnProperty.call(varsFromURL, key)) {
variables[key] = {
...varsFromYML[key],
value: varsFromURL[key],
};
} else {
variables[key] = varsFromYML[key];
}
});
return variables;
};
export default {}; export default {};
...@@ -170,11 +170,10 @@ export const convertVariablesForURL = variables => ...@@ -170,11 +170,10 @@ export const convertVariablesForURL = variables =>
* begin with a constant prefix so that it doesn't collide with * begin with a constant prefix so that it doesn't collide with
* other URL params. * other URL params.
* *
* @param {String} New URL * @param {String} search URL
* @returns {Object} The custom variables defined by the user in the URL * @returns {Object} The custom variables defined by the user in the URL
*/ */
export const templatingVariablesFromUrl = (search = window.location.search) => {
export const getPromCustomVariablesFromUrl = (search = window.location.search) => {
const params = queryToObject(search); const params = queryToObject(search);
// pick the params with variable prefix // pick the params with variable prefix
const paramsWithVars = pickBy(params, (val, key) => key.startsWith(VARIABLE_PREFIX)); const paramsWithVars = pickBy(params, (val, key) => key.startsWith(VARIABLE_PREFIX));
...@@ -353,39 +352,4 @@ export const barChartsDataParser = (data = []) => ...@@ -353,39 +352,4 @@ export const barChartsDataParser = (data = []) =>
{}, {},
); );
/**
* Custom variables are defined in the dashboard yml file
* and their values can be passed through the URL.
*
* On component load, this method merges variables data
* from the yml file with URL data to store in the Vuex store.
* Not all params coming from the URL need to be stored. Only
* the ones that have a corresponding variable defined in the
* yml file.
*
* This ensures that there is always a single source of truth
* for variables
*
* This method can be improved further. See the below issue
* https://gitlab.com/gitlab-org/gitlab/-/issues/217713
*
* @param {Object} varsFromYML template variables from yml file
* @returns {Object}
*/
export const mergeURLVariables = (varsFromYML = {}) => {
const varsFromURL = getPromCustomVariablesFromUrl();
const variables = {};
Object.keys(varsFromYML).forEach(key => {
if (Object.prototype.hasOwnProperty.call(varsFromURL, key)) {
variables[key] = {
...varsFromYML[key],
value: varsFromURL[key],
};
} else {
variables[key] = varsFromYML[key];
}
});
return variables;
};
export default {}; export default {};
...@@ -25,6 +25,11 @@ export default { ...@@ -25,6 +25,11 @@ export default {
containerClasses: ['dag-graph-container', 'gl-display-flex', 'gl-flex-direction-column'].join( containerClasses: ['dag-graph-container', 'gl-display-flex', 'gl-flex-direction-column'].join(
' ', ' ',
), ),
hoverFadeClasses: [
'gl-cursor-pointer',
'gl-transition-duration-slow',
'gl-transition-timing-function-ease',
].join(' '),
}, },
gitLabColorRotation: [ gitLabColorRotation: [
'#e17223', '#e17223',
...@@ -230,7 +235,10 @@ export default { ...@@ -230,7 +235,10 @@ export default {
.attr('id', d => { .attr('id', d => {
return this.createAndAssignId(d, 'uid', LINK_SELECTOR); return this.createAndAssignId(d, 'uid', LINK_SELECTOR);
}) })
.classed(`${LINK_SELECTOR} gl-cursor-pointer`, true); .classed(
`${LINK_SELECTOR} gl-transition-property-stroke-opacity ${this.$options.viewOptions.hoverFadeClasses}`,
true,
);
}, },
generateNodes(svg, nodeData) { generateNodes(svg, nodeData) {
...@@ -242,7 +250,10 @@ export default { ...@@ -242,7 +250,10 @@ export default {
.data(nodeData) .data(nodeData)
.enter() .enter()
.append('line') .append('line')
.classed(`${NODE_SELECTOR} gl-cursor-pointer`, true) .classed(
`${NODE_SELECTOR} gl-transition-property-stroke ${this.$options.viewOptions.hoverFadeClasses}`,
true,
)
.attr('id', d => { .attr('id', d => {
return this.createAndAssignId(d, 'uid', NODE_SELECTOR); return this.createAndAssignId(d, 'uid', NODE_SELECTOR);
}) })
......
<script>
import { __ } from '~/locale';
import { GlIcon, GlLink, GlLoadingIcon, GlSprintf } from '@gitlab/ui';
import axios from '~/lib/utils/axios_utils';
import flash from '~/flash';
import Poll from '~/lib/utils/poll';
export default {
name: 'MRWidgetTerraformPlan',
components: {
GlIcon,
GlLink,
GlLoadingIcon,
GlSprintf,
},
props: {
endpoint: {
type: String,
required: true,
},
},
data() {
return {
loading: true,
plans: {},
};
},
computed: {
addNum() {
return Number(this.plan.create);
},
changeNum() {
return Number(this.plan.update);
},
deleteNum() {
return Number(this.plan.delete);
},
logUrl() {
return this.plan.job_path;
},
plan() {
const firstPlanKey = Object.keys(this.plans)[0];
return this.plans[firstPlanKey] ?? {};
},
validPlanValues() {
return this.addNum + this.changeNum + this.deleteNum >= 0;
},
},
created() {
this.fetchPlans();
},
methods: {
fetchPlans() {
this.loading = true;
const poll = new Poll({
resource: {
fetchPlans: () => axios.get(this.endpoint),
},
data: this.endpoint,
method: 'fetchPlans',
successCallback: ({ data }) => {
this.plans = data;
if (Object.keys(this.plan).length) {
this.loading = false;
poll.stop();
}
},
errorCallback: () => {
this.plans = {};
this.loading = false;
flash(__('An error occurred while loading terraform report'));
},
});
poll.makeRequest();
},
},
};
</script>
<template>
<section class="mr-widget-section">
<div class="mr-widget-body media d-flex flex-row">
<span class="append-right-default align-self-start align-self-lg-center">
<gl-icon name="status_warning" :size="24" />
</span>
<div class="d-flex flex-fill flex-column flex-md-row">
<div class="terraform-mr-plan-text normal d-flex flex-column flex-lg-row">
<p class="m-0 pr-1">{{ __('A terraform report was generated in your pipelines.') }}</p>
<gl-loading-icon v-if="loading" size="md" />
<p v-else-if="validPlanValues" class="m-0">
<gl-sprintf
:message="
__(
'Reported Resource Changes: %{addNum} to add, %{changeNum} to change, %{deleteNum} to delete',
)
"
>
<template #addNum>
<strong>{{ addNum }}</strong>
</template>
<template #changeNum>
<strong>{{ changeNum }}</strong>
</template>
<template #deleteNum>
<strong>{{ deleteNum }}</strong>
</template>
</gl-sprintf>
</p>
<p v-else class="m-0">{{ __('Changes are unknown') }}</p>
</div>
<div class="terraform-mr-plan-actions">
<gl-link
v-if="logUrl"
:href="logUrl"
target="_blank"
data-track-event="click_terraform_mr_plan_button"
data-track-label="mr_widget_terraform_mr_plan_button"
data-track-property="terraform_mr_plan_button"
class="btn btn-sm js-terraform-report-link"
rel="noopener"
>
{{ __('View full log') }}
<gl-icon name="external-link" />
</gl-link>
</div>
</div>
</div>
</section>
</template>
<script>
import { GlSkeletonLoading } from '@gitlab/ui';
import axios from '~/lib/utils/axios_utils';
import Poll from '~/lib/utils/poll';
import TerraformPlan from './terraform_plan.vue';
export default {
name: 'MRWidgetTerraformContainer',
components: {
GlSkeletonLoading,
TerraformPlan,
},
props: {
endpoint: {
type: String,
required: true,
},
},
data() {
return {
loading: true,
plans: {},
poll: null,
};
},
created() {
this.fetchPlans();
},
beforeDestroy() {
this.poll.stop();
},
methods: {
fetchPlans() {
this.loading = true;
this.poll = new Poll({
resource: {
fetchPlans: () => axios.get(this.endpoint),
},
data: this.endpoint,
method: 'fetchPlans',
successCallback: ({ data }) => {
this.plans = data;
if (Object.keys(this.plans).length) {
this.loading = false;
this.poll.stop();
}
},
errorCallback: () => {
this.plans = { bad_plan: {} };
this.loading = false;
this.poll.stop();
},
});
this.poll.makeRequest();
},
},
};
</script>
<template>
<section class="mr-widget-section">
<div v-if="loading" class="mr-widget-body media">
<gl-skeleton-loading />
</div>
<terraform-plan
v-for="(plan, key) in plans"
v-else
:key="key"
:plan="plan"
class="mr-widget-body media"
/>
</section>
</template>
<script>
import { __ } from '~/locale';
import { GlIcon, GlLink, GlSprintf } from '@gitlab/ui';
export default {
name: 'TerraformPlan',
components: {
GlIcon,
GlLink,
GlSprintf,
},
props: {
plan: {
required: true,
type: Object,
},
},
computed: {
addNum() {
return Number(this.plan.create);
},
changeNum() {
return Number(this.plan.update);
},
deleteNum() {
return Number(this.plan.delete);
},
reportChangeText() {
if (this.validPlanValues) {
return __(
'Reported Resource Changes: %{addNum} to add, %{changeNum} to change, %{deleteNum} to delete',
);
}
return __('Generating the report caused an error.');
},
reportHeaderText() {
if (this.plan.job_name) {
return __('The Terraform report %{name} was generated in your pipelines.');
}
return __('A Terraform report was generated in your pipelines.');
},
validPlanValues() {
return this.addNum + this.changeNum + this.deleteNum >= 0;
},
},
};
</script>
<template>
<div class="gl-display-flex">
<span
class="gl-display-flex gl-align-items-center gl-justify-content-center append-right-default gl-align-self-start gl-mt-1"
>
<gl-icon name="status_warning" :size="24" />
</span>
<div class="gl-display-flex gl-flex-fill-1 gl-flex-direction-column flex-md-row">
<div class="terraform-mr-plan-text normal gl-display-flex gl-flex-direction-column">
<p class="gl-m-0 gl-pr-1">
<gl-sprintf :message="reportHeaderText">
<template #name>
<strong>{{ plan.job_name }}</strong>
</template>
</gl-sprintf>
</p>
<p class="gl-m-0">
<gl-sprintf :message="reportChangeText">
<template #addNum>
<strong>{{ addNum }}</strong>
</template>
<template #changeNum>
<strong>{{ changeNum }}</strong>
</template>
<template #deleteNum>
<strong>{{ deleteNum }}</strong>
</template>
</gl-sprintf>
</p>
</div>
<div>
<gl-link
v-if="plan.job_path"
:href="plan.job_path"
target="_blank"
data-track-event="click_terraform_mr_plan_button"
data-track-label="mr_widget_terraform_mr_plan_button"
data-track-property="terraform_mr_plan_button"
class="btn btn-sm js-terraform-report-link"
rel="noopener"
>
{{ __('View full log') }}
<gl-icon name="external-link" />
</gl-link>
</div>
</div>
</div>
</template>
...@@ -36,7 +36,7 @@ import CheckingState from './components/states/mr_widget_checking.vue'; ...@@ -36,7 +36,7 @@ import CheckingState from './components/states/mr_widget_checking.vue';
import eventHub from './event_hub'; import eventHub from './event_hub';
import notify from '~/lib/utils/notify'; import notify from '~/lib/utils/notify';
import SourceBranchRemovalStatus from './components/source_branch_removal_status.vue'; import SourceBranchRemovalStatus from './components/source_branch_removal_status.vue';
import TerraformPlan from './components/mr_widget_terraform_plan.vue'; import TerraformPlan from './components/terraform/mr_widget_terraform_container.vue';
import GroupedTestReportsApp from '../reports/components/grouped_test_reports_app.vue'; import GroupedTestReportsApp from '../reports/components/grouped_test_reports_app.vue';
import { setFaviconOverlay } from '../lib/utils/common_utils'; import { setFaviconOverlay } from '../lib/utils/common_utils';
import GroupedAccessibilityReportsApp from '../reports/accessibility_report/grouped_accessibility_reports_app.vue'; import GroupedAccessibilityReportsApp from '../reports/accessibility_report/grouped_accessibility_reports_app.vue';
......
import { __ } from '~/locale'; import { __ } from '~/locale';
import { generateToolbarItem } from './editor_service'; import { generateToolbarItem } from './editor_service';
import buildCustomHTMLRenderer from './services/build_custom_renderer';
export const CUSTOM_EVENTS = { export const CUSTOM_EVENTS = {
openAddImageModal: 'gl_openAddImageModal', openAddImageModal: 'gl_openAddImageModal',
...@@ -31,6 +32,7 @@ const TOOLBAR_ITEM_CONFIGS = [ ...@@ -31,6 +32,7 @@ const TOOLBAR_ITEM_CONFIGS = [
export const EDITOR_OPTIONS = { export const EDITOR_OPTIONS = {
toolbarItems: TOOLBAR_ITEM_CONFIGS.map(config => generateToolbarItem(config)), toolbarItems: TOOLBAR_ITEM_CONFIGS.map(config => generateToolbarItem(config)),
customHTMLRenderer: buildCustomHTMLRenderer(),
}; };
export const EDITOR_TYPES = { export const EDITOR_TYPES = {
......
import renderKramdownList from './renderers/render_kramdown_list';
import renderKramdownText from './renderers/render_kramdown_text';
const listRenderers = [renderKramdownList];
const textRenderers = [renderKramdownText];
const executeRenderer = (renderers, node, context) => {
const availableRenderer = renderers.find(renderer => renderer.canRender(node, context));
return availableRenderer ? availableRenderer.render(context) : context.origin();
};
const buildCustomRendererFunctions = (customRenderers, defaults) => {
const customTypes = Object.keys(customRenderers).filter(type => !defaults[type]);
const customEntries = customTypes.map(type => {
const fn = (node, context) => executeRenderer(customRenderers[type], node, context);
return [type, fn];
});
return Object.fromEntries(customEntries);
};
const buildCustomHTMLRenderer = (customRenderers = { list: [], text: [] }) => {
const defaults = {
list(node, context) {
const allListRenderers = [...customRenderers.list, ...listRenderers];
return executeRenderer(allListRenderers, node, context);
},
text(node, context) {
const allTextRenderers = [...customRenderers.text, ...textRenderers];
return executeRenderer(allTextRenderers, node, context);
},
};
return {
...buildCustomRendererFunctions(customRenderers, defaults),
...defaults,
};
};
export default buildCustomHTMLRenderer;
const buildToken = (type, tagName, props) => {
return { type, tagName, ...props };
};
export const buildUneditableOpenTokens = token => {
return [
buildToken('openTag', 'div', {
attributes: { contenteditable: false },
classNames: [
'gl-px-4 gl-py-2 gl-opacity-5 gl-bg-gray-100 gl-user-select-none gl-cursor-not-allowed',
],
}),
token,
];
};
export const buildUneditableCloseToken = () => buildToken('closeTag', 'div');
export const buildUneditableTokens = token => {
return [...buildUneditableOpenTokens(token), buildUneditableCloseToken()];
};
import { buildUneditableOpenTokens, buildUneditableCloseToken } from './build_uneditable_token';
const isKramdownTOC = ({ type, literal }) => type === 'text' && literal === 'TOC';
const canRender = node => {
let targetNode = node;
while (targetNode !== null) {
const { firstChild } = targetNode;
const isLeaf = firstChild === null;
if (isLeaf) {
if (isKramdownTOC(targetNode)) {
return true;
}
break;
}
targetNode = targetNode.firstChild;
}
return false;
};
const render = ({ entering, origin }) =>
entering ? buildUneditableOpenTokens(origin()) : buildUneditableCloseToken();
export default { canRender, render };
import { buildUneditableTokens } from './build_uneditable_token';
const canRender = ({ literal }) => {
const kramdownRegex = /(^{:.+}$)/gm;
return kramdownRegex.test(literal);
};
const render = ({ origin }) => {
return buildUneditableTokens(origin());
};
export default { canRender, render };
...@@ -100,3 +100,11 @@ ...@@ -100,3 +100,11 @@
.gl-pl-7 { .gl-pl-7 {
padding-left: $gl-spacing-scale-7; padding-left: $gl-spacing-scale-7;
} }
.gl-transition-property-stroke-opacity {
transition-property: stroke-opacity;
}
.gl-transition-property-stroke {
transition-property: stroke;
}
...@@ -2,19 +2,34 @@ ...@@ -2,19 +2,34 @@
module KnownSignIn module KnownSignIn
include Gitlab::Utils::StrongMemoize include Gitlab::Utils::StrongMemoize
include CookiesHelper
KNOWN_SIGN_IN_COOKIE = :known_sign_in
KNOWN_SIGN_IN_COOKIE_EXPIRY = 14.days
private private
def verify_known_sign_in def verify_known_sign_in
return unless current_user return unless current_user
notify_user unless known_remote_ip? notify_user unless known_device? || known_remote_ip?
update_cookie
end end
def known_remote_ip? def known_remote_ip?
known_ip_addresses.include?(request.remote_ip) known_ip_addresses.include?(request.remote_ip)
end end
def known_device?
cookies.encrypted[KNOWN_SIGN_IN_COOKIE] == current_user.id
end
def update_cookie
set_secure_cookie(KNOWN_SIGN_IN_COOKIE, current_user.id,
type: COOKIE_TYPE_ENCRYPTED, httponly: true, expires: KNOWN_SIGN_IN_COOKIE_EXPIRY)
end
def sessions def sessions
strong_memoize(:session) do strong_memoize(:session) do
ActiveSession.list(current_user).reject(&:is_impersonated) ActiveSession.list(current_user).reject(&:is_impersonated)
......
...@@ -82,7 +82,7 @@ class Projects::ApplicationController < ApplicationController ...@@ -82,7 +82,7 @@ class Projects::ApplicationController < ApplicationController
end end
def apply_diff_view_cookie! def apply_diff_view_cookie!
set_secure_cookie(:diff_view, params.delete(:view), permanent: true) if params[:view].present? set_secure_cookie(:diff_view, params.delete(:view), type: COOKIE_TYPE_PERMANENT) if params[:view].present?
end end
def require_pages_enabled! def require_pages_enabled!
......
...@@ -8,6 +8,7 @@ class Projects::MergeRequests::DiffsController < Projects::MergeRequests::Applic ...@@ -8,6 +8,7 @@ class Projects::MergeRequests::DiffsController < Projects::MergeRequests::Applic
before_action :commit before_action :commit
before_action :define_diff_vars before_action :define_diff_vars
before_action :define_diff_comment_vars, except: [:diffs_batch, :diffs_metadata] before_action :define_diff_comment_vars, except: [:diffs_batch, :diffs_metadata]
before_action :update_diff_discussion_positions!
around_action :allow_gitaly_ref_name_caching around_action :allow_gitaly_ref_name_caching
...@@ -171,4 +172,12 @@ class Projects::MergeRequests::DiffsController < Projects::MergeRequests::Applic ...@@ -171,4 +172,12 @@ class Projects::MergeRequests::DiffsController < Projects::MergeRequests::Applic
@notes.concat(draft_notes) @notes.concat(draft_notes)
end end
def update_diff_discussion_positions!
return unless Feature.enabled?(:merge_ref_head_comments, @merge_request.target_project, default_enabled: true)
return unless Feature.enabled?(:merge_red_head_comments_position_on_demand, @merge_request.target_project, default_enabled: true)
return if @merge_request.has_any_diff_note_positions?
Discussions::CaptureDiffNotePositionsService.new(@merge_request).execute
end
end end
# frozen_string_literal: true # frozen_string_literal: true
module CookiesHelper module CookiesHelper
def set_secure_cookie(key, value, httponly: false, permanent: false) COOKIE_TYPE_PERMANENT = :permanent
cookie_jar = permanent ? cookies.permanent : cookies COOKIE_TYPE_ENCRYPTED = :encrypted
cookie_jar[key] = { value: value, secure: Gitlab.config.gitlab.https, httponly: httponly } def set_secure_cookie(key, value, httponly: false, expires: nil, type: nil)
cookie_jar = case type
when COOKIE_TYPE_PERMANENT
cookies.permanent
when COOKIE_TYPE_ENCRYPTED
cookies.encrypted
else
cookies
end
cookie_jar[key] = { value: value, secure: Gitlab.config.gitlab.https, httponly: httponly, expires: expires }
end end
end end
...@@ -67,6 +67,10 @@ module Noteable ...@@ -67,6 +67,10 @@ module Noteable
false false
end end
def has_any_diff_note_positions?
notes.any? && DiffNotePosition.where(note: notes).exists?
end
def discussion_notes def discussion_notes
notes notes
end end
......
...@@ -97,29 +97,6 @@ class IssuableBaseService < BaseService ...@@ -97,29 +97,6 @@ class IssuableBaseService < BaseService
params.delete(label_key) if params[label_key].nil? params.delete(label_key) if params[label_key].nil?
end end
def filter_labels_in_param(key)
return if params[key].to_a.empty?
params[key] = available_labels.id_in(params[key]).pluck_primary_key
end
def find_or_create_label_ids
labels = params.delete(:labels)
return unless labels
params[:label_ids] = labels.map do |label_name|
label = Labels::FindOrCreateService.new(
current_user,
parent,
title: label_name.strip,
available_labels: available_labels
).execute
label.try(:id)
end.compact
end
def labels_service def labels_service
@labels_service ||= ::Labels::AvailableLabelsService.new(current_user, parent, params) @labels_service ||= ::Labels::AvailableLabelsService.new(current_user, parent, params)
end end
......
---
title: Display Multiple Terraform Reports in MR Widget
merge_request: 34392
author:
type: added
---
title: "Prevents editing of non-markdown kramdown content in the Static Site Editor's WYSIWYG mode"
merge_request: 34185
author:
type: changed
---
title: Use IP or cookie in known sign-in check
merge_request: 34102
author:
type: changed
---
title: Fix missing templating vars set from URL in metrics dashboard
merge_request: 34668
author:
type: fixed
---
title: Update diff discussion positions on demand
merge_request: 34148
author:
type: added
...@@ -61,6 +61,7 @@ export default { ...@@ -61,6 +61,7 @@ export default {
<gl-icon <gl-icon
name="issues" name="issues"
:size="24" :size="24"
class="class-name"
/> />
</template> </template>
``` ```
...@@ -68,7 +69,7 @@ export default { ...@@ -68,7 +69,7 @@ export default {
- **name** Name of the Icon in the SVG Sprite ([Overview is available here](https://gitlab-org.gitlab.io/gitlab-svgs)). - **name** Name of the Icon in the SVG Sprite ([Overview is available here](https://gitlab-org.gitlab.io/gitlab-svgs)).
- **size (optional)** Number value for the size which is then mapped to a specific CSS class - **size (optional)** Number value for the size which is then mapped to a specific CSS class
(Available Sizes: 8, 12, 16, 18, 24, 32, 48, 72 are mapped to `sXX` CSS classes) (Available Sizes: 8, 12, 16, 18, 24, 32, 48, 72 are mapped to `sXX` CSS classes)
- **css-classes (optional)** Additional CSS Classes to add to the SVG tag. - **class (optional)** Additional CSS Classes to add to the SVG tag.
### Usage in HTML/JS ### Usage in HTML/JS
......
...@@ -12,7 +12,7 @@ Some gems may not include their license information in their `gemspec` file, and ...@@ -12,7 +12,7 @@ Some gems may not include their license information in their `gemspec` file, and
### License Finder commands ### License Finder commands
> Note: License Finder currently uses GitLab misused terms of whitelist and blacklist. As a result, the commands below references those terms. We've created an [issue on their project](https://github.com/pivotal/LicenseFinder/issues/745) to propose that they rename their commands. > Note: License Finder currently uses GitLab misused terms of `whitelist` and `blacklist`. As a result, the commands below reference those terms. We've created an [issue on their project](https://github.com/pivotal/LicenseFinder/issues/745) to propose that they rename their commands.
There are a few basic commands License Finder provides that you'll need in order to manage license detection. There are a few basic commands License Finder provides that you'll need in order to manage license detection.
......
This source diff could not be displayed because it is too large. You can view the blob instead.
...@@ -242,7 +242,7 @@ a `before_script` execution to prepare your scan job. ...@@ -242,7 +242,7 @@ a `before_script` execution to prepare your scan job.
To pass your project's dependencies as artifacts, the dependencies must be included To pass your project's dependencies as artifacts, the dependencies must be included
in the project's working directory and specified using the `artifacts:path` configuration. in the project's working directory and specified using the `artifacts:path` configuration.
If all dependencies are present, the `-compile=false` flag can be provided to the If all dependencies are present, the `COMPILE=false` variable can be provided to the
analyzer and compilation will be skipped: analyzer and compilation will be skipped:
```yaml ```yaml
...@@ -267,10 +267,9 @@ build: ...@@ -267,10 +267,9 @@ build:
spotbugs-sast: spotbugs-sast:
dependencies: dependencies:
- build - build
script:
- /analyzer run -compile=false
variables: variables:
MAVEN_REPO_PATH: ./.m2/repository MAVEN_REPO_PATH: ./.m2/repository
COMPILE: false
artifacts: artifacts:
reports: reports:
sast: gl-sast-report.json sast: gl-sast-report.json
...@@ -339,6 +338,7 @@ Some analyzers can be customized with environment variables. ...@@ -339,6 +338,7 @@ Some analyzers can be customized with environment variables.
| `SCAN_KUBERNETES_MANIFESTS` | Kubesec | Set to `"true"` to scan Kubernetes manifests. | | `SCAN_KUBERNETES_MANIFESTS` | Kubesec | Set to `"true"` to scan Kubernetes manifests. |
| `KUBESEC_HELM_CHARTS_PATH` | Kubesec | Optional path to Helm charts that `helm` will use to generate a Kubernetes manifest that `kubesec` will scan. If dependencies are defined, `helm dependency build` should be ran in a `before_script` to fetch the necessary dependencies. | | `KUBESEC_HELM_CHARTS_PATH` | Kubesec | Optional path to Helm charts that `helm` will use to generate a Kubernetes manifest that `kubesec` will scan. If dependencies are defined, `helm dependency build` should be ran in a `before_script` to fetch the necessary dependencies. |
| `KUBESEC_HELM_OPTIONS` | Kubesec | Additional arguments for the `helm` executable. | | `KUBESEC_HELM_OPTIONS` | Kubesec | Additional arguments for the `helm` executable. |
| `COMPILE` | SpotBugs | Set to `"false"` to disable project compilation and dependency fetching |
| `ANT_HOME` | SpotBugs | The `ANT_HOME` environment variable. | | `ANT_HOME` | SpotBugs | The `ANT_HOME` environment variable. |
| `ANT_PATH` | SpotBugs | Path to the `ant` executable. | | `ANT_PATH` | SpotBugs | Path to the `ant` executable. |
| `GRADLE_PATH` | SpotBugs | Path to the `gradle` executable. | | `GRADLE_PATH` | SpotBugs | Path to the `gradle` executable. |
......
...@@ -821,6 +821,16 @@ user's home location (in this case the user is `root` since it runs in a ...@@ -821,6 +821,16 @@ user's home location (in this case the user is `root` since it runs in a
Docker container), and Maven will use the configured CI Docker container), and Maven will use the configured CI
[environment variables](../../../ci/variables/README.md#predefined-environment-variables). [environment variables](../../../ci/variables/README.md#predefined-environment-variables).
### Version validation
The version string is validated using the following regex.
```ruby
\A(\.?[\w\+-]+\.?)+\z
```
You can play around with the regex and try your version strings on [this regular expression editor](https://rubular.com/r/rrLQqUXjfKEoL6).
## Troubleshooting ## Troubleshooting
### Useful Maven command line options ### Useful Maven command line options
......
...@@ -22,7 +22,7 @@ See the [authentication topic](../../topics/authentication/index.md) for more de ...@@ -22,7 +22,7 @@ See the [authentication topic](../../topics/authentication/index.md) for more de
### Unknown sign-in ### Unknown sign-in
GitLab will notify you if a sign-in occurs that is from an unknown IP address. GitLab will notify you if a sign-in occurs that is from an unknown IP address or device.
See [Unknown Sign-In Notification](unknown_sign_in_notification.md) for more details. See [Unknown Sign-In Notification](unknown_sign_in_notification.md) for more details.
## User profile ## User profile
......
...@@ -9,16 +9,19 @@ info: To determine the technical writer assigned to the Stage/Group associated w ...@@ -9,16 +9,19 @@ info: To determine the technical writer assigned to the Stage/Group associated w
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/27211) in GitLab 13.0. > [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/27211) in GitLab 13.0.
When a user successfully signs in from a previously unknown IP address, When a user successfully signs in from a previously unknown IP address or device,
GitLab notifies the user by email. In this way, GitLab proactively alerts users of potentially GitLab notifies the user by email. In this way, GitLab proactively alerts users of potentially
malicious or unauthorized sign-ins. malicious or unauthorized sign-ins.
There are two methods used to identify a known sign-in: There are several methods used to identify a known sign-in. All methods must fail
for a notification email to be sent.
- Last sign-in IP: The current sign-in IP address is checked against the last sign-in - Last sign-in IP: The current sign-in IP address is checked against the last sign-in
IP address. IP address.
- Current active sessions: If the user has an existing active session from the - Current active sessions: If the user has an existing active session from the
same IP address. See [Active Sessions](active_sessions.md). same IP address. See [Active Sessions](active_sessions.md).
- Cookie: After successful sign in, an encrypted cookie is stored in the browser.
This cookie is set to expire 14 days after the last successful sign in.
## Example email ## Example email
......
...@@ -173,6 +173,24 @@ Read through the documentation on [project settings](settings/index.md). ...@@ -173,6 +173,24 @@ Read through the documentation on [project settings](settings/index.md).
- [Export a project from GitLab](settings/import_export.md#exporting-a-project-and-its-data) - [Export a project from GitLab](settings/import_export.md#exporting-a-project-and-its-data)
- [Importing and exporting projects between GitLab instances](settings/import_export.md) - [Importing and exporting projects between GitLab instances](settings/import_export.md)
## Remove a project
To remove a project, first navigate to the home page for that project.
1. Navigate to **Settings > General**.
1. Expand the **Advanced** section.
1. Scroll down to the **Remove project** section.
1. Click **Remove project**
1. Confirm this action by typing in the expected text.
### Delayed removal **(PREMIUM)**
By default, clicking to remove a project is followed by a seven day delay. Admins can restore the project during this period of time.
This delay [may be changed by an admin](../admin_area/settings/visibility_and_access_controls.md#default-deletion-adjourned-period-premium-only).
Admins can view all projects pending deletion. If you're an administrator, go to the top navigation bar, click **Projects > Your projects**, and then select the **Removed projects** tab.
From this tab an admin can restore any project.
## CI/CD for external repositories **(PREMIUM)** ## CI/CD for external repositories **(PREMIUM)**
Instead of importing a repository directly to GitLab, you can connect your repository Instead of importing a repository directly to GitLab, you can connect your repository
......
<script> <script>
import { mapActions, mapState } from 'vuex';
import AuditEventsFilter from './audit_events_filter.vue'; import AuditEventsFilter from './audit_events_filter.vue';
import DateRangeField from './date_range_field.vue'; import DateRangeField from './date_range_field.vue';
import SortingField from './sorting_field.vue'; import SortingField from './sorting_field.vue';
...@@ -12,10 +13,6 @@ export default { ...@@ -12,10 +13,6 @@ export default {
AuditEventsTable, AuditEventsTable,
}, },
props: { props: {
formPath: {
type: String,
required: true,
},
events: { events: {
type: Array, type: Array,
required: false, required: false,
...@@ -41,16 +38,11 @@ export default { ...@@ -41,16 +38,11 @@ export default {
default: undefined, default: undefined,
}, },
}, },
data() { computed: {
return { ...mapState(['filterValue', 'startDate', 'endDate', 'sortBy']),
formElement: null,
};
}, },
mounted() { methods: {
// Passing the form to child components is only temporary ...mapActions(['setDateRange', 'setFilterValue', 'setSortBy', 'searchForAuditEvents']),
// and should be changed when this issue is completed:
// https://gitlab.com/gitlab-org/gitlab/-/issues/217759
this.formElement = this.$refs.form;
}, },
}; };
</script> </script>
...@@ -58,25 +50,34 @@ export default { ...@@ -58,25 +50,34 @@ export default {
<template> <template>
<div> <div>
<div class="row-content-block second-block pb-0"> <div class="row-content-block second-block pb-0">
<form <div class="d-flex justify-content-between audit-controls row">
ref="form"
method="GET"
:path="formPath"
class="filter-form d-flex justify-content-between audit-controls row"
>
<div class="col-lg-auto flex-fill form-group align-items-lg-center pr-lg-8"> <div class="col-lg-auto flex-fill form-group align-items-lg-center pr-lg-8">
<audit-events-filter v-bind="{ enabledTokenTypes, qaSelector: filterQaSelector }" /> <audit-events-filter
:enabled-token-types="enabledTokenTypes"
:qa-selector="filterQaSelector"
:value="filterValue"
@selected="setFilterValue"
@submit="searchForAuditEvents"
/>
</div> </div>
<div class="d-flex col-lg-auto flex-wrap pl-lg-0"> <div class="d-flex col-lg-auto flex-wrap pl-lg-0">
<div <div
class="audit-controls d-flex align-items-lg-center flex-column flex-lg-row col-lg-auto px-0" class="audit-controls d-flex align-items-lg-center flex-column flex-lg-row col-lg-auto px-0"
> >
<date-range-field v-if="formElement" :form-element="formElement" /> <date-range-field
<sorting-field /> :start-date="startDate"
:end-date="endDate"
@selected="setDateRange"
/>
<sorting-field :sort-by="sortBy" @selected="setSortBy" />
</div> </div>
</div> </div>
</form> </div>
</div> </div>
<audit-events-table v-bind="{ events, isLastPage, qaSelector: tableQaSelector }" /> <audit-events-table
:events="events"
:is-last-page="isLastPage"
:qa-selector="tableQaSelector"
/>
</div> </div>
</template> </template>
<script> <script>
import { GlFilteredSearch } from '@gitlab/ui'; import { GlFilteredSearch } from '@gitlab/ui';
import { queryToObject } from '~/lib/utils/url_utility';
import { FILTER_TOKENS, AVAILABLE_TOKEN_TYPES } from '../constants'; import { FILTER_TOKENS, AVAILABLE_TOKEN_TYPES } from '../constants';
import { availableTokensValidator } from '../validators'; import { availableTokensValidator } from '../validators';
...@@ -9,6 +8,11 @@ export default { ...@@ -9,6 +8,11 @@ export default {
GlFilteredSearch, GlFilteredSearch,
}, },
props: { props: {
value: {
type: Array,
required: false,
default: () => [],
},
enabledTokenTypes: { enabledTokenTypes: {
type: Array, type: Array,
required: false, required: false,
...@@ -21,14 +25,9 @@ export default { ...@@ -21,14 +25,9 @@ export default {
default: undefined, default: undefined,
}, },
}, },
data() {
return {
searchTerms: [],
};
},
computed: { computed: {
searchTerm() { searchTerm() {
return this.searchTerms.find(term => AVAILABLE_TOKEN_TYPES.includes(term.type)); return this.value.find(term => AVAILABLE_TOKEN_TYPES.includes(term.type));
}, },
enabledTokens() { enabledTokens() {
return FILTER_TOKENS.filter(token => this.enabledTokenTypes.includes(token.type)); return FILTER_TOKENS.filter(token => this.enabledTokenTypes.includes(token.type));
...@@ -36,39 +35,23 @@ export default { ...@@ -36,39 +35,23 @@ export default {
filterTokens() { filterTokens() {
// This limits the user to search by only one of the available tokens // This limits the user to search by only one of the available tokens
const { enabledTokens, searchTerm } = this; const { enabledTokens, searchTerm } = this;
if (searchTerm?.type) { if (searchTerm?.type) {
return enabledTokens.map(token => ({ return enabledTokens.map(token => ({
...token, ...token,
disabled: searchTerm.type !== token.type, disabled: searchTerm.type !== token.type,
})); }));
} }
return enabledTokens; return enabledTokens;
}, },
id() {
return this.searchTerm?.value?.data;
},
type() {
return this.searchTerm?.type;
},
},
created() {
this.setSearchTermsFromQuery();
}, },
methods: { methods: {
// The form logic here will be removed once all the audit onSubmit() {
// components are migrated into a single Vue application. this.$emit('submit');
// https://gitlab.com/gitlab-org/gitlab/-/issues/215363
getFormElement() {
return this.$refs.input.form;
}, },
setSearchTermsFromQuery() { onInput(val) {
const { entity_type: type, entity_id: value } = queryToObject(window.location.search); this.$emit('selected', val);
if (type && value) {
this.searchTerms = [{ type, value: { data: value, operator: '=' } }];
}
},
filteredSearchSubmit() {
this.getFormElement().submit();
}, },
}, },
}; };
...@@ -81,16 +64,14 @@ export default { ...@@ -81,16 +64,14 @@ export default {
:data-qa-selector="qaSelector" :data-qa-selector="qaSelector"
> >
<gl-filtered-search <gl-filtered-search
v-model="searchTerms" :value="value"
:placeholder="__('Search')" :placeholder="__('Search')"
:clear-button-title="__('Clear')" :clear-button-title="__('Clear')"
:close-button-title="__('Close')" :close-button-title="__('Close')"
:available-tokens="filterTokens" :available-tokens="filterTokens"
class="gl-h-32 w-100" class="gl-h-32 w-100"
@submit="filteredSearchSubmit" @submit="onSubmit"
@input="onInput"
/> />
<input ref="input" v-model="type" type="hidden" name="entity_type" />
<input v-model="id" type="hidden" name="entity_id" />
</div> </div>
</template> </template>
<script> <script>
import { GlDaterangePicker } from '@gitlab/ui'; import { GlDaterangePicker } from '@gitlab/ui';
import { parsePikadayDate, pikadayToString } from '~/lib/utils/datetime_utility';
import { queryToObject } from '~/lib/utils/url_utility';
export default { export default {
components: { components: {
GlDaterangePicker, GlDaterangePicker,
}, },
props: { props: {
formElement: { startDate: {
type: HTMLFormElement, type: Date,
required: true, required: false,
default: null,
}, },
}, endDate: {
data() { type: Date,
const data = { required: false,
startDate: null, default: null,
endDate: null,
};
const { created_after: initialStartDate, created_before: initialEndDate } = queryToObject(
window.location.search,
);
if (initialStartDate) {
data.startDate = parsePikadayDate(initialStartDate);
}
if (initialEndDate) {
data.endDate = parsePikadayDate(initialEndDate);
}
return data;
},
computed: {
createdAfter() {
return this.startDate ? pikadayToString(this.startDate) : '';
},
createdBefore() {
return this.endDate ? pikadayToString(this.endDate) : '';
}, },
}, },
methods: { methods: {
handleInput(dates) { onInput(dates) {
this.startDate = dates.startDate; this.$emit('selected', dates);
this.endDate = dates.endDate;
this.$nextTick(() => this.formElement.submit());
}, },
}, },
}; };
</script> </script>
<template> <template>
<div> <gl-daterange-picker
<gl-daterange-picker class="d-flex flex-wrap flex-sm-nowrap"
class="d-flex flex-wrap flex-sm-nowrap" :default-start-date="startDate"
:default-start-date="startDate" :default-end-date="endDate"
:default-end-date="endDate" start-picker-class="form-group align-items-lg-center mr-0 mr-sm-1 d-flex flex-column flex-lg-row"
start-picker-class="form-group align-items-lg-center mr-0 mr-sm-1 d-flex flex-column flex-lg-row" end-picker-class="form-group align-items-lg-center mr-0 mr-sm-2 d-flex flex-column flex-lg-row"
end-picker-class="form-group align-items-lg-center mr-0 mr-sm-2 d-flex flex-column flex-lg-row" @input="onInput"
@input="handleInput" />
/>
<input type="hidden" name="created_after" :value="createdAfter" />
<input type="hidden" name="created_before" :value="createdBefore" />
</div>
</template> </template>
<script> <script>
import { GlNewDropdown, GlNewDropdownHeader, GlNewDropdownItem } from '@gitlab/ui'; import { GlNewDropdown, GlNewDropdownHeader, GlNewDropdownItem } from '@gitlab/ui';
import { setUrlParams, queryToObject } from '~/lib/utils/url_utility';
import { s__ } from '~/locale'; import { s__ } from '~/locale';
const SORTING_TITLE = s__('SortOptions|Sort by:'); const SORTING_TITLE = s__('SortOptions|Sort by:');
...@@ -22,24 +20,24 @@ export default { ...@@ -22,24 +20,24 @@ export default {
GlNewDropdownHeader, GlNewDropdownHeader,
GlNewDropdownItem, GlNewDropdownItem,
}, },
data() { props: {
const { sort: selectedOption } = queryToObject(window.location.search); sortBy: {
type: String,
return { required: false,
selectedOption: selectedOption || SORTING_OPTIONS[0].key, default: null,
}; },
}, },
computed: { computed: {
selectedOptionText() { selectedOption() {
return SORTING_OPTIONS.find(option => option.key === this.selectedOption).text; return SORTING_OPTIONS.find(option => option.key === this.sortBy) || SORTING_OPTIONS[0];
}, },
}, },
methods: { methods: {
getItemLink(key) { onItemClick(option) {
return setUrlParams({ sort: key }); this.$emit('selected', option);
}, },
isChecked(key) { isChecked(key) {
return key === this.selectedOption; return key === this.selectedOption.key;
}, },
}, },
SORTING_TITLE, SORTING_TITLE,
...@@ -49,23 +47,17 @@ export default { ...@@ -49,23 +47,17 @@ export default {
<template> <template>
<div> <div>
<gl-new-dropdown <gl-new-dropdown :text="selectedOption.text" class="w-100 flex-column flex-lg-row form-group">
v-model="selectedOption"
:text="selectedOptionText"
class="w-100 flex-column flex-lg-row form-group"
>
<gl-new-dropdown-header> {{ $options.SORTING_TITLE }}</gl-new-dropdown-header> <gl-new-dropdown-header> {{ $options.SORTING_TITLE }}</gl-new-dropdown-header>
<gl-new-dropdown-item <gl-new-dropdown-item
v-for="option in $options.SORTING_OPTIONS" v-for="option in $options.SORTING_OPTIONS"
:key="option.key" :key="option.key"
:is-check-item="true" :is-check-item="true"
:is-checked="isChecked(option.key)" :is-checked="isChecked(option.key)"
:href="getItemLink(option.key)" @click="onItemClick(option.key)"
> >
{{ option.text }} {{ option.text }}
</gl-new-dropdown-item> </gl-new-dropdown-item>
</gl-new-dropdown> </gl-new-dropdown>
<input type="hidden" name="sort" :value="selectedOption" />
</div> </div>
</template> </template>
import Vue from 'vue'; import Vue from 'vue';
import { parseBoolean } from '~/lib/utils/common_utils'; import { parseBoolean } from '~/lib/utils/common_utils';
import AuditEventsApp from './components/audit_events_app.vue'; import AuditEventsApp from './components/audit_events_app.vue';
import createStore from './store';
export default selector => { export default selector => {
const el = document.querySelector(selector); const el = document.querySelector(selector);
const { const { events, isLastPage, enabledTokenTypes, filterQaSelector, tableQaSelector } = el.dataset;
events,
isLastPage, const store = createStore();
formPath, store.dispatch('initializeAuditEvents');
enabledTokenTypes,
filterQaSelector,
tableQaSelector,
} = el.dataset;
return new Vue({ return new Vue({
el, el,
store,
render: createElement => render: createElement =>
createElement(AuditEventsApp, { createElement(AuditEventsApp, {
props: { props: {
events: JSON.parse(events), events: JSON.parse(events),
isLastPage: parseBoolean(isLastPage), isLastPage: parseBoolean(isLastPage),
enabledTokenTypes: JSON.parse(enabledTokenTypes), enabledTokenTypes: JSON.parse(enabledTokenTypes),
formPath,
filterQaSelector, filterQaSelector,
tableQaSelector, tableQaSelector,
}, },
......
...@@ -18,9 +18,8 @@ export const setDateRange = ({ commit, dispatch }, { startDate, endDate }) => { ...@@ -18,9 +18,8 @@ export const setDateRange = ({ commit, dispatch }, { startDate, endDate }) => {
dispatch('searchForAuditEvents'); dispatch('searchForAuditEvents');
}; };
export const setFilterValue = ({ commit, dispatch }, { id, type }) => { export const setFilterValue = ({ commit }, filterValue) => {
commit(types.SET_FILTER_VALUE, { id, type }); commit(types.SET_FILTER_VALUE, filterValue);
dispatch('searchForAuditEvents');
}; };
export const setSortBy = ({ commit, dispatch }, sortBy) => { export const setSortBy = ({ commit, dispatch }, sortBy) => {
......
...@@ -11,14 +11,14 @@ export default { ...@@ -11,14 +11,14 @@ export default {
sort: sortBy = null, sort: sortBy = null,
} = {}, } = {},
) { ) {
state.filterValue = { id, type }; state.filterValue = type && id ? [{ type, value: { data: id, operator: '=' } }] : [];
state.startDate = startDate; state.startDate = startDate;
state.endDate = endDate; state.endDate = endDate;
state.sortBy = sortBy; state.sortBy = sortBy;
}, },
[types.SET_FILTER_VALUE](state, { id, type }) { [types.SET_FILTER_VALUE](state, filterValue) {
state.filterValue = { id, type }; state.filterValue = filterValue;
}, },
[types.SET_DATE_RANGE](state, { startDate, endDate }) { [types.SET_DATE_RANGE](state, { startDate, endDate }) {
......
export default () => ({ export default () => ({
filterValue: { filterValue: [],
id: null,
type: null,
},
startDate: null, startDate: null,
endDate: null, endDate: null,
......
import { parsePikadayDate, pikadayToString } from '~/lib/utils/datetime_utility'; import { parsePikadayDate, pikadayToString } from '~/lib/utils/datetime_utility';
import { AVAILABLE_TOKEN_TYPES } from './constants';
export const isNumeric = str => { export const isNumeric = str => {
return !Number.isNaN(parseInt(str, 10), 10); return !Number.isNaN(parseInt(str, 10), 10);
...@@ -14,10 +15,16 @@ export const parseAuditEventSearchQuery = ({ ...@@ -14,10 +15,16 @@ export const parseAuditEventSearchQuery = ({
created_before: createdBefore ? parsePikadayDate(createdBefore) : null, created_before: createdBefore ? parsePikadayDate(createdBefore) : null,
}); });
export const createAuditEventSearchQuery = ({ filterValue, startDate, endDate, sortBy }) => ({ export const createAuditEventSearchQuery = ({ filterValue, startDate, endDate, sortBy }) => {
entity_id: filterValue.id, const entityValue = filterValue.find(value => AVAILABLE_TOKEN_TYPES.includes(value.type));
entity_type: filterValue.type,
created_after: startDate ? pikadayToString(startDate) : null, return {
created_before: endDate ? pikadayToString(endDate) : null, created_after: startDate ? pikadayToString(startDate) : null,
sort: sortBy, created_before: endDate ? pikadayToString(endDate) : null,
}); sort: sortBy,
entity_id: entityValue?.value.data,
entity_type: entityValue?.type,
// When changing the search parameters, we should be resetting to the first page
page: null,
};
};
...@@ -23,7 +23,7 @@ export default { ...@@ -23,7 +23,7 @@ export default {
<section <section
v-if="hasStickySlot" v-if="hasStickySlot"
data-testid="sticky-section" data-testid="sticky-section"
class="position-sticky gl-z-index-2 security_dashboard_filters" class="position-sticky gl-z-index-2 security-dashboard-filters"
> >
<slot name="sticky"></slot> <slot name="sticky"></slot>
</section> </section>
......
...@@ -70,10 +70,13 @@ export default { ...@@ -70,10 +70,13 @@ export default {
return this.shouldShowSelection && Boolean(this.numOfSelectedVulnerabilities); return this.shouldShowSelection && Boolean(this.numOfSelectedVulnerabilities);
}, },
checkboxClass() { checkboxClass() {
return this.shouldShowSelection ? '' : 'd-none'; return this.shouldShowSelection ? '' : 'gl-display-none';
},
theadClass() {
return this.shouldShowSelectionSummary ? 'below-selection-summary' : '';
}, },
fields() { fields() {
const commonThClass = ['table-th-transparent', 'original-gl-th'].join(' '); const commonThClass = ['table-th-transparent', 'original-gl-th', 'gl-bg-white!'].join(' ');
return [ return [
{ {
key: 'checkbox', key: 'checkbox',
...@@ -154,7 +157,7 @@ export default { ...@@ -154,7 +157,7 @@ export default {
</script> </script>
<template> <template>
<div> <div class="vulnerability-list">
<selection-summary <selection-summary
v-if="shouldShowSelectionSummary" v-if="shouldShowSelectionSummary"
:selected-vulnerabilities="Object.values(selectedVulnerabilities)" :selected-vulnerabilities="Object.values(selectedVulnerabilities)"
...@@ -165,6 +168,7 @@ export default { ...@@ -165,6 +168,7 @@ export default {
:busy="isLoading" :busy="isLoading"
:fields="fields" :fields="fields"
:items="vulnerabilities" :items="vulnerabilities"
:thead-class="theadClass"
stacked="sm" stacked="sm"
show-empty show-empty
responsive responsive
......
$security-filter-height: 90px;
$selection-summary-height: 68px;
@mixin sticky-top-positioning($extra: 0) {
top: $header-height + $extra;
.with-performance-bar & {
top: $header-height + $performance-bar-height + $extra;
}
}
.vulnerabilities-row { .vulnerabilities-row {
&.dismissed .table-mobile-content:not(.action-buttons) { &.dismissed .table-mobile-content:not(.action-buttons) {
opacity: 0.5; opacity: 0.5;
...@@ -23,10 +34,26 @@ ...@@ -23,10 +34,26 @@
} }
} }
.security_dashboard_filters { .security-dashboard-filters {
top: $header-height; @include sticky-top-positioning();
}
.with-performance-bar & { // Due to position: sticky not being supported on Chrome (https://caniuse.com/#feat=css-sticky),
top: $header-height + $performance-bar-height; // the property is assigned to the th element as a workaround
.vulnerability-list {
.card,
thead th {
position: -webkit-sticky;
position: sticky;
z-index: 1;
@include sticky-top-positioning($security-filter-height);
}
thead th {
box-shadow: 0 1px $gray-100;
}
thead.below-selection-summary th {
@include sticky-top-positioning($security-filter-height + $selection-summary-height);
} }
} }
# frozen_string_literal: true
module Geo
class ProjectRegistryFinder
# Returns ProjectRegistry records that have never been synced.
#
# Does not care about selective sync, because it considers the Registry
# table to be the single source of truth. The contract is that other
# processes need to ensure that the table only contains records that should
# be synced.
#
# Any registries that have ever been synced that currently need to be
# resynced will be handled by other find methods (like
# #find_retryable_dirty_registries)
#
# You can pass a list with `except_ids:` so you can exclude items you
# already scheduled but haven't finished and aren't persisted to the database yet
#
# @param [Integer] batch_size used to limit the results returned
# @param [Array<Integer>] except_ids ids that will be ignored from the query
# rubocop:disable CodeReuse/ActiveRecord
def find_never_synced_registries(batch_size:, except_ids: [])
Geo::ProjectRegistry
.never_synced
.model_id_not_in(except_ids)
.limit(batch_size)
end
# rubocop:enable CodeReuse/ActiveRecord
# rubocop:disable CodeReuse/ActiveRecord
def find_retryable_dirty_registries(batch_size:, except_ids: [])
Geo::ProjectRegistry
.dirty
.retry_due
.model_id_not_in(except_ids)
.order(Gitlab::Database.nulls_first_order(:last_repository_synced_at))
.limit(batch_size)
end
# rubocop:enable CodeReuse/ActiveRecord
end
end
...@@ -11,6 +11,10 @@ class Geo::BaseRegistry < Geo::TrackingBase ...@@ -11,6 +11,10 @@ class Geo::BaseRegistry < Geo::TrackingBase
where(self::MODEL_FOREIGN_KEY => range).pluck(self::MODEL_FOREIGN_KEY) where(self::MODEL_FOREIGN_KEY => range).pluck(self::MODEL_FOREIGN_KEY)
end end
def self.pluck_model_foreign_key
where(nil).pluck(self::MODEL_FOREIGN_KEY)
end
def self.model_id_in(ids) def self.model_id_in(ids)
where(self::MODEL_FOREIGN_KEY => ids) where(self::MODEL_FOREIGN_KEY => ids)
end end
......
# frozen_string_literal: true # frozen_string_literal: true
class Geo::DeletedProject class Geo::DeletedProject
attr_reader :id, :name, :disk_path include ActiveModel::Validations
attr_accessor :id, :name, :disk_path
validates :id, :name, :disk_path, presence: true
def initialize(id:, name:, disk_path:, repository_storage:) def initialize(id:, name:, disk_path:, repository_storage:)
@id = id @id = id
......
...@@ -3,6 +3,9 @@ ...@@ -3,6 +3,9 @@
class Geo::DesignRegistry < Geo::BaseRegistry class Geo::DesignRegistry < Geo::BaseRegistry
include ::Delay include ::Delay
MODEL_CLASS = ::Project
MODEL_FOREIGN_KEY = :project_id
RETRIES_BEFORE_REDOWNLOAD = 5 RETRIES_BEFORE_REDOWNLOAD = 5
belongs_to :project belongs_to :project
......
...@@ -5,6 +5,9 @@ class Geo::ProjectRegistry < Geo::BaseRegistry ...@@ -5,6 +5,9 @@ class Geo::ProjectRegistry < Geo::BaseRegistry
include ::EachBatch include ::EachBatch
include ::ShaAttribute include ::ShaAttribute
MODEL_CLASS = ::Project
MODEL_FOREIGN_KEY = :project_id
REGISTRY_TYPES = %i{repository wiki}.freeze REGISTRY_TYPES = %i{repository wiki}.freeze
RETRIES_BEFORE_REDOWNLOAD = 5 RETRIES_BEFORE_REDOWNLOAD = 5
...@@ -39,6 +42,34 @@ class Geo::ProjectRegistry < Geo::BaseRegistry ...@@ -39,6 +42,34 @@ class Geo::ProjectRegistry < Geo::BaseRegistry
where(nil).pluck(:project_id) where(nil).pluck(:project_id)
end end
def self.registry_consistency_worker_enabled?
Feature.enabled?(:geo_project_registry_ssot_sync)
end
def self.has_create_events?
true
end
def self.find_registry_differences(range)
source_ids = Gitlab::Geo.current_node.projects.id_in(range).pluck_primary_key
tracked_ids = self.pluck_model_ids_in_range(range)
untracked_ids = source_ids - tracked_ids
unused_tracked_ids = tracked_ids - source_ids
[untracked_ids, unused_tracked_ids]
end
def self.delete_worker_class
::GeoRepositoryDestroyWorker
end
def self.delete_for_model_ids(project_ids)
project_ids.map do |project_id|
delete_worker_class.perform_async(project_id)
end
end
def self.failed def self.failed
repository_sync_failed = arel_table[:repository_retry_count].gt(0) repository_sync_failed = arel_table[:repository_retry_count].gt(0)
wiki_sync_failed = arel_table[:wiki_retry_count].gt(0) wiki_sync_failed = arel_table[:wiki_retry_count].gt(0)
......
...@@ -36,6 +36,7 @@ class Packages::Package < ApplicationRecord ...@@ -36,6 +36,7 @@ class Packages::Package < ApplicationRecord
validates :version, format: { with: Gitlab::Regex.semver_regex }, if: -> { npm? || nuget? } validates :version, format: { with: Gitlab::Regex.semver_regex }, if: -> { npm? || nuget? }
validates :name, format: { with: Gitlab::Regex.conan_recipe_component_regex }, if: :conan? validates :name, format: { with: Gitlab::Regex.conan_recipe_component_regex }, if: :conan?
validates :version, format: { with: Gitlab::Regex.conan_recipe_component_regex }, if: :conan? validates :version, format: { with: Gitlab::Regex.conan_recipe_component_regex }, if: :conan?
validates :version, format: { with: Gitlab::Regex.maven_version_regex }, if: -> { version? && maven? }
enum package_type: { maven: 1, npm: 2, conan: 3, nuget: 4, pypi: 5, composer: 6 } enum package_type: { maven: 1, npm: 2, conan: 3, nuget: 4, pypi: 5, composer: 6 }
......
...@@ -3,10 +3,14 @@ ...@@ -3,10 +3,14 @@
module Geo module Geo
class RepositoryDestroyService class RepositoryDestroyService
include ::Gitlab::Geo::LogHelpers include ::Gitlab::Geo::LogHelpers
include ::Gitlab::Utils::StrongMemoize
attr_reader :id, :name, :disk_path, :repository_storage attr_reader :id, :name, :disk_path, :repository_storage
def initialize(id, name, disk_path, repository_storage) # There is a possibility that the replicable's record does not exist
# anymore. In this case, you need to pass the optional parameters
# explicitly.
def initialize(id, name = nil, disk_path = nil, repository_storage = nil)
@id = id @id = id
@name = name @name = name
@disk_path = disk_path @disk_path = disk_path
...@@ -29,25 +33,36 @@ module Geo ...@@ -29,25 +33,36 @@ module Geo
private private
def destroy_project def destroy_project
::Projects::DestroyService.new(deleted_project, nil).geo_replicate # We should skip if we had to rebuild the project, but we don't
# have the information that our service class requires.
return if project.is_a?(Geo::DeletedProject) && !project.valid?
::Projects::DestroyService.new(project, nil).geo_replicate
end end
# rubocop: disable CodeReuse/ActiveRecord
def destroy_registry_entries def destroy_registry_entries
::Geo::ProjectRegistry.where(project_id: id).delete_all ::Geo::ProjectRegistry.model_id_in(id).delete_all
::Geo::DesignRegistry.where(project_id: id).delete_all ::Geo::DesignRegistry.model_id_in(id).delete_all
log_info("Registry entries removed", project_id: id) log_info('Registry entries removed', project_id: id)
end end
# rubocop: enable CodeReuse/ActiveRecord
def project
def deleted_project strong_memoize(:project) do
# We don't have access to the original model anymore, so we are Project.find(id)
# rebuilding only what our service class requires rescue ActiveRecord::RecordNotFound => e
::Geo::DeletedProject.new(id: id, # When cleaning up project/registries, there are some cases where
name: name, # the replicable record does not exist anymore. So, we try to
disk_path: disk_path, # rebuild it with only what our service class requires.
repository_storage: repository_storage) log_error('Could not find project', e.message)
::Geo::DeletedProject.new(
id: id,
name: name,
disk_path: disk_path,
repository_storage: repository_storage
)
end
end end
end end
end end
...@@ -10,15 +10,15 @@ module Geo ...@@ -10,15 +10,15 @@ module Geo
{ project_id: project_id, job_id: job_id } if job_id { project_id: project_id, job_id: job_id } if job_id
end end
def find_project_ids_not_synced(batch_size:) def find_project_ids_not_synced(except_ids:, batch_size:)
Geo::DesignUnsyncedFinder Geo::DesignUnsyncedFinder
.new(scheduled_project_ids: scheduled_project_ids, shard_name: shard_name, batch_size: batch_size) .new(scheduled_project_ids: except_ids, shard_name: shard_name, batch_size: batch_size)
.execute .execute
end end
def find_project_ids_updated_recently(batch_size:) def find_project_ids_updated_recently(except_ids:, batch_size:)
Geo::DesignUpdatedRecentlyFinder Geo::DesignUpdatedRecentlyFinder
.new(scheduled_project_ids: scheduled_project_ids, shard_name: shard_name, batch_size: batch_size) .new(scheduled_project_ids: except_ids, shard_name: shard_name, batch_size: batch_size)
.execute .execute
end end
end end
......
...@@ -62,22 +62,31 @@ module Geo ...@@ -62,22 +62,31 @@ module Geo
end end
def load_pending_resources def load_pending_resources
resources = find_project_ids_not_synced(batch_size: db_retrieve_batch_size) return [] unless valid_shard?
resources = find_project_ids_not_synced(except_ids: scheduled_project_ids, batch_size: db_retrieve_batch_size)
remaining_capacity = db_retrieve_batch_size - resources.size remaining_capacity = db_retrieve_batch_size - resources.size
if remaining_capacity.zero? if remaining_capacity.zero?
resources resources
else else
resources + find_project_ids_updated_recently(batch_size: remaining_capacity) resources + find_project_ids_updated_recently(except_ids: scheduled_project_ids + resources, batch_size: remaining_capacity)
end end
end end
# rubocop: disable CodeReuse/ActiveRecord # rubocop: disable CodeReuse/ActiveRecord
def find_project_ids_not_synced(batch_size:) def find_project_ids_not_synced(except_ids:, batch_size:)
find_unsynced_projects(batch_size: batch_size) if Geo::ProjectRegistry.registry_consistency_worker_enabled?
.id_not_in(scheduled_project_ids) project_ids =
.reorder(last_repository_updated_at: :desc) find_never_synced_project_ids(batch_size: batch_size, except_ids: except_ids)
.pluck_primary_key
find_project_ids_within_shard(project_ids, direction: :desc)
else
find_unsynced_projects(batch_size: batch_size)
.id_not_in(except_ids)
.reorder(last_repository_updated_at: :desc)
.pluck_primary_key
end
end end
# rubocop: enable CodeReuse/ActiveRecord # rubocop: enable CodeReuse/ActiveRecord
...@@ -88,11 +97,18 @@ module Geo ...@@ -88,11 +97,18 @@ module Geo
end end
# rubocop: disable CodeReuse/ActiveRecord # rubocop: disable CodeReuse/ActiveRecord
def find_project_ids_updated_recently(batch_size:) def find_project_ids_updated_recently(except_ids:, batch_size:)
find_projects_updated_recently(batch_size: batch_size) if Geo::ProjectRegistry.registry_consistency_worker_enabled?
.id_not_in(scheduled_project_ids) project_ids =
.order('project_registry.last_repository_synced_at ASC NULLS FIRST, projects.last_repository_updated_at ASC') find_retryable_dirty_project_ids(batch_size: batch_size, except_ids: except_ids)
.pluck_primary_key
find_project_ids_within_shard(project_ids, direction: :asc)
else
find_projects_updated_recently(batch_size: batch_size)
.id_not_in(except_ids)
.order('project_registry.last_repository_synced_at ASC NULLS FIRST, projects.last_repository_updated_at ASC')
.pluck_primary_key
end
end end
# rubocop: enable CodeReuse/ActiveRecord # rubocop: enable CodeReuse/ActiveRecord
...@@ -101,5 +117,37 @@ module Geo ...@@ -101,5 +117,37 @@ module Geo
.new(current_node: current_node, shard_name: shard_name, batch_size: batch_size) .new(current_node: current_node, shard_name: shard_name, batch_size: batch_size)
.execute .execute
end end
def valid_shard?
return true unless current_node.selective_sync_by_shards?
current_node.selective_sync_shards.include?(shard_name)
end
def find_never_synced_project_ids(batch_size:, except_ids:)
registry_finder
.find_never_synced_registries(batch_size: batch_size, except_ids: except_ids)
.pluck_model_foreign_key
end
def find_retryable_dirty_project_ids(batch_size:, except_ids:)
registry_finder
.find_retryable_dirty_registries(batch_size: batch_size, except_ids: except_ids)
.pluck_model_foreign_key
end
# rubocop:disable CodeReuse/ActiveRecord
def find_project_ids_within_shard(project_ids, direction:)
Project
.id_in(project_ids)
.within_shards(shard_name)
.reorder(last_repository_updated_at: direction)
.pluck_primary_key
end
# rubocop:enable CodeReuse/ActiveRecord
def registry_finder
@registry_finder ||= Geo::ProjectRegistryFinder.new
end
end end
end end
...@@ -18,8 +18,9 @@ module Geo ...@@ -18,8 +18,9 @@ module Geo
REGISTRY_CLASSES = [ REGISTRY_CLASSES = [
Geo::JobArtifactRegistry, Geo::JobArtifactRegistry,
Geo::LfsObjectRegistry, Geo::LfsObjectRegistry,
Geo::UploadRegistry, Geo::PackageFileRegistry,
Geo::PackageFileRegistry Geo::ProjectRegistry,
Geo::UploadRegistry
].freeze ].freeze
BATCH_SIZE = 1000 BATCH_SIZE = 1000
......
...@@ -3,10 +3,13 @@ ...@@ -3,10 +3,13 @@
class GeoRepositoryDestroyWorker # rubocop:disable Scalability/IdempotentWorker class GeoRepositoryDestroyWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker include ApplicationWorker
include GeoQueue include GeoQueue
include ::Gitlab::Geo::LogHelpers
loggable_arguments 1, 2, 3 loggable_arguments 1, 2, 3
def perform(id, name, disk_path, storage_name) def perform(id, name = nil, disk_path = nil, storage_name = nil)
log_info('Executing Geo::RepositoryDestroyService', id: id, name: name, disk_path: disk_path, storage_name: storage_name)
Geo::RepositoryDestroyService.new(id, name, disk_path, storage_name).execute Geo::RepositoryDestroyService.new(id, name, disk_path, storage_name).execute
end end
end end
---
title: Pin selection summary/list header to the page top
merge_request: 33875
author:
type: added
---
title: Add validation to maven package version
merge_request: 32925
author: Bola Ahmed Buari
type: added
...@@ -162,9 +162,10 @@ RSpec.describe 'Admin::AuditLogs', :js do ...@@ -162,9 +162,10 @@ RSpec.describe 'Admin::AuditLogs', :js do
end end
def filter_for(type, name) def filter_for(type, name)
within '[data-qa-selector="admin_audit_log_filter"]' do filter_container = '[data-testid="audit-events-filter"]'
find('input').click
find(filter_container).click
within filter_container do
click_link type click_link type
click_link name click_link name
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Geo::ProjectRegistryFinder, :geo do
let_it_be(:project_1) { create(:project) }
let_it_be(:project_2) { create(:project) }
let_it_be(:project_3) { create(:project) }
let_it_be(:project_4) { create(:project) }
let_it_be(:project_5) { create(:project) }
let_it_be(:project_6) { create(:project) }
let_it_be(:registry_project_1) { create(:geo_project_registry, :synced, project_id: project_1.id) }
let_it_be(:registry_project_2) { create(:geo_project_registry, :sync_failed, project_id: project_2.id) }
let_it_be(:registry_project_3) { create(:geo_project_registry, project_id: project_3.id) }
let_it_be(:registry_project_4) { create(:geo_project_registry, :repository_dirty, project_id: project_4.id, last_repository_synced_at: 2.days.ago) }
let_it_be(:registry_project_5) { create(:geo_project_registry, :wiki_dirty, project_id: project_5.id, last_repository_synced_at: 5.days.ago) }
let_it_be(:registry_project_6) { create(:geo_project_registry, project_id: project_6.id) }
describe '#find_never_synced_registries' do
it 'returns registries for projects that have never been synced' do
registries = subject.find_never_synced_registries(batch_size: 10)
expect(registries).to match_ids(registry_project_3, registry_project_6)
end
it 'excludes except_ids' do
registries = subject.find_never_synced_registries(batch_size: 10, except_ids: [project_3.id])
expect(registries).to match_ids(registry_project_6)
end
end
describe '#find_retryable_dirty_registries' do
it 'returns registries for projects that have been recently updated or that have never been synced' do
registries = subject.find_retryable_dirty_registries(batch_size: 10)
expect(registries).to match_ids(registry_project_2, registry_project_3, registry_project_4, registry_project_5, registry_project_6)
end
it 'excludes except_ids' do
registries = subject.find_retryable_dirty_registries(batch_size: 10, except_ids: [project_4.id, project_5.id, project_6.id])
expect(registries).to match_ids(registry_project_2, registry_project_3)
end
end
end
...@@ -5,10 +5,8 @@ exports[`AuditEventsApp when initialized matches the snapshot 1`] = ` ...@@ -5,10 +5,8 @@ exports[`AuditEventsApp when initialized matches the snapshot 1`] = `
<div <div
class="row-content-block second-block pb-0" class="row-content-block second-block pb-0"
> >
<form <div
class="filter-form d-flex justify-content-between audit-controls row" class="d-flex justify-content-between audit-controls row"
method="GET"
path="form/path"
> >
<div <div
class="col-lg-auto flex-fill form-group align-items-lg-center pr-lg-8" class="col-lg-auto flex-fill form-group align-items-lg-center pr-lg-8"
...@@ -24,17 +22,7 @@ exports[`AuditEventsApp when initialized matches the snapshot 1`] = ` ...@@ -24,17 +22,7 @@ exports[`AuditEventsApp when initialized matches the snapshot 1`] = `
clearbuttontitle="Clear" clearbuttontitle="Clear"
close-button-title="Close" close-button-title="Close"
placeholder="Search" placeholder="Search"
value="" value="[object Object]"
/>
<input
name="entity_type"
type="hidden"
/>
<input
name="entity_id"
type="hidden"
/> />
</div> </div>
</div> </div>
...@@ -46,13 +34,16 @@ exports[`AuditEventsApp when initialized matches the snapshot 1`] = ` ...@@ -46,13 +34,16 @@ exports[`AuditEventsApp when initialized matches the snapshot 1`] = `
class="audit-controls d-flex align-items-lg-center flex-column flex-lg-row col-lg-auto px-0" class="audit-controls d-flex align-items-lg-center flex-column flex-lg-row col-lg-auto px-0"
> >
<date-range-field-stub <date-range-field-stub
formelement="[object HTMLFormElement]" enddate="Sun Feb 02 2020 00:00:00 GMT+0000 (Greenwich Mean Time)"
startdate="Wed Jan 01 2020 00:00:00 GMT+0000 (Greenwich Mean Time)"
/> />
<sorting-field-stub /> <sorting-field-stub
sortby="created_asc"
/>
</div> </div>
</div> </div>
</form> </div>
</div> </div>
<audit-events-table-stub <audit-events-table-stub
......
...@@ -2,12 +2,20 @@ import { shallowMount } from '@vue/test-utils'; ...@@ -2,12 +2,20 @@ import { shallowMount } from '@vue/test-utils';
import AuditEventsApp from 'ee/audit_events/components/audit_events_app.vue'; import AuditEventsApp from 'ee/audit_events/components/audit_events_app.vue';
import DateRangeField from 'ee/audit_events/components/date_range_field.vue'; import DateRangeField from 'ee/audit_events/components/date_range_field.vue';
import SortingField from 'ee/audit_events/components/sorting_field.vue';
import AuditEventsTable from 'ee/audit_events/components/audit_events_table.vue'; import AuditEventsTable from 'ee/audit_events/components/audit_events_table.vue';
import AuditEventsFilter from 'ee/audit_events/components/audit_events_filter.vue'; import AuditEventsFilter from 'ee/audit_events/components/audit_events_filter.vue';
import { AVAILABLE_TOKEN_TYPES } from 'ee/audit_events/constants'; import { AVAILABLE_TOKEN_TYPES } from 'ee/audit_events/constants';
import createStore from 'ee/audit_events/store';
const TEST_SORT_BY = 'created_asc';
const TEST_START_DATE = new Date('2020-01-01');
const TEST_END_DATE = new Date('2020-02-02');
const TEST_FILTER_VALUE = [{ id: 50, type: 'User' }];
describe('AuditEventsApp', () => { describe('AuditEventsApp', () => {
let wrapper; let wrapper;
let store;
const events = [{ foo: 'bar' }]; const events = [{ foo: 'bar' }];
const enabledTokenTypes = AVAILABLE_TOKEN_TYPES; const enabledTokenTypes = AVAILABLE_TOKEN_TYPES;
...@@ -16,8 +24,8 @@ describe('AuditEventsApp', () => { ...@@ -16,8 +24,8 @@ describe('AuditEventsApp', () => {
const initComponent = (props = {}) => { const initComponent = (props = {}) => {
wrapper = shallowMount(AuditEventsApp, { wrapper = shallowMount(AuditEventsApp, {
store,
propsData: { propsData: {
formPath: 'form/path',
isLastPage: true, isLastPage: true,
filterQaSelector, filterQaSelector,
tableQaSelector, tableQaSelector,
...@@ -31,9 +39,20 @@ describe('AuditEventsApp', () => { ...@@ -31,9 +39,20 @@ describe('AuditEventsApp', () => {
}); });
}; };
beforeEach(() => {
store = createStore();
Object.assign(store.state, {
startDate: TEST_START_DATE,
endDate: TEST_END_DATE,
sortBy: TEST_SORT_BY,
filterValue: TEST_FILTER_VALUE,
});
});
afterEach(() => { afterEach(() => {
wrapper.destroy(); wrapper.destroy();
wrapper = null; wrapper = null;
store = null;
}); });
describe('when initialized', () => { describe('when initialized', () => {
...@@ -45,25 +64,51 @@ describe('AuditEventsApp', () => { ...@@ -45,25 +64,51 @@ describe('AuditEventsApp', () => {
expect(wrapper.element).toMatchSnapshot(); expect(wrapper.element).toMatchSnapshot();
}); });
it('sets the form element on the date range field', () => { it('renders audit events table', () => {
const { element } = wrapper.find('form'); expect(wrapper.find(AuditEventsTable).props()).toEqual({
expect(wrapper.find(DateRangeField).props('formElement')).toEqual(element); events,
qaSelector: tableQaSelector,
isLastPage: true,
});
});
it('renders audit events filter', () => {
expect(wrapper.find(AuditEventsFilter).props()).toEqual({
enabledTokenTypes,
qaSelector: filterQaSelector,
value: TEST_FILTER_VALUE,
});
}); });
it('passes its events property to the logs table', () => { it('renders date range field', () => {
expect(wrapper.find(AuditEventsTable).props('events')).toEqual(events); expect(wrapper.find(DateRangeField).props()).toEqual({
startDate: TEST_START_DATE,
endDate: TEST_END_DATE,
});
}); });
it('passes the tables QA selector to the logs table', () => { it('renders sorting field', () => {
expect(wrapper.find(AuditEventsTable).props('qaSelector')).toEqual(tableQaSelector); expect(wrapper.find(SortingField).props()).toEqual({ sortBy: TEST_SORT_BY });
}); });
});
it('passes its available token types to the logs filter', () => { describe('when a field is selected', () => {
expect(wrapper.find(AuditEventsFilter).props('enabledTokenTypes')).toEqual(enabledTokenTypes); beforeEach(() => {
jest.spyOn(store, 'dispatch').mockImplementation();
initComponent();
}); });
it('passes the filters QA selector to the logs filter', () => { it.each`
expect(wrapper.find(AuditEventsFilter).props('qaSelector')).toEqual(filterQaSelector); name | field | action | payload
${'date range'} | ${DateRangeField} | ${'setDateRange'} | ${'test'}
${'sort by'} | ${SortingField} | ${'setSortBy'} | ${'test'}
${'events filter'} | ${AuditEventsFilter} | ${'setFilterValue'} | ${'test'}
`('for $name, it calls $handler', ({ field, action, payload }) => {
expect(store.dispatch).not.toHaveBeenCalled();
wrapper.find(field).vm.$emit('selected', payload);
expect(store.dispatch).toHaveBeenCalledWith(action, payload);
}); });
}); });
}); });
...@@ -6,9 +6,8 @@ import { AVAILABLE_TOKEN_TYPES } from 'ee/audit_events/constants'; ...@@ -6,9 +6,8 @@ import { AVAILABLE_TOKEN_TYPES } from 'ee/audit_events/constants';
describe('AuditEventsFilter', () => { describe('AuditEventsFilter', () => {
let wrapper; let wrapper;
const formElement = document.createElement('form');
formElement.submit = jest.fn();
const value = [{ type: 'Project', value: { data: 1, operator: '=' } }];
const findFilteredSearch = () => wrapper.find(GlFilteredSearch); const findFilteredSearch = () => wrapper.find(GlFilteredSearch);
const getAvailableTokens = () => findFilteredSearch().props('availableTokens'); const getAvailableTokens = () => findFilteredSearch().props('availableTokens');
const getAvailableTokenProps = type => const getAvailableTokenProps = type =>
...@@ -19,9 +18,6 @@ describe('AuditEventsFilter', () => { ...@@ -19,9 +18,6 @@ describe('AuditEventsFilter', () => {
propsData: { propsData: {
...props, ...props,
}, },
methods: {
getFormElement: () => formElement,
},
}); });
}; };
...@@ -46,74 +42,59 @@ describe('AuditEventsFilter', () => { ...@@ -46,74 +42,59 @@ describe('AuditEventsFilter', () => {
}); });
}); });
describe('when the URL query has a search term', () => { describe('when the default token value is set', () => {
const type = 'User';
const id = '1';
beforeEach(() => { beforeEach(() => {
delete window.location; initComponent({ value });
window.location = { search: `entity_type=${type}&entity_id=${id}` };
initComponent();
}); });
it('sets the filtered searched token', () => { it('sets the filtered searched token', () => {
expect(findFilteredSearch().props('value')).toMatchObject([ expect(findFilteredSearch().props('value')).toEqual(value);
{
type,
value: {
data: id,
},
},
]);
}); });
});
describe('when the URL query is empty', () => { it('only one token matching the selected token type is enabled', () => {
beforeEach(() => { expect(getAvailableTokenProps('Project').disabled).toEqual(false);
delete window.location; expect(getAvailableTokenProps('Group').disabled).toEqual(true);
window.location = { search: '' }; expect(getAvailableTokenProps('User').disabled).toEqual(true);
initComponent();
}); });
it('has an empty search value', () => { describe('and the user submits the search field', () => {
expect(findFilteredSearch().vm.value).toEqual([]); beforeEach(() => {
findFilteredSearch().vm.$emit('submit');
});
it('should emit the "submit" event', () => {
expect(wrapper.emitted().submit).toHaveLength(1);
});
}); });
}); });
describe('when submitting the filtered search', () => { describe('when the default token value is not set', () => {
beforeEach(() => { beforeEach(() => {
initComponent(); initComponent();
findFilteredSearch().vm.$emit('submit');
}); });
it("calls submit on this component's FORM element", () => { it('has an empty search value', () => {
expect(formElement.submit).toHaveBeenCalledWith(); expect(findFilteredSearch().vm.value).toEqual([]);
}); });
});
describe('when a search token has been selected', () => { describe('and the user inputs nothing into the search field', () => {
const searchTerm = { beforeEach(() => {
value: { data: '1' }, findFilteredSearch().vm.$emit('input', []);
type: 'Project',
};
beforeEach(() => {
initComponent();
wrapper.setData({
searchTerms: [searchTerm],
}); });
});
it('only one token matching the selected type is available', () => { it('should emit the "selected" event with empty values', () => {
expect(getAvailableTokenProps('Project').disabled).toEqual(false); expect(wrapper.emitted().selected[0]).toEqual([[]]);
expect(getAvailableTokenProps('Group').disabled).toEqual(true); });
expect(getAvailableTokenProps('User').disabled).toEqual(true);
}); describe('and the user submits the search field', () => {
beforeEach(() => {
findFilteredSearch().vm.$emit('submit');
});
it('sets the input values according to the search term', () => { it('should emit the "submit" event', () => {
expect(wrapper.find('input[name="entity_type"]').attributes().value).toEqual(searchTerm.type); expect(wrapper.emitted().submit).toHaveLength(1);
expect(wrapper.find('input[name="entity_id"]').attributes().value).toEqual( });
searchTerm.value.data, });
);
}); });
}); });
......
...@@ -5,81 +5,61 @@ import DateRangeField from 'ee/audit_events/components/date_range_field.vue'; ...@@ -5,81 +5,61 @@ import DateRangeField from 'ee/audit_events/components/date_range_field.vue';
import { parsePikadayDate } from '~/lib/utils/datetime_utility'; import { parsePikadayDate } from '~/lib/utils/datetime_utility';
describe('DateRangeField component', () => { describe('DateRangeField component', () => {
const DATE = '1970-01-01';
let wrapper; let wrapper;
const createComponent = (props = {}) => { const startDate = parsePikadayDate('2020-03-13');
const formElement = document.createElement('form'); const endDate = parsePikadayDate('2020-03-14');
document.body.appendChild(formElement);
return shallowMount(DateRangeField, { const createComponent = (props = {}) => {
propsData: { formElement, ...props }, wrapper = shallowMount(DateRangeField, {
propsData: { ...props },
}); });
}; };
beforeEach(() => {
delete window.location;
window.location = { search: '' };
});
afterEach(() => { afterEach(() => {
document.querySelector('form').remove();
wrapper.destroy(); wrapper.destroy();
wrapper = null;
}); });
it('should populate the initial start date if passed in the query string', () => { it('passes the startDate to the date picker as defaultStartDate', () => {
window.location.search = `?created_after=${DATE}`; createComponent({ startDate });
wrapper = createComponent();
expect(wrapper.find(GlDaterangePicker).props()).toMatchObject({ expect(wrapper.find(GlDaterangePicker).props()).toMatchObject({
defaultStartDate: parsePikadayDate(DATE), defaultStartDate: startDate,
defaultEndDate: null, defaultEndDate: null,
}); });
}); });
it('should populate the initial end date if passed in the query string', () => { it('passes the endDate to the date picker as defaultEndDate', () => {
window.location.search = `?created_before=${DATE}`; createComponent({ endDate });
wrapper = createComponent();
expect(wrapper.find(GlDaterangePicker).props()).toMatchObject({ expect(wrapper.find(GlDaterangePicker).props()).toMatchObject({
defaultStartDate: null, defaultStartDate: null,
defaultEndDate: parsePikadayDate(DATE), defaultEndDate: endDate,
}); });
}); });
it('should populate both the initial start and end dates if passed in the query string', () => { it('passes both startDate and endDate to the date picker as default dates', () => {
window.location.search = `?created_after=${DATE}&created_before=${DATE}`; createComponent({ startDate, endDate });
wrapper = createComponent();
expect(wrapper.find(GlDaterangePicker).props()).toMatchObject({ expect(wrapper.find(GlDaterangePicker).props()).toMatchObject({
defaultStartDate: parsePikadayDate(DATE), defaultStartDate: startDate,
defaultEndDate: parsePikadayDate(DATE), defaultEndDate: endDate,
}); });
}); });
it('should populate the date hidden fields on input', () => { it('should emit the "selected" event with startDate and endDate on input change', () => {
wrapper = createComponent(); createComponent();
wrapper.find(GlDaterangePicker).vm.$emit('input', { startDate, endDate });
wrapper
.find(GlDaterangePicker) return wrapper.vm.$nextTick(() => {
.vm.$emit('input', { startDate: parsePikadayDate(DATE), endDate: parsePikadayDate(DATE) }); expect(wrapper.emitted().selected).toBeTruthy();
expect(wrapper.emitted().selected[0]).toEqual([
return wrapper.vm.$nextTick().then(() => { {
expect(wrapper.find('input[name="created_after"]').attributes().value).toEqual(DATE); startDate,
expect(wrapper.find('input[name="created_before"]').attributes().value).toEqual(DATE); endDate,
}); },
}); ]);
it('should submit the form on input change', () => {
wrapper = createComponent();
const spy = jest.spyOn(wrapper.props().formElement, 'submit');
wrapper
.find(GlDaterangePicker)
.vm.$emit('input', { startDate: parsePikadayDate(DATE), endDate: parsePikadayDate(DATE) });
return wrapper.vm.$nextTick().then(() => {
expect(spy).toHaveBeenCalledTimes(1);
}); });
}); });
}); });
import { shallowMount } from '@vue/test-utils'; import { shallowMount } from '@vue/test-utils';
import { GlNewDropdownItem } from '@gitlab/ui'; import { GlNewDropdownItem } from '@gitlab/ui';
import * as urlUtils from '~/lib/utils/url_utility';
import SortingField from 'ee/audit_events/components/sorting_field.vue'; import SortingField from 'ee/audit_events/components/sorting_field.vue';
describe('SortingField component', () => { describe('SortingField component', () => {
let wrapper; let wrapper;
const DUMMY_URL = 'https://localhost'; const initComponent = (props = {}) => {
const createComponent = () => wrapper = shallowMount(SortingField, {
shallowMount(SortingField, { stubs: { GlNewDropdown: true, GlNewDropdownItem: true } }); propsData: { ...props },
stubs: {
GlNewDropdown: true,
GlNewDropdownItem: true,
},
});
};
const getCheckedOptions = () => const getCheckedOptions = () =>
wrapper.findAll(GlNewDropdownItem).filter(item => item.props().isChecked); wrapper.findAll(GlNewDropdownItem).filter(item => item.props().isChecked);
const getCheckedOptionHref = () => {
return getCheckedOptions()
.at(0)
.attributes().href;
};
beforeEach(() => { beforeEach(() => {
urlUtils.setUrlParams = jest.fn(({ sort }) => `${DUMMY_URL}/?sort=${sort}`); initComponent();
wrapper = createComponent();
}); });
afterEach(() => { afterEach(() => {
wrapper.destroy(); wrapper.destroy();
wrapper = null;
}); });
describe('Sorting behaviour', () => { describe('when initialized', () => {
it('should have sorting options', () => { it('should have sorting options', () => {
expect(wrapper.findAll(GlNewDropdownItem)).toHaveLength(2); expect(wrapper.findAll(GlNewDropdownItem)).toHaveLength(2);
}); });
it('should set the sorting option to `created_desc` by default', () => { it('should set the sorting option to `created_desc` by default', () => {
expect(getCheckedOptions()).toHaveLength(1); expect(getCheckedOptions()).toHaveLength(1);
expect(getCheckedOptionHref()).toBe(`${DUMMY_URL}/?sort=created_desc`);
}); });
it('should get the sorting option from the URL', () => { describe('with a sortBy value', () => {
urlUtils.queryToObject = jest.fn(() => ({ sort: 'created_asc' })); beforeEach(() => {
wrapper = createComponent(); initComponent({
sortBy: 'created_asc',
});
});
expect(getCheckedOptions()).toHaveLength(1); it('should set the sorting option accordingly', () => {
expect(getCheckedOptionHref()).toBe(`${DUMMY_URL}/?sort=created_asc`); expect(getCheckedOptions()).toHaveLength(1);
expect(
getCheckedOptions()
.at(0)
.text(),
).toEqual('Oldest created');
});
}); });
});
it('should retain other params when creating the option URL', () => { describe('when the user clicks on a option', () => {
urlUtils.setUrlParams = jest.fn(({ sort }) => `${DUMMY_URL}/?abc=defg&sort=${sort}`); beforeEach(() => {
urlUtils.queryToObject = jest.fn(() => ({ sort: 'created_desc', abc: 'defg' })); initComponent();
wrapper
wrapper = createComponent(); .findAll(GlNewDropdownItem)
.at(1)
.vm.$emit('click');
});
expect(getCheckedOptionHref()).toBe(`${DUMMY_URL}/?abc=defg&sort=created_desc`); it('should emit the "selected" event with clicked option', () => {
expect(wrapper.emitted().selected).toBeTruthy();
expect(wrapper.emitted().selected[0]).toEqual(['created_asc']);
}); });
}); });
}); });
...@@ -18,10 +18,9 @@ describe('Audit Event actions', () => { ...@@ -18,10 +18,9 @@ describe('Audit Event actions', () => {
}); });
it.each` it.each`
action | type | payload action | type | payload
${'setDateRange'} | ${types.SET_DATE_RANGE} | ${{ startDate, endDate }} ${'setDateRange'} | ${types.SET_DATE_RANGE} | ${{ startDate, endDate }}
${'setFilterValue'} | ${types.SET_FILTER_VALUE} | ${{ id: '1', type: 'user' }} ${'setSortBy'} | ${types.SET_SORT_BY} | ${'created_asc'}
${'setSortBy'} | ${types.SET_SORT_BY} | ${'created_asc'}
`( `(
'$action should commit $type with $payload and dispatches "searchForAuditEvents"', '$action should commit $type with $payload and dispatches "searchForAuditEvents"',
({ action, type, payload }) => { ({ action, type, payload }) => {
...@@ -40,6 +39,11 @@ describe('Audit Event actions', () => { ...@@ -40,6 +39,11 @@ describe('Audit Event actions', () => {
}, },
); );
it('setFilterValue action should commit to the store', () => {
const payload = [{ type: 'User', value: { data: 1, operator: '=' } }];
testAction(actions.setFilterValue, payload, state, [{ type: types.SET_FILTER_VALUE, payload }]);
});
describe('searchForAuditEvents', () => { describe('searchForAuditEvents', () => {
let spy; let spy;
......
...@@ -15,10 +15,10 @@ describe('Audit Event mutations', () => { ...@@ -15,10 +15,10 @@ describe('Audit Event mutations', () => {
}); });
it.each` it.each`
mutation | payload | expectedState mutation | payload | expectedState
${types.SET_FILTER_VALUE} | ${{ id: '1', type: 'user' }} | ${{ filterValue: { id: '1', type: 'user' } }} ${types.SET_FILTER_VALUE} | ${[{ type: 'User', value: { data: 1, operator: '=' } }]} | ${{ filterValue: [{ type: 'User', value: { data: 1, operator: '=' } }] }}
${types.SET_DATE_RANGE} | ${{ startDate, endDate }} | ${{ startDate, endDate }} ${types.SET_DATE_RANGE} | ${{ startDate, endDate }} | ${{ startDate, endDate }}
${types.SET_SORT_BY} | ${'created_asc'} | ${{ sortBy: 'created_asc' }} ${types.SET_SORT_BY} | ${'created_asc'} | ${{ sortBy: 'created_asc' }}
`( `(
'$mutation with payload $payload will update state with $expectedState', '$mutation with payload $payload will update state with $expectedState',
({ mutation, payload, expectedState }) => { ({ mutation, payload, expectedState }) => {
...@@ -32,7 +32,7 @@ describe('Audit Event mutations', () => { ...@@ -32,7 +32,7 @@ describe('Audit Event mutations', () => {
describe(`${types.INITIALIZE_AUDIT_EVENTS}`, () => { describe(`${types.INITIALIZE_AUDIT_EVENTS}`, () => {
const payload = { const payload = {
entity_id: '1', entity_id: '1',
entity_type: 'user', entity_type: 'User',
created_after: startDate, created_after: startDate,
created_before: endDate, created_before: endDate,
sort: 'created_asc', sort: 'created_asc',
...@@ -40,7 +40,7 @@ describe('Audit Event mutations', () => { ...@@ -40,7 +40,7 @@ describe('Audit Event mutations', () => {
it.each` it.each`
stateKey | expectedState stateKey | expectedState
${'filterValue'} | ${{ id: payload.entity_id, type: payload.entity_type }} ${'filterValue'} | ${[{ type: payload.entity_type, value: { data: payload.entity_id, operator: '=' } }]}
${'startDate'} | ${payload.created_after} ${'startDate'} | ${payload.created_after}
${'endDate'} | ${payload.created_before} ${'endDate'} | ${payload.created_before}
${'sortBy'} | ${payload.sort} ${'sortBy'} | ${payload.sort}
......
...@@ -8,6 +8,7 @@ describe('Audit Event Utils', () => { ...@@ -8,6 +8,7 @@ describe('Audit Event Utils', () => {
created_before: '2020-04-13', created_before: '2020-04-13',
sortBy: 'created_asc', sortBy: 'created_asc',
}; };
expect(parseAuditEventSearchQuery(input)).toEqual({ expect(parseAuditEventSearchQuery(input)).toEqual({
created_after: new Date('2020-03-13'), created_after: new Date('2020-03-13'),
created_before: new Date('2020-04-13'), created_before: new Date('2020-04-13'),
...@@ -19,20 +20,19 @@ describe('Audit Event Utils', () => { ...@@ -19,20 +20,19 @@ describe('Audit Event Utils', () => {
describe('createAuditEventSearchQuery', () => { describe('createAuditEventSearchQuery', () => {
it('returns a query object with remapped keys and stringified dates', () => { it('returns a query object with remapped keys and stringified dates', () => {
const input = { const input = {
filterValue: { filterValue: [{ type: 'User', value: { data: '1', operator: '=' } }],
id: '1',
type: 'user',
},
startDate: new Date('2020-03-13'), startDate: new Date('2020-03-13'),
endDate: new Date('2020-04-13'), endDate: new Date('2020-04-13'),
sortBy: 'bar', sortBy: 'bar',
}; };
expect(createAuditEventSearchQuery(input)).toEqual({ expect(createAuditEventSearchQuery(input)).toEqual({
entity_id: '1', entity_id: '1',
entity_type: 'user', entity_type: 'User',
created_after: '2020-03-13', created_after: '2020-03-13',
created_before: '2020-04-13', created_before: '2020-04-13',
sort: 'bar', sort: 'bar',
page: null,
}); });
}); });
}); });
......
...@@ -74,8 +74,8 @@ describe('Vulnerability list component', () => { ...@@ -74,8 +74,8 @@ describe('Vulnerability list component', () => {
}); });
it('should not show the checkboxes if shouldShowSelection is passed in', () => { it('should not show the checkboxes if shouldShowSelection is passed in', () => {
expect(findCheckAllCheckboxCell().classes()).toContain('d-none'); expect(findCheckAllCheckboxCell().classes()).toContain('gl-display-none');
expect(findFirstCheckboxCell().classes()).toContain('d-none'); expect(findFirstCheckboxCell().classes()).toContain('gl-display-none');
}); });
}); });
......
...@@ -18,9 +18,13 @@ RSpec.describe Gitlab::Auth::GroupSaml::FailureHandler do ...@@ -18,9 +18,13 @@ RSpec.describe Gitlab::Auth::GroupSaml::FailureHandler do
'omniauth.error.strategy' => strategy, 'omniauth.error.strategy' => strategy,
'devise.mapping' => Devise.mappings[:user], 'devise.mapping' => Devise.mappings[:user],
'warden' => warden, 'warden' => warden,
'action_dispatch.key_generator' => ActiveSupport::KeyGenerator.new('b2efbaccbdb9548217eebc73a896db73'), # necessary for setting signed cookies in lib/gitlab/experimentation.rb # The following are necessary for setting signed/encrypted cookies such as in
'action_dispatch.signed_cookie_salt' => 'a4fb52b0ccb302eaef92bda18fedf5c3', # necessary for setting signed cookies in lib/gitlab/experimentation.rb # lib/gitlab/experimentation.rb or app/controllers/concerns/known_sign_in.rb
'action_dispatch.cookies_rotations' => OpenStruct.new(signed: []) # necessary for setting signed cookies in lib/gitlab/experimentation.rb 'action_dispatch.key_generator' => ActiveSupport::KeyGenerator.new('b2efbaccbdb9548217eebc73a896db73'),
'action_dispatch.signed_cookie_salt' => 'a4fb52b0ccb302eaef92bda18fedf5c3',
'action_dispatch.encrypted_signed_cookie_salt' => 'a4fb52b0ccb302eaef92bda18fedf5c3',
'action_dispatch.encrypted_cookie_salt' => 'a4fb52b0ccb302eaef92bda18fedf5c3',
'action_dispatch.cookies_rotations' => OpenStruct.new(signed: [], encrypted: [])
} }
Rack::MockRequest.env_for(path, params) Rack::MockRequest.env_for(path, params)
end end
......
...@@ -2,9 +2,11 @@ ...@@ -2,9 +2,11 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe Geo::DeletedProject, type: :model do RSpec.describe Geo::DeletedProject, :geo, type: :model do
include StubConfiguration include StubConfiguration
subject { described_class.new(id: 1, name: 'sample', disk_path: 'root/sample', repository_storage: 'foo') }
before do before do
storages = { storages = {
'foo' => { 'path' => 'tmp/tests/storage_foo' }, 'foo' => { 'path' => 'tmp/tests/storage_foo' },
...@@ -14,11 +16,23 @@ RSpec.describe Geo::DeletedProject, type: :model do ...@@ -14,11 +16,23 @@ RSpec.describe Geo::DeletedProject, type: :model do
stub_storage_settings(storages) stub_storage_settings(storages)
end end
subject { described_class.new(id: 1, name: 'sample', disk_path: 'root/sample', repository_storage: 'foo') } describe 'attributes' do
it { is_expected.to respond_to(:id) }
it { is_expected.to respond_to(:name) }
it { is_expected.to respond_to(:disk_path) }
end
it { is_expected.to respond_to(:id) } describe 'validations' do
it { is_expected.to respond_to(:name) } it { is_expected.to validate_presence_of(:id) }
it { is_expected.to respond_to(:disk_path) } it { is_expected.to validate_presence_of(:name) }
it { is_expected.to validate_presence_of(:disk_path) }
end
describe 'attributes' do
it { is_expected.to respond_to(:id) }
it { is_expected.to respond_to(:name) }
it { is_expected.to respond_to(:disk_path) }
end
describe '#full_path' do describe '#full_path' do
it 'is an alias for disk_path' do it 'is an alias for disk_path' do
......
...@@ -25,6 +25,154 @@ RSpec.describe Geo::ProjectRegistry, :geo_fdw do ...@@ -25,6 +25,154 @@ RSpec.describe Geo::ProjectRegistry, :geo_fdw do
it { is_expected.to validate_uniqueness_of(:project) } it { is_expected.to validate_uniqueness_of(:project) }
end end
describe '.find_registry_differences' do
let!(:secondary) { create(:geo_node) }
let!(:synced_group) { create(:group) }
let!(:nested_group) { create(:group, parent: synced_group) }
let!(:project_1) { create(:project, group: synced_group) }
let!(:project_2) { create(:project, group: nested_group) }
let!(:project_3) { create(:project) }
let!(:project_4) { create(:project) }
let!(:project_5) { create(:project, :broken_storage) }
let!(:project_6) { create(:project, :broken_storage) }
before do
stub_current_geo_node(secondary)
end
context 'untracked IDs' do
before do
create(:geo_project_registry, project_id: project_1.id)
create(:geo_project_registry, :sync_failed, project_id: project_3.id)
create(:geo_project_registry, project_id: project_5.id)
end
it 'includes project IDs without an entry on the tracking database' do
range = Project.minimum(:id)..Project.maximum(:id)
untracked_ids, _ = described_class.find_registry_differences(range)
expect(untracked_ids).to match_array([project_2.id, project_4.id, project_6.id])
end
it 'excludes projects outside the ID range' do
untracked_ids, _ = described_class.find_registry_differences(project_4.id..project_6.id)
expect(untracked_ids).to match_array([project_4.id, project_6.id])
end
context 'with selective sync by namespace' do
let(:secondary) { create(:geo_node, selective_sync_type: 'namespaces', namespaces: [synced_group]) }
it 'excludes project IDs that are not in selectively synced projects' do
range = Project.minimum(:id)..Project.maximum(:id)
untracked_ids, _ = described_class.find_registry_differences(range)
expect(untracked_ids).to match_array([project_2.id])
end
end
context 'with selective sync by shard' do
let(:secondary) { create(:geo_node, selective_sync_type: 'shards', selective_sync_shards: ['broken']) }
it 'excludes project IDs that are not in selectively synced projects' do
range = Project.minimum(:id)..Project.maximum(:id)
untracked_ids, _ = described_class.find_registry_differences(range)
expect(untracked_ids).to match_array([project_6.id])
end
end
end
context 'unused tracked IDs' do
context 'with an orphaned registry' do
let!(:orphaned) { create(:geo_project_registry, project_id: project_1.id) }
before do
project_1.delete
end
it 'includes tracked IDs that do not exist in the model table' do
range = project_1.id..project_1.id
_, unused_tracked_ids = described_class.find_registry_differences(range)
expect(unused_tracked_ids).to match_array([project_1.id])
end
it 'excludes IDs outside the ID range' do
range = (project_1.id + 1)..Project.maximum(:id)
_, unused_tracked_ids = described_class.find_registry_differences(range)
expect(unused_tracked_ids).to be_empty
end
end
context 'with selective sync by namespace' do
let(:secondary) { create(:geo_node, selective_sync_type: 'namespaces', namespaces: [synced_group]) }
context 'with a tracked project' do
context 'excluded from selective sync' do
let!(:registry_entry) { create(:geo_project_registry, project_id: project_3.id) }
it 'includes tracked project IDs that exist but are not in a selectively synced project' do
range = project_3.id..project_3.id
_, unused_tracked_ids = described_class.find_registry_differences(range)
expect(unused_tracked_ids).to match_array([project_3.id])
end
end
context 'included in selective sync' do
let!(:registry_entry) { create(:geo_project_registry, project_id: project_1.id) }
it 'excludes tracked project IDs that are in selectively synced projects' do
range = project_1.id..project_1.id
_, unused_tracked_ids = described_class.find_registry_differences(range)
expect(unused_tracked_ids).to be_empty
end
end
end
end
context 'with selective sync by shard' do
let(:secondary) { create(:geo_node, selective_sync_type: 'shards', selective_sync_shards: ['broken']) }
context 'with a tracked project' do
let!(:registry_entry) { create(:geo_project_registry, project_id: project_1.id) }
context 'excluded from selective sync' do
it 'includes tracked project IDs that exist but are not in a selectively synced project' do
range = project_1.id..project_1.id
_, unused_tracked_ids = described_class.find_registry_differences(range)
expect(unused_tracked_ids).to match_array([project_1.id])
end
end
context 'included in selective sync' do
let!(:registry_entry) { create(:geo_project_registry, project_id: project_5.id) }
it 'excludes tracked project IDs that are in selectively synced projects' do
range = project_5.id..project_5.id
_, unused_tracked_ids = described_class.find_registry_differences(range)
expect(unused_tracked_ids).to be_empty
end
end
end
end
end
end
describe '.synced_repos' do describe '.synced_repos' do
it 'returns clean projects where last attempt to sync succeeded' do it 'returns clean projects where last attempt to sync succeeded' do
expected = [] expected = []
......
...@@ -141,6 +141,34 @@ RSpec.describe Packages::Package, type: :model do ...@@ -141,6 +141,34 @@ RSpec.describe Packages::Package, type: :model do
it { is_expected.not_to allow_value('%2e%2e%2f1.2.3').for(:version) } it { is_expected.not_to allow_value('%2e%2e%2f1.2.3').for(:version) }
end end
context 'maven package' do
subject { create(:maven_package) }
it { is_expected.to allow_value('0').for(:version) }
it { is_expected.to allow_value('1').for(:version) }
it { is_expected.to allow_value('10').for(:version) }
it { is_expected.to allow_value('1.0').for(:version) }
it { is_expected.to allow_value('1.3.350.v20200505-1744').for(:version) }
it { is_expected.to allow_value('1.1-beta-2').for(:version) }
it { is_expected.to allow_value('1.2-SNAPSHOT').for(:version) }
it { is_expected.to allow_value('12.1.2-2-1').for(:version) }
it { is_expected.to allow_value('1.2.3..beta').for(:version) }
it { is_expected.to allow_value('1.2.3-beta').for(:version) }
it { is_expected.to allow_value('10.2.3-beta').for(:version) }
it { is_expected.to allow_value('2.0.0.v200706041905-7C78EK9E_EkMNfNOd2d8qq').for(:version) }
it { is_expected.to allow_value('1.2-alpha-1-20050205.060708-1').for(:version) }
it { is_expected.to allow_value('703220b4e2cea9592caeb9f3013f6b1e5335c293').for(:version) }
it { is_expected.to allow_value('RELEASE').for(:version) }
it { is_expected.not_to allow_value('..1.2.3').for(:version) }
it { is_expected.not_to allow_value(' 1.2.3').for(:version) }
it { is_expected.not_to allow_value("1.2.3 \r\t").for(:version) }
it { is_expected.not_to allow_value("\r\t 1.2.3").for(:version) }
it { is_expected.not_to allow_value('1.2.3-4/../../').for(:version) }
it { is_expected.not_to allow_value('1.2.3-4%2e%2e%').for(:version) }
it { is_expected.not_to allow_value('../../../../../1.2.3').for(:version) }
it { is_expected.not_to allow_value('%2e%2e%2f1.2.3').for(:version) }
end
it_behaves_like 'validating version to be SemVer compliant for', :npm_package it_behaves_like 'validating version to be SemVer compliant for', :npm_package
it_behaves_like 'validating version to be SemVer compliant for', :nuget_package it_behaves_like 'validating version to be SemVer compliant for', :nuget_package
end end
......
...@@ -46,6 +46,10 @@ RSpec.describe Geo::RegistryConsistencyService, :geo, :use_clean_rails_memory_st ...@@ -46,6 +46,10 @@ RSpec.describe Geo::RegistryConsistencyService, :geo, :use_clean_rails_memory_st
expect(registry_class).to respond_to(:delete_for_model_ids) expect(registry_class).to respond_to(:delete_for_model_ids)
end end
it 'responds to .find_registry_differences' do
expect(registry_class).to respond_to(:find_registry_differences)
end
it 'responds to .has_create_events?' do it 'responds to .has_create_events?' do
expect(registry_class).to respond_to(:has_create_events?) expect(registry_class).to respond_to(:has_create_events?)
end end
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe Geo::RepositoryDestroyService do RSpec.describe Geo::RepositoryDestroyService, :geo do
include ::EE::GeoHelpers include ::EE::GeoHelpers
let_it_be(:secondary) { create(:geo_node) } let_it_be(:secondary) { create(:geo_node) }
...@@ -128,5 +128,49 @@ RSpec.describe Geo::RepositoryDestroyService do ...@@ -128,5 +128,49 @@ RSpec.describe Geo::RepositoryDestroyService do
expect(Geo::DesignRegistry.where(project: project)).to be_empty expect(Geo::DesignRegistry.where(project: project)).to be_empty
end end
end end
context 'with an unused registry' do
let!(:project) { create(:project_empty_repo, :legacy_storage) }
let!(:unused_project_registry) { create(:geo_project_registry, project_id: project.id) }
let!(:unused_design_registry) { create(:geo_design_registry, project_id: project.id) }
subject(:service) { described_class.new(project.id) }
context 'when the replicable model does not exist' do
before do
project.delete
end
it 'does not delegate project removal to Projects::DestroyService' do
expect_any_instance_of(EE::Projects::DestroyService).not_to receive(:geo_replicate)
service.execute
end
it 'removes the registry entries' do
service.execute
expect(Geo::ProjectRegistry.where(project: project)).to be_empty
expect(Geo::DesignRegistry.where(project: project)).to be_empty
end
end
context 'when the replicable model exists' do
subject(:service) { described_class.new(project.id) }
it 'delegates project removal to Projects::DestroyService' do
expect_any_instance_of(EE::Projects::DestroyService).to receive(:geo_replicate)
service.execute
end
it 'removes the registry entries' do
service.execute
expect(Geo::ProjectRegistry.where(project: project)).to be_empty
expect(Geo::DesignRegistry.where(project: project)).to be_empty
end
end
end
end end
end end
...@@ -76,13 +76,15 @@ RSpec.describe Geo::Secondary::RegistryConsistencyWorker, :geo, :geo_fdw do ...@@ -76,13 +76,15 @@ RSpec.describe Geo::Secondary::RegistryConsistencyWorker, :geo, :geo_fdw do
# Somewhat of an integration test # Somewhat of an integration test
it 'creates missing registries for each registry class' do it 'creates missing registries for each registry class' do
lfs_object = create(:lfs_object)
job_artifact = create(:ci_job_artifact) job_artifact = create(:ci_job_artifact)
lfs_object = create(:lfs_object)
project = create(:project)
upload = create(:upload) upload = create(:upload)
package_file = create(:conan_package_file, :conan_package) package_file = create(:conan_package_file, :conan_package)
expect(Geo::LfsObjectRegistry.where(lfs_object_id: lfs_object.id).count).to eq(0) expect(Geo::LfsObjectRegistry.where(lfs_object_id: lfs_object.id).count).to eq(0)
expect(Geo::JobArtifactRegistry.where(artifact_id: job_artifact.id).count).to eq(0) expect(Geo::JobArtifactRegistry.where(artifact_id: job_artifact.id).count).to eq(0)
expect(Geo::ProjectRegistry.where(project_id: project.id).count).to eq(0)
expect(Geo::UploadRegistry.where(file_id: upload.id).count).to eq(0) expect(Geo::UploadRegistry.where(file_id: upload.id).count).to eq(0)
expect(Geo::PackageFileRegistry.where(package_file_id: package_file.id).count).to eq(0) expect(Geo::PackageFileRegistry.where(package_file_id: package_file.id).count).to eq(0)
...@@ -90,13 +92,12 @@ RSpec.describe Geo::Secondary::RegistryConsistencyWorker, :geo, :geo_fdw do ...@@ -90,13 +92,12 @@ RSpec.describe Geo::Secondary::RegistryConsistencyWorker, :geo, :geo_fdw do
expect(Geo::LfsObjectRegistry.where(lfs_object_id: lfs_object.id).count).to eq(1) expect(Geo::LfsObjectRegistry.where(lfs_object_id: lfs_object.id).count).to eq(1)
expect(Geo::JobArtifactRegistry.where(artifact_id: job_artifact.id).count).to eq(1) expect(Geo::JobArtifactRegistry.where(artifact_id: job_artifact.id).count).to eq(1)
expect(Geo::ProjectRegistry.where(project_id: project.id).count).to eq(1)
expect(Geo::UploadRegistry.where(file_id: upload.id).count).to eq(1) expect(Geo::UploadRegistry.where(file_id: upload.id).count).to eq(1)
expect(Geo::PackageFileRegistry.where(package_file_id: package_file.id).count).to eq(1) expect(Geo::PackageFileRegistry.where(package_file_id: package_file.id).count).to eq(1)
end end
context 'when geo_file_registry_ssot_sync is disabled' do context 'when geo_file_registry_ssot_sync is disabled' do
let_it_be(:upload) { create(:upload) }
before do before do
stub_feature_flags(geo_file_registry_ssot_sync: false) stub_feature_flags(geo_file_registry_ssot_sync: false)
end end
...@@ -109,6 +110,7 @@ RSpec.describe Geo::Secondary::RegistryConsistencyWorker, :geo, :geo_fdw do ...@@ -109,6 +110,7 @@ RSpec.describe Geo::Secondary::RegistryConsistencyWorker, :geo, :geo_fdw do
allow(Geo::RegistryConsistencyService).to receive(:new).with(Geo::JobArtifactRegistry, batch_size: 1000).and_call_original allow(Geo::RegistryConsistencyService).to receive(:new).with(Geo::JobArtifactRegistry, batch_size: 1000).and_call_original
allow(Geo::RegistryConsistencyService).to receive(:new).with(Geo::LfsObjectRegistry, batch_size: 1000).and_call_original allow(Geo::RegistryConsistencyService).to receive(:new).with(Geo::LfsObjectRegistry, batch_size: 1000).and_call_original
allow(Geo::RegistryConsistencyService).to receive(:new).with(Geo::PackageFileRegistry, batch_size: 1000).and_call_original allow(Geo::RegistryConsistencyService).to receive(:new).with(Geo::PackageFileRegistry, batch_size: 1000).and_call_original
allow(Geo::RegistryConsistencyService).to receive(:new).with(Geo::ProjectRegistry, batch_size: 1000).and_call_original
expect(Geo::RegistryConsistencyService).not_to receive(:new).with(Geo::UploadRegistry, batch_size: 1000) expect(Geo::RegistryConsistencyService).not_to receive(:new).with(Geo::UploadRegistry, batch_size: 1000)
...@@ -116,6 +118,27 @@ RSpec.describe Geo::Secondary::RegistryConsistencyWorker, :geo, :geo_fdw do ...@@ -116,6 +118,27 @@ RSpec.describe Geo::Secondary::RegistryConsistencyWorker, :geo, :geo_fdw do
end end
end end
context 'when geo_project_registry_ssot_sync is disabled' do
before do
stub_feature_flags(geo_project_registry_ssot_sync: false)
end
it 'returns false' do
expect(subject.perform).to be_falsey
end
it 'does not execute RegistryConsistencyService for projects' do
allow(Geo::RegistryConsistencyService).to receive(:new).with(Geo::JobArtifactRegistry, batch_size: 1000).and_call_original
allow(Geo::RegistryConsistencyService).to receive(:new).with(Geo::LfsObjectRegistry, batch_size: 1000).and_call_original
allow(Geo::RegistryConsistencyService).to receive(:new).with(Geo::PackageFileRegistry, batch_size: 1000).and_call_original
allow(Geo::RegistryConsistencyService).to receive(:new).with(Geo::UploadRegistry, batch_size: 1000).and_call_original
expect(Geo::RegistryConsistencyService).not_to receive(:new).with(Geo::ProjectRegistry, batch_size: 1000)
subject.perform
end
end
context 'when the current Geo node is disabled or primary' do context 'when the current Geo node is disabled or primary' do
before do before do
stub_primary_node stub_primary_node
......
...@@ -2,16 +2,31 @@ ...@@ -2,16 +2,31 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe GeoRepositoryDestroyWorker do RSpec.describe GeoRepositoryDestroyWorker, :geo do
describe '#perform' do describe '#perform' do
it 'delegates project removal to Geo::RepositoryDestroyService' do let(:project) { create(:project) }
project = create(:project)
expect_next_instance_of(Geo::RepositoryDestroyService) do |instance| context 'with an existing project' do
expect(instance).to receive(:execute) it 'delegates project removal to Geo::RepositoryDestroyService' do
expect_next_instance_of(Geo::RepositoryDestroyService) do |instance|
expect(instance).to receive(:execute)
end
subject.perform(project.id, project.name, project.path, 'default')
end end
end
context 'with project ID from an orphaned registry' do
it 'delegates project removal to Geo::RepositoryDestroyService' do
registry = create(:geo_project_registry, project_id: project.id)
project.delete
described_class.new.perform(project.id, project.name, project.path, 'default') expect_next_instance_of(Geo::RepositoryDestroyService) do |instance|
expect(instance).to receive(:execute)
end
subject.perform(registry.project_id)
end
end end
end end
end end
...@@ -43,6 +43,10 @@ module Gitlab ...@@ -43,6 +43,10 @@ module Gitlab
@maven_app_name_regex ||= /\A[\w\-\.]+\z/.freeze @maven_app_name_regex ||= /\A[\w\-\.]+\z/.freeze
end end
def maven_version_regex
@maven_version_regex ||= /\A(\.?[\w\+-]+\.?)+\z/.freeze
end
def maven_app_group_regex def maven_app_group_regex
maven_app_name_regex maven_app_name_regex
end end
......
...@@ -972,6 +972,9 @@ msgstr "" ...@@ -972,6 +972,9 @@ msgstr ""
msgid "A Let's Encrypt account will be configured for this GitLab installation using your email address. You will receive emails to warn of expiring certificates." msgid "A Let's Encrypt account will be configured for this GitLab installation using your email address. You will receive emails to warn of expiring certificates."
msgstr "" msgstr ""
msgid "A Terraform report was generated in your pipelines."
msgstr ""
msgid "A basic page and serverless function that uses AWS Lambda, AWS API Gateway, and GitLab Pages" msgid "A basic page and serverless function that uses AWS Lambda, AWS API Gateway, and GitLab Pages"
msgstr "" msgstr ""
...@@ -1059,9 +1062,6 @@ msgstr "" ...@@ -1059,9 +1062,6 @@ msgstr ""
msgid "A suggestion is not applicable." msgid "A suggestion is not applicable."
msgstr "" msgstr ""
msgid "A terraform report was generated in your pipelines."
msgstr ""
msgid "A user with write access to the source branch selected this option" msgid "A user with write access to the source branch selected this option"
msgstr "" msgstr ""
...@@ -2392,9 +2392,6 @@ msgstr "" ...@@ -2392,9 +2392,6 @@ msgstr ""
msgid "An error occurred while loading project creation UI" msgid "An error occurred while loading project creation UI"
msgstr "" msgstr ""
msgid "An error occurred while loading terraform report"
msgstr ""
msgid "An error occurred while loading the data. Please try again." msgid "An error occurred while loading the data. Please try again."
msgstr "" msgstr ""
...@@ -4054,9 +4051,6 @@ msgstr "" ...@@ -4054,9 +4051,6 @@ msgstr ""
msgid "Changes are still tracked. Useful for cluster/index migrations." msgid "Changes are still tracked. Useful for cluster/index migrations."
msgstr "" msgstr ""
msgid "Changes are unknown"
msgstr ""
msgid "Changes suppressed. Click to show." msgid "Changes suppressed. Click to show."
msgstr "" msgstr ""
...@@ -10187,6 +10181,9 @@ msgstr "" ...@@ -10187,6 +10181,9 @@ msgstr ""
msgid "Generate new export" msgid "Generate new export"
msgstr "" msgstr ""
msgid "Generating the report caused an error."
msgstr ""
msgid "Geo" msgid "Geo"
msgstr "" msgstr ""
...@@ -22235,6 +22232,9 @@ msgstr "" ...@@ -22235,6 +22232,9 @@ msgstr ""
msgid "The Prometheus server responded with \"bad request\". Please check your queries are correct and are supported in your Prometheus version. %{documentationLink}" msgid "The Prometheus server responded with \"bad request\". Please check your queries are correct and are supported in your Prometheus version. %{documentationLink}"
msgstr "" msgstr ""
msgid "The Terraform report %{name} was generated in your pipelines."
msgstr ""
msgid "The URL defined on the primary node that secondary nodes should use to contact it. Defaults to URL" msgid "The URL defined on the primary node that secondary nodes should use to contact it. Defaults to URL"
msgstr "" msgstr ""
......
...@@ -75,7 +75,7 @@ RSpec.describe SortingPreference do ...@@ -75,7 +75,7 @@ RSpec.describe SortingPreference do
it 'sets the cookie with the right values and flags' do it 'sets the cookie with the right values and flags' do
subject subject
expect(cookies['issue_sort']).to eq(value: 'popularity', secure: false, httponly: false) expect(cookies['issue_sort']).to eq(expires: nil, value: 'popularity', secure: false, httponly: false)
end end
end end
...@@ -86,7 +86,7 @@ RSpec.describe SortingPreference do ...@@ -86,7 +86,7 @@ RSpec.describe SortingPreference do
it 'sets the cookie with the right values and flags' do it 'sets the cookie with the right values and flags' do
subject subject
expect(cookies['issue_sort']).to eq(value: 'created_asc', secure: false, httponly: false) expect(cookies['issue_sort']).to eq(expires: nil, value: 'created_asc', secure: false, httponly: false)
end end
end end
end end
......
...@@ -91,6 +91,17 @@ RSpec.describe Projects::MergeRequests::DiffsController do ...@@ -91,6 +91,17 @@ RSpec.describe Projects::MergeRequests::DiffsController do
end end
end end
shared_examples "diff note on-demand position creation" do
it "updates diff discussion positions" do
service = double("service")
expect(Discussions::CaptureDiffNotePositionsService).to receive(:new).with(merge_request).and_return(service)
expect(service).to receive(:execute)
go
end
end
let(:project) { create(:project, :repository) } let(:project) { create(:project, :repository) }
let(:user) { create(:user) } let(:user) { create(:user) }
let(:merge_request) { create(:merge_request_with_diffs, target_project: project, source_project: project) } let(:merge_request) { create(:merge_request_with_diffs, target_project: project, source_project: project) }
...@@ -146,6 +157,7 @@ RSpec.describe Projects::MergeRequests::DiffsController do ...@@ -146,6 +157,7 @@ RSpec.describe Projects::MergeRequests::DiffsController do
it_behaves_like 'persisted preferred diff view cookie' it_behaves_like 'persisted preferred diff view cookie'
it_behaves_like 'cached diff collection' it_behaves_like 'cached diff collection'
it_behaves_like 'diff note on-demand position creation'
end end
describe 'GET diffs_metadata' do describe 'GET diffs_metadata' do
......
...@@ -10,6 +10,7 @@ import { ...@@ -10,6 +10,7 @@ import {
addDashboardMetaDataToLink, addDashboardMetaDataToLink,
normalizeCustomDashboardPath, normalizeCustomDashboardPath,
} from '~/monitoring/stores/utils'; } from '~/monitoring/stores/utils';
import * as urlUtils from '~/lib/utils/url_utility';
import { annotationsData } from '../mock_data'; import { annotationsData } from '../mock_data';
import { NOT_IN_DB_PREFIX } from '~/monitoring/constants'; import { NOT_IN_DB_PREFIX } from '~/monitoring/constants';
...@@ -399,6 +400,118 @@ describe('mapToDashboardViewModel', () => { ...@@ -399,6 +400,118 @@ describe('mapToDashboardViewModel', () => {
}); });
}); });
}); });
describe('templating variables mapping', () => {
beforeEach(() => {
jest.spyOn(urlUtils, 'queryToObject');
});
afterEach(() => {
urlUtils.queryToObject.mockRestore();
});
it('sets variables as-is from yml file if URL has no variables', () => {
const response = {
dashboard: 'Dashboard Name',
links: [],
templating: {
variables: {
pod: 'kubernetes',
pod_2: 'kubernetes-2',
},
},
};
urlUtils.queryToObject.mockReturnValueOnce();
expect(mapToDashboardViewModel(response)).toMatchObject({
dashboard: 'Dashboard Name',
links: [],
variables: {
pod: {
label: 'pod',
type: 'text',
value: 'kubernetes',
},
pod_2: {
label: 'pod_2',
type: 'text',
value: 'kubernetes-2',
},
},
});
});
it('sets variables as-is from yml file if URL has no matching variables', () => {
const response = {
dashboard: 'Dashboard Name',
links: [],
templating: {
variables: {
pod: 'kubernetes',
pod_2: 'kubernetes-2',
},
},
};
urlUtils.queryToObject.mockReturnValueOnce({
'var-environment': 'POD',
});
expect(mapToDashboardViewModel(response)).toMatchObject({
dashboard: 'Dashboard Name',
links: [],
variables: {
pod: {
label: 'pod',
type: 'text',
value: 'kubernetes',
},
pod_2: {
label: 'pod_2',
type: 'text',
value: 'kubernetes-2',
},
},
});
});
it('merges variables from URL with the ones from yml file', () => {
const response = {
dashboard: 'Dashboard Name',
links: [],
templating: {
variables: {
pod: 'kubernetes',
pod_2: 'kubernetes-2',
},
},
};
urlUtils.queryToObject.mockReturnValueOnce({
'var-environment': 'POD',
'var-pod': 'POD1',
'var-pod_2': 'POD2',
});
expect(mapToDashboardViewModel(response)).toMatchObject({
dashboard: 'Dashboard Name',
links: [],
variables: {
pod: {
label: 'pod',
type: 'text',
value: 'POD1',
},
pod_2: {
label: 'pod_2',
type: 'text',
value: 'POD2',
},
},
});
});
});
}); });
describe('uniqMetricsId', () => { describe('uniqMetricsId', () => {
......
import { parseTemplatingVariables } from '~/monitoring/stores/variable_mapping'; import { parseTemplatingVariables, mergeURLVariables } from '~/monitoring/stores/variable_mapping';
import * as urlUtils from '~/lib/utils/url_utility';
import { mockTemplatingData, mockTemplatingDataResponses } from '../mock_data'; import { mockTemplatingData, mockTemplatingDataResponses } from '../mock_data';
describe('parseTemplatingVariables', () => { describe('parseTemplatingVariables', () => {
...@@ -21,3 +22,73 @@ describe('parseTemplatingVariables', () => { ...@@ -21,3 +22,73 @@ describe('parseTemplatingVariables', () => {
expect(parseTemplatingVariables(input?.dashboard?.templating)).toEqual(expected); expect(parseTemplatingVariables(input?.dashboard?.templating)).toEqual(expected);
}); });
}); });
describe('mergeURLVariables', () => {
beforeEach(() => {
jest.spyOn(urlUtils, 'queryToObject');
});
afterEach(() => {
urlUtils.queryToObject.mockRestore();
});
it('returns empty object if variables are not defined in yml or URL', () => {
urlUtils.queryToObject.mockReturnValueOnce({});
expect(mergeURLVariables({})).toEqual({});
});
it('returns empty object if variables are defined in URL but not in yml', () => {
urlUtils.queryToObject.mockReturnValueOnce({
'var-env': 'one',
'var-instance': 'localhost',
});
expect(mergeURLVariables({})).toEqual({});
});
it('returns yml variables if variables defined in yml but not in the URL', () => {
urlUtils.queryToObject.mockReturnValueOnce({});
const params = {
env: 'one',
instance: 'localhost',
};
expect(mergeURLVariables(params)).toEqual(params);
});
it('returns yml variables if variables defined in URL do not match with yml variables', () => {
const urlParams = {
'var-env': 'one',
'var-instance': 'localhost',
};
const ymlParams = {
pod: { value: 'one' },
service: { value: 'database' },
};
urlUtils.queryToObject.mockReturnValueOnce(urlParams);
expect(mergeURLVariables(ymlParams)).toEqual(ymlParams);
});
it('returns merged yml and URL variables if there is some match', () => {
const urlParams = {
'var-env': 'one',
'var-instance': 'localhost:8080',
};
const ymlParams = {
instance: { value: 'localhost' },
service: { value: 'database' },
};
const merged = {
instance: { value: 'localhost:8080' },
service: { value: 'database' },
};
urlUtils.queryToObject.mockReturnValueOnce(urlParams);
expect(mergeURLVariables(ymlParams)).toEqual(merged);
});
});
...@@ -169,8 +169,8 @@ describe('monitoring/utils', () => { ...@@ -169,8 +169,8 @@ describe('monitoring/utils', () => {
}); });
}); });
describe('getPromCustomVariablesFromUrl', () => { describe('templatingVariablesFromUrl', () => {
const { getPromCustomVariablesFromUrl } = monitoringUtils; const { templatingVariablesFromUrl } = monitoringUtils;
beforeEach(() => { beforeEach(() => {
jest.spyOn(urlUtils, 'queryToObject'); jest.spyOn(urlUtils, 'queryToObject');
...@@ -195,7 +195,7 @@ describe('monitoring/utils', () => { ...@@ -195,7 +195,7 @@ describe('monitoring/utils', () => {
'var-pod': 'POD', 'var-pod': 'POD',
}); });
expect(getPromCustomVariablesFromUrl()).toEqual(expect.objectContaining({ pod: 'POD' })); expect(templatingVariablesFromUrl()).toEqual(expect.objectContaining({ pod: 'POD' }));
}); });
it('returns an empty object when no custom variables are present', () => { it('returns an empty object when no custom variables are present', () => {
...@@ -203,7 +203,7 @@ describe('monitoring/utils', () => { ...@@ -203,7 +203,7 @@ describe('monitoring/utils', () => {
dashboard: '.gitlab/dashboards/custom_dashboard.yml', dashboard: '.gitlab/dashboards/custom_dashboard.yml',
}); });
expect(getPromCustomVariablesFromUrl()).toStrictEqual({}); expect(templatingVariablesFromUrl()).toStrictEqual({});
}); });
}); });
...@@ -427,76 +427,6 @@ describe('monitoring/utils', () => { ...@@ -427,76 +427,6 @@ describe('monitoring/utils', () => {
}); });
}); });
describe('mergeURLVariables', () => {
beforeEach(() => {
jest.spyOn(urlUtils, 'queryToObject');
});
afterEach(() => {
urlUtils.queryToObject.mockRestore();
});
it('returns empty object if variables are not defined in yml or URL', () => {
urlUtils.queryToObject.mockReturnValueOnce({});
expect(monitoringUtils.mergeURLVariables({})).toEqual({});
});
it('returns empty object if variables are defined in URL but not in yml', () => {
urlUtils.queryToObject.mockReturnValueOnce({
'var-env': 'one',
'var-instance': 'localhost',
});
expect(monitoringUtils.mergeURLVariables({})).toEqual({});
});
it('returns yml variables if variables defined in yml but not in the URL', () => {
urlUtils.queryToObject.mockReturnValueOnce({});
const params = {
env: 'one',
instance: 'localhost',
};
expect(monitoringUtils.mergeURLVariables(params)).toEqual(params);
});
it('returns yml variables if variables defined in URL do not match with yml variables', () => {
const urlParams = {
'var-env': 'one',
'var-instance': 'localhost',
};
const ymlParams = {
pod: { value: 'one' },
service: { value: 'database' },
};
urlUtils.queryToObject.mockReturnValueOnce(urlParams);
expect(monitoringUtils.mergeURLVariables(ymlParams)).toEqual(ymlParams);
});
it('returns merged yml and URL variables if there is some match', () => {
const urlParams = {
'var-env': 'one',
'var-instance': 'localhost:8080',
};
const ymlParams = {
instance: { value: 'localhost' },
service: { value: 'database' },
};
const merged = {
instance: { value: 'localhost:8080' },
service: { value: 'database' },
};
urlUtils.queryToObject.mockReturnValueOnce(urlParams);
expect(monitoringUtils.mergeURLVariables(ymlParams)).toEqual(merged);
});
});
describe('convertVariablesForURL', () => { describe('convertVariablesForURL', () => {
it.each` it.each`
input | expected input | expected
......
export const invalidPlan = {};
export const validPlan = {
create: 10,
update: 20,
delete: 30,
job_name: 'Plan Changes',
job_path: '/path/to/ci/logs/1',
};
export const plans = {
'1': validPlan,
'2': invalidPlan,
'3': {
create: 1,
update: 2,
delete: 3,
job_name: 'Plan 3',
job_path: '/path/to/ci/logs/3',
},
};
import { GlLink, GlLoadingIcon, GlSprintf } from '@gitlab/ui'; import { GlSkeletonLoading } from '@gitlab/ui';
import { plans } from './mock_data';
import { shallowMount } from '@vue/test-utils'; import { shallowMount } from '@vue/test-utils';
import axios from '~/lib/utils/axios_utils'; import axios from '~/lib/utils/axios_utils';
import MockAdapter from 'axios-mock-adapter'; import MockAdapter from 'axios-mock-adapter';
import MrWidgetTerraformPlan from '~/vue_merge_request_widget/components/mr_widget_terraform_plan.vue'; import MrWidgetTerraformContainer from '~/vue_merge_request_widget/components/terraform/mr_widget_terraform_container.vue';
import Poll from '~/lib/utils/poll'; import Poll from '~/lib/utils/poll';
import TerraformPlan from '~/vue_merge_request_widget/components/terraform/terraform_plan.vue';
const plan = { describe('MrWidgetTerraformConainer', () => {
create: 10,
update: 20,
delete: 30,
job_path: '/path/to/ci/logs',
};
describe('MrWidgetTerraformPlan', () => {
let mock; let mock;
let wrapper; let wrapper;
const propsData = { endpoint: '/path/to/terraform/report.json' }; const propsData = { endpoint: '/path/to/terraform/report.json' };
const findPlans = () => wrapper.findAll(TerraformPlan).wrappers.map(x => x.props('plan'));
const mockPollingApi = (response, body, header) => { const mockPollingApi = (response, body, header) => {
mock.onGet(propsData.endpoint).reply(response, body, header); mock.onGet(propsData.endpoint).reply(response, body, header);
}; };
const mountWrapper = () => { const mountWrapper = () => {
wrapper = shallowMount(MrWidgetTerraformPlan, { propsData }); wrapper = shallowMount(MrWidgetTerraformContainer, { propsData });
return axios.waitForAll(); return axios.waitForAll();
}; };
...@@ -36,9 +33,9 @@ describe('MrWidgetTerraformPlan', () => { ...@@ -36,9 +33,9 @@ describe('MrWidgetTerraformPlan', () => {
mock.restore(); mock.restore();
}); });
describe('loading poll', () => { describe('when data is loading', () => {
beforeEach(() => { beforeEach(() => {
mockPollingApi(200, { '123': plan }, {}); mockPollingApi(200, plans, {});
return mountWrapper().then(() => { return mountWrapper().then(() => {
wrapper.setData({ loading: true }); wrapper.setData({ loading: true });
...@@ -46,28 +43,20 @@ describe('MrWidgetTerraformPlan', () => { ...@@ -46,28 +43,20 @@ describe('MrWidgetTerraformPlan', () => {
}); });
}); });
it('Diplays loading icon when loading is true', () => { it('diplays loading skeleton', () => {
expect(wrapper.find(GlLoadingIcon).exists()).toBe(true); expect(wrapper.find(GlSkeletonLoading).exists()).toBe(true);
expect(wrapper.find(GlSprintf).exists()).toBe(false);
expect(wrapper.text()).not.toContain( expect(findPlans()).toEqual([]);
'A terraform report was generated in your pipelines. Changes are unknown',
);
}); });
}); });
describe('successful poll', () => { describe('polling', () => {
let pollRequest; let pollRequest;
let pollStop; let pollStop;
beforeEach(() => { beforeEach(() => {
pollRequest = jest.spyOn(Poll.prototype, 'makeRequest'); pollRequest = jest.spyOn(Poll.prototype, 'makeRequest');
pollStop = jest.spyOn(Poll.prototype, 'stop'); pollStop = jest.spyOn(Poll.prototype, 'stop');
mockPollingApi(200, { '123': plan }, {});
return mountWrapper();
}); });
afterEach(() => { afterEach(() => {
...@@ -75,33 +64,43 @@ describe('MrWidgetTerraformPlan', () => { ...@@ -75,33 +64,43 @@ describe('MrWidgetTerraformPlan', () => {
pollStop.mockRestore(); pollStop.mockRestore();
}); });
it('content change text', () => { describe('successful poll', () => {
expect(wrapper.find(GlSprintf).exists()).toBe(true); beforeEach(() => {
}); mockPollingApi(200, plans, {});
it('renders button when url is found', () => { return mountWrapper();
expect(wrapper.find(GlLink).exists()).toBe(true); });
});
it('does not make additional requests after poll is successful', () => { it('diplays terraform components and stops loading', () => {
expect(pollRequest).toHaveBeenCalledTimes(1); expect(wrapper.find(GlSkeletonLoading).exists()).toBe(false);
expect(pollStop).toHaveBeenCalledTimes(1);
});
});
describe('polling fails', () => { expect(findPlans()).toEqual(Object.values(plans));
beforeEach(() => { });
mockPollingApi(500, null, {});
return mountWrapper(); it('does not make additional requests after poll is successful', () => {
expect(pollRequest).toHaveBeenCalledTimes(1);
expect(pollStop).toHaveBeenCalledTimes(1);
});
}); });
it('does not display changes text when api fails', () => { describe('polling fails', () => {
expect(wrapper.text()).toContain( beforeEach(() => {
'A terraform report was generated in your pipelines. Changes are unknown', mockPollingApi(500, null, {});
); return mountWrapper();
});
it('stops loading', () => {
expect(wrapper.find(GlSkeletonLoading).exists()).toBe(false);
});
expect(wrapper.find('.js-terraform-report-link').exists()).toBe(false); it('generates one broken plan', () => {
expect(wrapper.find(GlLink).exists()).toBe(false); expect(findPlans()).toEqual([{}]);
});
it('does not make additional requests after poll is unsuccessful', () => {
expect(pollRequest).toHaveBeenCalledTimes(1);
expect(pollStop).toHaveBeenCalledTimes(1);
});
}); });
}); });
}); });
import { invalidPlan, validPlan } from './mock_data';
import { GlLink, GlSprintf } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import TerraformPlan from '~/vue_merge_request_widget/components/terraform/terraform_plan.vue';
describe('TerraformPlan', () => {
let wrapper;
const findLogButton = () => wrapper.find('.js-terraform-report-link');
const mountWrapper = propsData => {
wrapper = shallowMount(TerraformPlan, { stubs: { GlLink, GlSprintf }, propsData });
};
afterEach(() => {
wrapper.destroy();
});
describe('validPlan', () => {
beforeEach(() => {
mountWrapper({ plan: validPlan });
});
it('diplays the plan job_name', () => {
expect(wrapper.text()).toContain(
`The Terraform report ${validPlan.job_name} was generated in your pipelines.`,
);
});
it('diplays the reported changes', () => {
expect(wrapper.text()).toContain(
`Reported Resource Changes: ${validPlan.create} to add, ${validPlan.update} to change, ${validPlan.delete} to delete`,
);
});
it('renders button when url is found', () => {
expect(findLogButton().exists()).toBe(true);
expect(findLogButton().text()).toEqual('View full log');
});
});
describe('invalidPlan', () => {
beforeEach(() => {
mountWrapper({ plan: invalidPlan });
});
it('diplays generic header since job_name is missing', () => {
expect(wrapper.text()).toContain('A Terraform report was generated in your pipelines.');
});
it('diplays generic error since report values are missing', () => {
expect(wrapper.text()).toContain('Generating the report caused an error.');
});
it('does not render button because url is missing', () => {
expect(findLogButton().exists()).toBe(false);
});
});
});
const buildMockTextNode = literal => {
return {
firstChild: null,
literal,
type: 'text',
};
};
const buildMockListNode = literal => {
return {
firstChild: {
firstChild: {
firstChild: buildMockTextNode(literal),
type: 'paragraph',
},
type: 'item',
},
type: 'list',
};
};
export const kramdownListNode = buildMockListNode('TOC');
export const normalListNode = buildMockListNode('Just another bullet point');
export const kramdownTextNode = buildMockTextNode('{:toc}');
export const normalTextNode = buildMockTextNode('This is just normal text.');
const uneditableOpenToken = {
type: 'openTag',
tagName: 'div',
attributes: { contenteditable: false },
classNames: [
'gl-px-4 gl-py-2 gl-opacity-5 gl-bg-gray-100 gl-user-select-none gl-cursor-not-allowed',
],
};
export const uneditableCloseToken = { type: 'closeTag', tagName: 'div' };
export const originToken = {
type: 'text',
content: '{:.no_toc .hidden-md .hidden-lg}',
};
export const uneditableOpenTokens = [uneditableOpenToken, originToken];
export const uneditableTokens = [...uneditableOpenTokens, uneditableCloseToken];
import buildCustomHTMLRenderer from '~/vue_shared/components/rich_content_editor/services/build_custom_renderer';
describe('Build Custom Renderer Service', () => {
describe('buildCustomHTMLRenderer', () => {
it('should return an object with the default renderer functions when lacking arguments', () => {
expect(buildCustomHTMLRenderer()).toEqual(
expect.objectContaining({
list: expect.any(Function),
text: expect.any(Function),
}),
);
});
it('should return an object with both custom and default renderer functions when passed customRenderers', () => {
const mockHtmlCustomRenderer = jest.fn();
const customRenderers = {
html: [mockHtmlCustomRenderer],
};
expect(buildCustomHTMLRenderer(customRenderers)).toEqual(
expect.objectContaining({
html: expect.any(Function),
list: expect.any(Function),
text: expect.any(Function),
}),
);
});
});
});
import {
buildUneditableOpenTokens,
buildUneditableCloseToken,
buildUneditableTokens,
} from '~/vue_shared/components/rich_content_editor/services/renderers//build_uneditable_token';
import {
originToken,
uneditableOpenTokens,
uneditableCloseToken,
uneditableTokens,
} from '../../mock_data';
describe('Build Uneditable Token renderer helper', () => {
describe('buildUneditableOpenTokens', () => {
it('returns a 2-item array of tokens with the originToken appended to an open token', () => {
const result = buildUneditableOpenTokens(originToken);
expect(result).toHaveLength(2);
expect(result).toStrictEqual(uneditableOpenTokens);
});
});
describe('buildUneditableCloseToken', () => {
it('returns an object literal representing the uneditable close token', () => {
expect(buildUneditableCloseToken()).toStrictEqual(uneditableCloseToken);
});
});
describe('buildUneditableTokens', () => {
it('returns a 3-item array of tokens with the originToken wrapped in the middle', () => {
const result = buildUneditableTokens(originToken);
expect(result).toHaveLength(3);
expect(result).toStrictEqual(uneditableTokens);
});
});
});
import renderer from '~/vue_shared/components/rich_content_editor/services/renderers/render_kramdown_list';
import {
buildUneditableOpenTokens,
buildUneditableCloseToken,
} from '~/vue_shared/components/rich_content_editor/services/renderers//build_uneditable_token';
import { kramdownListNode, normalListNode } from '../../mock_data';
describe('Render Kramdown List renderer', () => {
describe('canRender', () => {
it('should return true when the argument is a special kramdown TOC ordered/unordered list', () => {
expect(renderer.canRender(kramdownListNode)).toBe(true);
});
it('should return false when the argument is a normal ordered/unordered list', () => {
expect(renderer.canRender(normalListNode)).toBe(false);
});
});
describe('render', () => {
const origin = jest.fn();
it('should return uneditable open tokens when entering', () => {
const context = { entering: true, origin };
expect(renderer.render(context)).toStrictEqual(buildUneditableOpenTokens(origin()));
});
it('should return an uneditable close tokens when exiting', () => {
const context = { entering: false, origin };
expect(renderer.render(context)).toStrictEqual(buildUneditableCloseToken(origin()));
});
});
});
import renderer from '~/vue_shared/components/rich_content_editor/services/renderers/render_kramdown_text';
import { buildUneditableTokens } from '~/vue_shared/components/rich_content_editor/services/renderers//build_uneditable_token';
import { kramdownTextNode, normalTextNode } from '../../mock_data';
describe('Render Kramdown Text renderer', () => {
describe('canRender', () => {
it('should return true when the argument `literal` has kramdown syntax', () => {
expect(renderer.canRender(kramdownTextNode)).toBe(true);
});
it('should return false when the argument `literal` lacks kramdown syntax', () => {
expect(renderer.canRender(normalTextNode)).toBe(false);
});
});
describe('render', () => {
const origin = jest.fn();
it('should return uneditable tokens', () => {
const context = { origin };
expect(renderer.render(context)).toStrictEqual(buildUneditableTokens(origin()));
});
});
});
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe CookiesHelper do
describe '#set_secure_cookie' do
it 'creates an encrypted cookie with expected attributes' do
stub_config_setting(https: true)
expiration = 1.month.from_now
key = :secure_cookie
value = 'secure value'
expect_next_instance_of(ActionDispatch::Cookies::EncryptedKeyRotatingCookieJar) do |instance|
expect(instance).to receive(:[]=).with(key, httponly: true, secure: true, expires: expiration, value: value)
end
helper.set_secure_cookie(key, value, httponly: true, expires: expiration, type: CookiesHelper::COOKIE_TYPE_ENCRYPTED)
end
it 'creates a permanent cookie with expected attributes' do
key = :permanent_cookie
value = 'permanent value'
expect_next_instance_of(ActionDispatch::Cookies::PermanentCookieJar) do |instance|
expect(instance).to receive(:[]=).with(key, httponly: false, secure: false, expires: nil, value: value)
end
helper.set_secure_cookie(key, value, type: CookiesHelper::COOKIE_TYPE_PERMANENT)
end
it 'creates a regular cookie with expected attributes' do
key = :regular_cookie
value = 'regular value'
expect_next_instance_of(ActionDispatch::Cookies::CookieJar) do |instance|
expect(instance).to receive(:[]=).with(key, httponly: false, secure: false, expires: nil, value: value)
end
helper.set_secure_cookie(key, value)
end
end
end
...@@ -262,6 +262,39 @@ describe Gitlab::Regex do ...@@ -262,6 +262,39 @@ describe Gitlab::Regex do
it { is_expected.not_to match('!!()()') } it { is_expected.not_to match('!!()()') }
end end
describe '.maven_version_regex' do
subject { described_class.maven_version_regex }
it { is_expected.to match('0')}
it { is_expected.to match('1') }
it { is_expected.to match('03') }
it { is_expected.to match('2.0') }
it { is_expected.to match('01.2') }
it { is_expected.to match('10.2.3-beta')}
it { is_expected.to match('1.2-SNAPSHOT') }
it { is_expected.to match('20') }
it { is_expected.to match('20.3') }
it { is_expected.to match('1.2.1') }
it { is_expected.to match('1.4.2-12') }
it { is_expected.to match('1.2-beta-2') }
it { is_expected.to match('12.1.2-2-1') }
it { is_expected.to match('1.1-beta-2') }
it { is_expected.to match('1.3.350.v20200505-1744') }
it { is_expected.to match('2.0.0.v200706041905-7C78EK9E_EkMNfNOd2d8qq') }
it { is_expected.to match('1.2-alpha-1-20050205.060708-1') }
it { is_expected.to match('703220b4e2cea9592caeb9f3013f6b1e5335c293') }
it { is_expected.to match('RELEASE') }
it { is_expected.not_to match('..1.2.3') }
it { is_expected.not_to match(' 1.2.3') }
it { is_expected.not_to match("1.2.3 \r\t") }
it { is_expected.not_to match("\r\t 1.2.3") }
it { is_expected.not_to match('1./2.3') }
it { is_expected.not_to match('1.2.3-4/../../') }
it { is_expected.not_to match('1.2.3-4%2e%2e%') }
it { is_expected.not_to match('../../../../../1.2.3') }
it { is_expected.not_to match('%2e%2e%2f1.2.3') }
end
describe '.semver_regex' do describe '.semver_regex' do
subject { described_class.semver_regex } subject { described_class.semver_regex }
......
...@@ -262,4 +262,44 @@ describe Noteable do ...@@ -262,4 +262,44 @@ describe Noteable do
end end
end end
end end
describe "#has_any_diff_note_positions?" do
let(:source_branch) { "compare-with-merge-head-source" }
let(:target_branch) { "compare-with-merge-head-target" }
let(:merge_request) { create(:merge_request, source_branch: source_branch, target_branch: target_branch) }
let!(:note) do
path = "files/markdown/ruby-style-guide.md"
position = Gitlab::Diff::Position.new(
old_path: path,
new_path: path,
new_line: 508,
diff_refs: merge_request.diff_refs
)
create(:diff_note_on_merge_request, project: merge_request.project, position: position, noteable: merge_request)
end
before do
MergeRequests::MergeToRefService.new(merge_request.project, merge_request.author).execute(merge_request)
Discussions::CaptureDiffNotePositionsService.new(merge_request).execute
end
it "returns true when it has diff note positions" do
expect(merge_request.has_any_diff_note_positions?).to be(true)
end
it "returns false when it has notes but no diff note positions" do
DiffNotePosition.where(note: note).find_each(&:delete)
expect(merge_request.has_any_diff_note_positions?).to be(false)
end
it "returns false when it has no notes" do
merge_request.notes.find_each(&:destroy)
expect(merge_request.has_any_diff_note_positions?).to be(false)
end
end
end end
...@@ -9,13 +9,38 @@ RSpec.shared_examples 'known sign in' do ...@@ -9,13 +9,38 @@ RSpec.shared_examples 'known sign in' do
user.update!(current_sign_in_ip: ip) user.update!(current_sign_in_ip: ip)
end end
context 'with a valid post' do def stub_cookie(value = user.id)
context 'when remote IP does not match user last sign in IP' do cookies.encrypted[KnownSignIn::KNOWN_SIGN_IN_COOKIE] = {
before do value: value, expires: KnownSignIn::KNOWN_SIGN_IN_COOKIE_EXPIRY
stub_user_ip('127.0.0.1') }
stub_remote_ip('169.0.0.1') end
end
context 'when the remote IP and the last sign in IP match' do
before do
stub_user_ip('169.0.0.1')
stub_remote_ip('169.0.0.1')
end
it 'does not notify the user' do
expect_any_instance_of(NotificationService).not_to receive(:unknown_sign_in)
post_action
end
it 'sets/updates the encrypted cookie' do
post_action
expect(cookies.encrypted[KnownSignIn::KNOWN_SIGN_IN_COOKIE]).to eq(user.id)
end
end
context 'when the remote IP and the last sign in IP do not match' do
before do
stub_user_ip('127.0.0.1')
stub_remote_ip('169.0.0.1')
end
context 'when the cookie is not previously set' do
it 'notifies the user' do it 'notifies the user' do
expect_next_instance_of(NotificationService) do |instance| expect_next_instance_of(NotificationService) do |instance|
expect(instance).to receive(:unknown_sign_in) expect(instance).to receive(:unknown_sign_in)
...@@ -23,37 +48,50 @@ RSpec.shared_examples 'known sign in' do ...@@ -23,37 +48,50 @@ RSpec.shared_examples 'known sign in' do
post_action post_action
end end
end
context 'when remote IP matches an active session' do it 'sets the encrypted cookie' do
before do post_action
existing_sessions = ActiveSession.session_ids_for_user(user.id)
existing_sessions.each { |sessions| ActiveSession.destroy(user, sessions) }
stub_user_ip('169.0.0.1')
stub_remote_ip('127.0.0.1')
ActiveSession.set(user, request) expect(cookies.encrypted[KnownSignIn::KNOWN_SIGN_IN_COOKIE]).to eq(user.id)
end end
end
it 'notifies the user when the cookie is expired' do
stub_cookie
it 'does not notify the user' do Timecop.freeze((KnownSignIn::KNOWN_SIGN_IN_COOKIE_EXPIRY + 1.day).from_now) do
expect_any_instance_of(NotificationService).not_to receive(:unknown_sign_in) expect_next_instance_of(NotificationService) do |instance|
expect(instance).to receive(:unknown_sign_in)
end
post_action post_action
end end
end end
context 'when remote IP address matches last sign in IP' do it 'notifies the user when the cookie is for another user' do
before do stub_cookie(create(:user).id)
stub_user_ip('127.0.0.1')
stub_remote_ip('127.0.0.1') expect_next_instance_of(NotificationService) do |instance|
expect(instance).to receive(:unknown_sign_in)
end end
it 'does not notify the user' do post_action
expect_any_instance_of(NotificationService).not_to receive(:unknown_sign_in) end
post_action it 'does not notify the user when remote IP matches an active session' do
end ActiveSession.set(user, request)
expect_any_instance_of(NotificationService).not_to receive(:unknown_sign_in)
post_action
end
it 'does not notify the user when the cookie is present and not expired' do
stub_cookie
expect_any_instance_of(NotificationService).not_to receive(:unknown_sign_in)
post_action
end end
end end
end end
...@@ -1138,20 +1138,20 @@ ...@@ -1138,20 +1138,20 @@
dependencies: dependencies:
defer-to-connect "^1.0.1" defer-to-connect "^1.0.1"
"@toast-ui/editor@^2.0.1": "@toast-ui/editor@2.1.2", "@toast-ui/editor@^2.1.2":
version "2.0.1" version "2.1.2"
resolved "https://registry.yarnpkg.com/@toast-ui/editor/-/editor-2.0.1.tgz#749e5be1f02f42ded51488d1575ab1c19ca59952" resolved "https://registry.yarnpkg.com/@toast-ui/editor/-/editor-2.1.2.tgz#0472431bd039ae70882d77910e83f0ad222d0b1c"
integrity sha512-TC481O/zP37boY6H6oVN6KLVMY7yrU8zQu+3xqZ71V3Sr6D2XyaGb2Xub9XqTdqzBmzsf7y4Gi+EXO0IQ3rGVA== integrity sha512-yoWRVyp2m1dODH+bmzJaILUgl2L57GCQJ8c8+XRgJMwfxb/TFz5U+oT8JGAU5VwozIzKF0SyVMs8AEePwwhIIA==
dependencies: dependencies:
"@types/codemirror" "0.0.71" "@types/codemirror" "0.0.71"
codemirror "^5.48.4" codemirror "^5.48.4"
"@toast-ui/vue-editor@^2.0.1": "@toast-ui/vue-editor@2.1.2":
version "2.0.1" version "2.1.2"
resolved "https://registry.yarnpkg.com/@toast-ui/vue-editor/-/vue-editor-2.0.1.tgz#c9c8c8da4c0a67b9fbc4240464388c67d72a0c22" resolved "https://registry.yarnpkg.com/@toast-ui/vue-editor/-/vue-editor-2.1.2.tgz#a790e69fcf7fb426e6b8ea190733477c3cc756aa"
integrity sha512-sGsApl0n+GVAZbmPA+tTrq9rmmyh2mRgCgg2/mu1/lN7S4vPv/nQH8KXxLG9Y6hG2+kgelqz6wvbOCdzlM/HmQ== integrity sha512-RK01W6D8FqtNq4MjWsXk6KRzOU/vL6mpiADAnH5l/lFK4G6UQJhLKsMRfmxIqCH+ivm8VtQzGdd9obUfD+XbCw==
dependencies: dependencies:
"@toast-ui/editor" "^2.0.1" "@toast-ui/editor" "^2.1.2"
"@types/anymatch@*": "@types/anymatch@*":
version "1.3.0" version "1.3.0"
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment