Commit f6bb8a5d authored by GitLab Bot's avatar GitLab Bot

Automatic merge of gitlab-org/gitlab master

parents 494c845f 0cfc8c27
import initSearchSettings from '~/search_settings';
initSearchSettings();
import $ from 'jquery'; import $ from 'jquery';
import '~/profile/gl_crop'; import '~/profile/gl_crop';
import Profile from '~/profile/profile'; import Profile from '~/profile/profile';
import initSearchSettings from '~/search_settings';
document.addEventListener('DOMContentLoaded', () => { document.addEventListener('DOMContentLoaded', () => {
// eslint-disable-next-line func-names // eslint-disable-next-line func-names
...@@ -17,4 +18,6 @@ document.addEventListener('DOMContentLoaded', () => { ...@@ -17,4 +18,6 @@ document.addEventListener('DOMContentLoaded', () => {
}); });
new Profile(); // eslint-disable-line no-new new Profile(); // eslint-disable-line no-new
initSearchSettings();
}); });
import axios from '~/lib/utils/axios_utils';
function showCount(el, count) {
el.textContent = count;
el.classList.remove('hidden');
}
function refreshCount(el) {
const { url } = el.dataset;
return axios
.get(url)
.then(({ data }) => showCount(el, data.count))
.catch((e) => {
// eslint-disable-next-line no-console
console.error(`Failed to fetch search count from '${url}'.`, e);
});
}
export default function refreshCounts() {
const elements = Array.from(document.querySelectorAll('.js-search-count'));
return Promise.all(elements.map(refreshCount));
}
import setHighlightClass from 'ee_else_ce/search/highlight_blob_search_result'; import setHighlightClass from 'ee_else_ce/search/highlight_blob_search_result';
import Project from '~/pages/projects/project'; import Project from '~/pages/projects/project';
import refreshCounts from '~/pages/search/show/refresh_counts';
import { queryToObject } from '~/lib/utils/url_utility'; import { queryToObject } from '~/lib/utils/url_utility';
import createStore from './store'; import createStore from './store';
import { initTopbar } from './topbar'; import { initTopbar } from './topbar';
...@@ -20,6 +19,5 @@ export const initSearchApp = () => { ...@@ -20,6 +19,5 @@ export const initSearchApp = () => {
initSearchSort(store); initSearchSort(store);
setHighlightClass(query.search); // Code Highlighting setHighlightClass(query.search); // Code Highlighting
refreshCounts(); // Other Scope Tab Counts
Project.initRefSwitcher(); // Code Search Branch Picker Project.initRefSwitcher(); // Code Search Branch Picker
}; };
import axios from '~/lib/utils/axios_utils';
import Api from '~/api'; import Api from '~/api';
import createFlash from '~/flash'; import createFlash from '~/flash';
import { __ } from '~/locale'; import { __ } from '~/locale';
import { visitUrl, setUrlParams } from '~/lib/utils/url_utility'; import { visitUrl, setUrlParams } from '~/lib/utils/url_utility';
import * as types from './mutation_types'; import * as types from './mutation_types';
/* private */
const getCount = ({ params, state, activeCount }) => {
const globalSearchCountsPath = '/search/count';
const url = Api.buildUrl(globalSearchCountsPath);
// count is known for active tab, so return it and skip the Api call
if (params.scope === state.query?.scope) {
return { scope: params.scope, count: activeCount };
}
return axios
.get(url, { params })
.then(({ data }) => {
return { scope: params.scope, count: data.count };
})
.catch((e) => {
throw e;
});
};
export const fetchGroups = ({ commit }, search) => { export const fetchGroups = ({ commit }, search) => {
commit(types.REQUEST_GROUPS); commit(types.REQUEST_GROUPS);
Api.groups(search) Api.groups(search)
...@@ -38,6 +59,21 @@ export const fetchProjects = ({ commit, state }, search) => { ...@@ -38,6 +59,21 @@ export const fetchProjects = ({ commit, state }, search) => {
} }
}; };
export const fetchSearchCounts = ({ commit, state }, { scopeTabs, activeCount }) => {
commit(types.REQUEST_SEARCH_COUNTS, { scopeTabs, activeCount });
const promises = scopeTabs.map((scope) =>
getCount({ params: { ...state.query, scope }, state, activeCount }),
);
Promise.all(promises)
.then((data) => {
commit(types.RECEIVE_SEARCH_COUNTS_SUCCESS, data);
})
.catch(() => {
createFlash({ message: __('There was an error fetching the Search Counts') });
});
};
export const setQuery = ({ commit }, { key, value }) => { export const setQuery = ({ commit }, { key, value }) => {
commit(types.SET_QUERY, { key, value }); commit(types.SET_QUERY, { key, value });
}; };
...@@ -46,6 +82,22 @@ export const applyQuery = ({ state }) => { ...@@ -46,6 +82,22 @@ export const applyQuery = ({ state }) => {
visitUrl(setUrlParams({ ...state.query, page: null })); visitUrl(setUrlParams({ ...state.query, page: null }));
}; };
export const resetQuery = ({ state }) => { export const resetQuery = ({ state }, snippets = false) => {
visitUrl(setUrlParams({ ...state.query, page: null, state: null, confidential: null })); let defaultQuery = {
page: null,
state: null,
confidential: null,
nav_source: null,
};
if (snippets) {
defaultQuery = {
snippets: true,
group_id: null,
project_id: null,
...defaultQuery,
};
}
visitUrl(setUrlParams({ ...state.query, ...defaultQuery }));
}; };
...@@ -6,4 +6,7 @@ export const REQUEST_PROJECTS = 'REQUEST_PROJECTS'; ...@@ -6,4 +6,7 @@ export const REQUEST_PROJECTS = 'REQUEST_PROJECTS';
export const RECEIVE_PROJECTS_SUCCESS = 'RECEIVE_PROJECTS_SUCCESS'; export const RECEIVE_PROJECTS_SUCCESS = 'RECEIVE_PROJECTS_SUCCESS';
export const RECEIVE_PROJECTS_ERROR = 'RECEIVE_PROJECTS_ERROR'; export const RECEIVE_PROJECTS_ERROR = 'RECEIVE_PROJECTS_ERROR';
export const REQUEST_SEARCH_COUNTS = 'REQUEST_SEARCH_COUNTS';
export const RECEIVE_SEARCH_COUNTS_SUCCESS = 'RECEIVE_SEARCH_COUNTS_SUCCESS';
export const SET_QUERY = 'SET_QUERY'; export const SET_QUERY = 'SET_QUERY';
import { ALL_SCOPE_TABS } from '~/search/topbar/constants';
import * as types from './mutation_types'; import * as types from './mutation_types';
export default { export default {
...@@ -23,6 +24,16 @@ export default { ...@@ -23,6 +24,16 @@ export default {
state.fetchingProjects = false; state.fetchingProjects = false;
state.projects = []; state.projects = [];
}, },
[types.REQUEST_SEARCH_COUNTS](state, { scopeTabs, activeCount }) {
state.inflatedScopeTabs = scopeTabs.map((tab) => {
return { ...ALL_SCOPE_TABS[tab], count: tab === state.query?.scope ? activeCount : '' };
});
},
[types.RECEIVE_SEARCH_COUNTS_SUCCESS](state, data) {
state.inflatedScopeTabs = data.map((tab) => {
return { ...ALL_SCOPE_TABS[tab.scope], count: tab.count };
});
},
[types.SET_QUERY](state, { key, value }) { [types.SET_QUERY](state, { key, value }) {
state.query[key] = value; state.query[key] = value;
}, },
......
...@@ -4,5 +4,6 @@ const createState = ({ query }) => ({ ...@@ -4,5 +4,6 @@ const createState = ({ query }) => ({
fetchingGroups: false, fetchingGroups: false,
projects: [], projects: [],
fetchingProjects: false, fetchingProjects: false,
inflatedScopeTabs: [],
}); });
export default createState; export default createState;
...@@ -3,6 +3,7 @@ import { mapState, mapActions } from 'vuex'; ...@@ -3,6 +3,7 @@ import { mapState, mapActions } from 'vuex';
import { GlForm, GlSearchBoxByType, GlButton } from '@gitlab/ui'; import { GlForm, GlSearchBoxByType, GlButton } from '@gitlab/ui';
import GroupFilter from './group_filter.vue'; import GroupFilter from './group_filter.vue';
import ProjectFilter from './project_filter.vue'; import ProjectFilter from './project_filter.vue';
import ScopeTabs from './scope_tabs.vue';
export default { export default {
name: 'GlobalSearchTopbar', name: 'GlobalSearchTopbar',
...@@ -12,6 +13,7 @@ export default { ...@@ -12,6 +13,7 @@ export default {
GroupFilter, GroupFilter,
ProjectFilter, ProjectFilter,
GlButton, GlButton,
ScopeTabs,
}, },
props: { props: {
groupInitialData: { groupInitialData: {
...@@ -24,6 +26,16 @@ export default { ...@@ -24,6 +26,16 @@ export default {
required: false, required: false,
default: () => ({}), default: () => ({}),
}, },
scopeTabs: {
type: Array,
required: false,
default: () => [],
},
count: {
type: String,
required: false,
default: '',
},
}, },
computed: { computed: {
...mapState(['query']), ...mapState(['query']),
...@@ -38,6 +50,9 @@ export default { ...@@ -38,6 +50,9 @@ export default {
showFilters() { showFilters() {
return !this.query.snippets || this.query.snippets === 'false'; return !this.query.snippets || this.query.snippets === 'false';
}, },
showScopeTabs() {
return this.query.search;
},
}, },
methods: { methods: {
...mapActions(['applyQuery', 'setQuery']), ...mapActions(['applyQuery', 'setQuery']),
...@@ -46,28 +61,31 @@ export default { ...@@ -46,28 +61,31 @@ export default {
</script> </script>
<template> <template>
<gl-form class="search-page-form" @submit.prevent="applyQuery"> <section>
<section class="gl-lg-display-flex gl-align-items-flex-end"> <gl-form class="search-page-form" @submit.prevent="applyQuery">
<div class="gl-flex-fill-1 gl-mb-4 gl-lg-mb-0 gl-lg-mr-2"> <section class="gl-lg-display-flex gl-align-items-flex-end">
<label>{{ __('What are you searching for?') }}</label> <div class="gl-flex-fill-1 gl-mb-4 gl-lg-mb-0 gl-lg-mr-2">
<gl-search-box-by-type <label>{{ __('What are you searching for?') }}</label>
id="dashboard_search" <gl-search-box-by-type
v-model="search" id="dashboard_search"
name="search" v-model="search"
:placeholder="__(`Search for projects, issues, etc.`)" name="search"
/> :placeholder="__(`Search for projects, issues, etc.`)"
</div> />
<div v-if="showFilters" class="gl-mb-4 gl-lg-mb-0 gl-lg-mx-2"> </div>
<label class="gl-display-block">{{ __('Group') }}</label> <div v-if="showFilters" class="gl-mb-4 gl-lg-mb-0 gl-lg-mx-2">
<group-filter :initial-data="groupInitialData" /> <label class="gl-display-block">{{ __('Group') }}</label>
</div> <group-filter :initial-data="groupInitialData" />
<div v-if="showFilters" class="gl-mb-4 gl-lg-mb-0 gl-lg-mx-2"> </div>
<label class="gl-display-block">{{ __('Project') }}</label> <div v-if="showFilters" class="gl-mb-4 gl-lg-mb-0 gl-lg-mx-2">
<project-filter :initial-data="projectInitialData" /> <label class="gl-display-block">{{ __('Project') }}</label>
</div> <project-filter :initial-data="projectInitialData" />
<gl-button class="btn-search gl-lg-ml-2" variant="success" type="submit">{{ </div>
__('Search') <gl-button class="btn-search gl-lg-ml-2" variant="success" type="submit">{{
}}</gl-button> __('Search')
</section> }}</gl-button>
</gl-form> </section>
</gl-form>
<scope-tabs v-if="showScopeTabs" :scope-tabs="scopeTabs" :count="count" />
</section>
</template> </template>
<script>
import { GlTabs, GlTab, GlBadge } from '@gitlab/ui';
import { mapState, mapActions } from 'vuex';
export default {
name: 'ScopeTabs',
components: {
GlTabs,
GlTab,
GlBadge,
},
props: {
scopeTabs: {
type: Array,
required: true,
},
count: {
type: String,
required: false,
default: '',
},
},
computed: {
...mapState(['query', 'inflatedScopeTabs']),
},
created() {
this.fetchSearchCounts({ scopeTabs: this.scopeTabs, activeCount: this.count });
},
methods: {
...mapActions(['fetchSearchCounts', 'setQuery', 'resetQuery']),
handleTabChange(scope) {
this.setQuery({ key: 'scope', value: scope });
this.resetQuery(scope === 'snippet_titles');
},
isTabActive(scope) {
return scope === this.query.scope;
},
},
};
</script>
<template>
<div>
<gl-tabs
content-class="gl-p-0"
nav-class="search-filter search-nav-tabs gl-display-flex gl-overflow-x-auto"
>
<gl-tab
v-for="tab in inflatedScopeTabs"
:key="tab.scope"
class="gl-display-flex"
:active="isTabActive(tab.scope)"
:data-testid="`tab-${tab.scope}`"
:title-link-attributes="{ 'data-qa-selector': tab.qaSelector }"
title-link-class="gl-white-space-nowrap"
@click="handleTabChange(tab.scope)"
>
<template #title>
<span data-testid="tab-title"> {{ tab.title }} </span>
<gl-badge
v-show="tab.count"
:data-scope="tab.scope"
:data-testid="`badge-${tab.scope}`"
:variant="isTabActive(tab.scope) ? 'neutral' : 'muted'"
size="sm"
>
{{ tab.count }}
</gl-badge>
</template>
</gl-tab>
</gl-tabs>
</div>
</template>
...@@ -19,3 +19,17 @@ export const PROJECT_DATA = { ...@@ -19,3 +19,17 @@ export const PROJECT_DATA = {
selectedDisplayValue: 'name_with_namespace', selectedDisplayValue: 'name_with_namespace',
itemsDisplayValue: 'name_with_namespace', itemsDisplayValue: 'name_with_namespace',
}; };
export const ALL_SCOPE_TABS = {
blobs: { scope: 'blobs', title: __('Code'), qaSelector: 'code_tab' },
issues: { scope: 'issues', title: __('Issues') },
merge_requests: { scope: 'merge_requests', title: __('Merge requests') },
milestones: { scope: 'milestones', title: __('Milestones') },
notes: { scope: 'notes', title: __('Comments') },
wiki_blobs: { scope: 'wiki_blobs', title: __('Wiki') },
commits: { scope: 'commits', title: __('Commits') },
epics: { scope: 'epics', title: __('Epics') },
users: { scope: 'users', title: __('Users') },
snippet_titles: { scope: 'snippet_titles', title: __('Titles and Descriptions') },
projects: { scope: 'projects', title: __('Projects'), qaSelector: 'projects_tab' },
};
...@@ -11,10 +11,12 @@ export const initTopbar = (store) => { ...@@ -11,10 +11,12 @@ export const initTopbar = (store) => {
return false; return false;
} }
let { groupInitialData, projectInitialData } = el.dataset; let { groupInitialData, projectInitialData, scopeTabs } = el.dataset;
const { count } = el.dataset;
groupInitialData = JSON.parse(groupInitialData); groupInitialData = JSON.parse(groupInitialData);
projectInitialData = JSON.parse(projectInitialData); projectInitialData = JSON.parse(projectInitialData);
scopeTabs = JSON.parse(scopeTabs);
return new Vue({ return new Vue({
el, el,
...@@ -24,6 +26,8 @@ export const initTopbar = (store) => { ...@@ -24,6 +26,8 @@ export const initTopbar = (store) => {
props: { props: {
groupInitialData, groupInitialData,
projectInitialData, projectInitialData,
scopeTabs,
count,
}, },
}); });
}, },
......
...@@ -11,7 +11,7 @@ const mountSearch = ({ el }) => ...@@ -11,7 +11,7 @@ const mountSearch = ({ el }) =>
ref: 'searchSettings', ref: 'searchSettings',
props: { props: {
searchRoot: document.querySelector('#content-body'), searchRoot: document.querySelector('#content-body'),
sectionSelector: 'section.settings', sectionSelector: '.js-search-settings-section, section.settings',
}, },
on: { on: {
collapse: (section) => closeSection($(section)), collapse: (section) => closeSection($(section)),
......
...@@ -2,6 +2,7 @@ $search-dropdown-max-height: 400px; ...@@ -2,6 +2,7 @@ $search-dropdown-max-height: 400px;
$search-avatar-size: 16px; $search-avatar-size: 16px;
$search-sidebar-min-width: 240px; $search-sidebar-min-width: 240px;
$search-sidebar-max-width: 300px; $search-sidebar-max-width: 300px;
$search-topbar-min-height: 111px;
.search-results { .search-results {
.search-result-row { .search-result-row {
...@@ -19,6 +20,12 @@ $search-sidebar-max-width: 300px; ...@@ -19,6 +20,12 @@ $search-sidebar-max-width: 300px;
} }
} }
.search-topbar {
@include media-breakpoint-up(md) {
min-height: $search-topbar-min-height;
}
}
.search-sidebar { .search-sidebar {
@include media-breakpoint-up(md) { @include media-breakpoint-up(md) {
min-width: $search-sidebar-min-width; min-width: $search-sidebar-min-width;
...@@ -26,6 +33,11 @@ $search-sidebar-max-width: 300px; ...@@ -26,6 +33,11 @@ $search-sidebar-max-width: 300px;
} }
} }
.search-nav-tabs {
overflow-y: hidden;
flex-wrap: nowrap;
}
.search form:hover, .search form:hover,
.file-finder-input:hover, .file-finder-input:hover,
.issuable-search-form:hover, .issuable-search-form:hover,
......
...@@ -502,13 +502,15 @@ class ProjectsController < Projects::ApplicationController ...@@ -502,13 +502,15 @@ class ProjectsController < Projects::ApplicationController
render_404 unless Gitlab::CurrentSettings.project_export_enabled? render_404 unless Gitlab::CurrentSettings.project_export_enabled?
end end
# Redirect from localhost/group/project.git to localhost/group/project
def redirect_git_extension def redirect_git_extension
# Redirect from return unless params[:format] == 'git'
# localhost/group/project.git
# to # `project` calls `find_routable!`, so this will trigger the usual not-found
# localhost/group/project # behaviour when the user isn't authorized to see the project
# return unless project
redirect_to request.original_url.sub(%r{\.git/?\Z}, '') if params[:format] == 'git'
redirect_to(request.original_url.sub(%r{\.git/?\Z}, ''))
end end
def whitelist_query_limiting def whitelist_query_limiting
......
...@@ -511,7 +511,8 @@ module ProjectsHelper ...@@ -511,7 +511,8 @@ module ProjectsHelper
commits: :download_code, commits: :download_code,
merge_requests: :read_merge_request, merge_requests: :read_merge_request,
notes: [:read_merge_request, :download_code, :read_issue, :read_snippet], notes: [:read_merge_request, :download_code, :read_issue, :read_snippet],
members: :read_project_member members: :read_project_member,
wiki_blobs: :read_wiki
) )
end end
......
# frozen_string_literal: true # frozen_string_literal: true
module SearchHelper module SearchHelper
SEARCH_GENERIC_PARAMS = [ PROJECT_SEARCH_TABS = %i{blobs issues merge_requests milestones notes wiki_blobs commits}.freeze
:search, BASIC_SEARCH_TABS = %i{projects issues merge_requests milestones}.freeze
:scope,
:project_id,
:group_id,
:repository_ref,
:snippets,
:sort,
:force_search_results
].freeze
def search_autocomplete_opts(term) def search_autocomplete_opts(term)
return unless current_user return unless current_user
...@@ -283,27 +275,19 @@ module SearchHelper ...@@ -283,27 +275,19 @@ module SearchHelper
Sanitize.clean(str) Sanitize.clean(str)
end end
def search_filter_link(scope, label, data: {}, search: {}) def search_nav_tabs
search_params = params return [:snippet_titles] if !@project && @show_snippets
.merge(search)
.merge({ scope: scope })
.permit(SEARCH_GENERIC_PARAMS)
if @scope == scope tabs =
li_class = 'active' if @project
count = @search_results.formatted_count(scope) PROJECT_SEARCH_TABS.select { |tab| project_search_tabs?(tab) }
else else
badge_class = 'js-search-count hidden' BASIC_SEARCH_TABS.dup
badge_data = { url: search_count_path(search_params) }
end
content_tag :li, class: li_class, data: data do
link_to search_path(search_params) do
concat label
concat ' '
concat content_tag(:span, count, class: ['badge badge-pill', badge_class], data: badge_data)
end end
end
tabs << :users if show_user_search_tab?
tabs
end end
def search_filter_input_options(type, placeholder = _('Search or filter results...')) def search_filter_input_options(type, placeholder = _('Search or filter results...'))
......
...@@ -92,7 +92,7 @@ module AlertManagement ...@@ -92,7 +92,7 @@ module AlertManagement
def incoming_payload def incoming_payload
strong_memoize(:incoming_payload) do strong_memoize(:incoming_payload) do
Gitlab::AlertManagement::Payload.parse(project, payload.to_h) Gitlab::AlertManagement::Payload.parse(project, payload.to_h, integration: integration)
end end
end end
......
...@@ -4,4 +4,5 @@ ...@@ -4,4 +4,5 @@
- nav "profile" - nav "profile"
- @left_sidebar = true - @left_sidebar = true
- enable_search_settings locals: { container_class: 'gl-my-5' }
= render template: "layouts/application" = render template: "layouts/application"
- users = capture_haml do
- if show_user_search_tab?
= search_filter_link 'users', _("Users")
.scrolling-tabs-container.inner-page-scroll-tabs.is-smaller
.fade-left= sprite_icon('chevron-lg-left', size: 12)
.fade-right= sprite_icon('chevron-lg-right', size: 12)
%ul.nav-links.search-filter.scrolling-tabs.nav.nav-tabs
- if @project
- if project_search_tabs?(:blobs)
= search_filter_link 'blobs', _("Code"), data: { qa_selector: 'code_tab' }
- if project_search_tabs?(:issues)
= search_filter_link 'issues', _("Issues")
- if project_search_tabs?(:merge_requests)
= search_filter_link 'merge_requests', _("Merge requests")
- if project_search_tabs?(:milestones)
= search_filter_link 'milestones', _("Milestones")
- if project_search_tabs?(:notes)
= search_filter_link 'notes', _("Comments")
- if project_search_tabs?(:wiki)
= search_filter_link 'wiki_blobs', _("Wiki")
- if project_search_tabs?(:commits)
= search_filter_link 'commits', _("Commits")
= users
- elsif @show_snippets
= search_filter_link 'snippet_titles', _("Titles and Descriptions"), search: { snippets: true, group_id: nil, project_id: nil }
- else
= search_filter_link 'projects', _("Projects"), data: { qa_selector: 'projects_tab' }
= search_filter_link 'issues', _("Issues")
= search_filter_link 'merge_requests', _("Merge requests")
= search_filter_link 'milestones', _("Milestones")
= render_if_exists 'search/epics_filter_link'
= render_if_exists 'search/category_elasticsearch'
= users
...@@ -16,7 +16,6 @@ ...@@ -16,7 +16,6 @@
= render_if_exists 'search/form_elasticsearch', attrs: { class: 'mb-2 mb-sm-0 align-self-center' } = render_if_exists 'search/form_elasticsearch', attrs: { class: 'mb-2 mb-sm-0 align-self-center' }
.gl-mt-3 .gl-mt-3
#js-search-topbar{ data: { "group-initial-data": @group.to_json, "project-initial-data": project_attributes.to_json } } #js-search-topbar.search-topbar{ data: { "group-initial-data": @group.to_json, "project-initial-data": project_attributes.to_json, "scope-tabs": search_nav_tabs.to_json, count: @search_results&.formatted_count(@scope) } }
- if @search_term - if @search_term
= render 'search/category'
= render 'search/results' = render 'search/results'
---
title: Don't expose project existence by redirecting from its .git URL
merge_request: 52818
author:
type: fixed
---
title: Change search tab to Vue component
merge_request: 52018
author:
type: changed
---
name: database_sourced_aggregated_metrics
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/52784
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/300411
milestone: '13.9'
type: development
group: group::product intelligence
default_enabled: false
key_path: counts_monthly.deployments key_path: counts_monthly.deployments
description: Total deployments count for recent 28 days description: Total deployments count for recent 28 days
value_type: integer value_type: integer
stage: release product_stage: release
status: data_available status: data_available
milestone: 13.2 milestone: 13.2
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/35493 introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/35493
group: 'group::ops release' product_group: 'group::ops release'
time_frame: 28d time_frame: 28d
data_source: database data_source: database
distribution: [ee, ce] distribution: [ee, ce]
......
...@@ -2,11 +2,11 @@ key_path: redis_hll_counters.issues_edit.g_project_management_issue_title_change ...@@ -2,11 +2,11 @@ key_path: redis_hll_counters.issues_edit.g_project_management_issue_title_change
description: Distinct users count that changed issue title in a group for last recent week description: Distinct users count that changed issue title in a group for last recent week
value_type: integer value_type: integer
product_category: issue_tracking product_category: issue_tracking
stage: plan product_stage: plan
status: data_available status: data_available
milestone: 13.6 milestone: 13.6
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/issues/229918 introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/issues/229918
group: 'group::project management' product_group: 'group::project management'
time_frame: 7d time_frame: 7d
data_source: redis_hll data_source: redis_hll
distribution: [ee, ce] distribution: [ee, ce]
......
key_path: counts.deployments key_path: counts.deployments
description: Total deployments count description: Total deployments count
value_type: integer value_type: integer
stage: release product_stage: release
status: data_available status: data_available
milestone: 8.12 milestone: 8.12
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/735 introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/735
group: 'group::ops release' product_group: 'group::ops release'
time_frame: all time_frame: all
data_source: database data_source: database
distribution: [ee, ce] distribution: [ee, ce]
......
...@@ -2,11 +2,11 @@ key_path: recorded_at ...@@ -2,11 +2,11 @@ key_path: recorded_at
description: When the Usage Ping computation was started description: When the Usage Ping computation was started
value_type: string value_type: string
product_category: collection product_category: collection
stage: growth product_stage: growth
status: data_available status: data_available
milestone: 8.10 milestone: 8.10
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/557 introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/557
group: group::product analytics product_group: group::product intelligence
time_frame: none time_frame: none
data_source: ruby data_source: ruby
distribution: [ee, ce] distribution: [ee, ce]
......
...@@ -2,11 +2,11 @@ key_path: uuid ...@@ -2,11 +2,11 @@ key_path: uuid
description: GitLab instance unique identifier description: GitLab instance unique identifier
value_type: string value_type: string
product_category: collection product_category: collection
stage: growth product_stage: growth
status: data_available status: data_available
milestone: 9.1 milestone: 9.1
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/1521 introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/1521
group: group::product analytics product_group: group::product intelligence
time_frame: none time_frame: none
data_source: database data_source: database
distribution: [ee, ce] distribution: [ee, ce]
......
{ {
"type": "object", "type": "object",
"required": ["key_path", "description", "value_type", "status", "group", "time_frame", "data_source", "distribution", "tier"], "required": ["key_path", "description", "value_type", "status", "product_group", "time_frame", "data_source", "distribution", "tier"],
"properties": { "properties": {
"key_path": { "key_path": {
"type": "string" "type": "string"
...@@ -8,19 +8,25 @@ ...@@ -8,19 +8,25 @@
"description": { "description": {
"type": "string" "type": "string"
}, },
"value_type": { "product_section": {
"type": "string", "type": ["string", "null"]
"enum": ["integer", "string", "number", "boolean"]
}, },
"product_category": { "product_stage": {
"type": ["string", "null"] "type": ["string", "null"]
}, },
"stage": { "product_group": {
"type": "string"
},
"product_category": {
"type": ["string", "null"] "type": ["string", "null"]
}, },
"value_type": {
"type": "string",
"enum": ["integer", "string", "number", "boolean"]
},
"status": { "status": {
"type": ["string"], "type": ["string"],
"enum": ["data_available", "planned", "in_progress", "implmented"] "enum": ["data_available", "planned", "in_progress", "implemented"]
}, },
"milestone": { "milestone": {
"type": ["number", "null"] "type": ["number", "null"]
...@@ -31,9 +37,6 @@ ...@@ -31,9 +37,6 @@
"introduced_by_url": { "introduced_by_url": {
"type": ["string", "null"] "type": ["string", "null"]
}, },
"group": {
"type": "string"
},
"time_frame": { "time_frame": {
"type": "string", "type": "string",
"enum": ["7d", "28d", "all", "none"] "enum": ["7d", "28d", "all", "none"]
......
...@@ -2,9 +2,9 @@ key_path: database.adapter ...@@ -2,9 +2,9 @@ key_path: database.adapter
description: This metric only returns a value of PostgreSQL in supported versions of GitLab. It could be removed from the usage ping. Historically MySQL was also supported. description: This metric only returns a value of PostgreSQL in supported versions of GitLab. It could be removed from the usage ping. Historically MySQL was also supported.
value_type: string value_type: string
product_category: collection product_category: collection
stage: growth product_stage: growth
status: data_available status: data_available
group: group::enablement distribution product_group: group::enablement distribution
time_frame: none time_frame: none
data_source: database data_source: database
distribution: [ee, ce] distribution: [ee, ce]
......
...@@ -43,7 +43,7 @@ tracking_files = [ ...@@ -43,7 +43,7 @@ tracking_files = [
tracking_changed_files = all_changed_files & tracking_files tracking_changed_files = all_changed_files & tracking_files
usage_data_changed_files = all_changed_files.grep(%r{(usage_data)}) usage_data_changed_files = all_changed_files.grep(%r{(usage_data)})
metrics_changed_files = all_changed_files.grep(%r{((ee/)?config/metrics/.*\.yml)}) metrics_changed_files = all_changed_files.grep(%r{((ee/)?config/metrics/.*\.yml)})
dictionary_changed_file = all_changed_files.grep(%r{(doc/developmet/usage_ping/dictionary.md)}) dictionary_changed_file = all_changed_files.grep(%r{(doc/development/usage_ping/dictionary.md)})
usage_changed_files = usage_data_changed_files + tracking_changed_files + metrics_changed_files + dictionary_changed_file usage_changed_files = usage_data_changed_files + tracking_changed_files + metrics_changed_files + dictionary_changed_file
......
...@@ -37,11 +37,11 @@ Total deployments count ...@@ -37,11 +37,11 @@ Total deployments count
| --- | --- | | --- | --- |
| `key_path` | **counts.deployments** | | `key_path` | **counts.deployments** |
| `value_type` | integer | | `value_type` | integer |
| `stage` | release | | `product_stage` | release |
| `status` | data_available | | `status` | data_available |
| `milestone` | 8.12 | | `milestone` | 8.12 |
| `introduced_by_url` | [Introduced by](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/735) | | `introduced_by_url` | [Introduced by](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/735) |
| `group` | `group::ops release` | | `product_group` | `group::ops release` |
| `time_frame` | all | | `time_frame` | all |
| `data_source` | Database | | `data_source` | Database |
| `distribution` | ee, ce | | `distribution` | ee, ce |
...@@ -56,10 +56,10 @@ Total number of sites in a Geo deployment ...@@ -56,10 +56,10 @@ Total number of sites in a Geo deployment
| `key_path` | **counts.geo_nodes** | | `key_path` | **counts.geo_nodes** |
| `value_type` | integer | | `value_type` | integer |
| `product_category` | disaster_recovery | | `product_category` | disaster_recovery |
| `stage` | enablement | | `product_stage` | enablement |
| `status` | data_available | | `status` | data_available |
| `milestone` | 11.2 | | `milestone` | 11.2 |
| `group` | `group::geo` | | `product_group` | `group::geo` |
| `time_frame` | all | | `time_frame` | all |
| `data_source` | Database | | `data_source` | Database |
| `distribution` | ee | | `distribution` | ee |
...@@ -73,11 +73,11 @@ Total deployments count for recent 28 days ...@@ -73,11 +73,11 @@ Total deployments count for recent 28 days
| --- | --- | | --- | --- |
| `key_path` | **counts_monthly.deployments** | | `key_path` | **counts_monthly.deployments** |
| `value_type` | integer | | `value_type` | integer |
| `stage` | release | | `product_stage` | release |
| `status` | data_available | | `status` | data_available |
| `milestone` | 13.2 | | `milestone` | 13.2 |
| `introduced_by_url` | [Introduced by](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/35493) | | `introduced_by_url` | [Introduced by](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/35493) |
| `group` | `group::ops release` | | `product_group` | `group::ops release` |
| `time_frame` | 28d | | `time_frame` | 28d |
| `data_source` | Database | | `data_source` | Database |
| `distribution` | ee, ce | | `distribution` | ee, ce |
...@@ -92,9 +92,9 @@ This metric only returns a value of PostgreSQL in supported versions of GitLab. ...@@ -92,9 +92,9 @@ This metric only returns a value of PostgreSQL in supported versions of GitLab.
| `key_path` | **database.adapter** | | `key_path` | **database.adapter** |
| `value_type` | string | | `value_type` | string |
| `product_category` | collection | | `product_category` | collection |
| `stage` | growth | | `product_stage` | growth |
| `status` | data_available | | `status` | data_available |
| `group` | `group::enablement distribution` | | `product_group` | `group::enablement distribution` |
| `time_frame` | none | | `time_frame` | none |
| `data_source` | Database | | `data_source` | Database |
| `distribution` | ee, ce | | `distribution` | ee, ce |
...@@ -109,11 +109,11 @@ When the Usage Ping computation was started ...@@ -109,11 +109,11 @@ When the Usage Ping computation was started
| `key_path` | **recorded_at** | | `key_path` | **recorded_at** |
| `value_type` | string | | `value_type` | string |
| `product_category` | collection | | `product_category` | collection |
| `stage` | growth | | `product_stage` | growth |
| `status` | data_available | | `status` | data_available |
| `milestone` | 8.1 | | `milestone` | 8.1 |
| `introduced_by_url` | [Introduced by](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/557) | | `introduced_by_url` | [Introduced by](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/557) |
| `group` | `group::product analytics` | | `product_group` | `group::product intelligence` |
| `time_frame` | none | | `time_frame` | none |
| `data_source` | Ruby | | `data_source` | Ruby |
| `distribution` | ee, ce | | `distribution` | ee, ce |
...@@ -128,11 +128,11 @@ Distinct users count that changed issue title in a group for last recent week ...@@ -128,11 +128,11 @@ Distinct users count that changed issue title in a group for last recent week
| `key_path` | **redis_hll_counters.issues_edit.g_project_management_issue_title_changed_weekly** | | `key_path` | **redis_hll_counters.issues_edit.g_project_management_issue_title_changed_weekly** |
| `value_type` | integer | | `value_type` | integer |
| `product_category` | issue_tracking | | `product_category` | issue_tracking |
| `stage` | plan | | `product_stage` | plan |
| `status` | data_available | | `status` | data_available |
| `milestone` | 13.6 | | `milestone` | 13.6 |
| `introduced_by_url` | [Introduced by](https://gitlab.com/gitlab-org/gitlab/-/issues/229918) | | `introduced_by_url` | [Introduced by](https://gitlab.com/gitlab-org/gitlab/-/issues/229918) |
| `group` | `group::project management` | | `product_group` | `group::project management` |
| `time_frame` | 7d | | `time_frame` | 7d |
| `data_source` | Redis_hll | | `data_source` | Redis_hll |
| `distribution` | ee, ce | | `distribution` | ee, ce |
...@@ -147,11 +147,11 @@ GitLab instance unique identifier ...@@ -147,11 +147,11 @@ GitLab instance unique identifier
| `key_path` | **uuid** | | `key_path` | **uuid** |
| `value_type` | string | | `value_type` | string |
| `product_category` | collection | | `product_category` | collection |
| `stage` | growth | | `product_stage` | growth |
| `status` | data_available | | `status` | data_available |
| `milestone` | 9.1 | | `milestone` | 9.1 |
| `introduced_by_url` | [Introduced by](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/1521) | | `introduced_by_url` | [Introduced by](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/1521) |
| `group` | `group::product analytics` | | `product_group` | `group::product intelligence` |
| `time_frame` | none | | `time_frame` | none |
| `data_source` | Database | | `data_source` | Database |
| `distribution` | ee, ce | | `distribution` | ee, ce |
......
...@@ -30,13 +30,13 @@ Each metric is defined in a separate YAML file consisting of a number of fields: ...@@ -30,13 +30,13 @@ Each metric is defined in a separate YAML file consisting of a number of fields:
| `description` | yes | | | `description` | yes | |
| `value_type` | yes | | | `value_type` | yes | |
| `status` | yes | | | `status` | yes | |
| `group` | yes | The [group](https://about.gitlab.com/handbook/product/categories/#devops-stages) that owns the metric. | | `product_group` | yes | The [group](https://about.gitlab.com/handbook/product/categories/#devops-stages) that owns the metric. |
| `time_frame` | yes | `string`; may be set to a value like "7d" | | `time_frame` | yes | `string`; may be set to a value like "7d" |
| `data_source` | yes | `string`: may be set to a value like `database` or `redis_hll`. | | `data_source` | yes | `string`: may be set to a value like `database` or `redis_hll`. |
| `distribution` | yes | The [distribution](https://about.gitlab.com/handbook/marketing/strategic-marketing/tiers/#definitions) where the metric applies. | | `distribution` | yes | The [distribution](https://about.gitlab.com/handbook/marketing/strategic-marketing/tiers/#definitions) where the metric applies. |
| `tier` | yes | The [tier]( https://about.gitlab.com/handbook/marketing/strategic-marketing/tiers/) where the metric applies. | | `tier` | yes | The [tier]( https://about.gitlab.com/handbook/marketing/strategic-marketing/tiers/) where the metric applies. |
| `product_category` | no | The [product category](https://gitlab.com/gitlab-com/www-gitlab-com/blob/master/data/categories.yml) for the metric. | | `product_category` | no | The [product category](https://gitlab.com/gitlab-com/www-gitlab-com/blob/master/data/categories.yml) for the metric. |
| `stage` | no | The [stage](https://gitlab.com/gitlab-com/www-gitlab-com/blob/master/data/stages.yml) for the metric. | | `product_stage` | no | The [stage](https://gitlab.com/gitlab-com/www-gitlab-com/blob/master/data/stages.yml) for the metric. |
| `milestone` | no | The milestone when the metric is introduced. | | `milestone` | no | The milestone when the metric is introduced. |
| `milestone_removed` | no | The milestone when the metric is removed. | | `milestone_removed` | no | The milestone when the metric is removed. |
| `introduced_by_url` | no | The URL to the Merge Request that introduced the metric. | | `introduced_by_url` | no | The URL to the Merge Request that introduced the metric. |
...@@ -52,11 +52,11 @@ key_path: uuid ...@@ -52,11 +52,11 @@ key_path: uuid
description: GitLab instance unique identifier description: GitLab instance unique identifier
value_type: string value_type: string
product_category: collection product_category: collection
stage: growth product_stage: growth
status: data_available status: data_available
milestone: 9.1 milestone: 9.1
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/1521 introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/1521
group: group::product intelligence product_group: group::product intelligence
time_frame: none time_frame: none
data_source: database data_source: database
distribution: [ee, ce] distribution: [ee, ce]
......
...@@ -2,7 +2,8 @@ ...@@ -2,7 +2,8 @@
import { visitUrl } from '~/lib/utils/url_utility'; import { visitUrl } from '~/lib/utils/url_utility';
import * as Sentry from '~/sentry/wrapper'; import * as Sentry from '~/sentry/wrapper';
import createComplianceFrameworkMutation from '../graphql/queries/create_compliance_framework.mutation.graphql'; import createComplianceFrameworkMutation from '../graphql/queries/create_compliance_framework.mutation.graphql';
import { initialiseFormData, SAVE_ERROR } from '../constants'; import { SAVE_ERROR } from '../constants';
import { initialiseFormData } from '../utils';
import SharedForm from './shared_form.vue'; import SharedForm from './shared_form.vue';
import FormStatus from './form_status.vue'; import FormStatus from './form_status.vue';
......
...@@ -5,7 +5,8 @@ import { convertToGraphQLId } from '~/graphql_shared/utils'; ...@@ -5,7 +5,8 @@ import { convertToGraphQLId } from '~/graphql_shared/utils';
import getComplianceFrameworkQuery from '../graphql/queries/get_compliance_framework.query.graphql'; import getComplianceFrameworkQuery from '../graphql/queries/get_compliance_framework.query.graphql';
import updateComplianceFrameworkMutation from '../graphql/queries/update_compliance_framework.mutation.graphql'; import updateComplianceFrameworkMutation from '../graphql/queries/update_compliance_framework.mutation.graphql';
import { initialiseFormData, FETCH_ERROR, SAVE_ERROR } from '../constants'; import { FETCH_ERROR, SAVE_ERROR } from '../constants';
import { initialiseFormData } from '../utils';
import SharedForm from './shared_form.vue'; import SharedForm from './shared_form.vue';
import FormStatus from './form_status.vue'; import FormStatus from './form_status.vue';
...@@ -35,7 +36,8 @@ export default { ...@@ -35,7 +36,8 @@ export default {
}, },
data() { data() {
return { return {
errorMessage: '', initErrorMessage: '',
saveErrorMessage: '',
formData: initialiseFormData(), formData: initialiseFormData(),
saving: false, saving: false,
}; };
...@@ -53,7 +55,7 @@ export default { ...@@ -53,7 +55,7 @@ export default {
this.formData = this.extractComplianceFramework(data); this.formData = this.extractComplianceFramework(data);
}, },
error(error) { error(error) {
this.setError(error, FETCH_ERROR); this.setInitError(error, FETCH_ERROR);
}, },
}, },
}, },
...@@ -64,8 +66,13 @@ export default { ...@@ -64,8 +66,13 @@ export default {
isLoading() { isLoading() {
return this.$apollo.loading || this.saving; return this.$apollo.loading || this.saving;
}, },
hasFormData() { showForm() {
return Boolean(this.formData?.name); return (
Object.values(this.formData).filter((d) => d !== null).length > 0 && !this.initErrorMessage
);
},
errorMessage() {
return this.initErrorMessage || this.saveErrorMessage;
}, },
}, },
methods: { methods: {
...@@ -73,7 +80,7 @@ export default { ...@@ -73,7 +80,7 @@ export default {
const complianceFrameworks = data.namespace?.complianceFrameworks?.nodes || []; const complianceFrameworks = data.namespace?.complianceFrameworks?.nodes || [];
if (!complianceFrameworks.length) { if (!complianceFrameworks.length) {
this.setError(new Error(FETCH_ERROR), FETCH_ERROR); this.setInitError(new Error(FETCH_ERROR), FETCH_ERROR);
return initialiseFormData(); return initialiseFormData();
} }
...@@ -86,13 +93,17 @@ export default { ...@@ -86,13 +93,17 @@ export default {
color, color,
}; };
}, },
setError(error, userFriendlyText) { setInitError(error, userFriendlyText) {
this.errorMessage = userFriendlyText; this.initErrorMessage = userFriendlyText;
Sentry.captureException(error);
},
setSavingError(error, userFriendlyText) {
this.saveErrorMessage = userFriendlyText;
Sentry.captureException(error); Sentry.captureException(error);
}, },
async onSubmit() { async onSubmit() {
this.saving = true; this.saving = true;
this.errorMessage = ''; this.saveErrorMessage = '';
try { try {
const { name, description, color } = this.formData; const { name, description, color } = this.formData;
...@@ -113,13 +124,13 @@ export default { ...@@ -113,13 +124,13 @@ export default {
const [error] = data?.updateComplianceFramework?.errors || []; const [error] = data?.updateComplianceFramework?.errors || [];
if (error) { if (error) {
this.setError(new Error(error), error); this.setSavingError(new Error(error), error);
} else { } else {
this.saving = false; this.saving = false;
visitUrl(this.groupEditPath); visitUrl(this.groupEditPath);
} }
} catch (e) { } catch (e) {
this.setError(e, SAVE_ERROR); this.setSavingError(e, SAVE_ERROR);
} }
this.saving = false; this.saving = false;
...@@ -130,7 +141,7 @@ export default { ...@@ -130,7 +141,7 @@ export default {
<template> <template>
<form-status :loading="isLoading" :error="errorMessage"> <form-status :loading="isLoading" :error="errorMessage">
<shared-form <shared-form
v-if="hasFormData" v-if="showForm"
:group-edit-path="groupEditPath" :group-edit-path="groupEditPath"
:name.sync="formData.name" :name.sync="formData.name"
:description.sync="formData.description" :description.sync="formData.description"
......
...@@ -103,7 +103,12 @@ export default { ...@@ -103,7 +103,12 @@ export default {
</gl-sprintf> </gl-sprintf>
</template> </template>
<gl-form-input :value="name" data-testid="name-input" @input="$emit('update:name', $event)" /> <gl-form-input
:value="name"
:state="isValidName"
data-testid="name-input"
@input="$emit('update:name', $event)"
/>
</gl-form-group> </gl-form-group>
<gl-form-group <gl-form-group
...@@ -114,6 +119,7 @@ export default { ...@@ -114,6 +119,7 @@ export default {
> >
<gl-form-input <gl-form-input
:value="description" :value="description"
:state="isValidDescription"
data-testid="description-input" data-testid="description-input"
@input="$emit('update:description', $event)" @input="$emit('update:description', $event)"
/> />
......
import { s__ } from '~/locale'; import { s__ } from '~/locale';
export const initialiseFormData = () => ({
name: null,
description: null,
color: null,
});
export const FETCH_ERROR = s__( export const FETCH_ERROR = s__(
'ComplianceFrameworks|Error fetching compliance frameworks data. Please refresh the page', 'ComplianceFrameworks|Error fetching compliance frameworks data. Please refresh the page',
); );
......
export const initialiseFormData = () => ({
name: null,
description: null,
pipelineConfigurationFullPath: null,
color: null,
});
...@@ -3,8 +3,8 @@ module EE ...@@ -3,8 +3,8 @@ module EE
module SearchHelper module SearchHelper
extend ::Gitlab::Utils::Override extend ::Gitlab::Utils::Override
SWITCH_TO_BASIC_SEARCHABLE_TABS = %w[projects issues merge_requests milestones users epics].freeze
PLACEHOLDER = '_PLACEHOLDER_' PLACEHOLDER = '_PLACEHOLDER_'
ADVANCED_SEARCH_TABS = %i{notes blobs commits wiki_blobs}.freeze
override :search_filter_input_options override :search_filter_input_options
def search_filter_input_options(type, placeholder = _('Search or filter results...')) def search_filter_input_options(type, placeholder = _('Search or filter results...'))
...@@ -130,6 +130,20 @@ module EE ...@@ -130,6 +130,20 @@ module EE
options + super options + super
end end
override :search_nav_tabs
def search_nav_tabs
return super if @project || @show_snippets
tabs = []
tabs << :epics if search_service.show_epics?
tabs.push(*ADVANCED_SEARCH_TABS) if search_service.use_elasticsearch?
super_tabs = super
users_index = super_tabs.index(:users) || -1
super_tabs.insert(users_index, *tabs)
end
private private
def recent_epics_autocomplete(term) def recent_epics_autocomplete(term)
......
- if search_service.use_elasticsearch?
= search_filter_link 'notes', _("Comments")
= search_filter_link 'blobs', _("Code"), data: { qa_selector: 'code_tab' }
= search_filter_link 'commits', _("Commits")
= search_filter_link 'wiki_blobs', _("Wiki")
- if search_service.show_epics?
= search_filter_link 'epics', _("Epics")
...@@ -2,10 +2,10 @@ key_path: counts.geo_nodes ...@@ -2,10 +2,10 @@ key_path: counts.geo_nodes
description: Total number of sites in a Geo deployment description: Total number of sites in a Geo deployment
value_type: integer value_type: integer
product_category: disaster_recovery product_category: disaster_recovery
stage: enablement product_stage: enablement
status: data_available status: data_available
milestone: 11.2 milestone: 11.2
group: 'group::geo' product_group: 'group::geo'
time_frame: all time_frame: all
data_source: database data_source: database
distribution: [ee] distribution: [ee]
......
...@@ -9,6 +9,7 @@ module EE ...@@ -9,6 +9,7 @@ module EE
extend ::Gitlab::Utils::Override extend ::Gitlab::Utils::Override
EXCLUDED_PAYLOAD_FINGERPRINT_PARAMS = %w(start_time end_time hosts).freeze EXCLUDED_PAYLOAD_FINGERPRINT_PARAMS = %w(start_time end_time hosts).freeze
CUSTOM_MAPPING_PATH_KEY = 'path'
private private
...@@ -33,6 +34,20 @@ module EE ...@@ -33,6 +34,20 @@ module EE
def generic_alert_fingerprinting_enabled? def generic_alert_fingerprinting_enabled?
project.feature_available?(:generic_alert_fingerprinting) project.feature_available?(:generic_alert_fingerprinting)
end end
override :value_for_paths
def value_for_paths(paths)
custom_mapping_value_for_paths(paths) || super(paths)
end
def custom_mapping_value_for_paths(paths)
return unless ::Gitlab::AlertManagement.custom_mapping_available?(project)
return unless integration&.active?
custom_mapping_path = integration.payload_attribute_mapping.dig(*paths.first, CUSTOM_MAPPING_PATH_KEY)
payload&.dig(*custom_mapping_path) if custom_mapping_path
end
end end
end end
end end
......
...@@ -451,10 +451,12 @@ module EE ...@@ -451,10 +451,12 @@ module EE
pipelines_with_secure_jobs[metric_name.to_sym] = pipelines_with_secure_jobs[metric_name.to_sym] =
if start_id && finish_id if start_id && finish_id
estimate_batch_distinct_count(relation, :commit_id, batch_size: 1000, start: start_id, finish: finish_id) do |result| estimate_batch_distinct_count(relation, :commit_id, batch_size: 1000, start: start_id, finish: finish_id) do |result|
save_aggregated_metrics(**aggregated_metrics_params.merge({ data: result })) ::Gitlab::Usage::Metrics::Aggregates::Sources::PostgresHll
.save_aggregated_metrics(**aggregated_metrics_params.merge({ data: result }))
end end
else else
save_aggregated_metrics(**aggregated_metrics_params.merge({ data: ::Gitlab::Database::PostgresHll::Buckets.new })) ::Gitlab::Usage::Metrics::Aggregates::Sources::PostgresHll
.save_aggregated_metrics(**aggregated_metrics_params.merge({ data: ::Gitlab::Database::PostgresHll::Buckets.new }))
0 0
end end
end end
......
...@@ -77,7 +77,7 @@ RSpec.describe 'Global elastic search', :elastic, :sidekiq_inline do ...@@ -77,7 +77,7 @@ RSpec.describe 'Global elastic search', :elastic, :sidekiq_inline do
end end
end end
describe 'I search through the issues and I see pagination' do describe 'I search through the issues and I see pagination', :js do
before do before do
create_list(:issue, 21, project: project, title: 'initial') create_list(:issue, 21, project: project, title: 'initial')
...@@ -94,7 +94,7 @@ RSpec.describe 'Global elastic search', :elastic, :sidekiq_inline do ...@@ -94,7 +94,7 @@ RSpec.describe 'Global elastic search', :elastic, :sidekiq_inline do
end end
end end
describe 'I search through the notes and I see pagination' do describe 'I search through the notes and I see pagination', :js do
before do before do
issue = create(:issue, project: project, title: 'initial') issue = create(:issue, project: project, title: 'initial')
create_list(:note, 21, noteable: issue, project: project, note: 'foo') create_list(:note, 21, noteable: issue, project: project, note: 'foo')
...@@ -112,7 +112,7 @@ RSpec.describe 'Global elastic search', :elastic, :sidekiq_inline do ...@@ -112,7 +112,7 @@ RSpec.describe 'Global elastic search', :elastic, :sidekiq_inline do
end end
end end
describe 'I search through the blobs' do describe 'I search through the blobs', :js do
let(:project_2) { create(:project, :repository, :wiki_repo) } let(:project_2) { create(:project, :repository, :wiki_repo) }
before do before do
...@@ -156,7 +156,7 @@ RSpec.describe 'Global elastic search', :elastic, :sidekiq_inline do ...@@ -156,7 +156,7 @@ RSpec.describe 'Global elastic search', :elastic, :sidekiq_inline do
end end
end end
describe 'I search through the wiki blobs' do describe 'I search through the wiki blobs', :js do
before do before do
project.wiki.create_page('test.md', '# term') project.wiki.create_page('test.md', '# term')
project.wiki.index_wiki_blobs project.wiki.index_wiki_blobs
...@@ -175,9 +175,10 @@ RSpec.describe 'Global elastic search', :elastic, :sidekiq_inline do ...@@ -175,9 +175,10 @@ RSpec.describe 'Global elastic search', :elastic, :sidekiq_inline do
end end
end end
describe 'I search through the commits' do describe 'I search through the commits', :js do
before do before do
project.repository.index_commits_and_blobs project.repository.index_commits_and_blobs
ensure_elasticsearch_index! ensure_elasticsearch_index!
end end
...@@ -187,7 +188,7 @@ RSpec.describe 'Global elastic search', :elastic, :sidekiq_inline do ...@@ -187,7 +188,7 @@ RSpec.describe 'Global elastic search', :elastic, :sidekiq_inline do
submit_search('add') submit_search('add')
select_search_scope('Commits') select_search_scope('Commits')
expect(page).to have_selector('.commit-row-description') expect(page).to have_selector('.commit-row-message')
expect(page).to have_selector('.project-namespace') expect(page).to have_selector('.project-namespace')
end end
...@@ -197,7 +198,7 @@ RSpec.describe 'Global elastic search', :elastic, :sidekiq_inline do ...@@ -197,7 +198,7 @@ RSpec.describe 'Global elastic search', :elastic, :sidekiq_inline do
submit_search('add') submit_search('add')
select_search_scope('Commits') select_search_scope('Commits')
expected_message = "Add directory structure for tree_helper spec" expected_message = "Merge branch 'tree_helper_spec' into 'master'"
expect(page).not_to have_content(expected_message) expect(page).not_to have_content(expected_message)
...@@ -231,7 +232,7 @@ RSpec.describe 'Global elastic search', :elastic, :sidekiq_inline do ...@@ -231,7 +232,7 @@ RSpec.describe 'Global elastic search', :elastic, :sidekiq_inline do
end end
end end
RSpec.describe 'Global elastic search redactions', :elastic do RSpec.describe 'Global elastic search redactions', :elastic, :js do
context 'when block_anonymous_global_searches is disabled' do context 'when block_anonymous_global_searches is disabled' do
before do before do
stub_feature_flags(block_anonymous_global_searches: false) stub_feature_flags(block_anonymous_global_searches: false)
......
...@@ -83,6 +83,7 @@ RSpec.describe 'Group elastic search', :js, :elastic, :sidekiq_might_not_need_in ...@@ -83,6 +83,7 @@ RSpec.describe 'Group elastic search', :js, :elastic, :sidekiq_might_not_need_in
describe 'commit search' do describe 'commit search' do
before do before do
project.repository.index_commits_and_blobs project.repository.index_commits_and_blobs
ensure_elasticsearch_index! ensure_elasticsearch_index!
end end
...@@ -95,7 +96,7 @@ RSpec.describe 'Group elastic search', :js, :elastic, :sidekiq_might_not_need_in ...@@ -95,7 +96,7 @@ RSpec.describe 'Group elastic search', :js, :elastic, :sidekiq_might_not_need_in
end end
end end
RSpec.describe 'Group elastic search redactions', :elastic do RSpec.describe 'Group elastic search redactions', :elastic, :js do
it_behaves_like 'a redacted search results page' do it_behaves_like 'a redacted search results page' do
let(:search_path) { group_path(public_group) } let(:search_path) { group_path(public_group) }
end end
......
...@@ -10,7 +10,7 @@ RSpec.describe 'Project elastic search', :js, :elastic do ...@@ -10,7 +10,7 @@ RSpec.describe 'Project elastic search', :js, :elastic do
stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true) stub_ee_application_setting(elasticsearch_search: true, elasticsearch_indexing: true)
end end
describe 'searching' do describe 'searching', :sidekiq_inline do
before do before do
project.add_maintainer(user) project.add_maintainer(user)
sign_in(user) sign_in(user)
...@@ -18,7 +18,7 @@ RSpec.describe 'Project elastic search', :js, :elastic do ...@@ -18,7 +18,7 @@ RSpec.describe 'Project elastic search', :js, :elastic do
visit project_path(project) visit project_path(project)
end end
it 'finds issues', :sidekiq_inline do it 'finds issues' do
create(:issue, project: project, title: 'Test searching for an issue') create(:issue, project: project, title: 'Test searching for an issue')
ensure_elasticsearch_index! ensure_elasticsearch_index!
...@@ -28,7 +28,7 @@ RSpec.describe 'Project elastic search', :js, :elastic do ...@@ -28,7 +28,7 @@ RSpec.describe 'Project elastic search', :js, :elastic do
expect(page).to have_selector('.results', text: 'Test searching for an issue') expect(page).to have_selector('.results', text: 'Test searching for an issue')
end end
it 'finds merge requests', :sidekiq_inline do it 'finds merge requests' do
create(:merge_request, source_project: project, target_project: project, title: 'Test searching for an MR') create(:merge_request, source_project: project, target_project: project, title: 'Test searching for an MR')
ensure_elasticsearch_index! ensure_elasticsearch_index!
...@@ -38,7 +38,7 @@ RSpec.describe 'Project elastic search', :js, :elastic do ...@@ -38,7 +38,7 @@ RSpec.describe 'Project elastic search', :js, :elastic do
expect(page).to have_selector('.results', text: 'Test searching for an MR') expect(page).to have_selector('.results', text: 'Test searching for an MR')
end end
it 'finds milestones', :sidekiq_inline do it 'finds milestones' do
create(:milestone, project: project, title: 'Test searching for a milestone') create(:milestone, project: project, title: 'Test searching for a milestone')
ensure_elasticsearch_index! ensure_elasticsearch_index!
...@@ -48,9 +48,10 @@ RSpec.describe 'Project elastic search', :js, :elastic do ...@@ -48,9 +48,10 @@ RSpec.describe 'Project elastic search', :js, :elastic do
expect(page).to have_selector('.results', text: 'Test searching for a milestone') expect(page).to have_selector('.results', text: 'Test searching for a milestone')
end end
it 'finds wiki pages', :sidekiq_inline do it 'finds wiki pages' do
project.wiki.create_page('test.md', 'Test searching for a wiki page') project.wiki.create_page('test.md', 'Test searching for a wiki page')
project.wiki.index_wiki_blobs project.wiki.index_wiki_blobs
ensure_elasticsearch_index!
submit_search('Test') submit_search('Test')
select_search_scope('Wiki') select_search_scope('Wiki')
...@@ -58,7 +59,7 @@ RSpec.describe 'Project elastic search', :js, :elastic do ...@@ -58,7 +59,7 @@ RSpec.describe 'Project elastic search', :js, :elastic do
expect(page).to have_selector('.results', text: 'Test searching for a wiki page') expect(page).to have_selector('.results', text: 'Test searching for a wiki page')
end end
it 'finds notes', :sidekiq_inline do it 'finds notes' do
create(:note, project: project, note: 'Test searching for a comment') create(:note, project: project, note: 'Test searching for a comment')
ensure_elasticsearch_index! ensure_elasticsearch_index!
...@@ -68,8 +69,9 @@ RSpec.describe 'Project elastic search', :js, :elastic do ...@@ -68,8 +69,9 @@ RSpec.describe 'Project elastic search', :js, :elastic do
expect(page).to have_selector('.results', text: 'Test searching for a comment') expect(page).to have_selector('.results', text: 'Test searching for a comment')
end end
it 'finds commits', :sidekiq_inline do it 'finds commits' do
project.repository.index_commits_and_blobs project.repository.index_commits_and_blobs
ensure_elasticsearch_index!
submit_search('initial') submit_search('initial')
select_search_scope('Commits') select_search_scope('Commits')
...@@ -77,8 +79,9 @@ RSpec.describe 'Project elastic search', :js, :elastic do ...@@ -77,8 +79,9 @@ RSpec.describe 'Project elastic search', :js, :elastic do
expect(page).to have_selector('.results', text: 'Initial commit') expect(page).to have_selector('.results', text: 'Initial commit')
end end
it 'finds blobs', :sidekiq_inline do it 'finds blobs' do
project.repository.index_commits_and_blobs project.repository.index_commits_and_blobs
ensure_elasticsearch_index!
submit_search('def') submit_search('def')
select_search_scope('Code') select_search_scope('Code')
...@@ -126,7 +129,7 @@ RSpec.describe 'Project elastic search', :js, :elastic do ...@@ -126,7 +129,7 @@ RSpec.describe 'Project elastic search', :js, :elastic do
end end
end end
RSpec.describe 'Project elastic search redactions', :elastic do RSpec.describe 'Project elastic search redactions', :elastic, :js do
it_behaves_like 'a redacted search results page' do it_behaves_like 'a redacted search results page' do
let(:search_path) { project_path(public_restricted_project) } let(:search_path) { project_path(public_restricted_project) }
end end
......
...@@ -21,14 +21,15 @@ jest.mock('~/lib/utils/url_utility'); ...@@ -21,14 +21,15 @@ jest.mock('~/lib/utils/url_utility');
describe('CreateForm', () => { describe('CreateForm', () => {
let wrapper; let wrapper;
const sentryError = new Error('Network error');
const sentrySaveError = new Error('Invalid values given');
const propsData = { const propsData = {
groupPath: 'group-1', groupPath: 'group-1',
groupEditPath: 'group-1/edit', groupEditPath: 'group-1/edit',
scopedLabelsHelpPath: 'help/scoped-labels',
}; };
const sentryError = new Error('Network error');
const sentrySaveError = new Error('Invalid values given');
const create = jest.fn().mockResolvedValue(validCreateResponse); const create = jest.fn().mockResolvedValue(validCreateResponse);
const createWithNetworkErrors = jest.fn().mockRejectedValue(sentryError); const createWithNetworkErrors = jest.fn().mockRejectedValue(sentryError);
const createWithErrors = jest.fn().mockResolvedValue(errorCreateResponse); const createWithErrors = jest.fn().mockResolvedValue(errorCreateResponse);
......
...@@ -28,16 +28,16 @@ jest.mock('~/lib/utils/url_utility'); ...@@ -28,16 +28,16 @@ jest.mock('~/lib/utils/url_utility');
describe('EditForm', () => { describe('EditForm', () => {
let wrapper; let wrapper;
const sentryError = new Error('Network error');
const sentrySaveError = new Error('Invalid values given');
const propsData = { const propsData = {
graphqlFieldName: 'ComplianceManagement::Framework', graphqlFieldName: 'ComplianceManagement::Framework',
groupPath: 'group-1',
groupEditPath: 'group-1/edit', groupEditPath: 'group-1/edit',
groupPath: 'group-1',
id: '1', id: '1',
scopedLabelsHelpPath: 'help/scoped-labels',
}; };
const sentryError = new Error('Network error');
const sentrySaveError = new Error('Invalid values given');
const fetchOne = jest.fn().mockResolvedValue(validFetchOneResponse); const fetchOne = jest.fn().mockResolvedValue(validFetchOneResponse);
const fetchEmpty = jest.fn().mockResolvedValue(emptyFetchResponse); const fetchEmpty = jest.fn().mockResolvedValue(emptyFetchResponse);
const fetchLoading = jest.fn().mockResolvedValue(new Promise(() => {})); const fetchLoading = jest.fn().mockResolvedValue(new Promise(() => {}));
...@@ -96,7 +96,7 @@ describe('EditForm', () => { ...@@ -96,7 +96,7 @@ describe('EditForm', () => {
await waitForPromises(); await waitForPromises();
expect(fetchOne).toHaveBeenCalledTimes(1); expect(fetchOne).toHaveBeenCalledTimes(1);
expect(findForm().props()).toMatchObject({ expect(findForm().props()).toStrictEqual({
name: frameworkFoundResponse.name, name: frameworkFoundResponse.name,
description: frameworkFoundResponse.description, description: frameworkFoundResponse.description,
color: frameworkFoundResponse.color, color: frameworkFoundResponse.color,
......
...@@ -28,6 +28,15 @@ describe('SharedForm', () => { ...@@ -28,6 +28,15 @@ describe('SharedForm', () => {
}, },
stubs: { stubs: {
GlFormGroup, GlFormGroup,
GlFormInput: {
name: 'gl-form-input-stub',
props: ['state'],
template: `
<div>
<slot></slot>
</div>
`,
},
GlSprintf, GlSprintf,
}, },
}); });
...@@ -67,10 +76,11 @@ describe('SharedForm', () => { ...@@ -67,10 +76,11 @@ describe('SharedForm', () => {
${null} | ${null} ${null} | ${null}
${''} | ${false} ${''} | ${false}
${'foobar'} | ${true} ${'foobar'} | ${true}
`('sets the correct state to the name input group', ({ name, validity }) => { `('sets the correct state to the name input and group', ({ name, validity }) => {
wrapper = createComponent({ name }); wrapper = createComponent({ name });
expect(findNameGroup().props('state')).toBe(validity); expect(findNameGroup().props('state')).toBe(validity);
expect(findNameInput().props('state')).toBe(validity);
}); });
it.each` it.each`
...@@ -78,10 +88,11 @@ describe('SharedForm', () => { ...@@ -78,10 +88,11 @@ describe('SharedForm', () => {
${null} | ${null} ${null} | ${null}
${''} | ${false} ${''} | ${false}
${'foobar'} | ${true} ${'foobar'} | ${true}
`('sets the correct state to the description input group', ({ description, validity }) => { `('sets the correct state to the description input and group', ({ description, validity }) => {
wrapper = createComponent({ description }); wrapper = createComponent({ description });
expect(findDescriptionGroup().props('state')).toBe(validity); expect(findDescriptionGroup().props('state')).toBe(validity);
expect(findDescriptionInput().props('state')).toBe(validity);
}); });
it.each` it.each`
......
...@@ -48,7 +48,7 @@ describe('createComplianceFrameworksFormApp', () => { ...@@ -48,7 +48,7 @@ describe('createComplianceFrameworksFormApp', () => {
}); });
it('parses and passes props', () => { it('parses and passes props', () => {
expect(findFormApp(CreateForm).props()).toMatchObject({ expect(findFormApp(CreateForm).props()).toStrictEqual({
groupEditPath, groupEditPath,
groupPath, groupPath,
}); });
...@@ -61,7 +61,8 @@ describe('createComplianceFrameworksFormApp', () => { ...@@ -61,7 +61,8 @@ describe('createComplianceFrameworksFormApp', () => {
}); });
it('parses and passes props', () => { it('parses and passes props', () => {
expect(findFormApp(EditForm).props()).toMatchObject({ expect(findFormApp(EditForm).props()).toStrictEqual({
graphqlFieldName,
groupEditPath, groupEditPath,
groupPath, groupPath,
id: testId, id: testId,
......
import * as Utils from 'ee/groups/settings/compliance_frameworks/utils';
describe('Utils', () => {
describe('initialiseFormData', () => {
it('returns the initial form data object', () => {
expect(Utils.initialiseFormData()).toStrictEqual({
name: null,
description: null,
pipelineConfigurationFullPath: null,
color: null,
});
});
});
});
...@@ -17,6 +17,7 @@ RSpec.describe Resolvers::DastSiteValidationResolver do ...@@ -17,6 +17,7 @@ RSpec.describe Resolvers::DastSiteValidationResolver do
before do before do
project.add_maintainer(current_user) project.add_maintainer(current_user)
stub_licensed_features(security_on_demand_scans: true)
end end
specify do specify do
......
...@@ -6,18 +6,24 @@ RSpec.describe GitlabSchema.types['DastProfile'] do ...@@ -6,18 +6,24 @@ RSpec.describe GitlabSchema.types['DastProfile'] do
include GraphqlHelpers include GraphqlHelpers
let_it_be(:object) { create(:dast_profile) } let_it_be(:object) { create(:dast_profile) }
let_it_be(:project) { object.project }
let_it_be(:user) { create(:user, developer_projects: [project]) }
let_it_be(:fields) { %i[id name description dastSiteProfile dastScannerProfile editPath] } let_it_be(:fields) { %i[id name description dastSiteProfile dastScannerProfile editPath] }
specify { expect(described_class.graphql_name).to eq('DastProfile') } specify { expect(described_class.graphql_name).to eq('DastProfile') }
specify { expect(described_class).to require_graphql_authorizations(:read_on_demand_scans) } specify { expect(described_class).to require_graphql_authorizations(:read_on_demand_scans) }
before do
stub_licensed_features(security_on_demand_scans: true)
end
it { expect(described_class).to have_graphql_fields(fields) } it { expect(described_class).to have_graphql_fields(fields) }
describe 'editPath field' do describe 'editPath field' do
it 'correctly renders the field' do it 'correctly renders the field' do
expected_result = Gitlab::Routing.url_helpers.edit_project_on_demand_scan_path(object.project, object) expected_result = Gitlab::Routing.url_helpers.edit_project_on_demand_scan_path(project, object)
expect(resolve_field(:edit_path, object)).to eq(expected_result) expect(resolve_field(:edit_path, object, current_user: user)).to eq(expected_result)
end end
end end
end end
...@@ -286,4 +286,67 @@ RSpec.describe SearchHelper do ...@@ -286,4 +286,67 @@ RSpec.describe SearchHelper do
end end
end end
end end
describe '#search_nav_tabs' do
let(:current_user) { nil }
subject { search_nav_tabs }
context 'when @show_snippets is present' do
before do
@show_snippets = 1
end
it { is_expected.to eq([:snippet_titles]) }
end
context 'when @project is present' do
before do
@project = 1
allow(self).to receive(:project_search_tabs?).with(anything).and_return(true)
end
it { is_expected.to eq([:blobs, :issues, :merge_requests, :milestones, :notes, :wiki_blobs, :commits, :users]) }
end
context 'when @show_snippets and @project are not present' do
context 'when user has access to read users' do
before do
allow(self).to receive(:can?).with(current_user, :read_users_list).and_return(true)
end
context 'when elasticsearch is enabled' do
before do
allow(self.search_service).to receive(:use_elasticsearch?).and_return(true)
end
it { is_expected.to eq([:projects, :issues, :merge_requests, :milestones, :notes, :blobs, :commits, :wiki_blobs, :users]) }
context 'when show_epics? is true' do
before do
allow(self.search_service).to receive(:show_epics?).and_return(true)
end
it { is_expected.to eq([:projects, :issues, :merge_requests, :milestones, :epics, :notes, :blobs, :commits, :wiki_blobs, :users]) }
end
end
context 'when elasticsearch is disabled' do
before do
allow(self.search_service).to receive(:use_elasticsearch?).and_return(false)
end
it { is_expected.to eq([:projects, :issues, :merge_requests, :milestones, :users]) }
context 'when show_epics? is true' do
before do
allow(self.search_service).to receive(:show_epics?).and_return(true)
end
it { is_expected.to eq([:projects, :issues, :merge_requests, :milestones, :epics, :users]) }
end
end
end
end
end
end end
...@@ -3,10 +3,292 @@ ...@@ -3,10 +3,292 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe Gitlab::AlertManagement::Payload::Generic do RSpec.describe Gitlab::AlertManagement::Payload::Generic do
let_it_be(:project) { build_stubbed(:project) } let_it_be(:project) { create(:project) }
let(:raw_payload) { {} } let(:raw_payload) { {} }
let(:parsed_payload) { described_class.new(project: project, payload: raw_payload) } let(:parsed_payload) { described_class.new(project: project, payload: raw_payload) }
shared_examples 'parsing alert payload fields with default paths' do
describe '#title' do
subject { parsed_payload.title }
it { is_expected.to eq('default title') }
end
describe '#description' do
subject { parsed_payload.description }
it { is_expected.to eq('default description') }
end
describe '#starts_at' do
subject { parsed_payload.starts_at }
it { is_expected.to eq(default_start_time) }
end
describe '#ends_at' do
subject { parsed_payload.ends_at }
it { is_expected.to eq(default_end_time) }
end
describe '#service' do
subject { parsed_payload.service }
it { is_expected.to eq('default service') }
end
describe '#monitoring_tool' do
subject { parsed_payload.monitoring_tool }
it { is_expected.to eq('default monitoring tool') }
end
describe '#host' do
subject { parsed_payload.hosts }
it { is_expected.to eq(['default-host']) }
end
describe '#severity' do
subject { parsed_payload.severity }
it { is_expected.to eq('low') }
end
describe '#environment_name' do
subject { parsed_payload.environment_name }
it { is_expected.to eq('default gitlab environment')}
end
describe '#gitlab_fingerprint' do
subject { parsed_payload.gitlab_fingerprint }
it { is_expected.to eq(Gitlab::AlertManagement::Fingerprint.generate('default fingerprint')) }
end
end
describe 'attributes' do
let_it_be(:default_start_time) { 10.days.ago.change(usec: 0).utc }
let_it_be(:default_end_time) { 9.days.ago.change(usec: 0).utc }
let_it_be(:mapped_start_time) { 5.days.ago.change(usec: 0).utc }
let_it_be(:mapped_end_time) { 4.days.ago.change(usec: 0).utc }
let_it_be(:raw_payload) do
{
'title' => 'default title',
'description' => 'default description',
'start_time' => default_start_time.to_s,
'end_time' => default_end_time.to_s,
'service' => 'default service',
'monitoring_tool' => 'default monitoring tool',
'hosts' => ['default-host'],
'severity' => 'low',
'gitlab_environment_name' => 'default gitlab environment',
'fingerprint' => 'default fingerprint',
'alert' => {
'name' => 'mapped title',
'desc' => 'mapped description',
'start_time' => mapped_start_time.to_s,
'end_time' => mapped_end_time.to_s,
'service' => 'mapped service',
'monitoring_tool' => 'mapped monitoring tool',
'hosts' => ['mapped-host'],
'severity' => 'high',
'env_name' => 'mapped gitlab environment',
'fingerprint' => 'mapped fingerprint'
}
}
end
context 'with multiple HTTP integrations feature available' do
before do
stub_licensed_features(multiple_alert_http_integrations: project)
end
context 'with multiple_http_integrations_custom_mapping feature flag enabled' do
let_it_be(:attribute_mapping) do
{
title: { path: %w(alert name), type: 'string' },
description: { path: %w(alert desc), type: 'string' },
start_time: { path: %w(alert start_time), type: 'datetime' },
end_time: { path: %w(alert end_time), type: 'datetime' },
service: { path: %w(alert service), type: 'string' },
monitoring_tool: { path: %w(alert monitoring_tool), type: 'string' },
hosts: { path: %w(alert hosts), type: 'string' },
severity: { path: %w(alert severity), type: 'string' },
gitlab_environment_name: { path: %w(alert env_name), type: 'string' },
fingerprint: { path: %w(alert fingerprint), type: 'string' }
}
end
let(:parsed_payload) { described_class.new(project: project, payload: raw_payload, integration: integration) }
before do
stub_feature_flags(multiple_http_integrations_custom_mapping: project)
end
context 'with defined custom mapping' do
let_it_be(:integration) do
create(:alert_management_http_integration, project: project, payload_attribute_mapping: attribute_mapping)
end
describe '#title' do
subject { parsed_payload.title }
it { is_expected.to eq('mapped title') }
end
describe '#description' do
subject { parsed_payload.description }
it { is_expected.to eq('mapped description') }
end
describe '#starts_at' do
subject { parsed_payload.starts_at }
it { is_expected.to eq(mapped_start_time) }
end
describe '#ends_at' do
subject { parsed_payload.ends_at }
it { is_expected.to eq(mapped_end_time) }
end
describe '#service' do
subject { parsed_payload.service }
it { is_expected.to eq('mapped service') }
end
describe '#monitoring_tool' do
subject { parsed_payload.monitoring_tool }
it { is_expected.to eq('mapped monitoring tool') }
end
describe '#host' do
subject { parsed_payload.hosts }
it { is_expected.to eq(['mapped-host']) }
end
describe '#severity' do
subject { parsed_payload.severity }
it { is_expected.to eq('high') }
end
describe '#environment_name' do
subject { parsed_payload.environment_name }
it { is_expected.to eq('mapped gitlab environment')}
end
describe '#gitlab_fingerprint' do
subject { parsed_payload.gitlab_fingerprint }
it { is_expected.to eq(Gitlab::AlertManagement::Fingerprint.generate('mapped fingerprint')) }
end
end
context 'with only some attributes defined in custom mapping' do
let_it_be(:attribute_mapping) do
{
title: { path: %w(alert name), type: 'string' }
}
end
let_it_be(:integration) do
create(:alert_management_http_integration, project: project, payload_attribute_mapping: attribute_mapping)
end
describe '#title' do
subject { parsed_payload.title }
it 'uses the value defined by the custom mapping' do
is_expected.to eq('mapped title')
end
end
describe '#description' do
subject { parsed_payload.description }
it 'falls back to the default value' do
is_expected.to eq('default description')
end
end
end
context 'when the payload has no default generic attributes' do
let_it_be(:raw_payload) do
{
'alert' => {
'name' => 'mapped title',
'desc' => 'mapped description'
}
}
end
let_it_be(:attribute_mapping) do
{
title: { path: %w(alert name), type: 'string' },
description: { path: %w(alert desc), type: 'string' }
}
end
let_it_be(:integration) do
create(:alert_management_http_integration, project: project, payload_attribute_mapping: attribute_mapping)
end
describe '#title' do
subject { parsed_payload.title }
it { is_expected.to eq('mapped title') }
end
describe '#description' do
subject { parsed_payload.description }
it { is_expected.to eq('mapped description') }
end
end
context 'with inactive HTTP integration' do
let_it_be(:integration) do
create(:alert_management_http_integration, :inactive, project: project, payload_attribute_mapping: attribute_mapping)
end
it_behaves_like 'parsing alert payload fields with default paths'
end
context 'with blank custom mapping' do
let_it_be(:integration) { create(:alert_management_http_integration, project: project) }
it_behaves_like 'parsing alert payload fields with default paths'
end
end
context 'with multiple_http_integrations_custom_mapping feature flag disabled' do
before do
stub_feature_flags(multiple_http_integrations_custom_mapping: false)
end
it_behaves_like 'parsing alert payload fields with default paths'
end
end
context 'with multiple HTTP integrations feature unavailable' do
before do
stub_licensed_features(multiple_alert_http_integrations: false)
end
it_behaves_like 'parsing alert payload fields with default paths'
end
end
describe '#gitlab_fingerprint' do describe '#gitlab_fingerprint' do
subject { parsed_payload.gitlab_fingerprint } subject { parsed_payload.gitlab_fingerprint }
......
...@@ -17,13 +17,14 @@ module Gitlab ...@@ -17,13 +17,14 @@ module Gitlab
# @param project [Project] # @param project [Project]
# @param payload [Hash] # @param payload [Hash]
# @param monitoring_tool [String] # @param monitoring_tool [String]
def parse(project, payload, monitoring_tool: nil) # @param integration [AlertManagement::HttpIntegration]
def parse(project, payload, monitoring_tool: nil, integration: nil)
payload_class = payload_class_for( payload_class = payload_class_for(
monitoring_tool: monitoring_tool || payload&.dig('monitoring_tool'), monitoring_tool: monitoring_tool || payload&.dig('monitoring_tool'),
payload: payload payload: payload
) )
payload_class.new(project: project, payload: payload) payload_class.new(project: project, payload: payload, integration: integration)
end end
private private
......
...@@ -12,7 +12,7 @@ module Gitlab ...@@ -12,7 +12,7 @@ module Gitlab
include Gitlab::Utils::StrongMemoize include Gitlab::Utils::StrongMemoize
include Gitlab::Routing include Gitlab::Routing
attr_accessor :project, :payload attr_accessor :project, :payload, :integration
# Any attribute expected to be specifically read from # Any attribute expected to be specifically read from
# or derived from an alert payload should be defined. # or derived from an alert payload should be defined.
...@@ -147,6 +147,7 @@ module Gitlab ...@@ -147,6 +147,7 @@ module Gitlab
end end
end end
# Overriden in EE::Gitlab::AlertManagement::Payload::Generic
def value_for_paths(paths) def value_for_paths(paths)
target_path = paths.find { |path| payload&.dig(*path) } target_path = paths.find { |path| payload&.dig(*path) }
......
...@@ -10,7 +10,7 @@ module Gitlab ...@@ -10,7 +10,7 @@ module Gitlab
"**#{value}**" "**#{value}**"
when :data_source when :data_source
value.capitalize value.capitalize
when :group when :product_group
"`#{value}`" "`#{value}`"
when :introduced_by_url when :introduced_by_url
"[Introduced by](#{value})" "[Introduced by](#{value})"
......
...@@ -8,16 +8,26 @@ module Gitlab ...@@ -8,16 +8,26 @@ module Gitlab
INTERSECTION_OF_AGGREGATED_METRICS = 'AND' INTERSECTION_OF_AGGREGATED_METRICS = 'AND'
ALLOWED_METRICS_AGGREGATIONS = [UNION_OF_AGGREGATED_METRICS, INTERSECTION_OF_AGGREGATED_METRICS].freeze ALLOWED_METRICS_AGGREGATIONS = [UNION_OF_AGGREGATED_METRICS, INTERSECTION_OF_AGGREGATED_METRICS].freeze
AGGREGATED_METRICS_PATH = Rails.root.join('lib/gitlab/usage_data_counters/aggregated_metrics/*.yml') AGGREGATED_METRICS_PATH = Rails.root.join('lib/gitlab/usage_data_counters/aggregated_metrics/*.yml')
UnknownAggregationOperator = Class.new(StandardError) AggregatedMetricError = Class.new(StandardError)
UnknownAggregationOperator = Class.new(AggregatedMetricError)
UnknownAggregationSource = Class.new(AggregatedMetricError)
DATABASE_SOURCE = 'database'
REDIS_SOURCE = 'redis'
SOURCES = {
DATABASE_SOURCE => Sources::PostgresHll,
REDIS_SOURCE => Sources::RedisHll
}.freeze
class Aggregate class Aggregate
delegate :calculate_events_union, delegate :weekly_time_range,
:weekly_time_range,
:monthly_time_range, :monthly_time_range,
to: Gitlab::UsageDataCounters::HLLRedisCounter to: Gitlab::UsageDataCounters::HLLRedisCounter
def initialize def initialize(recorded_at)
@aggregated_metrics = load_events(AGGREGATED_METRICS_PATH) @aggregated_metrics = load_metrics(AGGREGATED_METRICS_PATH)
@recorded_at = recorded_at
end end
def monthly_data def monthly_data
...@@ -30,35 +40,49 @@ module Gitlab ...@@ -30,35 +40,49 @@ module Gitlab
private private
attr_accessor :aggregated_metrics attr_accessor :aggregated_metrics, :recorded_at
def aggregated_metrics_data(start_date:, end_date:) def aggregated_metrics_data(start_date:, end_date:)
aggregated_metrics.each_with_object({}) do |aggregation, weekly_data| aggregated_metrics.each_with_object({}) do |aggregation, data|
next if aggregation[:feature_flag] && Feature.disabled?(aggregation[:feature_flag], default_enabled: false, type: :development) next if aggregation[:feature_flag] && Feature.disabled?(aggregation[:feature_flag], default_enabled: false, type: :development)
weekly_data[aggregation[:name]] = calculate_count_for_aggregation(aggregation, start_date: start_date, end_date: end_date) case aggregation[:source]
when REDIS_SOURCE
data[aggregation[:name]] = calculate_count_for_aggregation(aggregation: aggregation, start_date: start_date, end_date: end_date)
when DATABASE_SOURCE
next unless Feature.enabled?('database_sourced_aggregated_metrics', default_enabled: false, type: :development)
data[aggregation[:name]] = calculate_count_for_aggregation(aggregation: aggregation, start_date: start_date, end_date: end_date)
else
Gitlab::ErrorTracking
.track_and_raise_for_dev_exception(UnknownAggregationSource.new("Aggregation source: '#{aggregation[:source]}' must be included in #{SOURCES.keys}"))
data[aggregation[:name]] = Gitlab::Utils::UsageData::FALLBACK
end
end end
end end
def calculate_count_for_aggregation(aggregation, start_date:, end_date:) def calculate_count_for_aggregation(aggregation:, start_date:, end_date:)
source = SOURCES[aggregation[:source]]
case aggregation[:operator] case aggregation[:operator]
when UNION_OF_AGGREGATED_METRICS when UNION_OF_AGGREGATED_METRICS
calculate_events_union(event_names: aggregation[:events], start_date: start_date, end_date: end_date) source.calculate_metrics_union(metric_names: aggregation[:events], start_date: start_date, end_date: end_date, recorded_at: recorded_at)
when INTERSECTION_OF_AGGREGATED_METRICS when INTERSECTION_OF_AGGREGATED_METRICS
calculate_events_intersections(event_names: aggregation[:events], start_date: start_date, end_date: end_date) calculate_metrics_intersections(source: source, metric_names: aggregation[:events], start_date: start_date, end_date: end_date)
else else
Gitlab::ErrorTracking Gitlab::ErrorTracking
.track_and_raise_for_dev_exception(UnknownAggregationOperator.new("Events should be aggregated with one of operators #{ALLOWED_METRICS_AGGREGATIONS}")) .track_and_raise_for_dev_exception(UnknownAggregationOperator.new("Events should be aggregated with one of operators #{ALLOWED_METRICS_AGGREGATIONS}"))
Gitlab::Utils::UsageData::FALLBACK Gitlab::Utils::UsageData::FALLBACK
end end
rescue Gitlab::UsageDataCounters::HLLRedisCounter::EventError => error rescue Gitlab::UsageDataCounters::HLLRedisCounter::EventError, AggregatedMetricError => error
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(error) Gitlab::ErrorTracking.track_and_raise_for_dev_exception(error)
Gitlab::Utils::UsageData::FALLBACK Gitlab::Utils::UsageData::FALLBACK
end end
# calculate intersection of 'n' sets based on inclusion exclusion principle https://en.wikipedia.org/wiki/Inclusion%E2%80%93exclusion_principle # calculate intersection of 'n' sets based on inclusion exclusion principle https://en.wikipedia.org/wiki/Inclusion%E2%80%93exclusion_principle
# this method will be extracted to dedicated module with https://gitlab.com/gitlab-org/gitlab/-/issues/273391 # this method will be extracted to dedicated module with https://gitlab.com/gitlab-org/gitlab/-/issues/273391
def calculate_events_intersections(event_names:, start_date:, end_date:, subset_powers_cache: Hash.new({})) def calculate_metrics_intersections(source:, metric_names:, start_date:, end_date:, subset_powers_cache: Hash.new({}))
# calculate power of intersection of all given metrics from inclusion exclusion principle # calculate power of intersection of all given metrics from inclusion exclusion principle
# |A + B + C| = (|A| + |B| + |C|) - (|A & B| + |A & C| + .. + |C & D|) + (|A & B & C|) => # |A + B + C| = (|A| + |B| + |C|) - (|A & B| + |A & C| + .. + |C & D|) + (|A & B & C|) =>
# |A & B & C| = - (|A| + |B| + |C|) + (|A & B| + |A & C| + .. + |C & D|) + |A + B + C| # |A & B & C| = - (|A| + |B| + |C|) + (|A & B| + |A & C| + .. + |C & D|) + |A + B + C|
...@@ -66,12 +90,12 @@ module Gitlab ...@@ -66,12 +90,12 @@ module Gitlab
# |A & B & C & D| = (|A| + |B| + |C| + |D|) - (|A & B| + |A & C| + .. + |C & D|) + (|A & B & C| + |B & C & D|) - |A + B + C + D| # |A & B & C & D| = (|A| + |B| + |C| + |D|) - (|A & B| + |A & C| + .. + |C & D|) + (|A & B & C| + |B & C & D|) - |A + B + C + D|
# calculate each components of equation except for the last one |A & B & C & D| = (|A| + |B| + |C| + |D|) - (|A & B| + |A & C| + .. + |C & D|) + (|A & B & C| + |B & C & D|) - ... # calculate each components of equation except for the last one |A & B & C & D| = (|A| + |B| + |C| + |D|) - (|A & B| + |A & C| + .. + |C & D|) + (|A & B & C| + |B & C & D|) - ...
subset_powers_data = subsets_intersection_powers(event_names, start_date, end_date, subset_powers_cache) subset_powers_data = subsets_intersection_powers(source, metric_names, start_date, end_date, subset_powers_cache)
# calculate last component of the equation |A & B & C & D| = .... - |A + B + C + D| # calculate last component of the equation |A & B & C & D| = .... - |A + B + C + D|
power_of_union_of_all_events = begin power_of_union_of_all_metrics = begin
subset_powers_cache[event_names.size][event_names.join('_+_')] ||= \ subset_powers_cache[metric_names.size][metric_names.join('_+_')] ||= \
calculate_events_union(event_names: event_names, start_date: start_date, end_date: end_date) source.calculate_metrics_union(metric_names: metric_names, start_date: start_date, end_date: end_date, recorded_at: recorded_at)
end end
# in order to determine if part of equation (|A & B & C|, |A & B & C & D|), that represents the intersection that we need to calculate, # in order to determine if part of equation (|A & B & C|, |A & B & C & D|), that represents the intersection that we need to calculate,
...@@ -86,7 +110,7 @@ module Gitlab ...@@ -86,7 +110,7 @@ module Gitlab
sum_of_all_subset_powers = sum_subset_powers(subset_powers_data, subset_powers_size_even) sum_of_all_subset_powers = sum_subset_powers(subset_powers_data, subset_powers_size_even)
# add last component of the equation |A & B & C & D| = sum_of_all_subset_powers - |A + B + C + D| # add last component of the equation |A & B & C & D| = sum_of_all_subset_powers - |A + B + C + D|
sum_of_all_subset_powers + (subset_powers_size_even ? power_of_union_of_all_events : -power_of_union_of_all_events) sum_of_all_subset_powers + (subset_powers_size_even ? power_of_union_of_all_metrics : -power_of_union_of_all_metrics)
end end
def sum_subset_powers(subset_powers_data, subset_powers_size_even) def sum_subset_powers(subset_powers_data, subset_powers_size_even)
...@@ -97,29 +121,29 @@ module Gitlab ...@@ -97,29 +121,29 @@ module Gitlab
(subset_powers_size_even ? -1 : 1) * sum_without_sign (subset_powers_size_even ? -1 : 1) * sum_without_sign
end end
def subsets_intersection_powers(event_names, start_date, end_date, subset_powers_cache) def subsets_intersection_powers(source, metric_names, start_date, end_date, subset_powers_cache)
subset_sizes = (1..(event_names.size - 1)) subset_sizes = (1...metric_names.size)
subset_sizes.map do |subset_size| subset_sizes.map do |subset_size|
if subset_size > 1 if subset_size > 1
# calculate sum of powers of intersection between each subset (with given size) of metrics: #|A + B + C + D| = ... - (|A & B| + |A & C| + .. + |C & D|) # calculate sum of powers of intersection between each subset (with given size) of metrics: #|A + B + C + D| = ... - (|A & B| + |A & C| + .. + |C & D|)
event_names.combination(subset_size).sum do |events_subset| metric_names.combination(subset_size).sum do |metrics_subset|
subset_powers_cache[subset_size][events_subset.join('_&_')] ||= \ subset_powers_cache[subset_size][metrics_subset.join('_&_')] ||=
calculate_events_intersections(event_names: events_subset, start_date: start_date, end_date: end_date, subset_powers_cache: subset_powers_cache) calculate_metrics_intersections(source: source, metric_names: metrics_subset, start_date: start_date, end_date: end_date, subset_powers_cache: subset_powers_cache)
end end
else else
# calculate sum of powers of each set (metric) alone #|A + B + C + D| = (|A| + |B| + |C| + |D|) - ... # calculate sum of powers of each set (metric) alone #|A + B + C + D| = (|A| + |B| + |C| + |D|) - ...
event_names.sum do |event| metric_names.sum do |metric|
subset_powers_cache[subset_size][event] ||= \ subset_powers_cache[subset_size][metric] ||= \
calculate_events_union(event_names: event, start_date: start_date, end_date: end_date) source.calculate_metrics_union(metric_names: metric, start_date: start_date, end_date: end_date, recorded_at: recorded_at)
end end
end end
end end
end end
def load_events(wildcard) def load_metrics(wildcard)
Dir[wildcard].each_with_object([]) do |path, events| Dir[wildcard].each_with_object([]) do |path, metrics|
events.push(*load_yaml_from_path(path)) metrics.push(*load_yaml_from_path(path))
end end
end end
......
# frozen_string_literal: true
module Gitlab
module Usage
module Metrics
module Aggregates
module Sources
class PostgresHll
class << self
def calculate_metrics_union(metric_names:, start_date:, end_date:, recorded_at:)
time_period = start_date && end_date ? (start_date..end_date) : nil
Array(metric_names).each_with_object(Gitlab::Database::PostgresHll::Buckets.new) do |event, buckets|
json = read_aggregated_metric(metric_name: event, time_period: time_period, recorded_at: recorded_at)
raise UnionNotAvailable, "Union data not available for #{metric_names}" unless json
buckets.merge_hash!(Gitlab::Json.parse(json))
end.estimated_distinct_count
end
def save_aggregated_metrics(metric_name:, time_period:, recorded_at_timestamp:, data:)
unless data.is_a? ::Gitlab::Database::PostgresHll::Buckets
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(StandardError.new("Unsupported data type: #{data.class}"))
return
end
# Usage Ping report generation for gitlab.com is very long running process
# to make sure that saved keys are available at the end of report generation process
# lets use triple max generation time
keys_expiration = ::Gitlab::UsageData::MAX_GENERATION_TIME_FOR_SAAS * 3
Gitlab::Redis::SharedState.with do |redis|
redis.set(
redis_key(metric_name: metric_name, time_period: time_period&.values&.first, recorded_at: recorded_at_timestamp),
data.to_json,
ex: keys_expiration
)
end
rescue ::Redis::CommandError => e
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(e)
end
private
def read_aggregated_metric(metric_name:, time_period:, recorded_at:)
Gitlab::Redis::SharedState.with do |redis|
redis.get(redis_key(metric_name: metric_name, time_period: time_period, recorded_at: recorded_at))
end
end
def redis_key(metric_name:, time_period:, recorded_at:)
# add timestamp at the end of the key to avoid stale keys if
# usage ping job is retried
"#{metric_name}_#{time_period_to_human_name(time_period)}-#{recorded_at.to_i}"
end
def time_period_to_human_name(time_period)
return Gitlab::Utils::UsageData::ALL_TIME_PERIOD_HUMAN_NAME if time_period.blank?
start_date = time_period.first.to_date
end_date = time_period.last.to_date
if (end_date - start_date).to_i > 7
Gitlab::Utils::UsageData::MONTHLY_PERIOD_HUMAN_NAME
else
Gitlab::Utils::UsageData::WEEKLY_PERIOD_HUMAN_NAME
end
end
end
end
end
end
end
end
end
# frozen_string_literal: true
module Gitlab
module Usage
module Metrics
module Aggregates
module Sources
UnionNotAvailable = Class.new(AggregatedMetricError)
class RedisHll
def self.calculate_metrics_union(metric_names:, start_date:, end_date:, recorded_at: nil)
union = Gitlab::UsageDataCounters::HLLRedisCounter
.calculate_events_union(event_names: metric_names, start_date: start_date, end_date: end_date)
return union if union >= 0
raise UnionNotAvailable, "Union data not available for #{metric_names}"
end
end
end
end
end
end
end
...@@ -13,6 +13,7 @@ ...@@ -13,6 +13,7 @@
module Gitlab module Gitlab
class UsageData class UsageData
DEPRECATED_VALUE = -1000 DEPRECATED_VALUE = -1000
MAX_GENERATION_TIME_FOR_SAAS = 40.hours
CE_MEMOIZED_VALUES = %i( CE_MEMOIZED_VALUES = %i(
issue_minimum_id issue_minimum_id
...@@ -754,7 +755,7 @@ module Gitlab ...@@ -754,7 +755,7 @@ module Gitlab
private private
def aggregated_metrics def aggregated_metrics
@aggregated_metrics ||= ::Gitlab::Usage::Metrics::Aggregates::Aggregate.new @aggregated_metrics ||= ::Gitlab::Usage::Metrics::Aggregates::Aggregate.new(recorded_at)
end end
def event_monthly_active_users(date_range) def event_monthly_active_users(date_range)
......
...@@ -4,21 +4,28 @@ ...@@ -4,21 +4,28 @@
# - "AND": counts unique elements that were observed triggering all of following events # - "AND": counts unique elements that were observed triggering all of following events
# events: list of events names to aggregate into metric. All events in this list must have the same 'redis_slot' and 'aggregation' attributes # events: list of events names to aggregate into metric. All events in this list must have the same 'redis_slot' and 'aggregation' attributes
# see from lib/gitlab/usage_data_counters/known_events/ for the list of valid events. # see from lib/gitlab/usage_data_counters/known_events/ for the list of valid events.
# source: defines which datasource will be used to locate events that should be included in aggregated metric. Valid values are:
# - database
# - redis
# feature_flag: name of development feature flag that will be checked before metrics aggregation is performed. # feature_flag: name of development feature flag that will be checked before metrics aggregation is performed.
# Corresponding feature flag should have `default_enabled` attribute set to `false`. # Corresponding feature flag should have `default_enabled` attribute set to `false`.
# This attribute is OPTIONAL and can be omitted, when `feature_flag` is missing no feature flag will be checked. # This attribute is OPTIONAL and can be omitted, when `feature_flag` is missing no feature flag will be checked.
--- ---
- name: compliance_features_track_unique_visits_union - name: compliance_features_track_unique_visits_union
operator: OR operator: OR
source: redis
events: ['g_compliance_audit_events', 'g_compliance_dashboard', 'i_compliance_audit_events', 'a_compliance_audit_events_api', 'i_compliance_credential_inventory'] events: ['g_compliance_audit_events', 'g_compliance_dashboard', 'i_compliance_audit_events', 'a_compliance_audit_events_api', 'i_compliance_credential_inventory']
- name: product_analytics_test_metrics_union - name: product_analytics_test_metrics_union
operator: OR operator: OR
source: redis
events: ['i_search_total', 'i_search_advanced', 'i_search_paid'] events: ['i_search_total', 'i_search_advanced', 'i_search_paid']
- name: product_analytics_test_metrics_intersection - name: product_analytics_test_metrics_intersection
operator: AND operator: AND
source: redis
events: ['i_search_total', 'i_search_advanced', 'i_search_paid'] events: ['i_search_total', 'i_search_advanced', 'i_search_paid']
- name: incident_management_alerts_total_unique_counts - name: incident_management_alerts_total_unique_counts
operator: OR operator: OR
source: redis
events: [ events: [
'incident_management_alert_status_changed', 'incident_management_alert_status_changed',
'incident_management_alert_assigned', 'incident_management_alert_assigned',
...@@ -27,6 +34,7 @@ ...@@ -27,6 +34,7 @@
] ]
- name: incident_management_incidents_total_unique_counts - name: incident_management_incidents_total_unique_counts
operator: OR operator: OR
source: redis
events: [ events: [
'incident_management_incident_created', 'incident_management_incident_created',
'incident_management_incident_reopened', 'incident_management_incident_reopened',
......
...@@ -80,27 +80,6 @@ module Gitlab ...@@ -80,27 +80,6 @@ module Gitlab
DISTRIBUTED_HLL_FALLBACK DISTRIBUTED_HLL_FALLBACK
end end
def save_aggregated_metrics(metric_name:, time_period:, recorded_at_timestamp:, data:)
unless data.is_a? ::Gitlab::Database::PostgresHll::Buckets
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(StandardError.new("Unsupported data type: #{data.class}"))
return
end
# the longest recorded usage ping generation time for gitlab.com
# was below 40 hours, there is added error margin of 20 h
usage_ping_generation_period = 80.hours
# add timestamp at the end of the key to avoid stale keys if
# usage ping job is retried
redis_key = "#{metric_name}_#{time_period_to_human_name(time_period)}-#{recorded_at_timestamp}"
Gitlab::Redis::SharedState.with do |redis|
redis.set(redis_key, data.to_json, ex: usage_ping_generation_period)
end
rescue ::Redis::CommandError => e
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(e)
end
def sum(relation, column, batch_size: nil, start: nil, finish: nil) def sum(relation, column, batch_size: nil, start: nil, finish: nil)
Gitlab::Database::BatchCount.batch_sum(relation, column, batch_size: batch_size, start: start, finish: finish) Gitlab::Database::BatchCount.batch_sum(relation, column, batch_size: batch_size, start: start, finish: finish)
rescue ActiveRecord::StatementInvalid rescue ActiveRecord::StatementInvalid
...@@ -152,20 +131,6 @@ module Gitlab ...@@ -152,20 +131,6 @@ module Gitlab
Gitlab::UsageDataCounters::HLLRedisCounter.track_event(event_name.to_s, values: values) Gitlab::UsageDataCounters::HLLRedisCounter.track_event(event_name.to_s, values: values)
end end
def time_period_to_human_name(time_period)
return ALL_TIME_PERIOD_HUMAN_NAME if time_period.blank?
date_range = time_period.values[0]
start_date = date_range.first.to_date
end_date = date_range.last.to_date
if (end_date - start_date).to_i > 7
MONTHLY_PERIOD_HUMAN_NAME
else
WEEKLY_PERIOD_HUMAN_NAME
end
end
private private
def prometheus_client(verify:) def prometheus_client(verify:)
......
...@@ -29389,6 +29389,9 @@ msgstr "" ...@@ -29389,6 +29389,9 @@ msgstr ""
msgid "There was an error fetching the Node's Groups" msgid "There was an error fetching the Node's Groups"
msgstr "" msgstr ""
msgid "There was an error fetching the Search Counts"
msgstr ""
msgid "There was an error fetching the deploy freezes." msgid "There was an error fetching the deploy freezes."
msgstr "" msgstr ""
......
...@@ -4,7 +4,7 @@ module QA ...@@ -4,7 +4,7 @@ module QA
module Page module Page
module Search module Search
class Results < QA::Page::Base class Results < QA::Page::Base
view 'app/views/search/_category.html.haml' do view 'app/assets/javascripts/search/topbar/constants.js' do
element :code_tab element :code_tab
element :projects_tab element :projects_tab
end end
......
...@@ -5,6 +5,7 @@ require('spec_helper') ...@@ -5,6 +5,7 @@ require('spec_helper')
RSpec.describe ProjectsController do RSpec.describe ProjectsController do
include ExternalAuthorizationServiceHelpers include ExternalAuthorizationServiceHelpers
include ProjectForksHelper include ProjectForksHelper
using RSpec::Parameterized::TableSyntax
let_it_be(:project, reload: true) { create(:project, service_desk_enabled: false) } let_it_be(:project, reload: true) { create(:project, service_desk_enabled: false) }
let_it_be(:public_project) { create(:project, :public) } let_it_be(:public_project) { create(:project, :public) }
...@@ -324,14 +325,39 @@ RSpec.describe ProjectsController do ...@@ -324,14 +325,39 @@ RSpec.describe ProjectsController do
end end
end end
context "redirection from http://someproject.git" do context 'redirection from http://someproject.git' do
it 'redirects to project page (format.html)' do where(:user_type, :project_visibility, :expected_redirect) do
project = create(:project, :public) :anonymous | :public | :redirect_to_project
:anonymous | :internal | :redirect_to_signup
:anonymous | :private | :redirect_to_signup
get :show, params: { namespace_id: project.namespace, id: project }, format: :git :signed_in | :public | :redirect_to_project
:signed_in | :internal | :redirect_to_project
:signed_in | :private | nil
expect(response).to have_gitlab_http_status(:found) :member | :public | :redirect_to_project
expect(response).to redirect_to(namespace_project_path) :member | :internal | :redirect_to_project
:member | :private | :redirect_to_project
end
with_them do
let(:redirect_to_signup) { new_user_session_path }
let(:redirect_to_project) { project_path(project) }
let(:expected_status) { expected_redirect ? :found : :not_found }
before do
project.update!(visibility: project_visibility.to_s)
project.team.add_user(user, :guest) if user_type == :member
sign_in(user) unless user_type == :anonymous
end
it 'returns the expected status' do
get :show, params: { namespace_id: project.namespace, id: project }, format: :git
expect(response).to have_gitlab_http_status(expected_status)
expect(response).to redirect_to(send(expected_redirect)) if expected_status == :found
end
end end
end end
......
...@@ -28,7 +28,7 @@ RSpec.describe 'Global search' do ...@@ -28,7 +28,7 @@ RSpec.describe 'Global search' do
create_list(:issue, 2, project: project, title: 'initial') create_list(:issue, 2, project: project, title: 'initial')
end end
it "has a pagination" do it "has a pagination", :js do
submit_search('initial') submit_search('initial')
select_search_scope('Issues') select_search_scope('Issues')
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'User searches their settings', :js do
let(:user) { create(:user) }
let(:search_input_placeholder) { 'Search settings' }
before do
sign_in(user)
end
context 'when search_settings_in_page feature flag is on' do
it 'allows searching in the user profile page' do
search_term = 'Public Avatar'
hidden_section_name = 'Main settings'
visit profile_path
fill_in search_input_placeholder, with: search_term
expect(page).to have_content(search_term)
expect(page).not_to have_content(hidden_section_name)
end
it 'allows searching in the user applications page' do
visit applications_profile_path
expect(page.find_field(placeholder: search_input_placeholder)).not_to be_disabled
end
it 'allows searching in the user preferences page' do
search_term = 'Syntax highlighting theme'
hidden_section_name = 'Behavior'
visit profile_preferences_path
fill_in search_input_placeholder, with: search_term
expect(page).to have_content(search_term)
expect(page).not_to have_content(hidden_section_name)
end
end
context 'when search_settings_in_page feature flag is off' do
before do
stub_feature_flags(search_settings_in_page: false)
visit(profile_path)
end
it 'does not allow searching in the user settings pages' do
expect(page).not_to have_content(search_input_placeholder)
end
end
end
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe 'User searches for code' do RSpec.describe 'User searches for code', :js do
let(:user) { create(:user) } let(:user) { create(:user) }
let(:project) { create(:project, :repository, namespace: user.namespace) } let(:project) { create(:project, :repository, namespace: user.namespace) }
...@@ -16,6 +16,7 @@ RSpec.describe 'User searches for code' do ...@@ -16,6 +16,7 @@ RSpec.describe 'User searches for code' do
visit(project_path(project)) visit(project_path(project))
submit_search('application.js') submit_search('application.js')
select_search_scope('Code') select_search_scope('Code')
expect(page).to have_selector('.results', text: 'application.js') expect(page).to have_selector('.results', text: 'application.js')
...@@ -24,7 +25,7 @@ RSpec.describe 'User searches for code' do ...@@ -24,7 +25,7 @@ RSpec.describe 'User searches for code' do
expect(page).to have_link('application.js', href: /master\/files\/js\/application.js/) expect(page).to have_link('application.js', href: /master\/files\/js\/application.js/)
end end
context 'when on a project page', :js do context 'when on a project page' do
before do before do
visit(search_path) visit(search_path)
find('[data-testid="project-filter"]').click find('[data-testid="project-filter"]').click
...@@ -48,7 +49,7 @@ RSpec.describe 'User searches for code' do ...@@ -48,7 +49,7 @@ RSpec.describe 'User searches for code' do
expect(current_url).to match(/master\/.gitignore#L3/) expect(current_url).to match(/master\/.gitignore#L3/)
end end
it 'search mutiple words with refs switching' do it 'search multiple words with refs switching' do
expected_result = 'Use `snake_case` for naming files' expected_result = 'Use `snake_case` for naming files'
search = 'for naming files' search = 'for naming files'
...@@ -67,7 +68,7 @@ RSpec.describe 'User searches for code' do ...@@ -67,7 +68,7 @@ RSpec.describe 'User searches for code' do
end end
end end
context 'search code within refs', :js do context 'search code within refs' do
let(:ref_name) { 'v1.0.0' } let(:ref_name) { 'v1.0.0' }
before do before do
...@@ -85,9 +86,9 @@ RSpec.describe 'User searches for code' do ...@@ -85,9 +86,9 @@ RSpec.describe 'User searches for code' do
expect(find('.js-project-refs-dropdown')).to have_text(ref_name) expect(find('.js-project-refs-dropdown')).to have_text(ref_name)
end end
# this example is use to test the desgine that the refs is not # this example is use to test the design that the refs is not
# only repersent the branch as well as the tags. # only represent the branch as well as the tags.
it 'ref swither list all the branchs and tags' do it 'ref switcher list all the branches and tags' do
find('.js-project-refs-dropdown').click find('.js-project-refs-dropdown').click
expect(find('.dropdown-page-one .dropdown-content')).to have_link('sha-starting-with-large-number') expect(find('.dropdown-page-one .dropdown-content')).to have_link('sha-starting-with-large-number')
expect(find('.dropdown-page-one .dropdown-content')).to have_link('v1.0.0') expect(find('.dropdown-page-one .dropdown-content')).to have_link('v1.0.0')
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe 'User searches for comments' do RSpec.describe 'User searches for comments', :js do
let(:project) { create(:project, :repository) } let(:project) { create(:project, :repository) }
let(:user) { create(:user) } let(:user) { create(:user) }
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe 'User searches for users' do RSpec.describe 'User searches for users', :js do
let(:user1) { create(:user, username: 'gob_bluth', name: 'Gob Bluth') } let(:user1) { create(:user, username: 'gob_bluth', name: 'Gob Bluth') }
let(:user2) { create(:user, username: 'michael_bluth', name: 'Michael Bluth') } let(:user2) { create(:user, username: 'michael_bluth', name: 'Michael Bluth') }
let(:user3) { create(:user, username: 'gob_2018', name: 'George Oscar Bluth') } let(:user3) { create(:user, username: 'gob_2018', name: 'George Oscar Bluth') }
...@@ -12,7 +12,7 @@ RSpec.describe 'User searches for users' do ...@@ -12,7 +12,7 @@ RSpec.describe 'User searches for users' do
end end
context 'when on the dashboard' do context 'when on the dashboard' do
it 'finds the user', :js do it 'finds the user' do
visit dashboard_projects_path visit dashboard_projects_path
submit_search('gob') submit_search('gob')
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
require 'spec_helper' require 'spec_helper'
RSpec.describe 'Search Snippets' do RSpec.describe 'Search Snippets', :js do
it 'user searches for snippets by title' do it 'user searches for snippets by title' do
public_snippet = create(:personal_snippet, :public, title: 'Beginning and Middle') public_snippet = create(:personal_snippet, :public, title: 'Beginning and Middle')
private_snippet = create(:personal_snippet, :private, title: 'Middle and End') private_snippet = create(:personal_snippet, :private, title: 'Middle and End')
......
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`pages/search/show/refresh_counts fetches and displays search counts 1`] = `
"<div class=\\"badge\\">22</div>
<div class=\\"badge js-search-count\\" data-url=\\"http://test.host/search/count?search=lorem+ipsum&amp;project_id=3&amp;scope=issues\\">4</div>
<div class=\\"badge js-search-count\\" data-url=\\"http://test.host/search/count?search=lorem+ipsum&amp;project_id=3&amp;scope=merge_requests\\">5</div>"
`;
import MockAdapter from 'axios-mock-adapter';
import { TEST_HOST } from 'helpers/test_constants';
import axios from '~/lib/utils/axios_utils';
import refreshCounts from '~/pages/search/show/refresh_counts';
const URL = `${TEST_HOST}/search/count?search=lorem+ipsum&project_id=3`;
const urlWithScope = (scope) => `${URL}&scope=${scope}`;
const counts = [
{ scope: 'issues', count: 4 },
{ scope: 'merge_requests', count: 5 },
];
const fixture = `<div class="badge">22</div>
<div class="badge js-search-count hidden" data-url="${urlWithScope('issues')}"></div>
<div class="badge js-search-count hidden" data-url="${urlWithScope('merge_requests')}"></div>`;
describe('pages/search/show/refresh_counts', () => {
let mock;
beforeEach(() => {
mock = new MockAdapter(axios);
setFixtures(fixture);
});
afterEach(() => {
mock.restore();
});
it('fetches and displays search counts', () => {
counts.forEach(({ scope, count }) => {
mock.onGet(urlWithScope(scope)).reply(200, { count });
});
// assert before act behavior
return refreshCounts().then(() => {
expect(document.body.innerHTML).toMatchSnapshot();
});
});
});
...@@ -61,3 +61,28 @@ export const MOCK_SORT_OPTIONS = [ ...@@ -61,3 +61,28 @@ export const MOCK_SORT_OPTIONS = [
}, },
}, },
]; ];
export const MOCK_SEARCH_COUNTS_INPUT = {
scopeTabs: ['issues', 'snippet_titles', 'merge_requests'],
activeCount: '15',
};
export const MOCK_SEARCH_COUNT = { scope: 'issues', count: '15' };
export const MOCK_SEARCH_COUNTS_SUCCESS = [
{ scope: 'issues', count: '15' },
{ scope: 'snippet_titles', count: '15' },
{ scope: 'merge_requests', count: '15' },
];
export const MOCK_SEARCH_COUNTS = [
{ scope: 'issues', count: '15' },
{ scope: 'snippet_titles', count: '5' },
{ scope: 'merge_requests', count: '1' },
];
export const MOCK_SCOPE_TABS = [
{ scope: 'issues', title: 'Issues', count: '15' },
{ scope: 'snippet_titles', title: 'Titles and Descriptions', count: '5' },
{ scope: 'merge_requests', title: 'Merge requests', count: '1' },
];
...@@ -7,7 +7,15 @@ import * as urlUtils from '~/lib/utils/url_utility'; ...@@ -7,7 +7,15 @@ import * as urlUtils from '~/lib/utils/url_utility';
import createState from '~/search/store/state'; import createState from '~/search/store/state';
import axios from '~/lib/utils/axios_utils'; import axios from '~/lib/utils/axios_utils';
import createFlash from '~/flash'; import createFlash from '~/flash';
import { MOCK_QUERY, MOCK_GROUPS, MOCK_PROJECT, MOCK_PROJECTS } from '../mock_data'; import {
MOCK_QUERY,
MOCK_GROUPS,
MOCK_PROJECT,
MOCK_PROJECTS,
MOCK_SEARCH_COUNT,
MOCK_SEARCH_COUNTS_SUCCESS,
MOCK_SEARCH_COUNTS_INPUT,
} from '../mock_data';
jest.mock('~/flash'); jest.mock('~/flash');
jest.mock('~/lib/utils/url_utility', () => ({ jest.mock('~/lib/utils/url_utility', () => ({
...@@ -37,19 +45,21 @@ describe('Global Search Store Actions', () => { ...@@ -37,19 +45,21 @@ describe('Global Search Store Actions', () => {
}); });
describe.each` describe.each`
action | axiosMock | type | expectedMutations | callback action | axiosMock | payload | type | expectedMutations | callback
${actions.fetchGroups} | ${{ method: 'onGet', code: 200, res: MOCK_GROUPS }} | ${'success'} | ${[{ type: types.REQUEST_GROUPS }, { type: types.RECEIVE_GROUPS_SUCCESS, payload: MOCK_GROUPS }]} | ${noCallback} ${actions.fetchGroups} | ${{ method: 'onGet', code: 200, res: MOCK_GROUPS }} | ${null} | ${'success'} | ${[{ type: types.REQUEST_GROUPS }, { type: types.RECEIVE_GROUPS_SUCCESS, payload: MOCK_GROUPS }]} | ${noCallback}
${actions.fetchGroups} | ${{ method: 'onGet', code: 500, res: null }} | ${'error'} | ${[{ type: types.REQUEST_GROUPS }, { type: types.RECEIVE_GROUPS_ERROR }]} | ${flashCallback} ${actions.fetchGroups} | ${{ method: 'onGet', code: 500, res: null }} | ${null} | ${'error'} | ${[{ type: types.REQUEST_GROUPS }, { type: types.RECEIVE_GROUPS_ERROR }]} | ${flashCallback}
${actions.fetchProjects} | ${{ method: 'onGet', code: 200, res: MOCK_PROJECTS }} | ${'success'} | ${[{ type: types.REQUEST_PROJECTS }, { type: types.RECEIVE_PROJECTS_SUCCESS, payload: MOCK_PROJECTS }]} | ${noCallback} ${actions.fetchProjects} | ${{ method: 'onGet', code: 200, res: MOCK_PROJECTS }} | ${null} | ${'success'} | ${[{ type: types.REQUEST_PROJECTS }, { type: types.RECEIVE_PROJECTS_SUCCESS, payload: MOCK_PROJECTS }]} | ${noCallback}
${actions.fetchProjects} | ${{ method: 'onGet', code: 500, res: null }} | ${'error'} | ${[{ type: types.REQUEST_PROJECTS }, { type: types.RECEIVE_PROJECTS_ERROR }]} | ${flashCallback} ${actions.fetchProjects} | ${{ method: 'onGet', code: 500, res: null }} | ${null} | ${'error'} | ${[{ type: types.REQUEST_PROJECTS }, { type: types.RECEIVE_PROJECTS_ERROR }]} | ${flashCallback}
`(`axios calls`, ({ action, axiosMock, type, expectedMutations, callback }) => { ${actions.fetchSearchCounts} | ${{ method: 'onGet', code: 200, res: MOCK_SEARCH_COUNT }} | ${MOCK_SEARCH_COUNTS_INPUT} | ${'success'} | ${[{ type: types.REQUEST_SEARCH_COUNTS, payload: MOCK_SEARCH_COUNTS_INPUT }, { type: types.RECEIVE_SEARCH_COUNTS_SUCCESS, payload: MOCK_SEARCH_COUNTS_SUCCESS }]} | ${noCallback}
${actions.fetchSearchCounts} | ${{ method: 'onGet', code: 500, res: null }} | ${MOCK_SEARCH_COUNTS_INPUT} | ${'error'} | ${[{ type: types.REQUEST_SEARCH_COUNTS, payload: MOCK_SEARCH_COUNTS_INPUT }]} | ${flashCallback}
`(`axios calls`, ({ action, axiosMock, payload, type, expectedMutations, callback }) => {
describe(action.name, () => { describe(action.name, () => {
describe(`on ${type}`, () => { describe(`on ${type}`, () => {
beforeEach(() => { beforeEach(() => {
mock[axiosMock.method]().replyOnce(axiosMock.code, axiosMock.res); mock[axiosMock.method]().reply(axiosMock.code, axiosMock.res);
}); });
it(`should dispatch the correct mutations`, () => { it(`should dispatch the correct mutations`, () => {
return testAction({ action, state, expectedMutations }).then(() => callback()); return testAction({ action, payload, state, expectedMutations }).then(() => callback());
}); });
}); });
}); });
...@@ -115,9 +125,25 @@ describe('Global Search Store Actions', () => { ...@@ -115,9 +125,25 @@ describe('Global Search Store Actions', () => {
page: null, page: null,
state: null, state: null,
confidential: null, confidential: null,
nav_source: null,
}); });
expect(urlUtils.visitUrl).toHaveBeenCalled(); expect(urlUtils.visitUrl).toHaveBeenCalled();
}); });
}); });
}); });
it('calls setUrlParams with snippets, group_id, and project_id when snippets param is true', () => {
return testAction(actions.resetQuery, true, state, [], [], () => {
expect(urlUtils.setUrlParams).toHaveBeenCalledWith({
...state.query,
page: null,
state: null,
confidential: null,
nav_source: null,
group_id: null,
project_id: null,
snippets: true,
});
});
});
}); });
import mutations from '~/search/store/mutations'; import mutations from '~/search/store/mutations';
import createState from '~/search/store/state'; import createState from '~/search/store/state';
import * as types from '~/search/store/mutation_types'; import * as types from '~/search/store/mutation_types';
import { MOCK_QUERY, MOCK_GROUPS, MOCK_PROJECTS } from '../mock_data'; import {
MOCK_QUERY,
MOCK_GROUPS,
MOCK_PROJECTS,
MOCK_SEARCH_COUNTS,
MOCK_SCOPE_TABS,
} from '../mock_data';
describe('Global Search Store Mutations', () => { describe('Global Search Store Mutations', () => {
let state; let state;
...@@ -71,4 +77,32 @@ describe('Global Search Store Mutations', () => { ...@@ -71,4 +77,32 @@ describe('Global Search Store Mutations', () => {
expect(state.query[payload.key]).toBe(payload.value); expect(state.query[payload.key]).toBe(payload.value);
}); });
}); });
describe('REQUEST_SEARCH_COUNTS', () => {
it('sets the count to for the query.scope activeCount', () => {
const payload = { scopeTabs: ['issues'], activeCount: '22' };
mutations[types.REQUEST_SEARCH_COUNTS](state, payload);
expect(state.inflatedScopeTabs).toStrictEqual([
{ scope: 'issues', title: 'Issues', count: '22' },
]);
});
it('sets other scopes count to empty string', () => {
const payload = { scopeTabs: ['milestones'], activeCount: '22' };
mutations[types.REQUEST_SEARCH_COUNTS](state, payload);
expect(state.inflatedScopeTabs).toStrictEqual([
{ scope: 'milestones', title: 'Milestones', count: '' },
]);
});
});
describe('RECEIVE_SEARCH_COUNTS_SUCCESS', () => {
it('sets the count from the input for all tabs', () => {
mutations[types.RECEIVE_SEARCH_COUNTS_SUCCESS](state, MOCK_SEARCH_COUNTS);
expect(state.inflatedScopeTabs).toStrictEqual(MOCK_SCOPE_TABS);
});
});
}); });
...@@ -5,6 +5,7 @@ import { MOCK_QUERY } from 'jest/search/mock_data'; ...@@ -5,6 +5,7 @@ import { MOCK_QUERY } from 'jest/search/mock_data';
import GlobalSearchTopbar from '~/search/topbar/components/app.vue'; import GlobalSearchTopbar from '~/search/topbar/components/app.vue';
import GroupFilter from '~/search/topbar/components/group_filter.vue'; import GroupFilter from '~/search/topbar/components/group_filter.vue';
import ProjectFilter from '~/search/topbar/components/project_filter.vue'; import ProjectFilter from '~/search/topbar/components/project_filter.vue';
import ScopeTabs from '~/search/topbar/components/scope_tabs.vue';
const localVue = createLocalVue(); const localVue = createLocalVue();
localVue.use(Vuex); localVue.use(Vuex);
...@@ -42,6 +43,7 @@ describe('GlobalSearchTopbar', () => { ...@@ -42,6 +43,7 @@ describe('GlobalSearchTopbar', () => {
const findGroupFilter = () => wrapper.find(GroupFilter); const findGroupFilter = () => wrapper.find(GroupFilter);
const findProjectFilter = () => wrapper.find(ProjectFilter); const findProjectFilter = () => wrapper.find(ProjectFilter);
const findSearchButton = () => wrapper.find(GlButton); const findSearchButton = () => wrapper.find(GlButton);
const findScopeTabs = () => wrapper.find(ScopeTabs);
describe('template', () => { describe('template', () => {
beforeEach(() => { beforeEach(() => {
...@@ -52,6 +54,18 @@ describe('GlobalSearchTopbar', () => { ...@@ -52,6 +54,18 @@ describe('GlobalSearchTopbar', () => {
expect(findTopbarForm().exists()).toBe(true); expect(findTopbarForm().exists()).toBe(true);
}); });
describe('Scope Tabs', () => {
it('renders when search param is set', () => {
createComponent({ query: { search: 'test' } });
expect(findScopeTabs().exists()).toBe(true);
});
it('does not render search param is blank', () => {
createComponent({ query: {} });
expect(findScopeTabs().exists()).toBe(false);
});
});
describe('Search box', () => { describe('Search box', () => {
it('renders always', () => { it('renders always', () => {
expect(findGlSearchBox().exists()).toBe(true); expect(findGlSearchBox().exists()).toBe(true);
......
import Vuex from 'vuex';
import { createLocalVue, mount } from '@vue/test-utils';
import { GlTabs, GlTab, GlBadge } from '@gitlab/ui';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import { MOCK_QUERY, MOCK_SCOPE_TABS } from 'jest/search/mock_data';
import ScopeTabs from '~/search/topbar/components/scope_tabs.vue';
const localVue = createLocalVue();
localVue.use(Vuex);
describe('ScopeTabs', () => {
let wrapper;
const actionSpies = {
fetchSearchCounts: jest.fn(),
setQuery: jest.fn(),
resetQuery: jest.fn(),
};
const defaultProps = {
scopeTabs: ['issues', 'merge_requests', 'milestones'],
count: '20',
};
const createComponent = (props = {}, initialState = {}) => {
const store = new Vuex.Store({
state: {
query: {
...MOCK_QUERY,
search: 'test',
},
...initialState,
},
actions: actionSpies,
});
wrapper = extendedWrapper(
mount(ScopeTabs, {
localVue,
store,
propsData: {
...defaultProps,
...props,
},
}),
);
};
afterEach(() => {
wrapper.destroy();
wrapper = null;
});
const findScopeTabs = () => wrapper.find(GlTabs);
const findTabs = () => wrapper.findAll(GlTab);
const findBadges = () => wrapper.findAll(GlBadge);
const findTabsTitle = () =>
wrapper.findAll('[data-testid="tab-title"]').wrappers.map((w) => w.text());
const findBadgesTitle = () => findBadges().wrappers.map((w) => w.text());
const findBadgeByScope = (scope) => wrapper.findByTestId(`badge-${scope}`);
const findTabByScope = (scope) => wrapper.findByTestId(`tab-${scope}`);
describe('template', () => {
beforeEach(() => {
createComponent({}, { inflatedScopeTabs: MOCK_SCOPE_TABS });
});
it('always renders Scope Tabs', () => {
expect(findScopeTabs().exists()).toBe(true);
});
describe('findTabs', () => {
it('renders a tab for each scope', () => {
expect(findTabs()).toHaveLength(defaultProps.scopeTabs.length);
expect(findTabsTitle()).toStrictEqual([
'Issues',
'Titles and Descriptions',
'Merge requests',
]);
});
});
describe('findBadges', () => {
it('renders a badge for each scope', () => {
expect(findBadges()).toHaveLength(defaultProps.scopeTabs.length);
expect(findBadgesTitle()).toStrictEqual(['15', '5', '1']);
});
it('sets the variant to neutral for active tab only', () => {
expect(findBadgeByScope('issues').classes()).toContain('badge-neutral');
expect(findBadgeByScope('snippet_titles').classes()).toContain('badge-muted');
expect(findBadgeByScope('merge_requests').classes()).toContain('badge-muted');
});
});
});
describe('methods', () => {
beforeEach(() => {
createComponent({}, { inflatedScopeTabs: MOCK_SCOPE_TABS });
findTabByScope('snippet_titles').vm.$emit('click');
});
describe('handleTabChange', () => {
it('calls setQuery with scope, applies any search params from ALL_SCOPE_TABS, and sends nulls for page, state, confidential, and nav_source', () => {
expect(actionSpies.setQuery).toHaveBeenCalledWith(expect.any(Object), {
key: 'scope',
value: 'snippet_titles',
});
});
it('calls resetQuery and sends true for snippet_titles tab', () => {
expect(actionSpies.resetQuery).toHaveBeenCalledWith(expect.any(Object), true);
});
it('calls resetQuery and does not send true for other tabs', () => {
findTabByScope('issues').vm.$emit('click');
expect(actionSpies.resetQuery).toHaveBeenCalledWith(expect.any(Object), false);
});
});
});
});
...@@ -6,7 +6,7 @@ RSpec.describe Resolvers::PackagesResolver do ...@@ -6,7 +6,7 @@ RSpec.describe Resolvers::PackagesResolver do
include GraphqlHelpers include GraphqlHelpers
let_it_be(:user) { create(:user) } let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) } let_it_be(:project) { create(:project, :public) }
let_it_be(:package) { create(:package, project: project) } let_it_be(:package) { create(:package, project: project) }
describe '#resolve' do describe '#resolve' do
......
...@@ -6,9 +6,10 @@ RSpec.describe Resolvers::ReleaseMilestonesResolver do ...@@ -6,9 +6,10 @@ RSpec.describe Resolvers::ReleaseMilestonesResolver do
include GraphqlHelpers include GraphqlHelpers
let_it_be(:release) { create(:release, :with_milestones, milestones_count: 2) } let_it_be(:release) { create(:release, :with_milestones, milestones_count: 2) }
let_it_be(:current_user) { create(:user, developer_projects: [release.project]) }
let(:resolved) do let(:resolved) do
resolve(described_class, obj: release) resolve(described_class, obj: release, ctx: { current_user: current_user })
end end
describe '#resolve' do describe '#resolve' do
......
...@@ -392,63 +392,6 @@ RSpec.describe SearchHelper do ...@@ -392,63 +392,6 @@ RSpec.describe SearchHelper do
end end
end end
describe 'search_filter_link' do
it 'renders a search filter link for the current scope' do
@scope = 'projects'
@search_results = double
expect(@search_results).to receive(:formatted_count).with('projects').and_return('23')
link = search_filter_link('projects', 'Projects')
expect(link).to have_css('li.active')
expect(link).to have_link('Projects', href: search_path(scope: 'projects'))
expect(link).to have_css('span.badge.badge-pill:not(.js-search-count):not(.hidden):not([data-url])', text: '23')
end
it 'renders a search filter link for another scope' do
link = search_filter_link('projects', 'Projects')
count_path = search_count_path(scope: 'projects')
expect(link).to have_css('li:not([class="active"])')
expect(link).to have_link('Projects', href: search_path(scope: 'projects'))
expect(link).to have_css("span.badge.badge-pill.js-search-count.hidden[data-url='#{count_path}']", text: '')
end
it 'merges in the current search params and given params' do
expect(self).to receive(:params).and_return(
ActionController::Parameters.new(
search: 'hello',
scope: 'ignored',
other_param: 'ignored'
)
)
link = search_filter_link('projects', 'Projects', search: { project_id: 23 })
expect(link).to have_link('Projects', href: search_path(scope: 'projects', search: 'hello', project_id: 23))
end
it 'restricts the params' do
expect(self).to receive(:params).and_return(
ActionController::Parameters.new(
search: 'hello',
unknown: 42
)
)
link = search_filter_link('projects', 'Projects')
expect(link).to have_link('Projects', href: search_path(scope: 'projects', search: 'hello'))
end
it 'assigns given data attributes on the list container' do
link = search_filter_link('projects', 'Projects', data: { foo: 'bar' })
expect(link).to have_css('li[data-foo="bar"]')
end
end
describe '#show_user_search_tab?' do describe '#show_user_search_tab?' do
subject { show_user_search_tab? } subject { show_user_search_tab? }
...@@ -631,4 +574,86 @@ RSpec.describe SearchHelper do ...@@ -631,4 +574,86 @@ RSpec.describe SearchHelper do
expect(search_sort_options).to eq([mock_created_sort]) expect(search_sort_options).to eq([mock_created_sort])
end end
end end
describe '#search_nav_tabs' do
subject { search_nav_tabs }
let(:current_user) { nil }
before do
allow(self).to receive(:current_user).and_return(current_user)
end
context 'when @show_snippets is present' do
before do
@show_snippets = 1
end
it { is_expected.to eq([:snippet_titles]) }
context 'and @project is present' do
before do
@project = 1
allow(self).to receive(:project_search_tabs?).with(anything).and_return(true)
end
it { is_expected.to eq([:blobs, :issues, :merge_requests, :milestones, :notes, :wiki_blobs, :commits, :users]) }
end
end
context 'when @project is present' do
before do
@project = 1
end
context 'when user has access to project' do
before do
allow(self).to receive(:project_search_tabs?).with(anything).and_return(true)
end
it { is_expected.to eq([:blobs, :issues, :merge_requests, :milestones, :notes, :wiki_blobs, :commits, :users]) }
end
context 'when user does not have access to project' do
before do
allow(self).to receive(:project_search_tabs?).with(anything).and_return(false)
end
it { is_expected.to eq([]) }
end
context 'when user does not have access to read members for project' do
before do
allow(self).to receive(:project_search_tabs?).with(:members).and_return(false)
allow(self).to receive(:project_search_tabs?).with(:merge_requests).and_return(true)
allow(self).to receive(:project_search_tabs?).with(:milestones).and_return(true)
allow(self).to receive(:project_search_tabs?).with(:wiki_blobs).and_return(true)
allow(self).to receive(:project_search_tabs?).with(:issues).and_return(true)
allow(self).to receive(:project_search_tabs?).with(:blobs).and_return(true)
allow(self).to receive(:project_search_tabs?).with(:notes).and_return(true)
allow(self).to receive(:project_search_tabs?).with(:commits).and_return(true)
end
it { is_expected.to eq([:blobs, :issues, :merge_requests, :milestones, :notes, :wiki_blobs, :commits]) }
end
end
context 'when @show_snippets and @project are not present' do
context 'when user has access to read users' do
before do
allow(self).to receive(:can?).with(current_user, :read_users_list).and_return(true)
end
it { is_expected.to eq([:projects, :issues, :merge_requests, :milestones, :users]) }
end
context 'when user does not have access to read users' do
before do
allow(self).to receive(:can?).with(current_user, :read_users_list).and_return(false)
end
it { is_expected.to eq([:projects, :issues, :merge_requests, :milestones]) }
end
end
end
end end
...@@ -56,5 +56,20 @@ RSpec.describe Gitlab::AlertManagement::Payload do ...@@ -56,5 +56,20 @@ RSpec.describe Gitlab::AlertManagement::Payload do
it { is_expected.to be_a Gitlab::AlertManagement::Payload::Generic } it { is_expected.to be_a Gitlab::AlertManagement::Payload::Generic }
end end
end end
context 'with integration specified by caller' do
let(:integration) { instance_double(AlertManagement::HttpIntegration) }
subject { described_class.parse(project, payload, integration: integration) }
it 'passes an integration to a specific payload' do
expect(::Gitlab::AlertManagement::Payload::Generic)
.to receive(:new)
.with(project: project, payload: payload, integration: integration)
.and_call_original
subject
end
end
end end
end end
...@@ -6,7 +6,7 @@ RSpec.describe Gitlab::Usage::Docs::ValueFormatter do ...@@ -6,7 +6,7 @@ RSpec.describe Gitlab::Usage::Docs::ValueFormatter do
describe '.format' do describe '.format' do
using RSpec::Parameterized::TableSyntax using RSpec::Parameterized::TableSyntax
where(:key, :value, :expected_value) do where(:key, :value, :expected_value) do
:group | 'growth::product intelligence' | '`growth::product intelligence`' :product_group | 'growth::product intelligence' | '`growth::product intelligence`'
:data_source | 'redis' | 'Redis' :data_source | 'redis' | 'Redis'
:data_source | 'ruby' | 'Ruby' :data_source | 'ruby' | 'Ruby'
:introduced_by_url | 'http://test.com' | '[Introduced by](http://test.com)' :introduced_by_url | 'http://test.com' | '[Introduced by](http://test.com)'
......
...@@ -8,11 +8,11 @@ RSpec.describe Gitlab::Usage::MetricDefinition do ...@@ -8,11 +8,11 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
description: 'GitLab instance unique identifier', description: 'GitLab instance unique identifier',
value_type: 'string', value_type: 'string',
product_category: 'collection', product_category: 'collection',
stage: 'growth', product_stage: 'growth',
status: 'data_available', status: 'data_available',
default_generation: 'generation_1', default_generation: 'generation_1',
key_path: 'uuid', key_path: 'uuid',
group: 'group::product analytics', product_group: 'group::product analytics',
time_frame: 'none', time_frame: 'none',
data_source: 'database', data_source: 'database',
distribution: %w(ee ce), distribution: %w(ee ce),
...@@ -45,7 +45,7 @@ RSpec.describe Gitlab::Usage::MetricDefinition do ...@@ -45,7 +45,7 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
:value_type | 'test' :value_type | 'test'
:status | nil :status | nil
:key_path | nil :key_path | nil
:group | nil :product_group | nil
:time_frame | nil :time_frame | nil
:time_frame | '29d' :time_frame | '29d'
:data_source | 'other' :data_source | 'other'
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Usage::Metrics::Aggregates::Sources::PostgresHll, :clean_gitlab_redis_shared_state do
let_it_be(:start_date) { 7.days.ago }
let_it_be(:end_date) { Date.current }
let_it_be(:recorded_at) { Time.current }
let_it_be(:time_period) { { created_at: (start_date..end_date) } }
let(:metric_1) { 'metric_1' }
let(:metric_2) { 'metric_2' }
let(:metric_names) { [metric_1, metric_2] }
describe '.calculate_events_union' do
subject(:calculate_metrics_union) do
described_class.calculate_metrics_union(metric_names: metric_names, start_date: start_date, end_date: end_date, recorded_at: recorded_at)
end
before do
[
{
metric_name: metric_1,
time_period: time_period,
recorded_at_timestamp: recorded_at,
data: ::Gitlab::Database::PostgresHll::Buckets.new(141 => 1, 56 => 1)
},
{
metric_name: metric_2,
time_period: time_period,
recorded_at_timestamp: recorded_at,
data: ::Gitlab::Database::PostgresHll::Buckets.new(10 => 1, 56 => 1)
}
].each do |params|
described_class.save_aggregated_metrics(**params)
end
end
it 'returns the number of unique events in the union of all metrics' do
expect(calculate_metrics_union.round(2)).to eq(3.12)
end
context 'when there is no aggregated data saved' do
let(:metric_names) { [metric_1, 'i do not have any records'] }
it 'raises error when union data is missing' do
expect { calculate_metrics_union }.to raise_error Gitlab::Usage::Metrics::Aggregates::Sources::UnionNotAvailable
end
end
context 'when there is only one metric defined as aggregated' do
let(:metric_names) { [metric_1] }
it 'returns the number of unique events for that metric' do
expect(calculate_metrics_union.round(2)).to eq(2.08)
end
end
end
describe '.save_aggregated_metrics' do
subject(:save_aggregated_metrics) do
described_class.save_aggregated_metrics(metric_name: metric_1,
time_period: time_period,
recorded_at_timestamp: recorded_at,
data: data)
end
context 'with compatible data argument' do
let(:data) { ::Gitlab::Database::PostgresHll::Buckets.new(141 => 1, 56 => 1) }
it 'persists serialized data in Redis' do
Gitlab::Redis::SharedState.with do |redis|
expect(redis).to receive(:set).with("#{metric_1}_weekly-#{recorded_at.to_i}", '{"141":1,"56":1}', ex: 120.hours)
end
save_aggregated_metrics
end
context 'with monthly key' do
let_it_be(:start_date) { 4.weeks.ago }
let_it_be(:time_period) { { created_at: (start_date..end_date) } }
it 'persists serialized data in Redis' do
Gitlab::Redis::SharedState.with do |redis|
expect(redis).to receive(:set).with("#{metric_1}_monthly-#{recorded_at.to_i}", '{"141":1,"56":1}', ex: 120.hours)
end
save_aggregated_metrics
end
end
context 'with all_time key' do
let_it_be(:time_period) { nil }
it 'persists serialized data in Redis' do
Gitlab::Redis::SharedState.with do |redis|
expect(redis).to receive(:set).with("#{metric_1}_all_time-#{recorded_at.to_i}", '{"141":1,"56":1}', ex: 120.hours)
end
save_aggregated_metrics
end
end
context 'error handling' do
before do
allow(Gitlab::Redis::SharedState).to receive(:with).and_raise(::Redis::CommandError)
end
it 'rescues and reraise ::Redis::CommandError for development and test environments' do
expect { save_aggregated_metrics }.to raise_error ::Redis::CommandError
end
context 'for environment different than development' do
before do
stub_rails_env('production')
end
it 'rescues ::Redis::CommandError' do
expect { save_aggregated_metrics }.not_to raise_error
end
end
end
end
context 'with incompatible data argument' do
let(:data) { 1 }
context 'for environment different than development' do
before do
stub_rails_env('production')
end
it 'does not persist data in Redis' do
Gitlab::Redis::SharedState.with do |redis|
expect(redis).not_to receive(:set)
end
save_aggregated_metrics
end
end
it 'raises error for development environment' do
expect { save_aggregated_metrics }.to raise_error /Unsupported data type/
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Usage::Metrics::Aggregates::Sources::RedisHll do
describe '.calculate_events_union' do
let(:event_names) { %w[event_a event_b] }
let(:start_date) { 7.days.ago }
let(:end_date) { Date.current }
subject(:calculate_metrics_union) do
described_class.calculate_metrics_union(metric_names: event_names, start_date: start_date, end_date: end_date, recorded_at: nil)
end
it 'calls Gitlab::UsageDataCounters::HLLRedisCounter.calculate_events_union' do
expect(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:calculate_events_union)
.with(event_names: event_names, start_date: start_date, end_date: end_date)
.and_return(5)
calculate_metrics_union
end
it 'prevents from using fallback value as valid union result' do
allow(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:calculate_events_union).and_return(-1)
expect { calculate_metrics_union }.to raise_error Gitlab::Usage::Metrics::Aggregates::Sources::UnionNotAvailable
end
end
end
...@@ -13,18 +13,32 @@ RSpec.describe 'aggregated metrics' do ...@@ -13,18 +13,32 @@ RSpec.describe 'aggregated metrics' do
end end
end end
RSpec::Matchers.define :has_known_source do
match do |aggregate|
Gitlab::Usage::Metrics::Aggregates::SOURCES.include?(aggregate[:source])
end
failure_message do |aggregate|
"Aggregate with name: `#{aggregate[:name]}` uses not allowed source `#{aggregate[:source]}`"
end
end
let_it_be(:known_events) do let_it_be(:known_events) do
Gitlab::UsageDataCounters::HLLRedisCounter.known_events Gitlab::UsageDataCounters::HLLRedisCounter.known_events
end end
Gitlab::Usage::Metrics::Aggregates::Aggregate.new.send(:aggregated_metrics).tap do |aggregated_metrics| Gitlab::Usage::Metrics::Aggregates::Aggregate.new(Time.current).send(:aggregated_metrics).tap do |aggregated_metrics|
it 'all events has unique name' do it 'all events has unique name' do
event_names = aggregated_metrics&.map { |event| event[:name] } event_names = aggregated_metrics&.map { |event| event[:name] }
expect(event_names).to eq(event_names&.uniq) expect(event_names).to eq(event_names&.uniq)
end end
aggregated_metrics&.each do |aggregate| it 'all aggregated metrics has known source' do
expect(aggregated_metrics).to all has_known_source
end
aggregated_metrics&.select { |agg| agg[:source] == Gitlab::Usage::Metrics::Aggregates::REDIS_SOURCE }&.each do |aggregate|
context "for #{aggregate[:name]} aggregate of #{aggregate[:events].join(' ')}" do context "for #{aggregate[:name]} aggregate of #{aggregate[:events].join(' ')}" do
let_it_be(:events_records) { known_events.select { |event| aggregate[:events].include?(event[:name]) } } let_it_be(:events_records) { known_events.select { |event| aggregate[:events].include?(event[:name]) } }
......
...@@ -372,97 +372,4 @@ RSpec.describe Gitlab::Utils::UsageData do ...@@ -372,97 +372,4 @@ RSpec.describe Gitlab::Utils::UsageData do
end end
end end
end end
describe '#save_aggregated_metrics', :clean_gitlab_redis_shared_state do
let(:timestamp) { Time.current.to_i }
let(:time_period) { { created_at: 7.days.ago..Date.current } }
let(:metric_name) { 'test_metric' }
let(:method_params) do
{
metric_name: metric_name,
time_period: time_period,
recorded_at_timestamp: timestamp,
data: data
}
end
context 'with compatible data argument' do
let(:data) { ::Gitlab::Database::PostgresHll::Buckets.new(141 => 1, 56 => 1) }
it 'persists serialized data in Redis' do
time_period_name = 'weekly'
expect(described_class).to receive(:time_period_to_human_name).with(time_period).and_return(time_period_name)
Gitlab::Redis::SharedState.with do |redis|
expect(redis).to receive(:set).with("#{metric_name}_#{time_period_name}-#{timestamp}", '{"141":1,"56":1}', ex: 80.hours)
end
described_class.save_aggregated_metrics(**method_params)
end
context 'error handling' do
before do
allow(Gitlab::Redis::SharedState).to receive(:with).and_raise(::Redis::CommandError)
end
it 'rescues and reraise ::Redis::CommandError for development and test environments' do
expect { described_class.save_aggregated_metrics(**method_params) }.to raise_error ::Redis::CommandError
end
context 'for environment different than development' do
before do
stub_rails_env('production')
end
it 'rescues ::Redis::CommandError' do
expect { described_class.save_aggregated_metrics(**method_params) }.not_to raise_error
end
end
end
end
context 'with incompatible data argument' do
let(:data) { 1 }
context 'for environment different than development' do
before do
stub_rails_env('production')
end
it 'does not persist data in Redis' do
Gitlab::Redis::SharedState.with do |redis|
expect(redis).not_to receive(:set)
end
described_class.save_aggregated_metrics(**method_params)
end
end
it 'raises error for development environment' do
expect { described_class.save_aggregated_metrics(**method_params) }.to raise_error /Unsupported data type/
end
end
end
describe '#time_period_to_human_name' do
it 'translates empty time period as all_time' do
expect(described_class.time_period_to_human_name({})).to eql 'all_time'
end
it 'translates time period not longer than 7 days as weekly', :aggregate_failures do
days_6_time_period = 6.days.ago..Date.current
days_7_time_period = 7.days.ago..Date.current
expect(described_class.time_period_to_human_name(column_name: days_6_time_period)).to eql 'weekly'
expect(described_class.time_period_to_human_name(column_name: days_7_time_period)).to eql 'weekly'
end
it 'translates time period longer than 7 days as monthly', :aggregate_failures do
days_8_time_period = 8.days.ago..Date.current
days_31_time_period = 31.days.ago..Date.current
expect(described_class.time_period_to_human_name(column_name: days_8_time_period)).to eql 'monthly'
expect(described_class.time_period_to_human_name(column_name: days_31_time_period)).to eql 'monthly'
end
end
end end
...@@ -36,7 +36,7 @@ RSpec.describe Projects::Alerting::NotifyService do ...@@ -36,7 +36,7 @@ RSpec.describe Projects::Alerting::NotifyService do
subject { service.execute(token, nil) } subject { service.execute(token, nil) }
shared_examples 'notifcations are handled correctly' do shared_examples 'notifications are handled correctly' do
context 'with valid token' do context 'with valid token' do
let(:token) { integration.token } let(:token) { integration.token }
let(:incident_management_setting) { double(send_email?: email_enabled, create_issue?: issue_enabled, auto_close_incident?: auto_close_enabled) } let(:incident_management_setting) { double(send_email?: email_enabled, create_issue?: issue_enabled, auto_close_incident?: auto_close_enabled) }
...@@ -85,6 +85,15 @@ RSpec.describe Projects::Alerting::NotifyService do ...@@ -85,6 +85,15 @@ RSpec.describe Projects::Alerting::NotifyService do
it_behaves_like 'creates an alert management alert' it_behaves_like 'creates an alert management alert'
it_behaves_like 'assigns the alert properties' it_behaves_like 'assigns the alert properties'
it 'passes the integration to alert processing' do
expect(Gitlab::AlertManagement::Payload)
.to receive(:parse)
.with(project, payload.to_h, integration: integration)
.and_call_original
subject
end
it 'creates a system note corresponding to alert creation' do it 'creates a system note corresponding to alert creation' do
expect { subject }.to change(Note, :count).by(1) expect { subject }.to change(Note, :count).by(1)
expect(Note.last.note).to include(payload_raw.fetch(:monitoring_tool)) expect(Note.last.note).to include(payload_raw.fetch(:monitoring_tool))
...@@ -259,7 +268,7 @@ RSpec.describe Projects::Alerting::NotifyService do ...@@ -259,7 +268,7 @@ RSpec.describe Projects::Alerting::NotifyService do
subject { service.execute(token, integration) } subject { service.execute(token, integration) }
it_behaves_like 'notifcations are handled correctly' do it_behaves_like 'notifications are handled correctly' do
let(:source) { integration.name } let(:source) { integration.name }
end end
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'layouts/profile' do
let(:user) { create(:user) }
before do
allow(view).to receive(:session).and_return({})
allow(view).to receive(:current_user).and_return(user)
allow(view).to receive(:current_user_mode).and_return(Gitlab::Auth::CurrentUserMode.new(user))
allow(view).to receive(:experiment_enabled?).and_return(false)
allow(view).to receive(:enable_search_settings).and_call_original
end
it 'calls enable_search_settings helper with a custom container class' do
render
expect(view).to have_received(:enable_search_settings)
.with({ locals: { container_class: 'gl-my-5' } })
end
context 'when search_settings_in_page feature flag is on' do
it 'displays the search settings entry point' do
render
expect(rendered).to include('js-search-settings-app')
end
end
context 'when search_settings_in_page feature flag is off' do
before do
stub_feature_flags(search_settings_in_page: false)
end
it 'does not display the search settings entry point' do
render
expect(rendered).not_to include('js-search-settings-app')
end
end
end
...@@ -6,7 +6,6 @@ RSpec.describe 'search/show' do ...@@ -6,7 +6,6 @@ RSpec.describe 'search/show' do
let(:search_term) { nil } let(:search_term) { nil }
before do before do
stub_template "search/_category.html.haml" => 'Category Partial'
stub_template "search/_results.html.haml" => 'Results Partial' stub_template "search/_results.html.haml" => 'Results Partial'
@search_term = search_term @search_term = search_term
...@@ -21,7 +20,6 @@ RSpec.describe 'search/show' do ...@@ -21,7 +20,6 @@ RSpec.describe 'search/show' do
end end
it 'does not render partials' do it 'does not render partials' do
expect(rendered).not_to render_template('search/_category')
expect(rendered).not_to render_template('search/_results') expect(rendered).not_to render_template('search/_results')
end end
end end
...@@ -30,7 +28,6 @@ RSpec.describe 'search/show' do ...@@ -30,7 +28,6 @@ RSpec.describe 'search/show' do
let(:search_term) { 'Search Foo' } let(:search_term) { 'Search Foo' }
it 'renders partials' do it 'renders partials' do
expect(rendered).to render_template('search/_category')
expect(rendered).to render_template('search/_results') expect(rendered).to render_template('search/_results')
end end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment