Commit 84636ff7 authored by Fabio Pitino's avatar Fabio Pitino

Merge branch '26793-implementing-include-multiple-file' into 'master'

Implement including multiple files from a project

See merge request gitlab-org/gitlab!45991
parents 18a42f9f 0b23c213
78487b6231f7f0b0ae2c6db34f1495adc47268e8
020b5f709d58277c360ba409b8f8a9e81cee2781
......@@ -168,9 +168,6 @@ export default class CreateMergeRequestDropdown {
disable() {
this.disableCreateAction();
this.dropdownToggle.classList.add('disabled');
this.dropdownToggle.setAttribute('disabled', 'disabled');
}
disableCreateAction() {
......@@ -189,9 +186,6 @@ export default class CreateMergeRequestDropdown {
this.createTargetButton.classList.remove('disabled');
this.createTargetButton.removeAttribute('disabled');
this.dropdownToggle.classList.remove('disabled');
this.dropdownToggle.removeAttribute('disabled');
}
static findByValue(objects, ref, returnFirstMatch = false) {
......
......@@ -8,9 +8,9 @@ import {
GlBadge,
GlAlert,
GlSprintf,
GlDeprecatedDropdown,
GlDeprecatedDropdownItem,
GlDeprecatedDropdownDivider,
GlDropdown,
GlDropdownItem,
GlDropdownDivider,
GlIcon,
} from '@gitlab/ui';
import { deprecatedCreateFlash as createFlash } from '~/flash';
......@@ -43,9 +43,9 @@ export default {
GlBadge,
GlAlert,
GlSprintf,
GlDeprecatedDropdown,
GlDeprecatedDropdownItem,
GlDeprecatedDropdownDivider,
GlDropdown,
GlDropdownItem,
GlDropdownDivider,
TimeAgoTooltip,
},
directives: {
......@@ -331,38 +331,38 @@ export default {
</gl-button>
</form>
</div>
<gl-deprecated-dropdown
<gl-dropdown
text="Options"
class="error-details-options d-md-none"
right
:disabled="issueUpdateInProgress"
>
<gl-deprecated-dropdown-item
<gl-dropdown-item
data-qa-selector="update_ignore_status_button"
@click="onIgnoreStatusUpdate"
>{{ ignoreBtnLabel }}</gl-deprecated-dropdown-item
>{{ ignoreBtnLabel }}</gl-dropdown-item
>
<gl-deprecated-dropdown-item
<gl-dropdown-item
data-qa-selector="update_resolve_status_button"
@click="onResolveStatusUpdate"
>{{ resolveBtnLabel }}</gl-deprecated-dropdown-item
>{{ resolveBtnLabel }}</gl-dropdown-item
>
<gl-deprecated-dropdown-divider />
<gl-deprecated-dropdown-item
<gl-dropdown-divider />
<gl-dropdown-item
v-if="error.gitlabIssuePath"
data-qa-selector="view_issue_button"
:href="error.gitlabIssuePath"
variant="success"
>{{ __('View issue') }}</gl-deprecated-dropdown-item
>{{ __('View issue') }}</gl-dropdown-item
>
<gl-deprecated-dropdown-item
<gl-dropdown-item
v-if="!error.gitlabIssuePath"
:loading="issueCreationInProgress"
data-qa-selector="create_issue_button"
@click="createIssue"
>{{ __('Create issue') }}</gl-deprecated-dropdown-item
>{{ __('Create issue') }}</gl-dropdown-item
>
</gl-deprecated-dropdown>
</gl-dropdown>
</div>
</div>
<div>
......
......@@ -8,9 +8,9 @@ import {
GlLoadingIcon,
GlTable,
GlFormInput,
GlDeprecatedDropdown,
GlDeprecatedDropdownItem,
GlDeprecatedDropdownDivider,
GlDropdown,
GlDropdownItem,
GlDropdownDivider,
GlTooltipDirective,
GlPagination,
} from '@gitlab/ui';
......@@ -72,9 +72,9 @@ export default {
components: {
GlEmptyState,
GlButton,
GlDeprecatedDropdown,
GlDeprecatedDropdownItem,
GlDeprecatedDropdownDivider,
GlDropdown,
GlDropdownItem,
GlDropdownDivider,
GlIcon,
GlLink,
GlLoadingIcon,
......@@ -233,30 +233,30 @@ export default {
>
<div class="search-box flex-fill mb-1 mb-md-0">
<div class="filtered-search-box mb-0">
<gl-deprecated-dropdown
<gl-dropdown
:text="__('Recent searches')"
class="filtered-search-history-dropdown-wrapper"
toggle-class="filtered-search-history-dropdown-toggle-button"
toggle-class="filtered-search-history-dropdown-toggle-button gl-shadow-none! gl-border-r-gray-200! gl-border-1! gl-rounded-0!"
:disabled="loading"
>
<div v-if="!$options.hasLocalStorage" class="px-3">
{{ __('This feature requires local storage to be enabled') }}
</div>
<template v-else-if="recentSearches.length > 0">
<gl-deprecated-dropdown-item
<gl-dropdown-item
v-for="searchQuery in recentSearches"
:key="searchQuery"
@click="setSearchText(searchQuery)"
>{{ searchQuery }}
</gl-deprecated-dropdown-item>
<gl-deprecated-dropdown-divider />
<gl-deprecated-dropdown-item ref="clearRecentSearches" @click="clearRecentSearches"
</gl-dropdown-item>
<gl-dropdown-divider />
<gl-dropdown-item ref="clearRecentSearches" @click="clearRecentSearches"
>{{ __('Clear recent searches') }}
</gl-deprecated-dropdown-item>
</gl-dropdown-item>
</template>
<div v-else class="px-3">{{ __("You don't have any recent searches") }}</div>
</gl-deprecated-dropdown>
<div class="filtered-search-input-container flex-fill">
</gl-dropdown>
<div class="filtered-search-input-container gl-flex-fill-1">
<gl-form-input
v-model="errorSearchQuery"
class="pl-2 filtered-search"
......@@ -280,49 +280,44 @@ export default {
</div>
</div>
<gl-deprecated-dropdown
<gl-dropdown
:text="$options.statusFilters[statusFilter]"
class="status-dropdown mx-md-1 mb-1 mb-md-0"
menu-class="dropdown"
:disabled="loading"
right
>
<gl-deprecated-dropdown-item
<gl-dropdown-item
v-for="(label, status) in $options.statusFilters"
:key="status"
@click="filterErrors(status, label)"
>
<span class="d-flex">
<gl-icon
class="flex-shrink-0 append-right-4"
class="gl-new-dropdown-item-check-icon"
:class="{ invisible: !isCurrentStatusFilter(status) }"
name="mobile-issue-close"
/>
{{ label }}
</span>
</gl-deprecated-dropdown-item>
</gl-deprecated-dropdown>
</gl-dropdown-item>
</gl-dropdown>
<gl-deprecated-dropdown
:text="$options.sortFields[sortField]"
left
:disabled="loading"
menu-class="dropdown"
>
<gl-deprecated-dropdown-item
<gl-dropdown :text="$options.sortFields[sortField]" right :disabled="loading">
<gl-dropdown-item
v-for="(label, field) in $options.sortFields"
:key="field"
@click="sortByField(field)"
>
<span class="d-flex">
<gl-icon
class="flex-shrink-0 append-right-4"
class="gl-new-dropdown-item-check-icon"
:class="{ invisible: !isCurrentSortField(field) }"
name="mobile-issue-close"
/>
{{ label }}
</span>
</gl-deprecated-dropdown-item>
</gl-deprecated-dropdown>
</gl-dropdown-item>
</gl-dropdown>
</div>
<div v-if="loading" class="py-3">
......
<script>
import { GlDeprecatedDropdown, GlDeprecatedDropdownItem } from '@gitlab/ui';
import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
import { getDisplayName } from '../utils';
export default {
components: {
GlDeprecatedDropdown,
GlDeprecatedDropdownItem,
GlDropdown,
GlDropdownItem,
},
props: {
dropdownLabel: {
......@@ -52,22 +52,22 @@ export default {
<div :class="{ 'gl-show-field-errors': isProjectInvalid }">
<label class="label-bold" for="project-dropdown">{{ __('Project') }}</label>
<div class="row">
<gl-deprecated-dropdown
<gl-dropdown
id="project-dropdown"
class="col-8 col-md-9 gl-pr-0"
:disabled="!hasProjects"
menu-class="w-100 mw-100"
toggle-class="dropdown-menu-toggle w-100 gl-field-error-outline"
toggle-class="dropdown-menu-toggle gl-field-error-outline"
:text="dropdownLabel"
>
<gl-deprecated-dropdown-item
<gl-dropdown-item
v-for="project in projects"
:key="`${project.organizationSlug}.${project.slug}`"
class="w-100"
@click="$emit('select-project', project)"
>{{ getDisplayName(project) }}</gl-deprecated-dropdown-item
>{{ getDisplayName(project) }}</gl-dropdown-item
>
</gl-deprecated-dropdown>
</gl-dropdown>
</div>
<p v-if="isProjectInvalid" class="js-project-dropdown-error gl-field-error">
{{ invalidProjectLabel }}
......
......@@ -37,8 +37,6 @@ const restartJobsPolling = () => {
if (eTagPoll) eTagPoll.restart();
};
const setFilter = ({ commit }, filter) => commit(types.SET_FILTER, filter);
const setImportTarget = ({ commit }, { repoId, importTarget }) =>
commit(types.SET_IMPORT_TARGET, { repoId, importTarget });
......@@ -172,12 +170,9 @@ const fetchNamespacesFactory = (namespacesPath = isRequired()) => ({ commit }) =
});
};
const setPage = ({ state, commit, dispatch }, page) => {
if (page === state.pageInfo.page) {
return null;
}
const setFilter = ({ commit, dispatch }, filter) => {
commit(types.SET_FILTER, filter);
commit(types.SET_PAGE, page);
return dispatch('fetchRepos');
};
......@@ -188,7 +183,6 @@ export default ({ endpoints = isRequired() }) => ({
setFilter,
setImportTarget,
importAll,
setPage,
fetchRepos: fetchReposFactory({ reposPath: endpoints.reposPath }),
fetchImport: fetchImportFactory(endpoints.importPath),
fetchJobs: fetchJobsFactory(endpoints.jobsPath),
......
......@@ -21,35 +21,32 @@ function mountRemoveMemberModal() {
});
}
document.addEventListener('DOMContentLoaded', () => {
groupsSelect();
memberExpirationDate();
memberExpirationDate('.js-access-expiration-date-groups');
mountRemoveMemberModal();
const SHARED_FIELDS = ['account', 'expires', 'maxRole', 'expiration', 'actions'];
initGroupMembersApp(
document.querySelector('.js-group-members-list'),
SHARED_FIELDS.concat(['source', 'granted']),
memberRequestFormatter,
);
initGroupMembersApp(
document.querySelector('.js-group-linked-list'),
SHARED_FIELDS.concat('granted'),
groupLinkRequestFormatter,
);
initGroupMembersApp(
document.querySelector('.js-group-invited-members-list'),
SHARED_FIELDS.concat('invited'),
memberRequestFormatter,
);
initGroupMembersApp(
document.querySelector('.js-group-access-requests-list'),
SHARED_FIELDS.concat('requested'),
memberRequestFormatter,
);
const SHARED_FIELDS = ['account', 'expires', 'maxRole', 'expiration', 'actions'];
groupsSelect();
memberExpirationDate();
memberExpirationDate('.js-access-expiration-date-groups');
mountRemoveMemberModal();
initGroupMembersApp(
document.querySelector('.js-group-members-list'),
SHARED_FIELDS.concat(['source', 'granted']),
memberRequestFormatter,
);
initGroupMembersApp(
document.querySelector('.js-group-linked-list'),
SHARED_FIELDS.concat('granted'),
groupLinkRequestFormatter,
);
initGroupMembersApp(
document.querySelector('.js-group-invited-members-list'),
SHARED_FIELDS.concat('invited'),
memberRequestFormatter,
);
initGroupMembersApp(
document.querySelector('.js-group-access-requests-list'),
SHARED_FIELDS.concat('requested'),
memberRequestFormatter,
);
new Members(); // eslint-disable-line no-new
new UsersSelect(); // eslint-disable-line no-new
});
new Members(); // eslint-disable-line no-new
new UsersSelect(); // eslint-disable-line no-new
......@@ -57,7 +57,7 @@ export default {
<tooltip-on-truncate :title="jobName" truncate-target="child" placement="top">
<div
:id="jobId"
class="pipeline-job-pill gl-bg-white gl-text-center gl-text-truncate gl-rounded-pill gl-mb-3 gl-px-5 gl-py-2 gl-relative gl-z-index-1 gl-transition-duration-slow gl-transition-timing-function-ease"
class="gl-w-15 gl-bg-white gl-text-center gl-text-truncate gl-rounded-pill gl-mb-3 gl-px-5 gl-py-2 gl-relative gl-z-index-1 gl-transition-duration-slow gl-transition-timing-function-ease"
:class="jobPillClasses"
@mouseover="onMouseEnter"
@mouseleave="onMouseLeave"
......
......@@ -97,15 +97,20 @@ export default {
this.reportFailure(DRAW_FAILURE);
}
},
getStageBackgroundClass(index) {
getStageBackgroundClasses(index) {
const { length } = this.pipelineData.stages;
// It's possible for a graph to have only one stage, in which
// case we concatenate both the left and right rounding classes
if (length === 1) {
return 'stage-rounded';
} else if (index === 0) {
return 'stage-left-rounded';
} else if (index === length - 1) {
return 'stage-right-rounded';
return 'gl-rounded-bottom-left-6 gl-rounded-top-left-6 gl-rounded-bottom-right-6 gl-rounded-top-right-6';
}
if (index === 0) {
return 'gl-rounded-bottom-left-6 gl-rounded-top-left-6';
}
if (index === length - 1) {
return 'gl-rounded-bottom-right-6 gl-rounded-top-right-6';
}
return '';
......@@ -190,7 +195,8 @@ export default {
>
<div
class="gl-display-flex gl-align-items-center gl-bg-white gl-w-full gl-px-8 gl-py-4 gl-mb-5"
:class="getStageBackgroundClass(index)"
:class="getStageBackgroundClasses(index)"
data-testid="stage-background"
>
<stage-pill :stage-name="stage.name" :is-empty="stage.groups.length === 0" />
</div>
......
......@@ -26,7 +26,7 @@ export default {
<template>
<tooltip-on-truncate :title="stageName" truncate-target="child" placement="top">
<div
class="gl-px-5 gl-py-2 gl-text-white gl-text-center gl-text-truncate gl-rounded-pill pipeline-stage-pill"
class="gl-px-5 gl-py-2 gl-text-white gl-text-center gl-text-truncate gl-rounded-pill gl-w-20"
:class="emptyClass"
>
{{ stageName }}
......
......@@ -137,8 +137,8 @@ export default {
:href="commit.author.webPath"
class="commit-author-link js-user-link"
>
{{ commit.author.name }}
</gl-link>
{{ commit.author.name }}</gl-link
>
<template v-else>
{{ commit.authorName }}
</template>
......
......@@ -486,23 +486,3 @@
.progress-bar.bg-primary {
background-color: $blue-500 !important;
}
.pipeline-stage-pill {
width: 10rem;
}
.pipeline-job-pill {
width: 8rem;
}
.stage-rounded {
border-radius: 2rem;
}
.stage-left-rounded {
border-radius: 2rem 0 0 2rem;
}
.stage-right-rounded {
border-radius: 0 2rem 2rem 0;
}
......@@ -188,6 +188,12 @@ ul.related-merge-requests > li {
border-width: 1px;
line-height: $line-height-base;
width: auto;
&.disabled {
background-color: $gray-light;
border-color: $gray-100;
color: $gl-text-color-disabled;
}
}
}
......
......@@ -132,13 +132,23 @@ class GroupsController < Groups::ApplicationController
def update
if Groups::UpdateService.new(@group, current_user, group_params).execute
redirect_to edit_group_path(@group, anchor: params[:update_section]), notice: "Group '#{@group.name}' was successfully updated."
notice = "Group '#{@group.name}' was successfully updated."
redirect_to edit_group_origin_location, notice: notice
else
@group.reset
render action: "edit"
end
end
def edit_group_origin_location
if params.dig(:group, :redirect_target) == 'repository_settings'
group_settings_repository_path(@group, anchor: 'js-default-branch-name')
else
edit_group_path(@group, anchor: params[:update_section])
end
end
def destroy
Groups::DestroyService.new(@group, current_user).async_execute
......
......@@ -48,18 +48,14 @@ class Import::BaseController < ApplicationController
private
def filter_attribute
:name
end
def sanitized_filter_param
@filter ||= sanitize(params[:filter])
@filter ||= sanitize(params[:filter])&.downcase
end
def filtered(collection)
return collection unless sanitized_filter_param
collection.select { |item| item[filter_attribute].include?(sanitized_filter_param) }
collection.select { |item| item[:name].to_s.downcase.include?(sanitized_filter_param) }
end
def serialized_provider_repos
......
......@@ -132,8 +132,4 @@ class Import::BitbucketController < Import::BaseController
refresh_token: session[:bitbucket_refresh_token]
}
end
def sanitized_filter_param
@filter ||= sanitize(params[:filter])
end
end
......@@ -170,10 +170,6 @@ class Import::BitbucketServerController < Import::BaseController
BitbucketServer::Paginator::PAGE_LENGTH
end
def sanitized_filter_param
sanitize(params[:filter])
end
def bitbucket_connection_error(error)
flash[:alert] = _("Unable to connect to server: %{error}") % { error: error }
clear_session_data
......
......@@ -245,14 +245,6 @@ class Import::GithubController < Import::BaseController
def extra_import_params
{}
end
def sanitized_filter_param
@filter ||= sanitize(params[:filter])
end
def filter_attribute
:name
end
end
Import::GithubController.prepend_if_ee('EE::Import::GithubController')
......@@ -48,6 +48,8 @@ class ApplicationRecord < ActiveRecord::Base
def self.safe_find_or_create_by!(*args, &block)
safe_find_or_create_by(*args, &block).tap do |record|
raise ActiveRecord::RecordNotFound unless record.present?
record.validate! unless record.persisted?
end
end
......
......@@ -109,6 +109,8 @@ class Group < Namespace
.where("project_authorizations.user_id IN (?)", user_ids)
end
delegate :default_branch_name, to: :namespace_settings
class << self
def sort_by_attribute(method)
if method == 'storage_size_desc'
......@@ -587,7 +589,7 @@ class Group < Namespace
def update_two_factor_requirement
return unless saved_change_to_require_two_factor_authentication? || saved_change_to_two_factor_grace_period?
members_with_descendants.find_each(&:update_two_factor_requirement)
direct_and_indirect_members.find_each(&:update_two_factor_requirement)
end
def path_changed_hook
......
......@@ -6,10 +6,18 @@ class NamespaceSetting < ApplicationRecord
validate :default_branch_name_content
validate :allow_mfa_for_group
before_validation :normalize_default_branch_name
NAMESPACE_SETTINGS_PARAMS = [:default_branch_name].freeze
self.primary_key = :namespace_id
private
def normalize_default_branch_name
self.default_branch_name = nil if default_branch_name.blank?
end
def default_branch_name_content
return if default_branch_name.nil?
......
......@@ -163,16 +163,18 @@ module Ci
end
def ensure_pending_state
Ci::BuildPendingState.create_or_find_by!(
build_state = Ci::BuildPendingState.safe_find_or_create_by(
build_id: build.id,
state: params.fetch(:state),
trace_checksum: params.fetch(:checksum),
failure_reason: params.dig(:failure_reason)
)
rescue ActiveRecord::RecordNotFound
metrics.increment_trace_operation(operation: :conflict)
build.pending_state
unless build_state.present?
metrics.increment_trace_operation(operation: :conflict)
end
build_state || build.pending_state
end
##
......
......@@ -7,7 +7,7 @@ module Clusters
GITLAB_ADMIN_TOKEN_NAME = 'gitlab-token'
GITLAB_CLUSTER_ROLE_BINDING_NAME = 'gitlab-admin'
GITLAB_CLUSTER_ROLE_NAME = 'cluster-admin'
PROJECT_CLUSTER_ROLE_NAME = 'edit'
PROJECT_CLUSTER_ROLE_NAME = 'admin'
GITLAB_KNATIVE_SERVING_ROLE_NAME = 'gitlab-knative-serving-role'
GITLAB_KNATIVE_SERVING_ROLE_BINDING_NAME = 'gitlab-knative-serving-rolebinding'
GITLAB_CROSSPLANE_DATABASE_ROLE_NAME = 'gitlab-crossplane-database-role'
......
......@@ -123,11 +123,9 @@ module Clusters
end
def role_binding_resource
role_name = Feature.enabled?(:kubernetes_cluster_namespace_role_admin) ? 'admin' : Clusters::Kubernetes::PROJECT_CLUSTER_ROLE_NAME
Gitlab::Kubernetes::RoleBinding.new(
name: role_binding_name,
role_name: role_name,
role_name: Clusters::Kubernetes::PROJECT_CLUSTER_ROLE_NAME,
role_kind: :ClusterRole,
namespace: service_account_namespace,
service_account_name: service_account_name
......
%section.settings.as-default-branch-name.no-animate#js-default-branch-name{ class: ('expanded' if expanded_by_default?) }
.settings-header
%h4
= _('Default initial branch name')
%button.gl-button.js-settings-toggle{ type: 'button' }
= expanded_by_default? ? _('Collapse') : _('Expand')
%p
= _('Set the default name of the initial branch when creating new repositories through the user interface.')
.settings-content
= form_for @group, url: group_path(@group, anchor: 'js-default-branch-name'), html: { class: 'fieldset-form' } do |f|
= form_errors(@group)
- fallback_branch_name = '<code>master</code>'
%fieldset
.form-group
= f.label :default_branch_name, _('Default initial branch name'), class: 'label-light'
= f.text_field :default_branch_name, value: group.namespace_settings&.default_branch_name, placeholder: 'master', class: 'form-control'
%span.form-text.text-muted
= (_("Changes affect new repositories only. If not specified, either the configured application-wide default or Git's default name %{branch_name_default} will be used.") % { branch_name_default: fallback_branch_name }).html_safe
= f.hidden_field :redirect_target, value: "repository_settings"
= f.submit _('Save changes'), class: 'gl-button btn-success'
......@@ -4,3 +4,4 @@
- deploy_token_description = s_('DeployTokens|Group deploy tokens allow access to the packages, repositories, and registry images within the group.')
= render "shared/deploy_tokens/index", group_or_project: @group, description: deploy_token_description
= render "initial_branch_name", group: @group
......@@ -23,7 +23,7 @@
%a.btn.gl-button.btn-default.float-right.gl-display-block.d-sm-none.gutter-toggle.issuable-gutter-toggle.js-sidebar-toggle{ href: "#" }
= sprite_icon('chevron-double-lg-left')
- if Feature.enabled?(:vue_issue_header, @project)
- if Feature.enabled?(:vue_issue_header, @project) && display_issuable_type == 'issue'
.js-issue-header-actions{ data: issue_header_actions_data(@project, @issue, current_user) }
- else
.detail-page-header-actions.js-issuable-actions.js-issuable-buttons{ data: { "action": "close-reopen" } }
......
---
title: Resolve User stuck in 2FA setup page even if group disable 2FA enforce
merge_request: 46432
author:
type: fixed
---
title: Populate missing `dismissed_at` and `dismissed_by_id` attributes of vulnerabilities
merge_request: 46370
author:
type: fixed
---
title: Replace-GlDeprecatedDropdown-with-GlDropdown-in-app/assets/javascripts/error_tracking
merge_request: 41420
author: nuwe1
type: other
---
title: Fix linebreak issue in last commit anchor
merge_request: 46643
author:
type: fixed
---
title: Add Default Initial Branch Name for Repositories Group Setting
merge_request: 43290
author:
type: added
---
title: Fix project import search box and make it case insensitive
merge_request: 45783
author:
type: fixed
---
title: Switch to admin clusterRole for GitLab created environment Kubernetes service
account
merge_request: 46417
author:
type: changed
---
title: Fixed create merge request dropdown not re-opening after typing invalid source
branch
merge_request: 46802
author:
type: fixed
---
name: kubernetes_cluster_namespace_role_admin
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/45479
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/270030
type: development
group: group::configure
default_enabled: false
# frozen_string_literal: true
class AddTemporaryIndexToVulnerabilitiesTable < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
INDEX_NAME = 'temporary_index_vulnerabilities_on_id'
disable_ddl_transaction!
def up
add_concurrent_index :vulnerabilities, :id, where: "state = 2 AND (dismissed_at IS NULL OR dismissed_by_id IS NULL)", name: INDEX_NAME
end
def down
remove_concurrent_index_by_name :vulnerabilities, INDEX_NAME
end
end
# frozen_string_literal: true
class SchedulePopulateMissingDismissalInformationForVulnerabilities < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
BATCH_SIZE = 1_000
DELAY_INTERVAL = 3.minutes.to_i
MIGRATION_CLASS = 'PopulateMissingVulnerabilityDismissalInformation'
disable_ddl_transaction!
def up
::Gitlab::BackgroundMigration::PopulateMissingVulnerabilityDismissalInformation::Vulnerability.broken.each_batch(of: BATCH_SIZE) do |batch, index|
vulnerability_ids = batch.pluck(:id)
migrate_in(index * DELAY_INTERVAL, MIGRATION_CLASS, vulnerability_ids)
end
end
def down
# no-op
end
end
4b0c70d8cd2648149011adab4f302922483436406f361c3037f26efb12b19042
\ No newline at end of file
9ea8e8f1234d6291ea00e725d380bfe33d804853b90da1221be8781b3dd9bb77
\ No newline at end of file
......@@ -22200,6 +22200,8 @@ CREATE UNIQUE INDEX snippet_user_mentions_on_snippet_id_index ON snippet_user_me
CREATE UNIQUE INDEX taggings_idx ON taggings USING btree (tag_id, taggable_id, taggable_type, context, tagger_id, tagger_type);
CREATE INDEX temporary_index_vulnerabilities_on_id ON vulnerabilities USING btree (id) WHERE ((state = 2) AND ((dismissed_at IS NULL) OR (dismissed_by_id IS NULL)));
CREATE UNIQUE INDEX term_agreements_unique_index ON term_agreements USING btree (user_id, term_id);
CREATE INDEX terraform_state_versions_verification_checksum_partial ON terraform_state_versions USING btree (verification_checksum) WHERE (verification_checksum IS NOT NULL);
......
......@@ -118,7 +118,7 @@ To disable NFS server delegation, do the following:
1. Restart the NFS server process. For example, on CentOS run `service nfs restart`.
NOTE: **Important note:**
NOTE: **Note:**
The kernel bug may be fixed in
[more recent kernels with this commit](https://github.com/torvalds/linux/commit/95da1b3a5aded124dd1bda1e3cdb876184813140).
Red Hat Enterprise 7 [shipped a kernel update](https://access.redhat.com/errata/RHSA-2019:2029)
......
......@@ -34,7 +34,7 @@ rcli() {
# This example works for Omnibus installations of GitLab 7.3 or newer. For an
# installation from source you will have to change the socket path and the
# path to redis-cli.
sudo /opt/gitlab/embedded/bin/redis-cli -s /var/opt/gitlab/redis/redis.shared_state.socket "$@"
sudo /opt/gitlab/embedded/bin/redis-cli -s /var/opt/gitlab/redis/redis.socket "$@"
}
# test the new shell function; the response should be PONG
......
......@@ -95,7 +95,6 @@ you want using steps 1 and 2 from the GitLab downloads page.
1. Run `gitlab-ctl reconfigure`.
NOTE: **Note:**
You will need to restart the Sidekiq nodes after an update has occurred and database
migrations performed.
......
......@@ -39,16 +39,15 @@ Feature.disable(:upload_middleware_jwt_params_handler)
## Using local storage
NOTE: **Note:**
This is the default configuration
To change the location where the uploads are stored locally, follow the steps
below.
This is the default configuration. To change the location where the uploads are
stored locally, use the steps in this section based on your installation method:
**In Omnibus installations:**
**In Omnibus GitLab installations:**
NOTE: **Note:**
For historical reasons, uploads are stored into a base directory, which by default is `uploads/-/system`. It is strongly discouraged to change this configuration option on an existing GitLab installation.
For historical reasons, uploads are stored into a base directory, which by
default is `uploads/-/system`. It's strongly discouraged to change this
configuration option for an existing GitLab installation.
_The uploads are stored by default in `/var/opt/gitlab/gitlab-rails/uploads`._
......@@ -92,7 +91,6 @@ This configuration relies on valid AWS credentials to be configured already.
[Read more about using object storage with GitLab](object_storage.md).
NOTE: **Note:**
We recommend using the [consolidated object storage settings](object_storage.md#consolidated-object-storage-configuration). The following instructions apply to the original configuration format.
## Object Storage Settings
......@@ -131,7 +129,6 @@ _The uploads are stored by default in
}
```
NOTE: **Note:**
If you are using AWS IAM profiles, be sure to omit the AWS access key and secret access key/value pairs.
```ruby
......
......@@ -322,6 +322,9 @@ will now trigger a pipeline on the current project's default branch. The maximum
number of upstream pipeline subscriptions is 2 by default, for both the upstream and
downstream projects. This [application limit](../administration/instance_limits.md#number-of-cicd-subscriptions-to-a-project) can be changed on self-managed instances by a GitLab administrator.
The upstream project needs to be [public](../public_access/public_access.md) for
pipeline subscription to work.
## Downstream private projects confidentiality concern
If you trigger a pipeline in a downstream private project, the name of the project
......
---
redirect_to: alert_notifications.md
redirect_to: alert_integrations.md
---
This document was moved to [another location](alert_notifications.md).
This document was moved to [another location](alert_integrations.md).
......@@ -207,6 +207,10 @@ sudo gitlab-rake gitlab:cleanup:sessions:active_sessions_lookup_keys
bundle exec rake gitlab:cleanup:sessions:active_sessions_lookup_keys RAILS_ENV=production
```
## Cleaning up stale Redis sessions
[Clean up stale sessions](../administration/operations/cleaning_up_redis_sessions.md) to compact the Redis database after you upgrade to GitLab 7.3.
## Container Registry garbage collection
Container Registry can use considerable amounts of disk space. To clear up
......
......@@ -19,7 +19,6 @@ cd /home/git/gitlab
sudo -u git -H bundle exec rake gitlab:list_repos RAILS_ENV=production
```
NOTE: **Note:**
The results use the default ordering of the GitLab Rails application.
## Limit search results
......
......@@ -32,7 +32,6 @@ sudo gitlab-rake gitlab:import:all_users_to_all_projects
bundle exec rake gitlab:import:all_users_to_all_projects RAILS_ENV=production
```
NOTE: **Note:**
Admin users are added as maintainers.
## Add user as a developer to all groups
......@@ -59,7 +58,6 @@ sudo gitlab-rake gitlab:import:all_users_to_all_groups
bundle exec rake gitlab:import:all_users_to_all_groups RAILS_ENV=production
```
NOTE: **Note:**
Admin users are added as owners so they can add additional users to the group.
## Control the number of active users
......
......@@ -74,7 +74,6 @@ Docker image based on based on the `ruby:alpine` instead of the default `ruby:la
# ... put your stuff here
```
NOTE: **Note:**
Use Base64 encoding if you need to pass complex values, such as newlines and
spaces. Left unencoded, complex values like these can cause escaping issues
due to how Auto DevOps uses the arguments.
......@@ -123,7 +122,6 @@ to `CI_COMMIT_SHA,CI_ENVIRONMENT_NAME`.
RUN --mount=type=secret,id=auto-devops-build-secrets . /run/secrets/auto-devops-build-secrets && $COMMAND
```
NOTE: **Note:**
When `AUTO_DEVOPS_BUILD_IMAGE_FORWARDED_CI_VARIABLES` is set, Auto DevOps
enables the experimental [Docker BuildKit](https://docs.docker.com/develop/develop-images/build_enhancements/)
feature to use the `--secret` flag.
......@@ -453,7 +451,6 @@ the updated secrets. To update the secrets, either:
- Manually delete running pods to cause Kubernetes to create new pods with updated
secrets.
NOTE: **Note:**
Variables with multi-line values are not currently supported due to
limitations with the current Auto DevOps scripting environment.
......
......@@ -114,7 +114,6 @@ In this guide, we will install Ingress and Prometheus:
- Prometheus - An open-source monitoring and alerting system used to supervise the
deployed application.
NOTE: **Note:**
We won't install GitLab Runner in this quick start guide, as this guide uses the
shared runners provided by GitLab.com.
......@@ -161,7 +160,8 @@ The jobs are separated into stages:
- **Build** - The application builds a Docker image and uploads it to your project's
[Container Registry](../../user/packages/container_registry/index.md) ([Auto Build](stages.md#auto-build)).
- **Test** - GitLab runs various checks on the application:
- **Test** - GitLab runs various checks on the application, but all jobs except `test`
are allowed to fail in the test stage:
- The `test` job runs unit and integration tests by detecting the language and
framework ([Auto Test](stages.md#auto-test))
......@@ -179,9 +179,6 @@ The jobs are separated into stages:
licenses and is allowed to fail
([Auto License Compliance](stages.md#auto-license-compliance)) **(ULTIMATE)**
NOTE: **Note:**
All jobs except `test` are allowed to fail in the test stage.
- **Review** - Pipelines on `master` include this stage with a `dast_environment_deploy` job.
To learn more, see [Dynamic Application Security Testing (DAST)](../../user/application_security/dast/index.md).
......
......@@ -126,11 +126,9 @@ When you trigger a pipeline, if you have Auto DevOps enabled and if you have cor
[entered AWS credentials as environment variables](../../ci/cloud_deployment/index.md#deploy-your-application-to-the-aws-elastic-container-service-ecs),
your application will be deployed to AWS ECS.
NOTE: **Note:**
[GitLab Managed Apps](../../user/clusters/applications.md) are not available when deploying to AWS ECS.
You must manually configure your application (such as Ingress or Help) on AWS ECS.
NOTE: **Note:**
If you have both a valid `AUTO_DEVOPS_PLATFORM_TARGET` variable and a Kubernetes cluster tied to your project,
only the deployment to Kubernetes will run.
......
......@@ -7,7 +7,7 @@ type: reference, howto
# Migration guide from Git Annex to Git LFS
NOTE: **Note:**
DANGER: **Deprecated:**
Git Annex support [has been removed](https://gitlab.com/gitlab-org/gitlab/-/issues/1648) in GitLab Enterprise
Edition 9.0 (2017/03/22).
......@@ -37,7 +37,6 @@ ones that GitLab developed.
## Migration steps
NOTE: **Note:**
Since Git Annex files are stored in a sub-directory of the normal repositories
(`.git/annex/objects`) and LFS files are stored outside of the repositories,
they are not compatible as they are using a different scheme. Therefore, the
......
......@@ -13,9 +13,7 @@ instance entirely offline.
NOTE: **Note:**
This guide assumes the server is Ubuntu 18.04. Instructions for other servers may vary.
NOTE: **Note:**
This guide assumes the server host resolves as `my-host`, which you should replace with your
This guide also assumes the server host resolves as `my-host`, which you should replace with your
server's name.
Follow the installation instructions [as outlined in the omnibus install
......
......@@ -62,12 +62,12 @@ For more details, please refer to our [full architecture documentation](https://
The setup process involves a few steps to enable GitOps deployments:
1. Installing the Agent server. This must be done one time for every GitLab installation.
1. Defining a configuration directory.
1. Creating an Agent record in GitLab.
1. Generating and copying a Secret token used to connect to the Agent.
1. Installing the Agent into the cluster.
1. Creating a `manifest.yaml`.
1. [Install the Agent server](#install-the-kubernetes-agent-server).
1. [Define a configuration directory](#define-a-configuration-repository).
1. [Create an Agent record in GitLab](#create-an-agent-record-in-gitlab).
1. [Generate and copy a Secret token used to connect to the Agent](#create-the-kubernetes-secret).
1. [Install the Agent into the cluster](#install-the-agent-into-the-cluster).
1. [Create a `manifest.yaml`](#create-a-manifestyaml).
### Upgrades and version compatibility
......@@ -100,9 +100,9 @@ When using the [Omnibus GitLab](https://docs.gitlab.com/omnibus/) package:
1. Edit `/etc/gitlab/gitlab.rb`:
```plaintext
gitlab_kas['enable'] = true
```
```plaintext
gitlab_kas['enable'] = true
```
1. [Reconfigure GitLab](../../../administration/restart_gitlab.md#omnibus-gitlab-reconfigure).
......@@ -124,6 +124,17 @@ helm upgrade --install gitlab gitlab/gitlab \
--set global.kas.enabled=true
```
To specify other options related to the KAS sub-chart, create a `gitlab.kas` sub-section
of your `values.yaml` file:
```shell
gitlab:
kas:
# put your KAS custom options here
```
For details, read [Using the GitLab-KAS chart](https://docs.gitlab.com/charts/charts/gitlab/kas/).
### Define a configuration repository
Next, you need a GitLab repository to contain your Agent configuration. The minimal
......@@ -133,12 +144,14 @@ repository layout looks like this:
.gitlab/agents/<agent-name>/config.yaml
```
The `config.yaml` file contents should look like this:
Your `config.yaml` file can specify multiple manifest projects in the
section `manifest_projects`:
```yaml
gitops:
manifest_projects:
- id: "path-to/your-awesome-project"
- id: "path-to/your-manifest-project-number1"
...
```
### Create an Agent record in GitLab
......@@ -147,20 +160,24 @@ Next, create an GitLab Rails Agent record so the Agent can associate itself with
the configuration repository project. Creating this record also creates a Secret needed to configure
the Agent in subsequent steps. You can create an Agent record either:
- Through the Rails console, by running `rails c`:
- Through the Rails console:
```ruby
project = ::Project.find_by_full_path("path-to/your-awesome-project")
project = ::Project.find_by_full_path("path-to/your-configuration-project")
# agent-name should be the same as specified above in the config.yaml
agent = ::Clusters::Agent.create(name: "<agent-name>", project: project)
token = ::Clusters::AgentToken.create(agent: agent)
token.token # this will print out the token you need to use on the next step
```
For full details, read [Starting a Rails console session](../../../administration/operations/rails_console.md#starting-a-rails-console-session).
- Through GraphQL: **(PREMIUM ONLY)**
```graphql
mutation createAgent {
createClusterAgent(input: { projectPath: "path-to/your-awesome-project", name: "<agent-name>" }) {
# agent-name should be the same as specified above in the config.yaml
createClusterAgent(input: { projectPath: "path-to/your-configuration-project", name: "<agent-name>" }) {
clusterAgent {
id
name
......@@ -182,7 +199,7 @@ the Agent in subsequent steps. You can create an Agent record either:
```
NOTE: **Note:**
GraphQL only displays the token once, after creating it.
GraphQL only displays the token one time after creating it.
If you are new to using the GitLab GraphQL API, refer to the
[Getting started with the GraphQL API page](../../../api/graphql/getting_started.md),
......@@ -192,7 +209,7 @@ the Agent in subsequent steps. You can create an Agent record either:
After generating the token, you must apply it to the Kubernetes cluster.
1. If you haven't previous defined or created a namespace, run the following command:
1. If you haven't previously defined or created a namespace, run the following command:
```shell
kubectl create namespace <YOUR-DESIRED-NAMESPACE>
......@@ -210,43 +227,40 @@ Next, install the in-cluster component of the Agent. This example file contains
Kubernetes resources required for the Agent to be installed. You can modify this
example [`resources.yml` file](#example-resourcesyml-file) in the following ways:
- You can replace `gitlab-agent` with `<YOUR-DESIRED-NAMESPACE>`.
- For the `kas-address` (Kubernetes Agent Server), the agent can use the WebSockets
or gRPC protocols to connect to the Agent Server. Depending on your cluster
configuration and GitLab architecture, you may need to use one or the other.
For the `gitlab-kas` Helm chart, an Ingress is created for the Agent Server using
the `/-/kubernetes-agent` endpoint. This can be used for the WebSockets protocol connection.
- Specify the `grpc` scheme (such as `grpc://gitlab-kas:5005`) to use gRPC directly.
Encrypted gRPC is not supported yet. Follow the
- Replace `namespace: gitlab-agent` with `namespace: <YOUR-DESIRED-NAMESPACE>`.
- You can configure `kas-address` (Kubernetes Agent Server) in several ways.
The agent can use the WebSockets or gRPC protocols to connect to the Agent Server.
Select the option appropriate for your cluster configuration and GitLab architecture:
- The `wss` scheme (an encrypted WebSockets connection) is specified by default
after you install `gitlab-kas` sub-chart or enable `kas` for Omnibus GitLab.
In this case, you must set `wss://GitLab.host.tld:443/-/kubernetes-agent` as
`kas-address`, where `GitLab.host.tld` is your GitLab hostname.
- Specify the `ws` scheme (such as `ws://GitLab.host.tld:80/-/kubernetes-agent`)
to use an unencrypted WebSockets connection.
- Specify the `grpc` scheme if both Agent and Server are installed in one cluster.
In this case, you may specify `kas-address` value as
`grpc://gitlab-kas.<your-namespace>:5005`) to use gRPC directly, where `gitlab-kas`
is the name of the service created by `gitlab-kas` chart, and `your-namespace`
is the namespace where the chart was installed. Encrypted gRPC is not supported yet.
Follow the
[Support TLS for gRPC communication issue](https://gitlab.com/gitlab-org/cluster-integration/gitlab-agent/-/issues/7)
for progress updates.
- Specify the `ws` scheme (such as `ws://gitlab-kas-ingress:80/-/kubernetes-agent`)
to use an unencrypted WebSockets connection.
- Specify the `wss` scheme (such as `wss://gitlab-kas-ingress:443/-/kubernetes-agent`)
to use an encrypted WebSockets connection. This is the recommended option if
installing the Agent into a separate cluster from your Agent Server.
- If you defined your own secret name, replace `gitlab-agent-token` with your secret name.
- If you defined your own secret name, replace `gitlab-agent-token` with your
secret name in the `secretName:` section.
To apply this file, run the following command:
```shell
kubectl apply -n gitlab-agent -f ./resources.yml
kubectl apply -n <YOUR-DESIRED-NAMESPACE> -f ./resources.yml
```
To review your configuration, run the following command:
```shell
$ kubectl get pods --all-namespaces
$ kubectl get pods -n <YOUR-DESIRED-NAMESPACE>
NAMESPACE NAME READY STATUS RESTARTS AGE
gitlab-agent gitlab-agent-77689f7dcb-5skqk 1/1 Running 0 51s
kube-system coredns-f9fd979d6-n6wcw 1/1 Running 0 14m
kube-system etcd-minikube 1/1 Running 0 14m
kube-system kube-apiserver-minikube 1/1 Running 0 14m
kube-system kube-controller-manager-minikube 1/1 Running 0 14m
kube-system kube-proxy-j6zdh 1/1 Running 0 14m
kube-system kube-scheduler-minikube 1/1 Running 0 14m
kube-system storage-provisioner 1/1 Running 0 14m
```
#### Example `resources.yml` file
......@@ -278,7 +292,7 @@ spec:
args:
- --token-file=/config/token
- --kas-address
- grpc://host.docker.internal:5005 # {"$openapi":"kas-address"}
- wss://gitlab.host.tld:443/-/kubernetes-agent
volumeMounts:
- name: token-volume
mountPath: /config
......@@ -353,7 +367,9 @@ subjects:
In a previous step, you configured a `config.yaml` to point to the GitLab projects
the Agent should synchronize. In each of those projects, you must create a `manifest.yaml`
file for the Agent to monitor. You can auto-generate this `manifest.yaml` with a
templating engine or other means.
templating engine or other means. Only public projects are supported as
manifest projects. Support for private projects is planned in the issue
[Agent authorization for private manifest projects](https://gitlab.com/gitlab-org/gitlab/-/issues/220912).
Each time you commit and push a change to this file, the Agent logs the change:
......@@ -363,7 +379,7 @@ Each time you commit and push a change to this file, the Agent logs the change:
#### Example `manifest.yaml` file
This file creates a simple NGINX deployment.
This file creates an NGINX deployment.
```yaml
apiVersion: apps/v1
......
......@@ -518,6 +518,23 @@ If you want to retain ownership over the original namespace and
protect the URL redirects, then instead of changing a group's path or renaming a
username, you can create a new group and transfer projects to it.
### Group repository settings
You can change settings that are specific to repositories in your group.
#### Custom initial branch name **(CORE ONLY)**
> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/43290) in GitLab 13.6.
By default, when you create a new project in GitLab, the initial branch is called `master`.
For groups, a group administrator can customize the initial branch name to something
else. This way, every new project created under that group from then on will start from the custom branch name rather than `master`. To do so:
1. Go to the **Group page > Settings > Repository** and expand **Default initial
branch name**.
1. Change the default initial branch to a custom name of your choice.
1. **Save Changes**.
### Remove a group
To remove a group and its contents:
......
......@@ -94,7 +94,11 @@ GitLab creates the following resources for RBAC clusters.
| Environment namespace | `Namespace` | Contains all environment-specific resources | Deploying to a cluster |
| Environment namespace | `ServiceAccount` | Uses namespace of environment | Deploying to a cluster |
| Environment namespace | `Secret` | Token for environment ServiceAccount | Deploying to a cluster |
| Environment namespace | `RoleBinding` | [`edit`](https://kubernetes.io/docs/reference/access-authn-authz/rbac/#user-facing-roles) roleRef | Deploying to a cluster |
| Environment namespace | `RoleBinding` | [`admin`](https://kubernetes.io/docs/reference/access-authn-authz/rbac/#user-facing-roles) roleRef | Deploying to a cluster |
The environment namespace `RoleBinding` was
[updated](https://gitlab.com/gitlab-org/gitlab/-/issues/31113) in GitLab 13.6
to `admin` roleRef. Previously, the `edit` roleRef was used.
### ABAC cluster resources
......
......@@ -12,6 +12,7 @@ import {
GlFormCheckbox,
GlFormRadioGroup,
} from '@gitlab/ui';
import { initFormField } from 'ee/security_configuration/utils';
import * as Sentry from '~/sentry/wrapper';
import { __, s__ } from '~/locale';
import { redirectTo } from '~/lib/utils/url_utility';
......@@ -21,13 +22,6 @@ import dastScannerProfileUpdateMutation from '../graphql/dast_scanner_profile_up
import tooltipIcon from './tooltip_icon.vue';
import { SCAN_TYPE, SCAN_TYPE_OPTIONS } from '../constants';
const initField = (value, isRequired = true) => ({
value,
required: isRequired,
state: null,
feedback: null,
});
const SPIDER_TIMEOUT_MIN = 0;
const SPIDER_TIMEOUT_MAX = 2880;
const TARGET_TIMEOUT_MIN = 1;
......@@ -74,12 +68,12 @@ export default {
} = this.profile;
const form = {
profileName: initField(name),
spiderTimeout: initField(spiderTimeout),
targetTimeout: initField(targetTimeout),
scanType: initField(scanType),
useAjaxSpider: initField(useAjaxSpider),
showDebugMessages: initField(showDebugMessages),
profileName: initFormField({ value: name }),
spiderTimeout: initFormField({ value: spiderTimeout }),
targetTimeout: initFormField({ value: targetTimeout }),
scanType: initFormField({ value: scanType }),
useAjaxSpider: initFormField({ value: useAjaxSpider }),
showDebugMessages: initFormField({ value: showDebugMessages }),
};
return {
......
<script>
import { GlFormGroup, GlFormInput, GlFormCheckbox } from '@gitlab/ui';
import { initFormField } from 'ee/security_configuration/utils';
import validation from '~/vue_shared/directives/validation';
export default {
components: {
GlFormGroup,
GlFormInput,
GlFormCheckbox,
},
directives: {
validation: validation(),
},
props: {
fields: {
type: Object,
required: false,
default: () => ({}),
},
showValidation: {
type: Boolean,
required: false,
default: false,
},
},
data() {
const {
authEnabled,
authenticationUrl,
userName,
password,
// default to commonly used names for `userName` and `password` fields in authentcation forms
userNameFormField = 'username',
passwordFormField = 'password',
} = this.fields;
return {
form: {
state: false,
fields: {
authEnabled: initFormField({ value: authEnabled, skipValidation: true }),
authenticationUrl: initFormField({ value: authenticationUrl }),
userName: initFormField({ value: userName }),
password: initFormField({ value: password }),
userNameFormField: initFormField({ value: userNameFormField }),
passwordFormField: initFormField({ value: passwordFormField }),
},
},
};
},
computed: {
showValidationOrInEditMode() {
return this.showValidation || Object.keys(this.fields).length > 0;
},
},
watch: {
form: { handler: 'emitUpdate', immediate: true, deep: true },
},
methods: {
emitUpdate() {
this.$emit('input', this.form);
},
},
};
</script>
<template>
<section>
<gl-form-group :label="s__('DastProfiles|Authentication')">
<gl-form-checkbox v-model="form.fields.authEnabled.value">{{
s__('DastProfiles|Enable Authentication')
}}</gl-form-checkbox>
</gl-form-group>
<div v-if="form.fields.authEnabled.value" data-testid="auth-form">
<div class="row">
<gl-form-group
:label="s__('DastProfiles|Authentication URL')"
:invalid-feedback="form.fields.authenticationUrl.feedback"
class="col-md-6"
>
<gl-form-input
v-model="form.fields.authenticationUrl.value"
v-validation:[showValidationOrInEditMode]
name="authenticationUrl"
type="url"
required
:state="form.fields.authenticationUrl.state"
/>
</gl-form-group>
</div>
<div class="row">
<gl-form-group
:label="s__('DastProfiles|Username')"
:invalid-feedback="form.fields.userName.feedback"
class="col-md-6"
>
<gl-form-input
v-model="form.fields.userName.value"
v-validation:[showValidationOrInEditMode]
autocomplete="off"
name="userName"
type="text"
required
:state="form.fields.userName.state"
/>
</gl-form-group>
<gl-form-group
:label="s__('DastProfiles|Password')"
:invalid-feedback="form.fields.password.feedback"
class="col-md-6"
>
<gl-form-input
v-model="form.fields.password.value"
v-validation:[showValidationOrInEditMode]
autocomplete="off"
name="password"
type="password"
required
:state="form.fields.password.state"
/>
</gl-form-group>
</div>
<div class="row">
<gl-form-group
:label="s__('DastProfiles|Username form field')"
:invalid-feedback="form.fields.userNameFormField.feedback"
class="col-md-6"
>
<gl-form-input
v-model="form.fields.userNameFormField.value"
v-validation:[showValidationOrInEditMode]
name="userNameFormField"
type="text"
required
:state="form.fields.userNameFormField.state"
/>
</gl-form-group>
<gl-form-group
:label="s__('DastProfiles|Password form field')"
:invalid-feedback="form.fields.passwordFormField.feedback"
class="col-md-6"
>
<gl-form-input
v-model="form.fields.passwordFormField.value"
v-validation:[showValidationOrInEditMode]
name="passwordFormField"
type="text"
required
:state="form.fields.passwordFormField.state"
/>
</gl-form-group>
</div>
</div>
</section>
</template>
......@@ -10,6 +10,7 @@ import {
GlModal,
GlToggle,
} from '@gitlab/ui';
import { initFormField } from 'ee/security_configuration/utils';
import * as Sentry from '~/sentry/wrapper';
import { __, s__ } from '~/locale';
import { redirectTo } from '~/lib/utils/url_utility';
......@@ -26,12 +27,6 @@ import { DAST_SITE_VALIDATION_STATUS, DAST_SITE_VALIDATION_POLL_INTERVAL } from
const { PENDING, INPROGRESS, PASSED, FAILED } = DAST_SITE_VALIDATION_STATUS;
const initField = value => ({
value,
state: null,
feedback: null,
});
export default {
name: 'DastSiteProfileForm',
components: {
......@@ -71,8 +66,8 @@ export default {
state: false,
showValidation: false,
fields: {
profileName: initField(name),
targetUrl: initField(targetUrl),
profileName: initFormField({ value: name }),
targetUrl: initFormField({ value: targetUrl }),
},
};
......
export const initFormField = ({ value, required = true, skipValidation = false }) => ({
value,
required,
state: skipValidation ? true : null,
feedback: null,
});
<script>
import { GlLink, GlSprintf, GlModalDirective, GlButton, GlIcon } from '@gitlab/ui';
import {
GlLink,
GlSprintf,
GlModalDirective,
GlButton,
GlIcon,
GlKeysetPagination,
} from '@gitlab/ui';
import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import ProjectsTable from './projects_table.vue';
import UsageGraph from './usage_graph.vue';
......@@ -9,18 +16,20 @@ import query from '../queries/storage.query.graphql';
import TemporaryStorageIncreaseModal from './temporary_storage_increase_modal.vue';
import { parseBoolean } from '~/lib/utils/common_utils';
import { formatUsageSize, parseGetStorageResults } from '../utils';
import { PROJECTS_PER_PAGE } from '../constants';
export default {
name: 'StorageCounterApp',
components: {
ProjectsTable,
GlLink,
GlIcon,
GlButton,
GlSprintf,
GlIcon,
StorageInlineAlert,
UsageGraph,
ProjectsTable,
UsageStatistics,
StorageInlineAlert,
GlKeysetPagination,
TemporaryStorageIncreaseModal,
},
directives: {
......@@ -55,20 +64,25 @@ export default {
fullPath: this.namespacePath,
searchTerm: this.searchTerm,
withExcessStorageData: this.isAdditionalStorageFlagEnabled,
first: PROJECTS_PER_PAGE,
};
},
update: parseGetStorageResults,
result() {
this.firstFetch = false;
},
},
},
data() {
return {
namespace: {},
searchTerm: '',
firstFetch: true,
};
},
computed: {
namespaceProjects() {
return this.namespace?.projects ?? [];
return this.namespace?.projects?.data ?? [];
},
isStorageIncreaseModalVisible() {
return parseBoolean(this.isTemporaryStorageIncreaseVisible);
......@@ -92,8 +106,24 @@ export default {
additionalPurchasedStorageSize: this.namespace.additionalPurchasedStorageSize,
};
},
isQueryLoading() {
return this.$apollo.queries.namespace.loading;
},
pageInfo() {
return this.namespace.projects?.pageInfo ?? {};
},
shouldShowStorageInlineAlert() {
return this.isAdditionalStorageFlagEnabled && !this.$apollo.queries.namespace.loading;
if (this.firstFetch) {
// for initial load check if the data fetch is done (isQueryLoading)
return this.isAdditionalStorageFlagEnabled && !this.isQueryLoading;
}
// for all subsequent queries the storage inline alert doesn't
// have to be re-rendered as the data from graphql will remain
// the same.
return this.isAdditionalStorageFlagEnabled;
},
showPagination() {
return Boolean(this.pageInfo?.hasPreviousPage || this.pageInfo?.hasNextPage);
},
},
methods: {
......@@ -103,8 +133,30 @@ export default {
this.searchTerm = input;
}
},
fetchMoreProjects(vars) {
this.$apollo.queries.namespace.fetchMore({
variables: {
fullPath: this.namespacePath,
withExcessStorageData: this.isAdditionalStorageFlagEnabled,
first: PROJECTS_PER_PAGE,
...vars,
},
updateQuery(previousResult, { fetchMoreResult }) {
return fetchMoreResult;
},
});
},
onPrev(before) {
if (this.pageInfo?.hasPreviousPage) {
this.fetchMoreProjects({ before });
}
},
onNext(after) {
if (this.pageInfo?.hasNextPage) {
this.fetchMoreProjects({ after });
}
},
},
modalId: 'temporary-increase-storage-modal',
};
</script>
......@@ -181,9 +233,13 @@ export default {
</div>
<projects-table
:projects="namespaceProjects"
:is-loading="isQueryLoading"
:additional-purchased-storage-size="namespace.additionalPurchasedStorageSize || 0"
@search="handleSearch"
/>
<div class="gl-display-flex gl-justify-content-center gl-mt-5">
<gl-keyset-pagination v-if="showPagination" v-bind="pageInfo" @prev="onPrev" @next="onNext" />
</div>
<temporary-storage-increase-modal
v-if="isStorageIncreaseModalVisible"
:limit="formattedNamespaceLimit"
......
<script>
import { GlSkeletonLoader } from '@gitlab/ui';
import { SKELETON_LOADER_ROWS } from '../constants';
export default {
name: 'ProjectsSkeletonLoader',
components: { GlSkeletonLoader },
SKELETON_LOADER_ROWS,
};
</script>
<template>
<div class="gl-border-b-solid gl-border-b-1 gl-border-gray-100">
<div class="gl-flex-direction-column gl-display-md-none" data-testid="mobile-loader">
<div
v-for="index in $options.SKELETON_LOADER_ROWS.mobile"
:key="index"
class="gl-responsive-table-row gl-border-solid gl-border-b-1 gl-pt-3 gl-pb-3 gl-border-b-gray-100"
>
<gl-skeleton-loader :width="500" :height="172">
<rect width="480" height="20" x="10" y="15" rx="4" />
<rect width="480" height="20" x="10" y="80" rx="4" />
<rect width="480" height="20" x="10" y="145" rx="4" />
</gl-skeleton-loader>
</div>
</div>
<div
class="gl-display-none gl-display-md-flex gl-flex-direction-column"
data-testid="desktop-loader"
>
<gl-skeleton-loader
v-for="index in $options.SKELETON_LOADER_ROWS.desktop"
:key="index"
:width="1000"
:height="39"
>
<rect rx="4" width="320" height="8" x="0" y="18" />
<rect rx="4" width="60" height="8" x="500" y="18" />
<rect rx="4" width="60" height="8" x="750" y="18" />
</gl-skeleton-loader>
</div>
</div>
</template>
......@@ -3,11 +3,13 @@ import { GlSearchBoxByType } from '@gitlab/ui';
import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import Project from './project.vue';
import ProjectWithExcessStorage from './project_with_excess_storage.vue';
import ProjectsSkeletonLoader from './projects_skeleton_loader.vue';
import { SEARCH_DEBOUNCE_MS } from '~/ref/constants';
export default {
components: {
Project,
ProjectsSkeletonLoader,
ProjectWithExcessStorage,
GlSearchBoxByType,
},
......@@ -21,6 +23,11 @@ export default {
type: Number,
required: true,
},
isLoading: {
type: Boolean,
required: false,
default: false,
},
},
computed: {
isAdditionalStorageFlagEnabled() {
......@@ -44,7 +51,7 @@ export default {
role="row"
>
<template v-if="isAdditionalStorageFlagEnabled">
<div class="table-section section-50 gl-font-weight-bold gl-pl-5" role="columnheader">
<div class="table-section section-50 gl-font-weight-bold gl-pl-5" role="columnheader">
{{ __('Project') }}
</div>
<div class="table-section section-15 gl-font-weight-bold" role="columnheader">
......@@ -70,13 +77,15 @@ export default {
</div>
</template>
</div>
<component
:is="projectRowComponent"
v-for="project in projects"
:key="project.id"
:project="project"
:additional-purchased-storage-size="additionalPurchasedStorageSize"
/>
<projects-skeleton-loader v-if="isAdditionalStorageFlagEnabled && isLoading" />
<template v-else>
<component
:is="projectRowComponent"
v-for="project in projects"
:key="project.id"
:project="project"
:additional-purchased-storage-size="additionalPurchasedStorageSize"
/>
</template>
</div>
</template>
......@@ -11,3 +11,10 @@ export const STORAGE_USAGE_THRESHOLDS = {
[ALERT_THRESHOLD]: 0.95,
[ERROR_THRESHOLD]: 1.0,
};
export const PROJECTS_PER_PAGE = 20;
export const SKELETON_LOADER_ROWS = {
desktop: PROJECTS_PER_PAGE,
mobile: 5,
};
#import "~/graphql_shared/fragments/pageInfo.fragment.graphql"
query getStorageCounter(
$fullPath: ID!
$searchTerm: String = ""
$withExcessStorageData: Boolean = false
$searchTerm: String = ""
$first: Int!
$after: String
$before: String
) {
namespace(fullPath: $fullPath) {
id
......@@ -23,29 +28,37 @@ query getStorageCounter(
wikiSize
snippetsSize
}
projects(includeSubgroups: true, sort: STORAGE, search: $searchTerm) {
edges {
node {
id
fullPath
nameWithNamespace
avatarUrl
webUrl
name
repositorySizeExcess @include(if: $withExcessStorageData)
actualRepositorySizeLimit @include(if: $withExcessStorageData)
statistics {
commitCount
storageSize
repositorySize
lfsObjectsSize
buildArtifactsSize
packagesSize
wikiSize
snippetsSize
}
projects(
includeSubgroups: true
search: $searchTerm
first: $first
after: $after
before: $before
sort: STORAGE
) {
nodes {
id
fullPath
nameWithNamespace
avatarUrl
webUrl
name
repositorySizeExcess @include(if: $withExcessStorageData)
actualRepositorySizeLimit @include(if: $withExcessStorageData)
statistics {
commitCount
storageSize
repositorySize
lfsObjectsSize
buildArtifactsSize
packagesSize
wikiSize
snippetsSize
}
}
pageInfo {
...PageInfo
}
}
}
}
......@@ -86,7 +86,7 @@ export const parseProjects = ({
additionalPurchasedStorageSize - totalRepositorySizeExcess,
);
return projects.edges.map(({ node: project }) =>
return projects.nodes.map(project =>
calculateUsedAndRemStorage(project, purchasedStorageRemaining),
);
};
......@@ -118,21 +118,26 @@ export const parseGetStorageResults = data => {
},
} = data || {};
const totalUsage = rootStorageStatistics?.storageSize
? numberToHumanSize(rootStorageStatistics.storageSize)
: 'N/A';
return {
projects: parseProjects({
projects,
additionalPurchasedStorageSize,
totalRepositorySizeExcess,
}),
projects: {
data: parseProjects({
projects,
additionalPurchasedStorageSize,
totalRepositorySizeExcess,
}),
pageInfo: projects.pageInfo,
},
additionalPurchasedStorageSize,
actualRepositorySizeLimit,
containsLockedProjects,
repositorySizeExcessProjectCount,
totalRepositorySize,
totalRepositorySizeExcess,
totalUsage: rootStorageStatistics?.storageSize
? numberToHumanSize(rootStorageStatistics.storageSize)
: 'N/A',
totalUsage,
rootStorageStatistics,
limit: storageSizeLimit,
};
......
......@@ -7,29 +7,30 @@ export const PREDEFINED_NETWORK_POLICIES = [
name: 'drop-outbound',
isEnabled: false,
manifest: `---
apiVersion: networking.k8s.io/v1
kind: NetworkPolicy
apiVersion: cilium.io/v2
kind: CiliumNetworkPolicy
metadata:
name: drop-outbound
spec:
podSelector: {}
policyTypes:
- Egress`,
endpointSelector: {}
egress:
- {}`,
},
{
name: 'allow-inbound-http',
isEnabled: false,
manifest: `---
apiVersion: networking.k8s.io/v1
kind: NetworkPolicy
apiVersion: cilium.io/v2
kind: CiliumNetworkPolicy
metadata:
name: allow-inbound-http
spec:
podSelector: {}
endpointSelector: {}
ingress:
- ports:
- port: 80
- port: 443`,
- toPorts:
- ports:
- port: '80'
- port: '443'`,
},
];
......
---
title: Change OOTB from `NetworkPolicy` to `CiliumNetworkPolicy`
merge_request: 45579
author:
type: fixed
import { mount } from '@vue/test-utils';
import { GlFormCheckbox } from '@gitlab/ui';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import DastSiteAuthSection from 'ee/security_configuration/dast_site_profiles_form/components/dast_site_auth_section.vue';
describe('DastSiteAuthSection', () => {
let wrapper;
const createComponent = ({ fields } = {}) => {
wrapper = extendedWrapper(
mount(DastSiteAuthSection, {
propsData: {
fields,
},
}),
);
};
beforeEach(() => {
createComponent();
});
afterEach(() => {
wrapper.destroy();
});
const findByNameAttribute = name => wrapper.find(`[name="${name}"]`);
const findAuthForm = () => wrapper.findByTestId('auth-form');
const findAuthCheckbox = () => wrapper.find(GlFormCheckbox);
const setAuthentication = ({ enabled }) => {
findAuthCheckbox().vm.$emit('input', enabled);
return wrapper.vm.$nextTick();
};
const getLatestInputEventPayload = () => {
const latestInputEvent = [...wrapper.emitted('input')].pop();
const [payload] = latestInputEvent;
return payload;
};
describe('authentication toggle', () => {
it.each([true, false])(
'is set correctly when the "authEnabled" field is set to "%s"',
authEnabled => {
createComponent({ fields: { authEnabled } });
expect(findAuthCheckbox().vm.$attrs.checked).toBe(authEnabled);
},
);
it('controls the visibility of the authentication-fields form', async () => {
expect(findAuthForm().exists()).toBe(false);
await setAuthentication({ enabled: true });
expect(findAuthForm().exists()).toBe(true);
});
it.each([true, false])(
'makes the component emit an "input" event when changed',
async enabled => {
await setAuthentication({ enabled });
expect(getLatestInputEventPayload().fields.authEnabled.value).toBe(enabled);
},
);
});
describe('authentication form', () => {
beforeEach(async () => {
await setAuthentication({ enabled: true });
});
const inputFieldsWithValues = {
authenticationUrl: 'http://www.gitlab.com',
userName: 'foo',
password: 'foo',
userNameFormField: 'foo',
passwordFormField: 'foo',
};
const inputFieldNames = Object.keys(inputFieldsWithValues);
describe.each(inputFieldNames)('input field "%s"', inputFieldName => {
it('is rendered', () => {
expect(findByNameAttribute(inputFieldName).exists()).toBe(true);
});
it('makes the component emit an "input" event when its value changes', () => {
const input = findByNameAttribute(inputFieldName);
const newValue = 'foo';
input.setValue(newValue);
expect(getLatestInputEventPayload().fields[inputFieldName].value).toBe(newValue);
});
});
describe('validity', () => {
it('is not valid per default', () => {
expect(getLatestInputEventPayload().state).toBe(false);
});
it('is valid when correct values are passed in via the "fields" prop', async () => {
createComponent({ fields: inputFieldsWithValues });
await setAuthentication({ enabled: true });
expect(getLatestInputEventPayload().state).toBe(true);
});
it('is valid once all fields have been entered correctly', () => {
Object.entries(inputFieldsWithValues).forEach(([inputFieldName, inputFieldValue]) => {
const input = findByNameAttribute(inputFieldName);
input.setValue(inputFieldValue);
input.trigger('blur');
});
expect(getLatestInputEventPayload().state).toBe(true);
});
});
});
});
......@@ -23,6 +23,8 @@ describe('Storage counter app', () => {
const findUsageStatistics = () => wrapper.find(UsageStatistics);
const findStorageInlineAlert = () => wrapper.find(StorageInlineAlert);
const findProjectsTable = () => wrapper.find(ProjectsTable);
const findPrevButton = () => wrapper.find('[data-testid="prevButton"]');
const findNextButton = () => wrapper.find('[data-testid="nextButton"]');
const createComponent = ({
props = {},
......@@ -257,4 +259,30 @@ describe('Storage counter app', () => {
expect(wrapper.vm.searchTerm).toBe('');
});
});
describe('renders projects table pagination component', () => {
const namespaceWithPageInfo = {
namespace: {
...withRootStorageStatistics,
projects: {
...withRootStorageStatistics.projects,
pageInfo: {
hasPreviousPage: false,
hasNextPage: true,
},
},
},
};
beforeEach(() => {
createComponent(namespaceWithPageInfo);
});
it('with disabled "Prev" button', () => {
expect(findPrevButton().attributes().disabled).toBe('disabled');
});
it('with enabled "Next" button', () => {
expect(findNextButton().attributes().disabled).toBeUndefined();
});
});
});
import { mount } from '@vue/test-utils';
import ProjectsSkeletonLoader from 'ee/storage_counter/components/projects_skeleton_loader.vue';
describe('ProjectsSkeletonLoader', () => {
let wrapper;
const createComponent = (props = {}) => {
wrapper = mount(ProjectsSkeletonLoader, {
propsData: {
...props,
},
});
};
const findDesktopLoader = () => wrapper.find('[data-testid="desktop-loader"]');
const findMobileLoader = () => wrapper.find('[data-testid="mobile-loader"]');
beforeEach(createComponent);
afterEach(() => {
wrapper.destroy();
wrapper = null;
});
describe('desktop loader', () => {
it('produces 20 rows', () => {
expect(findDesktopLoader().findAll('rect[width="1000"]')).toHaveLength(20);
});
it('has the correct classes', () => {
expect(findDesktopLoader().classes()).toEqual([
'gl-display-none',
'gl-display-md-flex',
'gl-flex-direction-column',
]);
});
});
describe('mobile loader', () => {
it('produces 5 rows', () => {
expect(findMobileLoader().findAll('rect[height="172"]')).toHaveLength(5);
});
it('has the correct classes', () => {
expect(findMobileLoader().classes()).toEqual([
'gl-flex-direction-column',
'gl-display-md-none',
]);
});
});
});
......@@ -61,7 +61,7 @@ export const projects = [
export const namespaceData = {
totalUsage: 'N/A',
limit: 10000000,
projects,
projects: { data: projects },
};
export const withRootStorageStatistics = {
......@@ -86,5 +86,5 @@ export const withRootStorageStatistics = {
};
export const mockGetStorageCounterGraphQLResponse = {
edges: projects.map(node => ({ node })),
nodes: projects.map(node => node),
};
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# This class populates missing dismissal information for
# vulnerability entries.
class PopulateMissingVulnerabilityDismissalInformation
class Vulnerability < ActiveRecord::Base # rubocop:disable Style/Documentation
include EachBatch
self.table_name = 'vulnerabilities'
has_one :finding, class_name: '::Gitlab::BackgroundMigration::PopulateMissingVulnerabilityDismissalInformation::Finding'
scope :broken, -> { where('state = 2 AND (dismissed_at IS NULL OR dismissed_by_id IS NULL)') }
def copy_dismissal_information
return unless finding&.dismissal_feedback
update_columns(
dismissed_at: finding.dismissal_feedback.created_at,
dismissed_by_id: finding.dismissal_feedback.author_id
)
end
end
class Finding < ActiveRecord::Base # rubocop:disable Style/Documentation
include ShaAttribute
self.table_name = 'vulnerability_occurrences'
sha_attribute :project_fingerprint
def dismissal_feedback
Feedback.dismissal.where(category: report_type, project_fingerprint: project_fingerprint, project_id: project_id).first
end
end
class Feedback < ActiveRecord::Base # rubocop:disable Style/Documentation
DISMISSAL_TYPE = 0
self.table_name = 'vulnerability_feedback'
scope :dismissal, -> { where(feedback_type: DISMISSAL_TYPE) }
end
def perform(*vulnerability_ids)
Vulnerability.includes(:finding).where(id: vulnerability_ids).each { |vulnerability| populate_for(vulnerability) }
log_info(vulnerability_ids)
end
private
def populate_for(vulnerability)
log_warning(vulnerability) unless vulnerability.copy_dismissal_information
rescue StandardError => error
log_error(error, vulnerability)
end
def log_info(vulnerability_ids)
::Gitlab::BackgroundMigration::Logger.info(
migrator: self.class.name,
message: 'Dismissal information has been copied',
count: vulnerability_ids.length
)
end
def log_warning(vulnerability)
::Gitlab::BackgroundMigration::Logger.warn(
migrator: self.class.name,
message: 'Could not update vulnerability!',
vulnerability_id: vulnerability.id
)
end
def log_error(error, vulnerability)
::Gitlab::BackgroundMigration::Logger.error(
migrator: self.class.name,
message: error.message,
vulnerability_id: vulnerability.id
)
end
end
end
end
# frozen_string_literal: true
require 'snowplow-tracker'
module Gitlab
module Tracking
SNOWPLOW_NAMESPACE = 'gl'
......@@ -27,16 +25,11 @@ module Gitlab
end
def event(category, action, label: nil, property: nil, value: nil, context: nil)
return unless enabled?
snowplow.track_struct_event(category, action, label, property, value, context, (Time.now.to_f * 1000).to_i)
snowplow.event(category, action, label: label, property: property, value: value, context: context)
end
def self_describing_event(schema_url, event_data_json, context: nil)
return unless enabled?
event_json = SnowplowTracker::SelfDescribingJson.new(schema_url, event_data_json)
snowplow.track_self_describing_event(event_json, context, (Time.now.to_f * 1000).to_i)
snowplow.self_describing_event(schema_url, event_data_json, context: context)
end
def snowplow_options(group)
......@@ -54,19 +47,7 @@ module Gitlab
private
def snowplow
@snowplow ||= SnowplowTracker::Tracker.new(
emitter,
SnowplowTracker::Subject.new,
SNOWPLOW_NAMESPACE,
Gitlab::CurrentSettings.snowplow_app_id
)
end
def emitter
SnowplowTracker::AsyncEmitter.new(
Gitlab::CurrentSettings.snowplow_collector_hostname,
protocol: 'https'
)
@snowplow ||= Gitlab::Tracking::Destinations::Snowplow.new
end
end
end
......
# frozen_string_literal: true
module Gitlab
module Tracking
module Destinations
class Base
def event(category, action, label: nil, property: nil, value: nil, context: nil)
raise NotImplementedError, "#{self} does not implement #{__method__}"
end
end
end
end
end
# frozen_string_literal: true
require 'snowplow-tracker'
module Gitlab
module Tracking
module Destinations
class Snowplow < Base
extend ::Gitlab::Utils::Override
override :event
def event(category, action, label: nil, property: nil, value: nil, context: nil)
return unless enabled?
tracker.track_struct_event(category, action, label, property, value, context, (Time.now.to_f * 1000).to_i)
end
def self_describing_event(schema_url, event_data_json, context: nil)
return unless enabled?
event_json = SnowplowTracker::SelfDescribingJson.new(schema_url, event_data_json)
tracker.track_self_describing_event(event_json, context, (Time.now.to_f * 1000).to_i)
end
private
def enabled?
Gitlab::CurrentSettings.snowplow_enabled?
end
def tracker
@tracker ||= SnowplowTracker::Tracker.new(
emitter,
SnowplowTracker::Subject.new,
Gitlab::Tracking::SNOWPLOW_NAMESPACE,
Gitlab::CurrentSettings.snowplow_app_id
)
end
def emitter
SnowplowTracker::AsyncEmitter.new(
Gitlab::CurrentSettings.snowplow_collector_hostname,
protocol: 'https'
)
end
end
end
end
end
......@@ -217,7 +217,8 @@ module Gitlab
personal_snippets: count(PersonalSnippet.where(last_28_days_time_period)),
project_snippets: count(ProjectSnippet.where(last_28_days_time_period))
}.merge(
snowplow_event_counts(last_28_days_time_period(column: :collector_tstamp))
snowplow_event_counts(last_28_days_time_period(column: :collector_tstamp)),
aggregated_metrics_monthly
).tap do |data|
data[:snippets] = data[:personal_snippets] + data[:project_snippets]
end
......@@ -239,7 +240,10 @@ module Gitlab
def system_usage_data_weekly
{
counts_weekly: {}
counts_weekly: {
}.merge(
aggregated_metrics_weekly
)
}
end
......@@ -691,11 +695,19 @@ module Gitlab
{ redis_hll_counters: ::Gitlab::UsageDataCounters::HLLRedisCounter.unique_events_data }
end
def aggregated_metrics
def aggregated_metrics_monthly
return {} unless Feature.enabled?(:product_analytics_aggregated_metrics)
{
aggregated_metrics: ::Gitlab::UsageDataCounters::HLLRedisCounter.aggregated_metrics_monthly_data
}
end
def aggregated_metrics_weekly
return {} unless Feature.enabled?(:product_analytics_aggregated_metrics)
{
aggregated_metrics: ::Gitlab::UsageDataCounters::HLLRedisCounter.aggregated_metrics_data
aggregated_metrics: ::Gitlab::UsageDataCounters::HLLRedisCounter.aggregated_metrics_weekly_data
}
end
......
......@@ -90,18 +90,24 @@ module Gitlab
event_for(event_name).present?
end
def aggregated_metrics_data
def aggregated_metrics_monthly_data
aggregated_metrics.to_h do |aggregation|
[aggregation[:name], calculate_count_for_aggregation(aggregation)]
[aggregation[:name], calculate_count_for_aggregation(aggregation, start_date: 4.weeks.ago.to_date, end_date: Date.current)]
end
end
def aggregated_metrics_weekly_data
aggregated_metrics.to_h do |aggregation|
[aggregation[:name], calculate_count_for_aggregation(aggregation, start_date: 7.days.ago.to_date, end_date: Date.current)]
end
end
private
def calculate_count_for_aggregation(aggregation)
def calculate_count_for_aggregation(aggregation, start_date:, end_date:)
validate_aggregation_operator!(aggregation[:operator])
count_unique_events(event_names: aggregation[:events], start_date: 4.weeks.ago.to_date, end_date: Date.current) do |events|
count_unique_events(event_names: aggregation[:events], start_date: start_date, end_date: end_date) do |events|
raise SlotMismatch, events unless events_in_same_slot?(events)
raise AggregationMismatch, events unless events_same_aggregation?(events)
end
......
......@@ -4959,6 +4959,9 @@ msgstr ""
msgid "Changes affect new repositories only. If not specified, Git's default name %{branch_name_default} will be used."
msgstr ""
msgid "Changes affect new repositories only. If not specified, either the configured application-wide default or Git's default name %{branch_name_default} will be used."
msgstr ""
msgid "Changes are shown as if the %{b_open}source%{b_close} revision was being merged into the %{b_open}target%{b_close} revision."
msgstr ""
......@@ -8310,6 +8313,12 @@ msgstr ""
msgid "DastProfiles|Are you sure you want to delete this profile?"
msgstr ""
msgid "DastProfiles|Authentication"
msgstr ""
msgid "DastProfiles|Authentication URL"
msgstr ""
msgid "DastProfiles|Could not create site validation token. Please refresh the page, or try again later."
msgstr ""
......@@ -8370,6 +8379,9 @@ msgstr ""
msgid "DastProfiles|Edit site profile"
msgstr ""
msgid "DastProfiles|Enable Authentication"
msgstr ""
msgid "DastProfiles|Error Details"
msgstr ""
......@@ -8406,6 +8418,12 @@ msgstr ""
msgid "DastProfiles|Passive"
msgstr ""
msgid "DastProfiles|Password"
msgstr ""
msgid "DastProfiles|Password form field"
msgstr ""
msgid "DastProfiles|Please enter a valid timeout value"
msgstr ""
......@@ -8475,6 +8493,12 @@ msgstr ""
msgid "DastProfiles|Turn on AJAX spider"
msgstr ""
msgid "DastProfiles|Username"
msgstr ""
msgid "DastProfiles|Username form field"
msgstr ""
msgid "DastProfiles|Validate"
msgstr ""
......
......@@ -25,4 +25,21 @@ RSpec.describe 'Group Repository settings' do
let(:entity_type) { 'group' }
end
end
context 'Default initial branch name' do
before do
visit group_settings_repository_path(group)
end
it 'has the setting section' do
expect(page).to have_css("#js-default-branch-name")
end
it 'renders the correct setting section content' do
within("#js-default-branch-name") do
expect(page).to have_content("Default initial branch name")
expect(page).to have_content("Set the default name of the initial branch when creating new repositories through the user interface.")
end
end
end
end
import { createLocalVue, mount } from '@vue/test-utils';
import Vuex from 'vuex';
import {
GlEmptyState,
GlLoadingIcon,
GlFormInput,
GlPagination,
GlDeprecatedDropdown,
} from '@gitlab/ui';
import { GlEmptyState, GlLoadingIcon, GlFormInput, GlPagination, GlDropdown } from '@gitlab/ui';
import stubChildren from 'helpers/stub_children';
import ErrorTrackingList from '~/error_tracking/components/error_tracking_list.vue';
import ErrorTrackingActions from '~/error_tracking/components/error_tracking_actions.vue';
......@@ -24,19 +18,19 @@ describe('ErrorTrackingList', () => {
const findErrorListTable = () => wrapper.find('table');
const findErrorListRows = () => wrapper.findAll('tbody tr');
const dropdownsArray = () => wrapper.findAll(GlDeprecatedDropdown);
const dropdownsArray = () => wrapper.findAll(GlDropdown);
const findRecentSearchesDropdown = () =>
dropdownsArray()
.at(0)
.find(GlDeprecatedDropdown);
.find(GlDropdown);
const findStatusFilterDropdown = () =>
dropdownsArray()
.at(1)
.find(GlDeprecatedDropdown);
.find(GlDropdown);
const findSortDropdown = () =>
dropdownsArray()
.at(2)
.find(GlDeprecatedDropdown);
.find(GlDropdown);
const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
const findPagination = () => wrapper.find(GlPagination);
const findErrorActions = () => wrapper.find(ErrorTrackingActions);
......@@ -134,8 +128,8 @@ describe('ErrorTrackingList', () => {
mountComponent({
stubs: {
GlTable: false,
GlDeprecatedDropdown: false,
GlDeprecatedDropdownItem: false,
GlDropdown: false,
GlDropdownItem: false,
GlLink: false,
},
});
......@@ -205,8 +199,8 @@ describe('ErrorTrackingList', () => {
mountComponent({
stubs: {
GlTable: false,
GlDeprecatedDropdown: false,
GlDeprecatedDropdownItem: false,
GlDropdown: false,
GlDropdownItem: false,
},
});
});
......@@ -325,8 +319,8 @@ describe('ErrorTrackingList', () => {
beforeEach(() => {
mountComponent({
stubs: {
GlDeprecatedDropdown: false,
GlDeprecatedDropdownItem: false,
GlDropdown: false,
GlDropdownItem: false,
},
});
});
......
import { pick, clone } from 'lodash';
import Vuex from 'vuex';
import { createLocalVue, shallowMount } from '@vue/test-utils';
import { GlDeprecatedDropdown, GlDeprecatedDropdownItem } from '@gitlab/ui';
import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
import ProjectDropdown from '~/error_tracking_settings/components/project_dropdown.vue';
import { defaultProps, projectList, staleProject } from '../mock';
......@@ -43,7 +43,7 @@ describe('error tracking settings project dropdown', () => {
describe('empty project list', () => {
it('renders the dropdown', () => {
expect(wrapper.find('#project-dropdown').exists()).toBeTruthy();
expect(wrapper.find(GlDeprecatedDropdown).exists()).toBeTruthy();
expect(wrapper.find(GlDropdown).exists()).toBeTruthy();
});
it('shows helper text', () => {
......@@ -58,8 +58,8 @@ describe('error tracking settings project dropdown', () => {
});
it('does not contain any dropdown items', () => {
expect(wrapper.find(GlDeprecatedDropdownItem).exists()).toBeFalsy();
expect(wrapper.find(GlDeprecatedDropdown).props('text')).toBe('No projects available');
expect(wrapper.find(GlDropdownItem).exists()).toBeFalsy();
expect(wrapper.find(GlDropdown).props('text')).toBe('No projects available');
});
});
......@@ -72,12 +72,12 @@ describe('error tracking settings project dropdown', () => {
it('renders the dropdown', () => {
expect(wrapper.find('#project-dropdown').exists()).toBeTruthy();
expect(wrapper.find(GlDeprecatedDropdown).exists()).toBeTruthy();
expect(wrapper.find(GlDropdown).exists()).toBeTruthy();
});
it('contains a number of dropdown items', () => {
expect(wrapper.find(GlDeprecatedDropdownItem).exists()).toBeTruthy();
expect(wrapper.findAll(GlDeprecatedDropdownItem).length).toBe(2);
expect(wrapper.find(GlDropdownItem).exists()).toBeTruthy();
expect(wrapper.findAll(GlDropdownItem).length).toBe(2);
});
});
......
......@@ -33,7 +33,6 @@ describe('ImportProjectsTable', () => {
const importAllFn = jest.fn();
const importAllModalShowFn = jest.fn();
const setPageFn = jest.fn();
const fetchReposFn = jest.fn();
function createComponent({
......@@ -60,7 +59,6 @@ describe('ImportProjectsTable', () => {
stopJobsPolling: jest.fn(),
clearJobsEtagPoll: jest.fn(),
setFilter: jest.fn(),
setPage: setPageFn,
},
});
......
......@@ -16,6 +16,7 @@ import {
RECEIVE_NAMESPACES_SUCCESS,
RECEIVE_NAMESPACES_ERROR,
SET_PAGE,
SET_FILTER,
} from '~/import_projects/store/mutation_types';
import actionsFactory from '~/import_projects/store/actions';
import { getImportTarget } from '~/import_projects/store/getters';
......@@ -40,7 +41,7 @@ const {
fetchImport,
fetchJobs,
fetchNamespaces,
setPage,
setFilter,
} = actionsFactory({
endpoints,
});
......@@ -359,21 +360,17 @@ describe('import_projects store actions', () => {
],
);
});
});
describe('setPage', () => {
it('dispatches fetchRepos and commits setPage when page number differs from current one', async () => {
await testAction(
setPage,
2,
{ ...localState, pageInfo: { page: 1 } },
[{ type: SET_PAGE, payload: 2 }],
[{ type: 'fetchRepos' }],
);
});
it('does not perform any action if page equals to current one', async () => {
await testAction(setPage, 2, { ...localState, pageInfo: { page: 2 } }, [], []);
});
describe('setFilter', () => {
it('dispatches sets the filter value and dispatches fetchRepos', async () => {
await testAction(
setFilter,
'filteredRepo',
localState,
[{ type: SET_FILTER, payload: 'filteredRepo' }],
[{ type: 'fetchRepos' }],
);
});
});
});
......@@ -91,3 +91,18 @@ export const pipelineData = {
[jobId4]: {},
},
};
export const singleStageData = {
stages: [
{
name: 'build',
groups: [
{
name: 'build_1',
jobs: [{ script: 'echo hello', stage: 'build' }],
id: jobId1,
},
],
},
],
};
import { shallowMount } from '@vue/test-utils';
import { pipelineData } from './mock_data';
import { pipelineData, singleStageData } from './mock_data';
import PipelineGraph from '~/pipelines/components/pipeline_graph/pipeline_graph.vue';
import StagePill from '~/pipelines/components/pipeline_graph/stage_pill.vue';
import JobPill from '~/pipelines/components/pipeline_graph/job_pill.vue';
......@@ -18,6 +18,8 @@ describe('pipeline graph component', () => {
};
const findAllStagePills = () => wrapper.findAll(StagePill);
const findAllStageBackgroundElements = () => wrapper.findAll('[data-testid="stage-background"]');
const findStageBackgroundElementAt = index => findAllStageBackgroundElements().at(index);
const findAllJobPills = () => wrapper.findAll(JobPill);
afterEach(() => {
......@@ -41,12 +43,43 @@ describe('pipeline graph component', () => {
beforeEach(() => {
wrapper = createComponent();
});
it('renders the right number of stage pills', () => {
const expectedStagesLength = pipelineData.stages.length;
expect(findAllStagePills()).toHaveLength(expectedStagesLength);
});
it.each`
cssClass | expectedState
${'gl-rounded-bottom-left-6'} | ${true}
${'gl-rounded-top-left-6'} | ${true}
${'gl-rounded-top-right-6'} | ${false}
${'gl-rounded-bottom-right-6'} | ${false}
`(
'rounds corner: $class should be $expectedState on the first element',
({ cssClass, expectedState }) => {
const classes = findStageBackgroundElementAt(0).classes();
expect(classes.includes(cssClass)).toBe(expectedState);
},
);
it.each`
cssClass | expectedState
${'gl-rounded-bottom-left-6'} | ${false}
${'gl-rounded-top-left-6'} | ${false}
${'gl-rounded-top-right-6'} | ${true}
${'gl-rounded-bottom-right-6'} | ${true}
`(
'rounds corner: $class should be $expectedState on the last element',
({ cssClass, expectedState }) => {
const classes = findStageBackgroundElementAt(pipelineData.stages.length - 1).classes();
expect(classes.includes(cssClass)).toBe(expectedState);
},
);
it('renders the right number of job pills', () => {
// We count the number of jobs in the mock data
const expectedJobsLength = pipelineData.stages.reduce((acc, val) => {
......@@ -56,4 +89,25 @@ describe('pipeline graph component', () => {
expect(findAllJobPills()).toHaveLength(expectedJobsLength);
});
});
describe('with only one stage', () => {
beforeEach(() => {
wrapper = createComponent({ pipelineData: singleStageData });
});
it.each`
cssClass | expectedState
${'gl-rounded-bottom-left-6'} | ${true}
${'gl-rounded-top-left-6'} | ${true}
${'gl-rounded-top-right-6'} | ${true}
${'gl-rounded-bottom-right-6'} | ${true}
`(
'rounds corner: $class should be $expectedState on the only element',
({ cssClass, expectedState }) => {
const classes = findStageBackgroundElementAt(0).classes();
expect(classes.includes(cssClass)).toBe(expectedState);
},
);
});
});
......@@ -40,7 +40,6 @@ exports[`Repository last commit component renders commit widget 1`] = `
>
Test
</gl-link-stub>
authored
......@@ -147,7 +146,6 @@ exports[`Repository last commit component renders the signature HTML as returned
>
Test
</gl-link-stub>
authored
......
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::PopulateMissingVulnerabilityDismissalInformation, schema: 20201028160832 do
let(:users) { table(:users) }
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:vulnerabilities) { table(:vulnerabilities) }
let(:findings) { table(:vulnerability_occurrences) }
let(:scanners) { table(:vulnerability_scanners) }
let(:identifiers) { table(:vulnerability_identifiers) }
let(:feedback) { table(:vulnerability_feedback) }
let(:user) { users.create!(name: 'test', email: 'test@example.com', projects_limit: 5) }
let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
let(:project) { projects.create!(namespace_id: namespace.id, name: 'foo') }
let(:vulnerability_1) { vulnerabilities.create!(title: 'title', state: 2, severity: 0, confidence: 5, report_type: 2, project_id: project.id, author_id: user.id) }
let(:vulnerability_2) { vulnerabilities.create!(title: 'title', state: 2, severity: 0, confidence: 5, report_type: 2, project_id: project.id, author_id: user.id) }
let(:scanner) { scanners.create!(project_id: project.id, external_id: 'foo', name: 'bar') }
let(:identifier) { identifiers.create!(project_id: project.id, fingerprint: 'foo', external_type: 'bar', external_id: 'zoo', name: 'identifier') }
before do
feedback.create!(feedback_type: 0,
category: 'sast',
project_fingerprint: '418291a26024a1445b23fe64de9380cdcdfd1fa8',
project_id: project.id,
author_id: user.id,
created_at: Time.current)
findings.create!(name: 'Finding',
report_type: 'sast',
project_fingerprint: Gitlab::Database::ShaAttribute.new.serialize('418291a26024a1445b23fe64de9380cdcdfd1fa8'),
location_fingerprint: 'bar',
severity: 1,
confidence: 1,
metadata_version: 1,
raw_metadata: '',
uuid: SecureRandom.uuid,
project_id: project.id,
vulnerability_id: vulnerability_1.id,
scanner_id: scanner.id,
primary_identifier_id: identifier.id)
allow(::Gitlab::BackgroundMigration::Logger).to receive_messages(info: true, warn: true, error: true)
end
describe '#perform' do
it 'updates the missing dismissal information of the vulnerability' do
expect { subject.perform(vulnerability_1.id, vulnerability_2.id) }.to change { vulnerability_1.reload.dismissed_at }.from(nil)
.and change { vulnerability_1.reload.dismissed_by_id }.from(nil).to(user.id)
end
it 'writes log messages' do
subject.perform(vulnerability_1.id, vulnerability_2.id)
expect(::Gitlab::BackgroundMigration::Logger).to have_received(:info).with(migrator: described_class.name,
message: 'Dismissal information has been copied',
count: 2)
expect(::Gitlab::BackgroundMigration::Logger).to have_received(:warn).with(migrator: described_class.name,
message: 'Could not update vulnerability!',
vulnerability_id: vulnerability_2.id)
end
end
end
# frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Tracking::Destinations::Snowplow do
let(:emitter) { SnowplowTracker::Emitter.new('localhost', buffer_size: 1) }
let(:tracker) { SnowplowTracker::Tracker.new(emitter, SnowplowTracker::Subject.new, 'namespace', 'app_id') }
before do
stub_application_setting(snowplow_collector_hostname: 'gitfoo.com')
stub_application_setting(snowplow_app_id: '_abc123_')
end
around do |example|
freeze_time { example.run }
end
context 'when snowplow is enabled' do
before do
stub_application_setting(snowplow_enabled: true)
expect(SnowplowTracker::AsyncEmitter)
.to receive(:new)
.with('gitfoo.com', { protocol: 'https' })
.and_return(emitter)
expect(SnowplowTracker::Tracker)
.to receive(:new)
.with(emitter, an_instance_of(SnowplowTracker::Subject), Gitlab::Tracking::SNOWPLOW_NAMESPACE, '_abc123_')
.and_return(tracker)
end
describe '#event' do
it 'sends event to tracker' do
allow(tracker).to receive(:track_struct_event).and_call_original
subject.event('category', 'action', label: 'label', property: 'property', value: 1.5)
expect(tracker)
.to have_received(:track_struct_event)
.with('category', 'action', 'label', 'property', 1.5, nil, (Time.now.to_f * 1000).to_i)
end
end
describe '#self_describing_event' do
it 'sends event to tracker' do
allow(tracker).to receive(:track_self_describing_event).and_call_original
subject.self_describing_event('iglu:com.gitlab/foo/jsonschema/1-0-0', foo: 'bar')
expect(tracker).to have_received(:track_self_describing_event) do |event, context, timestamp|
expect(event.to_json[:schema]).to eq('iglu:com.gitlab/foo/jsonschema/1-0-0')
expect(event.to_json[:data]).to eq(foo: 'bar')
expect(context).to eq(nil)
expect(timestamp).to eq((Time.now.to_f * 1000).to_i)
end
end
end
end
context 'when snowplow is not enabled' do
describe '#event' do
it 'does not send event to tracker' do
expect_any_instance_of(SnowplowTracker::Tracker).not_to receive(:track_struct_event)
subject.event('category', 'action', label: 'label', property: 'property', value: 1.5)
end
end
describe '#self_describing_event' do
it 'does not send event to tracker' do
expect_any_instance_of(SnowplowTracker::Tracker).not_to receive(:track_self_describing_event)
subject.self_describing_event('iglu:com.gitlab/foo/jsonschema/1-0-0', foo: 'bar')
end
end
end
end
......@@ -2,13 +2,13 @@
require 'spec_helper'
RSpec.describe Gitlab::Tracking do
let(:timestamp) { Time.utc(2017, 3, 22) }
before do
stub_application_setting(snowplow_enabled: true)
stub_application_setting(snowplow_collector_hostname: 'gitfoo.com')
stub_application_setting(snowplow_cookie_domain: '.gitfoo.com')
stub_application_setting(snowplow_app_id: '_abc123_')
described_class.instance_variable_set("@snowplow", nil)
end
describe '.snowplow_options' do
......@@ -35,99 +35,23 @@ RSpec.describe Gitlab::Tracking do
end
end
describe 'tracking events' do
shared_examples 'events not tracked' do
it 'does not track events' do
stub_application_setting(snowplow_enabled: false)
expect(SnowplowTracker::AsyncEmitter).not_to receive(:new)
expect(SnowplowTracker::Tracker).not_to receive(:new)
track_event
end
end
around do |example|
travel_to(timestamp) { example.run }
end
before do
described_class.instance_variable_set("@snowplow", nil)
end
let(:tracker) { double }
def receive_events
expect(SnowplowTracker::AsyncEmitter).to receive(:new).with(
'gitfoo.com', { protocol: 'https' }
).and_return('_emitter_')
describe '.event' do
it 'delegates to snowplow destination' do
expect_any_instance_of(Gitlab::Tracking::Destinations::Snowplow)
.to receive(:event)
.with('category', 'action', label: 'label', property: 'property', value: 1.5, context: nil)
expect(SnowplowTracker::Tracker).to receive(:new).with(
'_emitter_',
an_instance_of(SnowplowTracker::Subject),
'gl',
'_abc123_'
).and_return(tracker)
described_class.event('category', 'action', label: 'label', property: 'property', value: 1.5)
end
end
describe '.event' do
let(:track_event) do
described_class.event('category', 'action',
label: '_label_',
property: '_property_',
value: '_value_',
context: nil
)
end
it_behaves_like 'events not tracked'
it 'can track events' do
receive_events
expect(tracker).to receive(:track_struct_event).with(
'category',
'action',
'_label_',
'_property_',
'_value_',
nil,
(timestamp.to_f * 1000).to_i
)
track_event
end
end
describe '.self_describing_event' do
let(:track_event) do
described_class.self_describing_event('iglu:com.gitlab/example/jsonschema/1-0-2',
{
foo: 'bar',
foo_count: 42
},
context: nil
)
end
it_behaves_like 'events not tracked'
it 'can track self describing events' do
receive_events
expect(SnowplowTracker::SelfDescribingJson).to receive(:new).with(
'iglu:com.gitlab/example/jsonschema/1-0-2',
{
foo: 'bar',
foo_count: 42
}
).and_return('_event_json_')
expect(tracker).to receive(:track_self_describing_event).with(
'_event_json_',
nil,
(timestamp.to_f * 1000).to_i
)
describe '.self_describing_event' do
it 'delegates to snowplow destination' do
expect_any_instance_of(Gitlab::Tracking::Destinations::Snowplow)
.to receive(:self_describing_event)
.with('iglu:com.gitlab/foo/jsonschema/1-0-0', { foo: 'bar' }, context: nil)
track_event
end
described_class.self_describing_event('iglu:com.gitlab/foo/jsonschema/1-0-0', foo: 'bar')
end
end
end
......@@ -277,53 +277,86 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
end
end
describe '.aggregated_metrics_data' do
context 'no combination is tracked' do
it 'returns empty hash' do
allow(described_class).to receive(:aggregated_metrics).and_return([])
context 'aggregated metrics' do
let(:known_events) do
[
{ name: 'event1_slot', redis_slot: "slot", category: 'category1', aggregation: "weekly" },
{ name: 'event2_slot', redis_slot: "slot", category: 'category2', aggregation: "weekly" },
{ name: 'event3', category: 'category2', aggregation: "weekly" }
].map(&:with_indifferent_access)
end
expect(subject.aggregated_metrics_data).to eq({})
end
let(:aggregated_metrics) do
[
{ name: 'gmau_1', events: %w[event1_slot event2_slot], operator: "ANY" },
{ name: 'gmau_2', events: %w[event3], operator: "ANY" }
].map(&:with_indifferent_access)
end
context 'there are some combinations defined' do
let(:known_events) do
[
{ name: 'event1_slot', redis_slot: "slot", category: 'category1', aggregation: "weekly" },
{ name: 'event2_slot', redis_slot: "slot", category: 'category2', aggregation: "weekly" },
{ name: 'event3', category: 'category2', aggregation: "weekly" }
].map(&:with_indifferent_access)
before do
allow(described_class).to receive(:known_events).and_return(known_events)
allow(described_class).to receive(:aggregated_metrics).and_return(aggregated_metrics)
end
shared_examples 'aggregated_metrics_data' do
context 'no combination is tracked' do
it 'returns empty hash' do
allow(described_class).to receive(:aggregated_metrics).and_return([])
expect(aggregated_metrics_data).to eq({})
end
end
let(:aggregated_metrics) do
[
{ name: 'gmau_1', events: %w[event1_slot event2_slot], operator: "ANY" },
{ name: 'gmau_2', events: %w[event3], operator: "ANY" }
].map(&:with_indifferent_access)
context 'there are some combinations defined' do
it 'returns the number of unique events for all known events' do
results = {
'gmau_1' => 2,
'gmau_2' => 3
}
expect(aggregated_metrics_data).to eq(results)
end
end
end
before do
allow(described_class).to receive(:known_events).and_return(known_events)
allow(described_class).to receive(:aggregated_metrics).and_return(aggregated_metrics)
describe '.aggregated_metrics_weekly_data' do
subject(:aggregated_metrics_data) { described_class.aggregated_metrics_weekly_data }
before do
described_class.track_event(entity1, 'event1_slot', 2.days.ago)
described_class.track_event(entity1, 'event2_slot', 2.days.ago)
described_class.track_event(entity3, 'event2_slot', 3.days.ago)
# events out of time scope
described_class.track_event(entity3, 'event2_slot', 8.days.ago)
# events in different slots
described_class.track_event(entity1, 'event3', 2.days.ago)
described_class.track_event(entity2, 'event3', 2.days.ago)
described_class.track_event(entity4, 'event3', 2.days.ago)
end
it 'returns the number of unique events for all known events' do
results = {
'gmau_1' => 2,
'gmau_2' => 3
}
it_behaves_like 'aggregated_metrics_data'
end
describe '.aggregated_metrics_monthly_data' do
subject(:aggregated_metrics_data) { described_class.aggregated_metrics_monthly_data }
before do
described_class.track_event(entity1, 'event1_slot', 2.days.ago)
described_class.track_event(entity1, 'event2_slot', 10.days.ago)
described_class.track_event(entity3, 'event2_slot', 4.weeks.ago.advance(days: 1))
expect(subject.aggregated_metrics_data).to eq(results)
# events out of time scope
described_class.track_event(entity3, 'event2_slot', 4.weeks.ago.advance(days: -1))
# events in different slots
described_class.track_event(entity1, 'event3', 2.days.ago)
described_class.track_event(entity2, 'event3', 2.days.ago)
described_class.track_event(entity4, 'event3', 2.days.ago)
end
it_behaves_like 'aggregated_metrics_data'
end
end
end
......@@ -32,6 +32,8 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
.not_to include(:merge_requests_users)
expect(subject[:usage_activity_by_stage_monthly][:create])
.to include(:merge_requests_users)
expect(subject[:counts_weekly]).to include(:aggregated_metrics)
expect(subject[:counts_monthly]).to include(:aggregated_metrics)
end
it 'clears memoized values' do
......@@ -1240,28 +1242,44 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
describe 'aggregated_metrics' do
subject(:aggregated_metrics) { described_class.aggregated_metrics }
shared_examples 'aggregated_metrics_for_time_range' do
context 'with product_analytics_aggregated_metrics feature flag on' do
before do
stub_feature_flags(product_analytics_aggregated_metrics: true)
end
context 'with product_analytics_aggregated_metrics feature flag on' do
before do
stub_feature_flags(product_analytics_aggregated_metrics: true)
it 'uses ::Gitlab::UsageDataCounters::HLLRedisCounter#aggregated_metrics_data', :aggregate_failures do
expect(::Gitlab::UsageDataCounters::HLLRedisCounter).to receive(aggregated_metrics_data_method).and_return(global_search_gmau: 123)
expect(aggregated_metrics_payload).to eq(aggregated_metrics: { global_search_gmau: 123 })
end
end
it 'uses ::Gitlab::UsageDataCounters::HLLRedisCounter#aggregated_metrics_data', :aggregate_failures do
expect(::Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:aggregated_metrics_data).and_return(global_search_gmau: 123)
expect(aggregated_metrics).to eq(aggregated_metrics: { global_search_gmau: 123 })
context 'with product_analytics_aggregated_metrics feature flag off' do
before do
stub_feature_flags(product_analytics_aggregated_metrics: false)
end
it 'returns empty hash', :aggregate_failures do
expect(::Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(aggregated_metrics_data_method)
expect(aggregated_metrics_payload).to be {}
end
end
end
context 'with product_analytics_aggregated_metrics feature flag off' do
before do
stub_feature_flags(product_analytics_aggregated_metrics: false)
end
describe '.aggregated_metrics_weekly' do
subject(:aggregated_metrics_payload) { described_class.aggregated_metrics_weekly }
it 'returns empty hash', :aggregate_failures do
expect(::Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:aggregated_metrics_data)
expect(aggregated_metrics).to be {}
end
let(:aggregated_metrics_data_method) { :aggregated_metrics_weekly_data }
it_behaves_like 'aggregated_metrics_for_time_range'
end
describe '.aggregated_metrics_monthly' do
subject(:aggregated_metrics_payload) { described_class.aggregated_metrics_monthly }
let(:aggregated_metrics_data_method) { :aggregated_metrics_monthly_data }
it_behaves_like 'aggregated_metrics_for_time_range'
end
end
......
# frozen_string_literal: true
require 'spec_helper'
require_migration!
RSpec.describe SchedulePopulateMissingDismissalInformationForVulnerabilities do
let(:users) { table(:users) }
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:vulnerabilities) { table(:vulnerabilities) }
let(:user) { users.create!(name: 'test', email: 'test@example.com', projects_limit: 5) }
let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
let(:project) { projects.create!(namespace_id: namespace.id, name: 'foo') }
let!(:vulnerability_1) { vulnerabilities.create!(title: 'title', state: 2, severity: 0, confidence: 5, report_type: 2, project_id: project.id, author_id: user.id) }
let!(:vulnerability_2) { vulnerabilities.create!(title: 'title', state: 2, severity: 0, confidence: 5, report_type: 2, project_id: project.id, author_id: user.id, dismissed_at: Time.now) }
let!(:vulnerability_3) { vulnerabilities.create!(title: 'title', state: 2, severity: 0, confidence: 5, report_type: 2, project_id: project.id, author_id: user.id, dismissed_by_id: user.id) }
let!(:vulnerability_4) { vulnerabilities.create!(title: 'title', state: 2, severity: 0, confidence: 5, report_type: 2, project_id: project.id, author_id: user.id, dismissed_at: Time.now, dismissed_by_id: user.id) }
let!(:vulnerability_5) { vulnerabilities.create!(title: 'title', state: 1, severity: 0, confidence: 5, report_type: 2, project_id: project.id, author_id: user.id) }
around do |example|
freeze_time { Sidekiq::Testing.fake! { example.run } }
end
before do
stub_const("#{described_class.name}::BATCH_SIZE", 1)
end
it 'schedules the background jobs', :aggregate_failures do
migrate!
expect(BackgroundMigrationWorker.jobs.size).to be(3)
expect(described_class::MIGRATION_CLASS).to be_scheduled_delayed_migration(3.minutes, vulnerability_1.id)
expect(described_class::MIGRATION_CLASS).to be_scheduled_delayed_migration(6.minutes, vulnerability_2.id)
expect(described_class::MIGRATION_CLASS).to be_scheduled_delayed_migration(9.minutes, vulnerability_3.id)
end
end
......@@ -67,7 +67,8 @@ RSpec.describe ApplicationRecord do
end
it 'raises a validation error if the record was not persisted' do
expect { Suggestion.find_or_create_by!(note: nil) }.to raise_error(ActiveRecord::RecordInvalid)
expect { Suggestion.safe_find_or_create_by!(note: nil) }
.to raise_error(ActiveRecord::RecordInvalid)
end
it 'passes a block to find_or_create_by' do
......@@ -75,6 +76,14 @@ RSpec.describe ApplicationRecord do
Suggestion.safe_find_or_create_by!(suggestion_attributes, &block)
end.to yield_with_args(an_object_having_attributes(suggestion_attributes))
end
it 'raises a record not found error in case of attributes mismatch' do
suggestion = Suggestion.safe_find_or_create_by!(suggestion_attributes)
attributes = suggestion_attributes.merge(outdated: !suggestion.outdated)
expect { Suggestion.safe_find_or_create_by!(attributes) }
.to raise_error(ActiveRecord::RecordNotFound)
end
end
end
......
......@@ -967,23 +967,72 @@ RSpec.describe Group do
context 'expanded group members' do
let(:indirect_user) { create(:user) }
it 'enables two_factor_requirement for subgroup member' do
subgroup = create(:group, :nested, parent: group)
subgroup.add_user(indirect_user, GroupMember::OWNER)
context 'two_factor_requirement is enabled' do
context 'two_factor_requirement is also enabled for ancestor group' do
it 'enables two_factor_requirement for subgroup member' do
subgroup = create(:group, :nested, parent: group)
subgroup.add_user(indirect_user, GroupMember::OWNER)
group.update!(require_two_factor_authentication: true)
group.update!(require_two_factor_authentication: true)
expect(indirect_user.reload.require_two_factor_authentication_from_group).to be_truthy
end
end
context 'two_factor_requirement is disabled for ancestor group' do
it 'enables two_factor_requirement for subgroup member' do
subgroup = create(:group, :nested, parent: group, require_two_factor_authentication: true)
subgroup.add_user(indirect_user, GroupMember::OWNER)
group.update!(require_two_factor_authentication: false)
expect(indirect_user.reload.require_two_factor_authentication_from_group).to be_truthy
expect(indirect_user.reload.require_two_factor_authentication_from_group).to be_truthy
end
it 'enable two_factor_requirement for ancestor group member' do
ancestor_group = create(:group)
ancestor_group.add_user(indirect_user, GroupMember::OWNER)
group.update!(parent: ancestor_group)
group.update!(require_two_factor_authentication: true)
expect(indirect_user.reload.require_two_factor_authentication_from_group).to be_truthy
end
end
end
it 'does not enable two_factor_requirement for ancestor group member' do
ancestor_group = create(:group)
ancestor_group.add_user(indirect_user, GroupMember::OWNER)
group.update!(parent: ancestor_group)
context 'two_factor_requirement is disabled' do
context 'two_factor_requirement is enabled for ancestor group' do
it 'enables two_factor_requirement for subgroup member' do
subgroup = create(:group, :nested, parent: group)
subgroup.add_user(indirect_user, GroupMember::OWNER)
group.update!(require_two_factor_authentication: true)
group.update!(require_two_factor_authentication: true)
expect(indirect_user.reload.require_two_factor_authentication_from_group).to be_truthy
end
end
context 'two_factor_requirement is also disabled for ancestor group' do
it 'disables two_factor_requirement for subgroup member' do
subgroup = create(:group, :nested, parent: group)
subgroup.add_user(indirect_user, GroupMember::OWNER)
group.update!(require_two_factor_authentication: false)
expect(indirect_user.reload.require_two_factor_authentication_from_group).to be_falsey
expect(indirect_user.reload.require_two_factor_authentication_from_group).to be_falsey
end
it 'disables two_factor_requirement for ancestor group member' do
ancestor_group = create(:group, require_two_factor_authentication: false)
indirect_user.update!(require_two_factor_authentication_from_group: true)
ancestor_group.add_user(indirect_user, GroupMember::OWNER)
group.update!(require_two_factor_authentication: false)
expect(indirect_user.reload.require_two_factor_authentication_from_group).to be_falsey
end
end
end
end
......
......@@ -36,13 +36,10 @@ RSpec.describe NamespaceSetting, type: :model do
context "when an empty string" do
before do
namespace_settings.default_branch_name = ''
namespace_settings.default_branch_name = ""
end
it "returns an error" do
expect(namespace_settings.valid?).to be_falsey
expect(namespace_settings.errors.full_messages).not_to be_empty
end
it_behaves_like "doesn't return an error"
end
end
......
......@@ -161,60 +161,26 @@ RSpec.describe Clusters::Kubernetes::CreateOrUpdateServiceAccountService do
it_behaves_like 'creates service account and token'
context 'kubernetes_cluster_namespace_role_admin FF is enabled' do
before do
stub_feature_flags(kubernetes_cluster_namespace_role_admin: true)
end
it 'creates a namespaced role binding with admin access' do
subject
expect(WebMock).to have_requested(:put, api_url + "/apis/rbac.authorization.k8s.io/v1/namespaces/#{namespace}/rolebindings/#{role_binding_name}").with(
body: hash_including(
metadata: { name: "gitlab-#{namespace}", namespace: "#{namespace}" },
roleRef: {
apiGroup: 'rbac.authorization.k8s.io',
kind: 'ClusterRole',
name: 'admin'
},
subjects: [
{
kind: 'ServiceAccount',
name: service_account_name,
namespace: namespace
}
]
)
)
end
end
it 'creates a namespaced role binding with admin access' do
subject
context 'kubernetes_cluster_namespace_role_admin FF is disabled' do
before do
stub_feature_flags(kubernetes_cluster_namespace_role_admin: false)
end
it 'creates a namespaced role binding with edit access' do
subject
expect(WebMock).to have_requested(:put, api_url + "/apis/rbac.authorization.k8s.io/v1/namespaces/#{namespace}/rolebindings/#{role_binding_name}").with(
body: hash_including(
metadata: { name: "gitlab-#{namespace}", namespace: "#{namespace}" },
roleRef: {
apiGroup: 'rbac.authorization.k8s.io',
kind: 'ClusterRole',
name: 'edit'
},
subjects: [
{
kind: 'ServiceAccount',
name: service_account_name,
namespace: namespace
}
]
)
expect(WebMock).to have_requested(:put, api_url + "/apis/rbac.authorization.k8s.io/v1/namespaces/#{namespace}/rolebindings/#{role_binding_name}").with(
body: hash_including(
metadata: { name: "gitlab-#{namespace}", namespace: "#{namespace}" },
roleRef: {
apiGroup: 'rbac.authorization.k8s.io',
kind: 'ClusterRole',
name: 'admin'
},
subjects: [
{
kind: 'ServiceAccount',
name: service_account_name,
namespace: namespace
}
]
)
end
)
end
it 'creates a role binding granting crossplane database permissions to the service account' do
......
......@@ -157,6 +157,16 @@ RSpec.shared_examples 'a GitHub-ish import controller: GET status' do
expect(json_response.dig("namespaces", 0, "id")).to eq(group.id)
end
it 'filters the list, ignoring the case of the name' do
get :status, params: { filter: 'EMACS' }, format: :json
expect(response).to have_gitlab_http_status(:ok)
expect(json_response.dig("imported_projects").count).to eq(0)
expect(json_response.dig("provider_repos").count).to eq(1)
expect(json_response.dig("provider_repos", 0, "id")).to eq(repo_2.id)
expect(json_response.dig("namespaces", 0, "id")).to eq(group.id)
end
context 'when user input contains html' do
let(:expected_filter) { 'test' }
let(:filter) { "<html>#{expected_filter}</html>" }
......@@ -167,6 +177,23 @@ RSpec.shared_examples 'a GitHub-ish import controller: GET status' do
expect(assigns(:filter)).to eq(expected_filter)
end
end
context 'when the client returns a non-string name' do
before do
repos = [build(:project, name: 2, path: 'test')]
client = stub_client(repos: repos)
allow(client).to receive(:each_page).and_return([OpenStruct.new(objects: repos)].to_enum)
end
it 'does not raise an error' do
get :status, params: { filter: '2' }, format: :json
expect(response).to have_gitlab_http_status :ok
expect(json_response.dig("provider_repos").count).to eq(1)
end
end
end
end
......
This diff is collapsed.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment