Commit b4ded0ba authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent 2aaef94c
......@@ -75,16 +75,18 @@
changes: *code-backstage-qa-patterns
when: on_success
.rails:rules:master-refs:
.rails:rules:master-refs-code-backstage-qa:
rules:
- <<: *if-master-refs
changes: *code-backstage-qa-patterns
when: on_success
.rails:rules:master-refs-ee-only:
.rails:rules:master-refs-code-backstage-qa-ee-only:
rules:
- <<: *if-not-ee
when: never
- <<: *if-master-refs
changes: *code-backstage-qa-patterns
when: on_success
.rails:rules:ee-only:
......@@ -330,12 +332,12 @@ coverage:
rspec quarantine pg9:
extends:
- .rspec-base-quarantine
- .rails:rules:master-refs
- .rails:rules:master-refs-code-backstage-qa
.rspec-base-pg10:
extends:
- .rspec-base
- .rails:rules:master-refs
- .rails:rules:master-refs-code-backstage-qa
- .use-pg10
rspec unit pg10:
......@@ -357,7 +359,7 @@ rspec system pg10:
rspec-ee quarantine pg9:
extends:
- .rspec-base-quarantine
- .rails:rules:master-refs-ee-only
- .rails:rules:master-refs-code-backstage-qa-ee-only
variables:
RSPEC_OPTS: "--tag quarantine -- ee/spec/"
......@@ -365,25 +367,25 @@ rspec-ee migration pg10:
extends:
- .rspec-ee-base-pg10
- .rspec-base-migration
- .rails:rules:master-refs
- .rails:rules:master-refs-code-backstage-qa
parallel: 2
rspec-ee unit pg10:
extends:
- .rspec-ee-base-pg10
- .rails:rules:master-refs
- .rails:rules:master-refs-code-backstage-qa
parallel: 10
rspec-ee integration pg10:
extends:
- .rspec-ee-base-pg10
- .rails:rules:master-refs
- .rails:rules:master-refs-code-backstage-qa
parallel: 3
rspec-ee system pg10:
extends:
- .rspec-ee-base-pg10
- .rails:rules:master-refs
- .rails:rules:master-refs-code-backstage-qa
parallel: 5
# ee + master-only jobs #
#########################
......
<script>
import { GlButton, GlButtonGroup, GlIcon, GlTooltipDirective } from '@gitlab/ui';
import {
RICH_BLOB_VIEWER,
RICH_BLOB_VIEWER_TITLE,
SIMPLE_BLOB_VIEWER,
SIMPLE_BLOB_VIEWER_TITLE,
} from './constants';
export default {
components: {
GlIcon,
GlButtonGroup,
GlButton,
},
directives: {
GlTooltip: GlTooltipDirective,
},
props: {
blob: {
type: Object,
required: true,
},
},
data() {
return {
viewer: this.blob.richViewer ? RICH_BLOB_VIEWER : SIMPLE_BLOB_VIEWER,
};
},
computed: {
isSimpleViewer() {
return this.viewer === SIMPLE_BLOB_VIEWER;
},
isRichViewer() {
return this.viewer === RICH_BLOB_VIEWER;
},
},
methods: {
switchToViewer(viewer) {
if (viewer !== this.viewer) {
this.viewer = viewer;
this.$emit('switch-viewer', viewer);
}
},
},
SIMPLE_BLOB_VIEWER,
RICH_BLOB_VIEWER,
SIMPLE_BLOB_VIEWER_TITLE,
RICH_BLOB_VIEWER_TITLE,
};
</script>
<template>
<gl-button-group class="js-blob-viewer-switcher ml-2">
<gl-button
v-gl-tooltip.hover
:aria-label="$options.SIMPLE_BLOB_VIEWER_TITLE"
:title="$options.SIMPLE_BLOB_VIEWER_TITLE"
:selected="isSimpleViewer"
:class="{ active: isSimpleViewer }"
@click="switchToViewer($options.SIMPLE_BLOB_VIEWER)"
>
<gl-icon name="code" :size="14" />
</gl-button>
<gl-button
v-gl-tooltip.hover
:aria-label="$options.RICH_BLOB_VIEWER_TITLE"
:title="$options.RICH_BLOB_VIEWER_TITLE"
:selected="isRichViewer"
:class="{ active: isRichViewer }"
@click="switchToViewer($options.RICH_BLOB_VIEWER)"
>
<gl-icon name="document" :size="14" />
</gl-button>
</gl-button-group>
</template>
......@@ -3,3 +3,9 @@ import { __ } from '~/locale';
export const BTN_COPY_CONTENTS_TITLE = __('Copy file contents');
export const BTN_RAW_TITLE = __('Open raw');
export const BTN_DOWNLOAD_TITLE = __('Download');
export const SIMPLE_BLOB_VIEWER = 'simple';
export const SIMPLE_BLOB_VIEWER_TITLE = __('Display source');
export const RICH_BLOB_VIEWER = 'rich';
export const RICH_BLOB_VIEWER_TITLE = __('Display rendered file');
......@@ -161,50 +161,7 @@ class List {
}
addMultipleIssues(issues, listFrom, newIndex) {
let moveBeforeId = null;
let moveAfterId = null;
const listHasIssues = issues.every(issue => this.findIssue(issue.id));
if (!listHasIssues) {
if (newIndex !== undefined) {
if (this.issues[newIndex - 1]) {
moveBeforeId = this.issues[newIndex - 1].id;
}
if (this.issues[newIndex]) {
moveAfterId = this.issues[newIndex].id;
}
this.issues.splice(newIndex, 0, ...issues);
} else {
this.issues.push(...issues);
}
if (this.label) {
issues.forEach(issue => issue.addLabel(this.label));
}
if (this.assignee) {
if (listFrom && listFrom.type === 'assignee') {
issues.forEach(issue => issue.removeAssignee(listFrom.assignee));
}
issues.forEach(issue => issue.addAssignee(this.assignee));
}
if (IS_EE && this.milestone) {
if (listFrom && listFrom.type === 'milestone') {
issues.forEach(issue => issue.removeMilestone(listFrom.milestone));
}
issues.forEach(issue => issue.addMilestone(this.milestone));
}
if (listFrom) {
this.issuesSize += issues.length;
this.updateMultipleIssues(issues, listFrom, moveBeforeId, moveAfterId);
}
}
boardsStore.addMultipleListIssues(this, issues, listFrom, newIndex);
}
addIssue(issue, listFrom, newIndex) {
......
......@@ -131,6 +131,53 @@ const boardsStore = {
listFrom.update();
},
addMultipleListIssues(list, issues, listFrom, newIndex) {
let moveBeforeId = null;
let moveAfterId = null;
const listHasIssues = issues.every(issue => list.findIssue(issue.id));
if (!listHasIssues) {
if (newIndex !== undefined) {
if (list.issues[newIndex - 1]) {
moveBeforeId = list.issues[newIndex - 1].id;
}
if (list.issues[newIndex]) {
moveAfterId = list.issues[newIndex].id;
}
list.issues.splice(newIndex, 0, ...issues);
} else {
list.issues.push(...issues);
}
if (list.label) {
issues.forEach(issue => issue.addLabel(list.label));
}
if (list.assignee) {
if (listFrom && listFrom.type === 'assignee') {
issues.forEach(issue => issue.removeAssignee(listFrom.assignee));
}
issues.forEach(issue => issue.addAssignee(list.assignee));
}
if (IS_EE && list.milestone) {
if (listFrom && listFrom.type === 'milestone') {
issues.forEach(issue => issue.removeMilestone(listFrom.milestone));
}
issues.forEach(issue => issue.addMilestone(list.milestone));
}
if (listFrom) {
list.issuesSize += issues.length;
list.updateMultipleIssues(issues, listFrom, moveBeforeId, moveAfterId);
}
}
},
startMoving(list, issue) {
Object.assign(this.moving, { list, issue });
},
......
import 'core-js/stable';
// Browser polyfills
import 'formdata-polyfill';
import './polyfills/custom_event';
......
......@@ -168,6 +168,7 @@ export default {
'setIndexPath',
'fetchPaginatedResults',
'updateStatus',
'removeIgnoredResolvedErrors',
]),
setSearchText(text) {
this.errorSearchQuery = text;
......@@ -196,9 +197,9 @@ export default {
updateIssueStatus(errorId, status) {
this.updateStatus({
endpoint: this.getIssueUpdatePath(errorId),
redirectUrl: this.listPath,
status,
});
this.removeIgnoredResolvedErrors(errorId);
},
},
};
......@@ -235,7 +236,6 @@ export default {
</gl-dropdown>
<div class="filtered-search-input-container flex-fill">
<gl-form-input
v-model="errorSearchQuery"
class="pl-2 filtered-search"
:disabled="loading"
:placeholder="__('Search or filter results…')"
......
......@@ -100,4 +100,8 @@ export const fetchPaginatedResults = ({ commit, dispatch }, cursor) => {
dispatch('startPolling');
};
export const removeIgnoredResolvedErrors = ({ commit }, error) => {
commit(types.REMOVE_IGNORED_RESOLVED_ERRORS, error);
};
export default () => {};
......@@ -9,3 +9,4 @@ export const SET_ENDPOINT = 'SET_ENDPOINT';
export const SET_SORT_FIELD = 'SET_SORT_FIELD';
export const SET_SEARCH_QUERY = 'SET_SEARCH_QUERY';
export const SET_CURSOR = 'SET_CURSOR';
export const REMOVE_IGNORED_RESOLVED_ERRORS = 'REMOVE_IGNORED_RESOLVED_ERRORS';
......@@ -59,4 +59,7 @@ export default {
[types.SET_ENDPOINT](state, endpoint) {
state.endpoint = endpoint;
},
[types.REMOVE_IGNORED_RESOLVED_ERRORS](state, error) {
state.errors = state.errors.filter(err => err.id !== error);
},
};
import _ from 'underscore';
import { isMatch } from 'lodash';
import { __, s__, sprintf } from '~/locale';
import { getDisplayName } from '../utils';
......@@ -7,7 +7,7 @@ export const hasProjects = state => Boolean(state.projects) && state.projects.le
export const isProjectInvalid = (state, getters) =>
Boolean(state.selectedProject) &&
getters.hasProjects &&
!state.projects.some(project => _.isMatch(state.selectedProject, project));
!state.projects.some(project => isMatch(state.selectedProject, project));
export const dropdownLabel = (state, getters) => {
if (state.selectedProject !== null) {
......
import _ from 'underscore';
import { pick } from 'lodash';
import { convertObjectPropsToCamelCase, parseBoolean } from '~/lib/utils/common_utils';
import * as types from './mutation_types';
import { projectKeys } from '../utils';
......@@ -12,7 +12,7 @@ export default {
.map(convertObjectPropsToCamelCase)
// The `pick` strips out extra properties returned from Sentry.
// Such properties could be problematic later, e.g. when checking whether `projects` contains `selectedProject`
.map(project => _.pick(project, projectKeys));
.map(project => pick(project, projectKeys));
},
[types.RESET_CONNECT](state) {
state.connectSuccessful = false;
......@@ -29,10 +29,7 @@ export default {
state.operationsSettingsEndpoint = operationsSettingsEndpoint;
if (project) {
state.selectedProject = _.pick(
convertObjectPropsToCamelCase(JSON.parse(project)),
projectKeys,
);
state.selectedProject = pick(convertObjectPropsToCamelCase(JSON.parse(project)), projectKeys);
}
},
[types.UPDATE_API_HOST](state, apiHost) {
......
fragment Author on User {
avatarUrl
name
username
webUrl
}
......@@ -10,6 +10,7 @@ import pipelineHeader from './components/header_component.vue';
import eventHub from './event_hub';
import TestReports from './components/test_reports/test_reports.vue';
import testReportsStore from './stores/test_reports';
import axios from '~/lib/utils/axios_utils';
Vue.use(Translate);
......@@ -111,5 +112,12 @@ export default () => {
return createElement('test-reports');
},
});
axios
.get(dataset.testReportEndpoint)
.then(({ data }) => {
document.querySelector('.js-test-report-badge-counter').innerHTML = data.total_count;
})
.catch(() => {});
}
};
fragment Author on Snippet {
author {
name,
avatarUrl,
username,
webUrl
}
}
\ No newline at end of file
#import '../fragments/snippetBase.fragment.graphql'
#import '../fragments/project.fragment.graphql'
#import '../fragments/author.fragment.graphql'
#import "~/graphql_shared/fragments/author.fragment.graphql"
query GetSnippetQuery($ids: [ID!]) {
snippets(ids: $ids) {
......@@ -8,7 +8,9 @@ query GetSnippetQuery($ids: [ID!]) {
node {
...SnippetBase
...Project
...Author
author {
...Author
}
}
}
}
......
......@@ -8,16 +8,13 @@ class KeysFinder
'md5' => 'fingerprint'
}.freeze
def initialize(current_user, params)
@current_user = current_user
def initialize(params)
@params = params
end
def execute
raise GitLabAccessDeniedError unless current_user.admin?
keys = by_key_type
keys = by_user(keys)
keys = by_users(keys)
keys = sort(keys)
by_fingerprint(keys)
......@@ -25,7 +22,7 @@ class KeysFinder
private
attr_reader :current_user, :params
attr_reader :params
def by_key_type
if params[:key_type] == 'ssh'
......@@ -39,10 +36,10 @@ class KeysFinder
keys.order_last_used_at_desc
end
def by_user(keys)
return keys unless params[:user]
def by_users(keys)
return keys unless params[:users]
keys.for_user(params[:user])
keys.for_user(params[:users])
end
def by_fingerprint(keys)
......
......@@ -18,7 +18,7 @@
%li.js-tests-tab-link
= link_to test_report_project_pipeline_path(@project, @pipeline), data: { target: '#js-tab-tests', action: 'test_report', toggle: 'tab' }, class: 'test-tab' do
= s_('TestReports|Tests')
%span.badge.badge-pill= pipeline.test_reports.total_count
%span.badge.badge-pill.js-test-report-badge-counter
= render_if_exists "projects/pipelines/tabs_holder", pipeline: @pipeline, project: @project
.tab-content
......
---
title: Support for table of contents tag in GitLab Flavored Markdown
merge_request: 24196
author:
type: added
---
title: removes store logic from issue board models
merge_request: 21404
author: nuwe1
type: other
---
title: Upgrade to Gitaly v1.86.0
merge_request: 24610
author:
type: changed
......@@ -106,6 +106,7 @@ recorded:
- Grant OAuth access
- Started/stopped user impersonation
- Changed username
- User was deleted
It is possible to filter particular actions by choosing an audit data type from
the filter dropdown box. You can further filter by specific group, project or user
......
......@@ -81,6 +81,20 @@ Since use of the group-managed account requires the use of SSO, users of group-m
- The user will be unable to access the group (their credentials will no longer work on the identity provider when prompted to SSO).
- Contributions in the group (e.g. issues, merge requests) will remain intact.
##### Credentials inventory for Group-managed accounts **(ULTIMATE)**
> [Introduced in GitLab 12.8](https://gitlab.com/gitlab-org/gitlab/issues/38133)
Owners who manage user accounts in a group can view the following details of personal access tokens and SSH keys:
- Owners
- Scopes
- Usage patterns
To access the Credentials inventory of a group, navigate to **{shield}** **Security & Compliance > Credentials** in your group's sidebar.
This feature is similar to the [Credentials inventory for self-managed instances](../../admin_area/credentials_inventory.md).
#### Assertions
When using group-managed accounts, the following user details need to be passed to GitLab as SAML
......
......@@ -26,7 +26,7 @@ module API
get do
authenticated_with_can_read_all_resources!
key = KeysFinder.new(current_user, params).execute
key = KeysFinder.new(params).execute
not_found!('Key') unless key
......
......@@ -25,12 +25,10 @@ module Banzai
# * [[http://example.com/images/logo.png]]
# * [[http://example.com/images/logo.png|alt=Logo]]
#
# - Insert a Table of Contents list:
#
# * [[_TOC_]]
#
# Based on Gollum::Filter::Tags
#
# Note: the table of contents tag is now handled by TableOfContentsTagFilter
#
# Context options:
# :project_wiki (required) - Current project wiki.
#
......@@ -64,23 +62,11 @@ module Banzai
def call
doc.search(".//text()").each do |node|
next if has_ancestor?(node, IGNORED_ANCESTOR_TAGS)
next unless node.content =~ TAGS_PATTERN
# A Gollum ToC tag is `[[_TOC_]]`, but due to MarkdownFilter running
# before this one, it will be converted into `[[<em>TOC</em>]]`, so it
# needs special-case handling
if toc_tag?(node)
process_toc_tag(node)
else
content = node.content
next unless content =~ TAGS_PATTERN
html = process_tag($1)
html = process_tag($1)
if html && html != node.content
node.replace(html)
end
end
node.replace(html) if html && html != node.content
end
doc
......@@ -88,12 +74,6 @@ module Banzai
private
# Replace an entire `[[<em>TOC</em>]]` node with the result generated by
# TableOfContentsFilter
def process_toc_tag(node)
node.parent.parent.replace(result[:toc].presence || '')
end
# Process a single tag into its final HTML form.
#
# tag - The String tag contents (the stuff inside the double brackets).
......@@ -129,12 +109,6 @@ module Banzai
end
end
def toc_tag?(node)
node.content == 'TOC' &&
node.parent.name == 'em' &&
node.parent.parent.text == '[[TOC]]'
end
def image?(path)
path =~ ALLOWED_IMAGE_EXTENSIONS
end
......
# frozen_string_literal: true
module Banzai
module Filter
# Using `[[_TOC_]]`, inserts a Table of Contents list.
# This syntax is based on the Gollum syntax. This way we have
# some consistency between with wiki and normal markdown.
# If there ever emerges a markdown standard, we can implement
# that here.
#
# The support for this has been removed from GollumTagsFilter
#
# Based on Banzai::Filter::GollumTagsFilter
class TableOfContentsTagFilter < HTML::Pipeline::Filter
TEXT_QUERY = %q(descendant-or-self::text()[ancestor::p and contains(., 'TOC')])
def call
return doc if context[:no_header_anchors]
doc.xpath(TEXT_QUERY).each do |node|
# A Gollum ToC tag is `[[_TOC_]]`, but due to MarkdownFilter running
# before this one, it will be converted into `[[<em>TOC</em>]]`, so it
# needs special-case handling
process_toc_tag(node) if toc_tag?(node)
end
doc
end
private
# Replace an entire `[[<em>TOC</em>]]` node with the result generated by
# TableOfContentsFilter
def process_toc_tag(node)
node.parent.parent.replace(result[:toc].presence || '')
end
def toc_tag?(node)
node.content == 'TOC' &&
node.parent.name == 'em' &&
node.parent.parent.text == '[[TOC]]'
end
end
end
end
......@@ -32,6 +32,7 @@ module Banzai
Filter::InlineMetricsFilter,
Filter::InlineGrafanaMetricsFilter,
Filter::TableOfContentsFilter,
Filter::TableOfContentsTagFilter,
Filter::AutolinkFilter,
Filter::ExternalLinkFilter,
Filter::SuggestionFilter,
......
......@@ -11,6 +11,7 @@ class Feature
inforef_uploadpack_cache
commit_without_batch_check
use_core_delta_islands
use_git_protocol_v2
].freeze
DEFAULT_ON_FLAGS = Set.new([]).freeze
......
......@@ -6732,6 +6732,12 @@ msgstr ""
msgid "Display name"
msgstr ""
msgid "Display rendered file"
msgstr ""
msgid "Display source"
msgstr ""
msgid "Displays dependencies and known vulnerabilities, based on the %{linkStart}latest pipeline%{linkEnd} scan"
msgstr ""
......
......@@ -356,6 +356,18 @@ describe 'Pipeline', :js do
end
end
context 'test tabs' do
let(:pipeline) { create(:ci_pipeline, :with_test_reports, project: project) }
it 'shows badge counter in Tests tab' do
visit_pipeline
wait_for_requests
expect(pipeline.test_reports.total_count).to eq(4)
expect(page.find('.js-test-report-badge-counter').text).to eq(pipeline.test_reports.total_count.to_s)
end
end
context 'retrying jobs' do
before do
visit_pipeline
......
......@@ -3,7 +3,7 @@
require 'spec_helper'
describe KeysFinder do
subject { described_class.new(user, params).execute }
subject { described_class.new(params).execute }
let(:user) { create(:user) }
let(:params) { {} }
......@@ -20,159 +20,149 @@ describe KeysFinder do
let!(:key_2) { create(:personal_key, last_used_at: nil, user: user) }
let!(:key_3) { create(:personal_key, last_used_at: 2.days.ago) }
context 'with a regular user' do
it 'raises GitLabAccessDeniedError' do
expect { subject }.to raise_error(KeysFinder::GitLabAccessDeniedError)
end
end
context 'key_type' do
let!(:deploy_key) { create(:deploy_key) }
context 'with an admin user' do
let(:user) {create(:admin)}
context 'when `key_type` is `ssh`' do
before do
params[:key_type] = 'ssh'
end
it 'returns only SSH keys' do
expect(subject).to contain_exactly(key_1, key_2, key_3)
end
end
context 'key_type' do
let!(:deploy_key) { create(:deploy_key) }
context 'when `key_type` is not specified' do
it 'returns all types of keys' do
expect(subject).to contain_exactly(key_1, key_2, key_3, deploy_key)
end
end
end
context 'when `key_type` is `ssh`' do
context 'fingerprint' do
context 'with invalid fingerprint' do
context 'with invalid MD5 fingerprint' do
before do
params[:key_type] = 'ssh'
params[:fingerprint] = '11:11:11:11'
end
it 'returns only SSH keys' do
expect(subject).to contain_exactly(key_1, key_2, key_3)
it 'raises InvalidFingerprint' do
expect { subject }.to raise_error(KeysFinder::InvalidFingerprint)
end
end
context 'when `key_type` is not specified' do
it 'returns all types of keys' do
expect(subject).to contain_exactly(key_1, key_2, key_3, deploy_key)
context 'with invalid SHA fingerprint' do
before do
params[:fingerprint] = 'nUhzNyftwAAKs7HufskYTte2g'
end
it 'raises InvalidFingerprint' do
expect { subject }.to raise_error(KeysFinder::InvalidFingerprint)
end
end
end
context 'fingerprint' do
context 'with invalid fingerprint' do
context 'with invalid MD5 fingerprint' do
context 'with valid fingerprints' do
let!(:deploy_key) do
create(:deploy_key,
user: user,
key: 'ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAIEAiPWx6WM4lhHNedGfBpPJNPpZ7yKu+dnn1SJejgt1017k6YjzGGphH2TUxwKzxcKDKKezwkpfnxPkSMkuEspGRt/aZZ9wa++Oi7Qkr8prgHc4soW6NUlfDzpvZK2H5E7eQaSeP3SAwGmQKUFHCddNaP0L+hM7zhFNzjFvpaMgJw0=',
fingerprint: '8a:4a:12:92:0b:50:47:02:d4:5a:8e:a9:44:4e:08:b4',
fingerprint_sha256: '4DPHOVNh53i9dHb5PpY2vjfyf5qniTx1/pBFPoZLDdk')
end
context 'personal key with valid MD5 params' do
context 'with an existent fingerprint' do
before do
params[:fingerprint] = '11:11:11:11'
params[:fingerprint] = 'ba:81:59:68:d7:6c:cd:02:02:bf:6a:9b:55:4e:af:d1'
end
it 'raises InvalidFingerprint' do
expect { subject }.to raise_error(KeysFinder::InvalidFingerprint)
it 'returns the key' do
expect(subject).to eq(key_1)
expect(subject.user).to eq(user)
end
end
context 'with invalid SHA fingerprint' do
context 'deploy key with an existent fingerprint' do
before do
params[:fingerprint] = 'nUhzNyftwAAKs7HufskYTte2g'
params[:fingerprint] = '8a:4a:12:92:0b:50:47:02:d4:5a:8e:a9:44:4e:08:b4'
end
it 'raises InvalidFingerprint' do
expect { subject }.to raise_error(KeysFinder::InvalidFingerprint)
it 'returns the key' do
expect(subject).to eq(deploy_key)
expect(subject.user).to eq(user)
end
end
end
context 'with valid fingerprints' do
let!(:deploy_key) do
create(:deploy_key,
user: user,
key: 'ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAIEAiPWx6WM4lhHNedGfBpPJNPpZ7yKu+dnn1SJejgt1017k6YjzGGphH2TUxwKzxcKDKKezwkpfnxPkSMkuEspGRt/aZZ9wa++Oi7Qkr8prgHc4soW6NUlfDzpvZK2H5E7eQaSeP3SAwGmQKUFHCddNaP0L+hM7zhFNzjFvpaMgJw0=',
fingerprint: '8a:4a:12:92:0b:50:47:02:d4:5a:8e:a9:44:4e:08:b4',
fingerprint_sha256: '4DPHOVNh53i9dHb5PpY2vjfyf5qniTx1/pBFPoZLDdk')
end
context 'personal key with valid MD5 params' do
context 'with an existent fingerprint' do
before do
params[:fingerprint] = 'ba:81:59:68:d7:6c:cd:02:02:bf:6a:9b:55:4e:af:d1'
end
it 'returns the key' do
expect(subject).to eq(key_1)
expect(subject.user).to eq(user)
end
context 'with a non-existent fingerprint' do
before do
params[:fingerprint] = 'bb:81:59:68:d7:6c:cd:02:02:bf:6a:9b:55:4e:af:d2'
end
context 'deploy key with an existent fingerprint' do
before do
params[:fingerprint] = '8a:4a:12:92:0b:50:47:02:d4:5a:8e:a9:44:4e:08:b4'
end
it 'returns the key' do
expect(subject).to eq(deploy_key)
expect(subject.user).to eq(user)
end
it 'returns nil' do
expect(subject).to be_nil
end
end
end
context 'with a non-existent fingerprint' do
before do
params[:fingerprint] = 'bb:81:59:68:d7:6c:cd:02:02:bf:6a:9b:55:4e:af:d2'
end
context 'personal key with valid SHA256 params' do
context 'with an existent fingerprint' do
before do
params[:fingerprint] = 'SHA256:nUhzNyftwADy8AH3wFY31tAKs7HufskYTte2aXo/lCg'
end
it 'returns nil' do
expect(subject).to be_nil
end
it 'returns key' do
expect(subject).to eq(key_1)
expect(subject.user).to eq(user)
end
end
context 'personal key with valid SHA256 params' do
context 'with an existent fingerprint' do
before do
params[:fingerprint] = 'SHA256:nUhzNyftwADy8AH3wFY31tAKs7HufskYTte2aXo/lCg'
end
it 'returns key' do
expect(subject).to eq(key_1)
expect(subject.user).to eq(user)
end
context 'deploy key with an existent fingerprint' do
before do
params[:fingerprint] = 'SHA256:4DPHOVNh53i9dHb5PpY2vjfyf5qniTx1/pBFPoZLDdk'
end
context 'deploy key with an existent fingerprint' do
before do
params[:fingerprint] = 'SHA256:4DPHOVNh53i9dHb5PpY2vjfyf5qniTx1/pBFPoZLDdk'
end
it 'returns key' do
expect(subject).to eq(deploy_key)
expect(subject.user).to eq(user)
end
it 'returns key' do
expect(subject).to eq(deploy_key)
expect(subject.user).to eq(user)
end
end
context 'with a non-existent fingerprint' do
before do
params[:fingerprint] = 'SHA256:xTjuFqftwADy8AH3wFY31tAKs7HufskYTte2aXi/mNp'
end
context 'with a non-existent fingerprint' do
before do
params[:fingerprint] = 'SHA256:xTjuFqftwADy8AH3wFY31tAKs7HufskYTte2aXi/mNp'
end
it 'returns nil' do
expect(subject).to be_nil
end
it 'returns nil' do
expect(subject).to be_nil
end
end
end
end
end
context 'user' do
context 'without user' do
it 'contains ssh_keys of all users in the system' do
expect(subject).to contain_exactly(key_1, key_2, key_3)
end
context 'user' do
context 'without user' do
it 'contains ssh_keys of all users in the system' do
expect(subject).to contain_exactly(key_1, key_2, key_3)
end
end
context 'with user' do
before do
params[:user] = user
end
context 'with user' do
before do
params[:users] = user
end
it 'contains ssh_keys of only the specified users' do
expect(subject).to contain_exactly(key_1, key_2)
end
it 'contains ssh_keys of only the specified users' do
expect(subject).to contain_exactly(key_1, key_2)
end
end
end
context 'sort order' do
it 'sorts in last_used_at_desc order' do
expect(subject).to eq([key_3, key_1, key_2])
end
context 'sort order' do
it 'sorts in last_used_at_desc order' do
expect(subject).to eq([key_3, key_1, key_2])
end
end
end
import { mount } from '@vue/test-utils';
import BlobHeaderViewerSwitcher from '~/blob/components/blob_header_viewer_switcher.vue';
import {
RICH_BLOB_VIEWER,
RICH_BLOB_VIEWER_TITLE,
SIMPLE_BLOB_VIEWER,
SIMPLE_BLOB_VIEWER_TITLE,
} from '~/blob/components/constants';
import { GlButtonGroup, GlButton } from '@gitlab/ui';
import { Blob } from './mock_data';
describe('Blob Header Viewer Switcher', () => {
let wrapper;
function createComponent(props = {}) {
wrapper = mount(BlobHeaderViewerSwitcher, {
propsData: {
blob: Object.assign({}, Blob, props),
},
});
}
afterEach(() => {
wrapper.destroy();
});
describe('intiialization', () => {
it('is initialized with rich viewer as preselected when richViewer exists', () => {
createComponent();
expect(wrapper.vm.viewer).toBe(RICH_BLOB_VIEWER);
});
it('is initialized with simple viewer as preselected when richViewer does not exists', () => {
createComponent({ richViewer: null });
expect(wrapper.vm.viewer).toBe(SIMPLE_BLOB_VIEWER);
});
});
describe('rendering', () => {
let btnGroup;
let buttons;
beforeEach(() => {
createComponent();
btnGroup = wrapper.find(GlButtonGroup);
buttons = wrapper.findAll(GlButton);
});
it('renders gl-button-group component', () => {
expect(btnGroup.exists()).toBe(true);
});
it('renders exactly 2 buttons with predefined actions', () => {
expect(buttons.length).toBe(2);
[SIMPLE_BLOB_VIEWER_TITLE, RICH_BLOB_VIEWER_TITLE].forEach((title, i) => {
expect(buttons.at(i).attributes('title')).toBe(title);
});
});
});
describe('viewer changes', () => {
let buttons;
let simpleBtn;
let richBtn;
beforeEach(() => {
createComponent();
buttons = wrapper.findAll(GlButton);
simpleBtn = buttons.at(0);
richBtn = buttons.at(1);
});
it('does not switch the viewer if the selected one is already active', () => {
jest.spyOn(wrapper.vm, '$emit');
expect(wrapper.vm.viewer).toBe(RICH_BLOB_VIEWER);
richBtn.vm.$emit('click');
expect(wrapper.vm.viewer).toBe(RICH_BLOB_VIEWER);
expect(wrapper.vm.$emit).not.toHaveBeenCalled();
});
it('emits an event when a Simple Viewer button is clicked', () => {
jest.spyOn(wrapper.vm, '$emit');
simpleBtn.vm.$emit('click');
return wrapper.vm.$nextTick().then(() => {
expect(wrapper.vm.viewer).toBe(SIMPLE_BLOB_VIEWER);
expect(wrapper.vm.$emit).toHaveBeenCalledWith('switch-viewer', SIMPLE_BLOB_VIEWER);
});
});
it('emits an event when a Rich Viewer button is clicked', () => {
jest.spyOn(wrapper.vm, '$emit');
wrapper.setData({ viewer: SIMPLE_BLOB_VIEWER });
return wrapper.vm
.$nextTick()
.then(() => {
richBtn.vm.$emit('click');
})
.then(() => {
expect(wrapper.vm.viewer).toBe(RICH_BLOB_VIEWER);
expect(wrapper.vm.$emit).toHaveBeenCalledWith('switch-viewer', RICH_BLOB_VIEWER);
});
});
});
});
import Vue from 'vue';
import mountComponent from 'spec/helpers/vue_mount_component_helper';
import mountComponent from 'helpers/vue_mount_component_helper';
import IssueCardInnerScopedLabel from '~/boards/components/issue_card_inner_scoped_label.vue';
describe('IssueCardInnerScopedLabel Component', () => {
......
......@@ -62,6 +62,7 @@ describe('ErrorTrackingList', () => {
sortByField: jest.fn(),
fetchPaginatedResults: jest.fn(),
updateStatus: jest.fn(),
removeIgnoredResolvedErrors: jest.fn(),
};
const state = {
......@@ -221,6 +222,8 @@ describe('ErrorTrackingList', () => {
});
describe('When the ignore button on an error is clicked', () => {
const ignoreErrorButton = () => wrapper.find({ ref: 'ignoreError' });
beforeEach(() => {
store.state.list.loading = false;
store.state.list.errors = errorsList;
......@@ -235,20 +238,30 @@ describe('ErrorTrackingList', () => {
});
it('sends the "ignored" status and error ID', () => {
wrapper.find({ ref: 'ignoreError' }).trigger('click');
ignoreErrorButton().trigger('click');
expect(actions.updateStatus).toHaveBeenCalledWith(
expect.anything(),
{
endpoint: `/project/test/-/error_tracking/${errorsList[0].id}.json`,
redirectUrl: '/error_tracking',
status: 'ignored',
},
undefined,
);
});
it('calls an action to remove the item from the list', () => {
ignoreErrorButton().trigger('click');
expect(actions.removeIgnoredResolvedErrors).toHaveBeenCalledWith(
expect.anything(),
'1',
undefined,
);
});
});
describe('When the resolve button on an error is clicked', () => {
const resolveErrorButton = () => wrapper.find({ ref: 'resolveError' });
beforeEach(() => {
store.state.list.loading = false;
store.state.list.errors = errorsList;
......@@ -263,17 +276,25 @@ describe('ErrorTrackingList', () => {
});
it('sends "resolved" status and error ID', () => {
wrapper.find({ ref: 'resolveError' }).trigger('click');
resolveErrorButton().trigger('click');
expect(actions.updateStatus).toHaveBeenCalledWith(
expect.anything(),
{
endpoint: `/project/test/-/error_tracking/${errorsList[0].id}.json`,
redirectUrl: '/error_tracking',
status: 'resolved',
},
undefined,
);
});
it('calls an action to remove the item from the list', () => {
resolveErrorButton().trigger('click');
expect(actions.removeIgnoredResolvedErrors).toHaveBeenCalledWith(
expect.anything(),
'1',
undefined,
);
});
});
describe('When error tracking is disabled and user is not allowed to enable it', () => {
......
......@@ -5,6 +5,7 @@ import * as types from '~/error_tracking/store/list/mutation_types';
const ADD_RECENT_SEARCH = mutations[types.ADD_RECENT_SEARCH];
const CLEAR_RECENT_SEARCHES = mutations[types.CLEAR_RECENT_SEARCHES];
const LOAD_RECENT_SEARCHES = mutations[types.LOAD_RECENT_SEARCHES];
const REMOVE_IGNORED_RESOLVED_ERRORS = mutations[types.REMOVE_IGNORED_RESOLVED_ERRORS];
describe('Error tracking mutations', () => {
describe('SET_ERRORS', () => {
......@@ -114,5 +115,29 @@ describe('Error tracking mutations', () => {
expect(localStorage.getItem).toHaveBeenCalledWith('recent-searches/project/errors.json');
});
});
describe('REMOVE_IGNORED_RESOLVED_ERRORS', () => {
it('removes ignored or resolved errors from list', () => {
state.errors = [
{
id: 1,
status: 'unresolved',
},
{
id: 2,
status: 'ignored',
},
{
id: 3,
status: 'unresolved',
},
];
const ignoredError = state.errors[2].id;
REMOVE_IGNORED_RESOLVED_ERRORS(state, ignoredError);
expect(state.errors).not.toContain(ignoredError);
});
});
});
});
......@@ -100,19 +100,4 @@ describe Banzai::Filter::GollumTagsFilter do
expect(doc.at_css('code').text).to eq '[[link-in-backticks]]'
end
end
context 'table of contents' do
it 'replaces [[<em>TOC</em>]] with ToC result' do
doc = described_class.call("<p>[[<em>TOC</em>]]</p>", { project_wiki: project_wiki }, { toc: "FOO" })
expect(doc.to_html).to eq("FOO")
end
it 'handles an empty ToC result' do
input = "<p>[[<em>TOC</em>]]</p>"
doc = described_class.call(input, project_wiki: project_wiki)
expect(doc.to_html).to eq ''
end
end
end
# frozen_string_literal: true
require 'spec_helper'
describe Banzai::Filter::TableOfContentsTagFilter do
include FilterSpecHelper
context 'table of contents' do
let(:html) { '<p>[[<em>TOC</em>]]</p>' }
it 'replaces [[<em>TOC</em>]] with ToC result' do
doc = filter(html, {}, { toc: "FOO" })
expect(doc.to_html).to eq("FOO")
end
it 'handles an empty ToC result' do
doc = filter(html)
expect(doc.to_html).to eq ''
end
end
end
......@@ -99,4 +99,35 @@ describe Banzai::Pipeline::FullPipeline do
end
end
end
describe 'table of contents' do
let(:project) { create(:project, :public) }
let(:markdown) do
<<-MARKDOWN.strip_heredoc
[[_TOC_]]
# Header
MARKDOWN
end
let(:invalid_markdown) do
<<-MARKDOWN.strip_heredoc
test [[_TOC_]]
# Header
MARKDOWN
end
it 'inserts a table of contents' do
output = described_class.to_html(markdown, project: project)
expect(output).to include("<ul class=\"section-nav\">")
expect(output).to include("<li><a href=\"#header\">Header</a></li>")
end
it 'does not insert a table of contents' do
output = described_class.to_html(invalid_markdown, project: project)
expect(output).to include("test [[<em>TOC</em>]]")
end
end
end
......@@ -326,7 +326,7 @@ describe API::Internal::Base do
expect(json_response["gitaly"]["repository"]["relative_path"]).to eq(project.repository.gitaly_repository.relative_path)
expect(json_response["gitaly"]["address"]).to eq(Gitlab::GitalyClient.address(project.repository_storage))
expect(json_response["gitaly"]["token"]).to eq(Gitlab::GitalyClient.token(project.repository_storage))
expect(json_response["gitaly"]["features"]).to eq('gitaly-feature-inforef-uploadpack-cache' => 'true', 'gitaly-feature-cache-invalidator' => 'true', 'gitaly-feature-commit-without-batch-check' => 'true', 'gitaly-feature-use-core-delta-islands' => 'true')
expect(json_response["gitaly"]["features"]).to eq('gitaly-feature-inforef-uploadpack-cache' => 'true', 'gitaly-feature-cache-invalidator' => 'true', 'gitaly-feature-commit-without-batch-check' => 'true', 'gitaly-feature-use-core-delta-islands' => 'true', 'gitaly-feature-use-git-protocol-v2' => 'true')
expect(user.reload.last_activity_on).to eql(Date.today)
end
end
......@@ -346,7 +346,7 @@ describe API::Internal::Base do
expect(json_response["gitaly"]["repository"]["relative_path"]).to eq(project.repository.gitaly_repository.relative_path)
expect(json_response["gitaly"]["address"]).to eq(Gitlab::GitalyClient.address(project.repository_storage))
expect(json_response["gitaly"]["token"]).to eq(Gitlab::GitalyClient.token(project.repository_storage))
expect(json_response["gitaly"]["features"]).to eq('gitaly-feature-inforef-uploadpack-cache' => 'true', 'gitaly-feature-cache-invalidator' => 'true', 'gitaly-feature-commit-without-batch-check' => 'true', 'gitaly-feature-use-core-delta-islands' => 'true')
expect(json_response["gitaly"]["features"]).to eq('gitaly-feature-inforef-uploadpack-cache' => 'true', 'gitaly-feature-cache-invalidator' => 'true', 'gitaly-feature-commit-without-batch-check' => 'true', 'gitaly-feature-use-core-delta-islands' => 'true', 'gitaly-feature-use-git-protocol-v2' => 'true')
expect(user.reload.last_activity_on).to be_nil
end
end
......@@ -594,7 +594,7 @@ describe API::Internal::Base do
expect(json_response["gitaly"]["repository"]["relative_path"]).to eq(project.repository.gitaly_repository.relative_path)
expect(json_response["gitaly"]["address"]).to eq(Gitlab::GitalyClient.address(project.repository_storage))
expect(json_response["gitaly"]["token"]).to eq(Gitlab::GitalyClient.token(project.repository_storage))
expect(json_response["gitaly"]["features"]).to eq('gitaly-feature-inforef-uploadpack-cache' => 'true', 'gitaly-feature-cache-invalidator' => 'true', 'gitaly-feature-commit-without-batch-check' => 'true', 'gitaly-feature-use-core-delta-islands' => 'true')
expect(json_response["gitaly"]["features"]).to eq('gitaly-feature-inforef-uploadpack-cache' => 'true', 'gitaly-feature-cache-invalidator' => 'true', 'gitaly-feature-commit-without-batch-check' => 'true', 'gitaly-feature-use-core-delta-islands' => 'true', 'gitaly-feature-use-git-protocol-v2' => 'true')
end
end
......
......@@ -121,10 +121,17 @@ describe Users::DestroyService do
before do
solo_owned.group_members = [member]
service.execute(user)
end
it 'returns the user with attached errors' do
expect(service.execute(user)).to be(user)
expect(user.errors.full_messages).to eq([
'You must transfer ownership or delete groups before you can remove user'
])
end
it 'does not delete the user' do
service.execute(user)
expect(User.find(user.id)).to eq user
end
end
......
......@@ -15,7 +15,7 @@ module FilterSpecHelper
# context - Hash context for the filter. (default: {project: project})
#
# Returns a Nokogiri::XML::DocumentFragment
def filter(html, context = {})
def filter(html, context = {}, result = nil)
if defined?(project)
context.reverse_merge!(project: project)
end
......@@ -25,7 +25,7 @@ module FilterSpecHelper
context = context.merge(render_context: render_context)
described_class.call(html, context)
described_class.call(html, context, result)
end
# Get an instance of the Filter class
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment