Commit c9700726 authored by GitLab Bot's avatar GitLab Bot

Automatic merge of gitlab-org/gitlab master

parents fd5d3167 83e7a658
......@@ -77,9 +77,7 @@ export function initIssueApp(issueData, store) {
const { fullPath } = el.dataset;
if (gon?.features?.fixCommentScroll) {
scrollToTargetOnResize();
}
bootstrapApollo({ ...issueState, issueType: el.dataset.issueType });
......
/**
* This will wrap the given function to make sure that it is only triggered once
* while executing asynchronously
*
* @param {Function} fn some function that returns a promise
* @returns A function that will only be triggered *once* while the promise is executing
*/
export const ignoreWhilePending = (fn) => {
const isPendingMap = new WeakMap();
const defaultContext = {};
// We need this to be a function so we get the `this`
return function ignoreWhilePendingInner(...args) {
const context = this || defaultContext;
if (isPendingMap.get(context)) {
return Promise.resolve();
}
isPendingMap.set(context, true);
return fn.apply(this, args).finally(() => {
isPendingMap.delete(context);
});
};
};
......@@ -6,6 +6,7 @@ import createFlash from '~/flash';
import { clearDraft, getDiscussionReplyKey } from '~/lib/utils/autosave';
import { isLoggedIn } from '~/lib/utils/common_utils';
import { confirmAction } from '~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal';
import { ignoreWhilePending } from '~/lib/utils/ignore_while_pending';
import { s__, __ } from '~/locale';
import diffLineNoteFormMixin from '~/notes/mixins/diff_line_note_form';
import TimelineEntryItem from '~/vue_shared/components/notes/timeline_entry_item.vue';
......@@ -171,7 +172,7 @@ export default {
this.expandDiscussion({ discussionId: this.discussion.id });
}
},
async cancelReplyForm(shouldConfirm, isDirty) {
cancelReplyForm: ignoreWhilePending(async function cancelReplyForm(shouldConfirm, isDirty) {
if (shouldConfirm && isDirty) {
const msg = s__('Notes|Are you sure you want to cancel creating this comment?');
......@@ -188,7 +189,7 @@ export default {
this.isReplying = false;
clearDraft(this.autosaveKey);
},
}),
saveReply(noteText, form, callback) {
if (!noteText) {
this.cancelReplyForm();
......
......@@ -7,6 +7,7 @@ import { confirmAction } from '~/lib/utils/confirm_via_gl_modal/confirm_via_gl_m
import { INLINE_DIFF_LINES_KEY } from '~/diffs/constants';
import createFlash from '~/flash';
import httpStatusCodes from '~/lib/utils/http_status';
import { ignoreWhilePending } from '~/lib/utils/ignore_while_pending';
import { truncateSha } from '~/lib/utils/text_utility';
import TimelineEntryItem from '~/vue_shared/components/notes/timeline_entry_item.vue';
import { __, s__, sprintf } from '../../locale';
......@@ -350,7 +351,10 @@ export default {
parent: this.$el,
});
},
async formCancelHandler({ shouldConfirm, isDirty }) {
formCancelHandler: ignoreWhilePending(async function formCancelHandler({
shouldConfirm,
isDirty,
}) {
if (shouldConfirm && isDirty) {
const msg = __('Are you sure you want to cancel editing this comment?');
const confirmed = await confirmAction(msg);
......@@ -364,7 +368,7 @@ export default {
}
this.isEditing = false;
this.$emit('cancelForm');
},
}),
recoverNoteContent(noteText) {
// we need to do this to prevent noteForm inconsistent content warning
// this is something we intentionally do so we need to recover the content
......
<script>
import { isNode, isDocument, isSeq, visit } from 'yaml';
import { capitalize } from 'lodash';
import TextWidget from '~/pipeline_wizard/components/widgets/text.vue';
import ListWidget from '~/pipeline_wizard/components/widgets/list.vue';
const widgets = {
TextWidget,
ListWidget,
};
function isNullOrUndefined(v) {
return [undefined, null].includes(v);
}
export default {
components: {
...widgets,
},
props: {
template: {
type: Object,
required: true,
validator: (v) => isNode(v),
},
compiled: {
type: Object,
required: true,
validator: (v) => isDocument(v) || isNode(v),
},
target: {
type: String,
required: true,
validator: (v) => /^\$.*/g.test(v),
},
widget: {
type: String,
required: true,
validator: (v) => {
return Object.keys(widgets).includes(`${capitalize(v)}Widget`);
},
},
validate: {
type: Boolean,
required: false,
default: false,
},
},
computed: {
path() {
let res;
visit(this.template, (seqKey, node, path) => {
if (node && node.value === this.target) {
// `path` is an array of objects (all the node's parents)
// So this reducer will reduce it to an array of the path's keys,
// e.g. `[ 'foo', 'bar', '0' ]`
res = path.reduce((p, { key }) => (key ? [...p, `${key}`] : p), []);
const parent = path[path.length - 1];
if (isSeq(parent)) {
res.push(seqKey);
}
}
});
return res;
},
},
methods: {
compile(v) {
if (!this.path) return;
if (isNullOrUndefined(v)) {
this.compiled.deleteIn(this.path);
}
this.compiled.setIn(this.path, v);
},
onModelChange(v) {
this.$emit('beforeUpdate:compiled');
this.compile(v);
this.$emit('update:compiled', this.compiled);
this.$emit('highlight', this.path);
},
onValidationStateChange(v) {
this.$emit('update:valid', v);
},
},
};
</script>
<template>
<div>
<component
:is="`${widget}-widget`"
ref="widget"
:validate="validate"
v-bind="$attrs"
@input="onModelChange"
@update:valid="onValidationStateChange"
/>
</div>
</template>
......@@ -49,7 +49,6 @@ class Projects::IssuesController < Projects::ApplicationController
push_frontend_feature_flag(:confidential_notes, project&.group, default_enabled: :yaml)
push_frontend_feature_flag(:issue_assignees_widget, project, default_enabled: :yaml)
push_frontend_feature_flag(:paginated_issue_discussions, project, default_enabled: :yaml)
push_frontend_feature_flag(:fix_comment_scroll, project, default_enabled: :yaml)
push_frontend_feature_flag(:work_items, project&.group, default_enabled: :yaml)
end
......
---
name: fix_comment_scroll
introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/76340
rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/349638
milestone: '14.7'
type: development
group: group::project management
default_enabled: false
......@@ -582,10 +582,10 @@ module EE
def billed_user_ids_excluding_guests
strong_memoize(:billed_user_ids_excluding_guests) do
group_member_user_ids = billed_group_users(non_guests: true).distinct.pluck(:id)
project_member_user_ids = billed_project_users(non_guests: true).distinct.pluck(:id)
shared_group_user_ids = billed_shared_non_guests_group_users.distinct.pluck(:id)
shared_project_user_ids = billed_invited_non_guests_group_to_project_users.distinct.pluck(:id)
group_member_user_ids = billed_group_users(exclude_guests: true).distinct.pluck(:id)
project_member_user_ids = billed_project_users(exclude_guests: true).distinct.pluck(:id)
shared_group_user_ids = billed_shared_group_users(exclude_guests: true).distinct.pluck(:id)
shared_project_user_ids = billed_invited_group_to_project_users(exclude_guests: true).distinct.pluck(:id)
{
user_ids: (group_member_user_ids + project_member_user_ids + shared_group_user_ids + shared_project_user_ids).to_set,
......@@ -624,21 +624,21 @@ module EE
end
# Members belonging directly to Group or its subgroups
def billed_group_users(non_guests: false)
def billed_group_users(exclude_guests: false)
members = ::GroupMember.active_without_invites_and_requests.where(
source_id: self_and_descendants
)
members = members.non_guests if non_guests
members = members.non_guests if exclude_guests
users_without_project_bots(members)
end
# Members belonging directly to Projects within Group or Projects within subgroups
def billed_project_users(non_guests: false)
def billed_project_users(exclude_guests: false)
members = ::ProjectMember.without_invites_and_requests
members = members.non_guests if non_guests
members = members.non_guests if exclude_guests
members = members.where(
source_id: ::Project.joins(:group).where(namespace: self_and_descendants)
......@@ -648,13 +648,11 @@ module EE
end
# Members belonging to Groups invited to collaborate with Projects
def billed_invited_group_to_project_users
members = invited_or_shared_group_members(invited_groups_in_projects)
users_without_project_bots(members)
end
def billed_invited_group_to_project_users(exclude_guests: false)
groups = (exclude_guests ? invited_group_as_non_guests_in_projects : invited_groups_in_projects)
members = invited_or_shared_group_members(groups)
members = members.non_guests if exclude_guests
def billed_invited_non_guests_group_to_project_users
members = invited_or_shared_group_members(invited_group_as_non_guests_in_projects).non_guests
users_without_project_bots(members)
end
......@@ -668,13 +666,11 @@ module EE
end
# Members belonging to Groups invited to collaborate with Groups and Subgroups
def billed_shared_group_users
members = invited_or_shared_group_members(invited_group_in_groups)
users_without_project_bots(members)
end
def billed_shared_group_users(exclude_guests: false)
groups = (exclude_guests ? invited_non_guest_group_in_groups : invited_group_in_groups)
members = invited_or_shared_group_members(groups)
members = members.non_guests if exclude_guests
def billed_shared_non_guests_group_users
members = invited_or_shared_group_members(invited_non_guest_group_in_groups).non_guests
users_without_project_bots(members)
end
......
......@@ -3,11 +3,16 @@
# rubocop:disable Rails/Pluck
module QA
# Only executes in custom job/pipeline
# https://gitlab.com/gitlab-org/manage/import/import-github-performance
#
RSpec.describe 'Manage', :github, :requires_admin, only: { job: 'large-github-import' } do
describe 'Project import' do
let(:logger) { Runtime::Logger.logger }
let(:differ) { RSpec::Support::Differ.new(color: true) }
let(:created_by_pattern) { /\*Created by: \S+\*\n\n/ }
let(:suggestion_pattern) { /suggestion:-\d+\+\d+/ }
let(:api_client) { Runtime::API::Client.as_admin }
let(:user) do
......@@ -19,46 +24,57 @@ module QA
let(:github_repo) { ENV['QA_LARGE_GH_IMPORT_REPO'] || 'rspec/rspec-core' }
let(:import_max_duration) { ENV['QA_LARGE_GH_IMPORT_DURATION'] ? ENV['QA_LARGE_GH_IMPORT_DURATION'].to_i : 7200 }
let(:github_client) do
Octokit.middleware = Faraday::RackBuilder.new do |builder|
builder.response(:logger, logger, headers: false, bodies: false)
end
Octokit::Client.new(
access_token: ENV['QA_LARGE_GH_IMPORT_GH_TOKEN'] || Runtime::Env.github_access_token,
auto_paginate: true
)
end
let(:gh_branches) { github_client.branches(github_repo).map(&:name) }
let(:gh_commits) { github_client.commits(github_repo).map(&:sha) }
let(:gh_repo) { github_client.repository(github_repo) }
let(:gh_branches) do
logger.debug("= Fetching branches =")
github_client.branches(github_repo).map(&:name)
end
let(:gh_commits) do
logger.debug("= Fetching commits =")
github_client.commits(github_repo).map(&:sha)
end
let(:gh_labels) do
logger.debug("= Fetching labels =")
github_client.labels(github_repo).map { |label| { name: label.name, color: "##{label.color}" } }
end
let(:gh_milestones) do
logger.debug("= Fetching milestones =")
github_client
.list_milestones(github_repo, state: 'all')
.map { |ms| { title: ms.title, description: ms.description } }
end
let(:gh_all_issues) do
logger.debug("= Fetching issues and prs =")
github_client.list_issues(github_repo, state: 'all')
end
let(:gh_prs) do
gh_all_issues.select(&:pull_request).each_with_object({}) do |pr, hash|
hash[pr.title] = {
hash[pr.number] = {
url: pr.html_url,
title: pr.title,
body: pr.body || '',
comments: [*gh_pr_comments[pr.html_url], *gh_issue_comments[pr.html_url]].compact.sort
comments: [*gh_pr_comments[pr.html_url], *gh_issue_comments[pr.html_url]].compact
}
end
end
let(:gh_issues) do
gh_all_issues.reject(&:pull_request).each_with_object({}) do |issue, hash|
hash[issue.title] = {
hash[issue.number] = {
url: issue.html_url,
title: issue.title,
body: issue.body || '',
comments: gh_issue_comments[issue.html_url]
}
......@@ -66,12 +82,14 @@ module QA
end
let(:gh_issue_comments) do
logger.debug("= Fetching issue comments =")
github_client.issues_comments(github_repo).each_with_object(Hash.new { |h, k| h[k] = [] }) do |c, hash|
hash[c.html_url.gsub(/\#\S+/, "")] << c.body # use base html url as key
end
end
let(:gh_pr_comments) do
logger.debug("= Fetching pr comments =")
github_client.pull_requests_comments(github_repo).each_with_object(Hash.new { |h, k| h[k] = [] }) do |c, hash|
hash[c.html_url.gsub(/\#\S+/, "")] << c.body # use base html url as key
end
......@@ -97,6 +115,7 @@ module QA
"data",
{
import_time: @import_time,
reported_stats: @stats,
github: {
project_name: github_repo,
branches: gh_branches.length,
......@@ -104,9 +123,9 @@ module QA
labels: gh_labels.length,
milestones: gh_milestones.length,
prs: gh_prs.length,
pr_comments: gh_prs.sum { |_k, v| v.length },
pr_comments: gh_prs.sum { |_k, v| v[:comments].length },
issues: gh_issues.length,
issue_comments: gh_issues.sum { |_k, v| v.length }
issue_comments: gh_issues.sum { |_k, v| v[:comments].length }
},
gitlab: {
project_name: imported_project.path_with_namespace,
......@@ -115,15 +134,15 @@ module QA
labels: gl_labels.length,
milestones: gl_milestones.length,
mrs: mrs.length,
mr_comments: mrs.sum { |_k, v| v.length },
mr_comments: mrs.sum { |_k, v| v[:comments].length },
issues: gl_issues.length,
issue_comments: gl_issues.sum { |_k, v| v.length }
issue_comments: gl_issues.sum { |_k, v| v[:comments].length }
},
not_imported: {
mrs: @mr_diff,
issues: @issue_diff
}
}.to_json
}
)
end
......@@ -133,19 +152,25 @@ module QA
) do
start = Time.now
# import the project and log path
Runtime::Logger.info("Importing project '#{imported_project.reload!.full_path}'")
# import the project and log gitlab path
Runtime::Logger.info("== Importing project '#{github_repo}' in to '#{imported_project.reload!.full_path}' ==")
# fetch all objects right after import has started
fetch_github_objects
import_status = lambda do
imported_project.project_import_status[:import_status].tap do |status|
imported_project.project_import_status.yield_self do |status|
@stats = status.dig(:stats, :imported)
# fail fast if import explicitly failed
raise "Import of '#{imported_project.name}' failed!" if status == 'failed'
raise "Import of '#{imported_project.name}' failed!" if status[:import_status] == 'failed'
status[:import_status]
end
end
logger.info("== Waiting for import to be finished ==")
expect(import_status).to eventually_eq('finished').within(max_duration: import_max_duration, sleep_interval: 30)
@import_time = Time.now - start
aggregate_failures do
......@@ -161,22 +186,22 @@ module QA
#
# @return [void]
def fetch_github_objects
logger.debug("== Fetching objects for github repo: '#{github_repo}' ==")
logger.info("== Fetching github repo objects ==")
gh_repo
gh_branches
gh_commits
gh_prs
gh_issues
gh_labels
gh_milestones
gh_prs
gh_issues
end
# Verify repository imported correctly
#
# @return [void]
def verify_repository_import
logger.debug("== Verifying repository import ==")
logger.info("== Verifying repository import ==")
expect(imported_project.description).to eq(gh_repo.description)
# check via include, importer creates more branches
# https://gitlab.com/gitlab-org/gitlab/-/issues/332711
......@@ -184,42 +209,42 @@ module QA
expect(gl_commits).to match_array(gh_commits)
end
# Verify imported merge requests and mr issues
# Verify imported labels
#
# @return [void]
def verify_merge_requests_import
logger.debug("== Verifying merge request import ==")
@mr_diff = verify_mrs_or_issues('mr')
def verify_labels_import
logger.info("== Verifying label import ==")
# check via include, additional labels can be inherited from parent group
expect(gl_labels).to include(*gh_labels)
end
# Verify imported issues and issue comments
# Verify milestones import
#
# @return [void]
def verify_issues_import
logger.debug("== Verifying issue import ==")
@issue_diff = verify_mrs_or_issues('issue')
def verify_milestones_import
logger.info("== Verifying milestones import ==")
expect(gl_milestones).to match_array(gh_milestones)
end
# Verify imported labels
# Verify imported merge requests and mr issues
#
# @return [void]
def verify_labels_import
logger.debug("== Verifying label import ==")
# check via include, additional labels can be inherited from parent group
expect(gl_labels).to include(*gh_labels)
def verify_merge_requests_import
logger.info("== Verifying merge request import ==")
@mr_diff = verify_mrs_or_issues('mr')
end
# Verify milestones import
# Verify imported issues and issue comments
#
# @return [void]
def verify_milestones_import
logger.debug("== Verifying milestones import ==")
expect(gl_milestones).to match_array(gh_milestones)
def verify_issues_import
logger.info("== Verifying issue import ==")
@issue_diff = verify_mrs_or_issues('issue')
end
private
# Verify imported mrs or issues and return diff
# Verify imported mrs or issues and return missing items
#
# @param [String] type verification object, 'mrs' or 'issues'
# @return [Hash]
......@@ -231,11 +256,10 @@ module QA
count_msg = "Expected to contain same amount of #{type}s. Gitlab: #{expected.length}, Github: #{actual.length}"
expect(expected.length).to eq(actual.length), count_msg
logger.debug("= Comparing #{type}s =")
missing_comments = verify_comments(type, actual, expected)
{
"#{type}s": actual.keys - expected.keys,
"#{type}s": (actual.keys - expected.keys).map { |it| actual[it].slice(:title, :url) },
"#{type}_comments": missing_comments
}
end
......@@ -247,9 +271,10 @@ module QA
# @param [Hash] expected
# @return [Hash]
def verify_comments(type, actual, expected)
actual.each_with_object({}) do |(title, actual_item), missing_comments|
actual.each_with_object([]) do |(key, actual_item), missing_comments|
expected_item = expected[key]
title = actual_item[:title]
msg = "expected #{type} with title '#{title}' to have"
expected_item = expected[title]
# Print title in the error message to see which object is missing
#
......@@ -261,9 +286,9 @@ module QA
expected_body = expected_item[:body]
actual_body = actual_item[:body]
body_msg = <<~MSG
#{msg} same description. diff:\n#{differ.diff(expected_item[:body], actual_item[:body])}
#{msg} same description. diff:\n#{differ.diff(expected_body, actual_body)}
MSG
expect(expected_body).to include(actual_body), body_msg
expect(expected_body).to eq(actual_body), body_msg
# Print amount difference first
#
......@@ -278,7 +303,14 @@ module QA
# Save missing comments
#
comment_diff = actual_comments - expected_comments
missing_comments[title] = comment_diff unless comment_diff.empty?
next if comment_diff.empty?
missing_comments << {
title: title,
github_url: actual_item[:url],
gitlab_url: expected_item[:url],
missing_comments: comment_diff
}
end
end
......@@ -329,20 +361,25 @@ module QA
@mrs ||= begin
logger.debug("= Fetching merge requests =")
imported_mrs = imported_project.merge_requests(auto_paginate: true, attempts: 2)
logger.debug("= Transforming merge request objects for comparison =")
imported_mrs.each_with_object({}) do |mr, hash|
logger.debug("= Fetching merge request comments =")
imported_mrs.each_with_object({}) do |mr, mrs_with_comments|
resource = Resource::MergeRequest.init do |resource|
resource.project = imported_project
resource.iid = mr[:iid]
resource.api_client = api_client
end
hash[mr[:title]] = {
body: mr[:description],
comments: resource.comments(auto_paginate: true, attempts: 2)
logger.debug("Fetching comments for mr '#{mr[:title]}'")
mrs_with_comments[mr[:iid]] = {
url: mr[:web_url],
title: mr[:title],
body: sanitize_description(mr[:description]) || '',
comments: resource
.comments(auto_paginate: true, attempts: 2)
# remove system notes
.reject { |c| c[:system] || c[:body].match?(/^(\*\*Review:\*\*)|(\*Merged by:).*/) }
.map { |c| sanitize(c[:body]) }
.map { |c| sanitize_comment(c[:body]) }
}
end
end
......@@ -355,37 +392,51 @@ module QA
@gl_issues ||= begin
logger.debug("= Fetching issues =")
imported_issues = imported_project.issues(auto_paginate: true, attempts: 2)
logger.debug("= Transforming issue objects for comparison =")
imported_issues.each_with_object({}) do |issue, hash|
logger.debug("= Fetching issue comments =")
imported_issues.each_with_object({}) do |issue, issues_with_comments|
resource = Resource::Issue.init do |issue_resource|
issue_resource.project = imported_project
issue_resource.iid = issue[:iid]
issue_resource.api_client = api_client
end
hash[issue[:title]] = {
body: issue[:description],
comments: resource.comments(auto_paginate: true, attempts: 2).map { |c| sanitize(c[:body]) }
logger.debug("Fetching comments for issue '#{issue[:title]}'")
issues_with_comments[issue[:iid]] = {
url: issue[:web_url],
title: issue[:title],
body: sanitize_description(issue[:description]) || '',
comments: resource
.comments(auto_paginate: true, attempts: 2)
.map { |c| sanitize_comment(c[:body]) }
}
end
end
end
# Remove added prefixes and legacy diff format
# Remove added prefixes and legacy diff format from comments
#
# @param [String] body
# @return [String]
def sanitize_comment(body)
body.gsub(created_by_pattern, "").gsub(suggestion_pattern, "suggestion\r")
end
# Remove created by prefix from descripion
#
# @param [String] body
# @return [String]
def sanitize(body)
body.gsub(/\*Created by: \S+\*\n\n/, "").gsub(/suggestion:-\d+\+\d+/, "suggestion\r")
def sanitize_description(body)
body&.gsub(created_by_pattern, "")
end
# Save json as file
#
# @param [String] name
# @param [String] json
# @param [Hash] json
# @return [void]
def save_json(name, json)
File.open("tmp/#{name}.json", "w") { |file| file.write(json) }
File.open("tmp/#{name}.json", "w") { |file| file.write(JSON.pretty_generate(json)) }
end
end
end
......
import waitForPromises from 'helpers/wait_for_promises';
import { ignoreWhilePending } from '~/lib/utils/ignore_while_pending';
const TEST_ARGS = [123, { foo: 'bar' }];
describe('~/lib/utils/ignore_while_pending', () => {
let spyResolve;
let spyReject;
let spy;
let subject;
beforeEach(() => {
spy = jest.fn().mockImplementation(
// NOTE: We can't pass an arrow function here...
function foo() {
return new Promise((resolve, reject) => {
spyResolve = resolve;
spyReject = reject;
});
},
);
});
describe('with non-instance method', () => {
beforeEach(() => {
subject = ignoreWhilePending(spy);
});
it('while pending, will ignore subsequent calls', () => {
subject(...TEST_ARGS);
subject();
subject();
subject();
expect(spy).toHaveBeenCalledTimes(1);
expect(spy).toHaveBeenCalledWith(...TEST_ARGS);
});
it.each`
desc | act
${'when resolved'} | ${() => spyResolve()}
${'when rejected'} | ${() => spyReject(new Error('foo'))}
`('$desc, can be triggered again', async ({ act }) => {
// We need the empty catch(), since we are testing rejecting the promise,
// which would otherwise cause the test to fail.
subject(...TEST_ARGS).catch(() => {});
subject();
subject();
subject();
act();
// We need waitForPromises, so that the underlying finally() runs.
await waitForPromises();
subject({ again: 'foo' });
expect(spy).toHaveBeenCalledTimes(2);
expect(spy).toHaveBeenCalledWith(...TEST_ARGS);
expect(spy).toHaveBeenCalledWith({ again: 'foo' });
});
it('while pending, returns empty resolutions for ignored calls', async () => {
subject(...TEST_ARGS);
await expect(subject(...TEST_ARGS)).resolves.toBeUndefined();
await expect(subject(...TEST_ARGS)).resolves.toBeUndefined();
});
it('when resolved, returns resolution for origin call', async () => {
const resolveValue = { original: 1 };
const result = subject(...TEST_ARGS);
spyResolve(resolveValue);
await expect(result).resolves.toEqual(resolveValue);
});
it('when rejected, returns rejection for original call', async () => {
const rejectedErr = new Error('original');
const result = subject(...TEST_ARGS);
spyReject(rejectedErr);
await expect(result).rejects.toEqual(rejectedErr);
});
});
describe('with instance method', () => {
let instance1;
let instance2;
beforeEach(() => {
// Let's capture the "this" for tests
subject = ignoreWhilePending(function instanceMethod(...args) {
return spy(this, ...args);
});
instance1 = {};
instance2 = {};
});
it('will not ignore calls across instances', () => {
subject.call(instance1, { context: 1 });
subject.call(instance1, {});
subject.call(instance1, {});
subject.call(instance2, { context: 2 });
subject.call(instance2, {});
expect(spy.mock.calls).toEqual([
[instance1, { context: 1 }],
[instance2, { context: 2 }],
]);
});
it('resolving one instance does not resolve other instances', async () => {
subject.call(instance1, { context: 1 });
// We need to save off spyResolve so it's not overwritten by next call
const instance1Resolve = spyResolve;
subject.call(instance2, { context: 2 });
instance1Resolve();
await waitForPromises();
subject.call(instance1, { context: 1 });
subject.call(instance2, { context: 2 });
expect(spy.mock.calls).toEqual([
[instance1, { context: 1 }],
[instance2, { context: 2 }],
[instance1, { context: 1 }],
]);
});
});
});
import { mount, shallowMount } from '@vue/test-utils';
import { Document } from 'yaml';
import InputWrapper from '~/pipeline_wizard/components/input.vue';
import TextWidget from '~/pipeline_wizard/components/widgets/text.vue';
describe('Pipeline Wizard -- Input Wrapper', () => {
let wrapper;
const createComponent = (props = {}, mountFunc = mount) => {
wrapper = mountFunc(InputWrapper, {
propsData: {
template: new Document({
template: {
bar: 'baz',
foo: { some: '$TARGET' },
},
}).get('template'),
compiled: new Document({ bar: 'baz', foo: { some: '$TARGET' } }),
target: '$TARGET',
widget: 'text',
label: 'some label (required by the text widget)',
...props,
},
});
};
describe('API', () => {
const inputValue = 'dslkfjsdlkfjlskdjfn';
let inputChild;
beforeEach(() => {
createComponent({});
inputChild = wrapper.find(TextWidget);
});
afterEach(() => {
wrapper.destroy();
});
it('will replace its value in compiled', async () => {
await inputChild.vm.$emit('input', inputValue);
const expected = new Document({
bar: 'baz',
foo: { some: inputValue },
});
expect(wrapper.emitted()['update:compiled']).toEqual([[expected]]);
});
it('will emit a highlight event with the correct path if child emits an input event', async () => {
await inputChild.vm.$emit('input', inputValue);
const expected = ['foo', 'some'];
expect(wrapper.emitted().highlight).toEqual([[expected]]);
});
});
describe('Target Path Discovery', () => {
afterEach(() => {
wrapper.destroy();
});
it.each`
scenario | template | target | expected
${'simple nested object'} | ${{ foo: { bar: { baz: '$BOO' } } }} | ${'$BOO'} | ${['foo', 'bar', 'baz']}
${'list, first pos.'} | ${{ foo: ['$BOO'] }} | ${'$BOO'} | ${['foo', 0]}
${'list, second pos.'} | ${{ foo: ['bar', '$BOO'] }} | ${'$BOO'} | ${['foo', 1]}
${'lowercase target'} | ${{ foo: { bar: '$jupp' } }} | ${'$jupp'} | ${['foo', 'bar']}
${'root list'} | ${['$BOO']} | ${'$BOO'} | ${[0]}
`('$scenario', ({ template, target, expected }) => {
createComponent(
{
template: new Document({ template }).get('template'),
target,
},
shallowMount,
);
expect(wrapper.vm.path).toEqual(expected);
});
});
});
import fs from 'fs';
import { mount } from '@vue/test-utils';
import { Document } from 'yaml';
import InputWrapper from '~/pipeline_wizard/components/input.vue';
describe('Test all widgets in ./widgets/* whether they provide a minimal api', () => {
const createComponent = (props = {}, mountFunc = mount) => {
mountFunc(InputWrapper, {
propsData: {
template: new Document({
template: {
bar: 'baz',
foo: { some: '$TARGET' },
},
}).get('template'),
compiled: new Document({ bar: 'baz', foo: { some: '$TARGET' } }),
target: '$TARGET',
widget: 'text',
label: 'some label (required by the text widget)',
...props,
},
});
};
const widgets = fs
.readdirSync('./app/assets/javascripts/pipeline_wizard/components/widgets')
.map((filename) => [filename.match(/^(.*).vue$/)[1]]);
let consoleErrorSpy;
beforeAll(() => {
consoleErrorSpy = jest.spyOn(console, 'error').mockImplementation(() => {});
});
afterAll(() => {
consoleErrorSpy.mockRestore();
});
describe.each(widgets)('`%s` Widget', (name) => {
it('passes the input validator', () => {
const validatorFunc = InputWrapper.props.widget.validator;
expect(validatorFunc(name)).toBe(true);
});
it('mounts without error', () => {
createComponent({ widget: name });
expect(consoleErrorSpy).not.toHaveBeenCalled();
});
});
});
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment