Commit 40254b9a authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent 22a0d312
...@@ -28,29 +28,38 @@ export default { ...@@ -28,29 +28,38 @@ export default {
{ {
key: 'error', key: 'error',
label: __('Error'), label: __('Error'),
thClass: 'w-70p', thClass: 'w-60p',
tdClass: 'table-col d-flex align-items-center d-sm-table-cell', tdClass: 'table-col d-flex d-sm-table-cell px-3',
}, },
{ {
key: 'events', key: 'events',
label: __('Events'), label: __('Events'),
tdClass: 'table-col d-flex align-items-center d-sm-table-cell', thClass: 'text-right',
tdClass: 'table-col d-flex d-sm-table-cell',
}, },
{ {
key: 'users', key: 'users',
label: __('Users'), label: __('Users'),
tdClass: 'table-col d-flex align-items-center d-sm-table-cell', thClass: 'text-right',
tdClass: 'table-col d-flex d-sm-table-cell',
}, },
{ {
key: 'lastSeen', key: 'lastSeen',
label: __('Last seen'), label: __('Last seen'),
thClass: 'w-15p', thClass: '',
tdClass: 'table-col d-flex align-items-center d-sm-table-cell', tdClass: 'table-col d-flex d-sm-table-cell',
}, },
{ {
key: 'ignore', key: 'ignore',
label: '', label: '',
tdClass: 'table-col d-flex align-items-center d-sm-table-cell', thClass: 'w-3rem',
tdClass: 'table-col d-flex pl-0 d-sm-table-cell',
},
{
key: 'resolved',
label: '',
thClass: 'w-3rem',
tdClass: 'table-col d-flex pl-0 d-sm-table-cell',
}, },
{ {
key: 'details', key: 'details',
...@@ -197,9 +206,7 @@ export default { ...@@ -197,9 +206,7 @@ export default {
<template> <template>
<div class="error-list"> <div class="error-list">
<div v-if="errorTrackingEnabled"> <div v-if="errorTrackingEnabled">
<div <div class="row flex-column flex-sm-row align-items-sm-center row-top m-0 mt-sm-2 p-0 p-sm-3">
class="row flex-column flex-sm-row align-items-sm-center row-top m-0 mt-sm-2 mx-sm-1 p-0 p-sm-3"
>
<div class="search-box flex-fill mr-sm-2 my-3 m-sm-0 p-3 p-sm-0"> <div class="search-box flex-fill mr-sm-2 my-3 m-sm-0 p-3 p-sm-0">
<div class="filtered-search-box mb-0"> <div class="filtered-search-box mb-0">
<gl-dropdown <gl-dropdown
...@@ -333,6 +340,16 @@ export default { ...@@ -333,6 +340,16 @@ export default {
<gl-icon name="eye-slash" :size="12" /> <gl-icon name="eye-slash" :size="12" />
</gl-button> </gl-button>
</template> </template>
<template v-slot:resolved="errors">
<gl-button
ref="resolveError"
v-gl-tooltip
:title="__('Resolve')"
@click="updateIssueStatus(errors.item.id, 'resolved')"
>
<gl-icon name="check-circle" :size="12" />
</gl-button>
</template>
<template v-slot:details="errors"> <template v-slot:details="errors">
<gl-button <gl-button
:href="getDetailsLink(errors.item.id)" :href="getDetailsLink(errors.item.id)"
......
...@@ -21,12 +21,17 @@ export const addDelimiter = text => ...@@ -21,12 +21,17 @@ export const addDelimiter = text =>
export const highCountTrim = count => (count > 99 ? '99+' : count); export const highCountTrim = count => (count > 99 ? '99+' : count);
/** /**
* Converts first char to uppercase and replaces undercores with spaces * Converts first char to uppercase and replaces the given separator with spaces
* @param {String} string * @param {String} string - The string to humanize
* @param {String} separator - The separator used to separate words (defaults to "_")
* @requires {String} * @requires {String}
* @returns {String}
*/ */
export const humanize = string => export const humanize = (string, separator = '_') => {
string.charAt(0).toUpperCase() + string.replace(/_/g, ' ').slice(1); const replaceRegex = new RegExp(separator, 'g');
return string.charAt(0).toUpperCase() + string.replace(replaceRegex, ' ').slice(1);
};
/** /**
* Replaces underscores with dashes * Replaces underscores with dashes
...@@ -45,7 +50,11 @@ export const slugify = (str, separator = '-') => { ...@@ -45,7 +50,11 @@ export const slugify = (str, separator = '-') => {
const slug = str const slug = str
.trim() .trim()
.toLowerCase() .toLowerCase()
.replace(/[^a-zA-Z0-9_.-]+/g, separator); .replace(/[^a-zA-Z0-9_.-]+/g, separator)
// Remove any duplicate separators or separator prefixes/suffixes
.split(separator)
.filter(Boolean)
.join(separator);
return slug === separator ? '' : slug; return slug === separator ? '' : slug;
}; };
...@@ -159,6 +168,15 @@ export const convertToSentenceCase = string => { ...@@ -159,6 +168,15 @@ export const convertToSentenceCase = string => {
return splitWord.join(' '); return splitWord.join(' ');
}; };
/**
* Converts a sentence to title case
* e.g. Hello world => Hello World
*
* @param {String} string
* @returns {String}
*/
export const convertToTitleCase = string => string.replace(/\b[a-z]/g, s => s.toUpperCase());
/** /**
* Splits camelCase or PascalCase words * Splits camelCase or PascalCase words
* e.g. HelloWorld => Hello World * e.g. HelloWorld => Hello World
......
import $ from 'jquery'; import $ from 'jquery';
import { convertToTitleCase, humanize, slugify } from '../lib/utils/text_utility';
import { getParameterValues } from '../lib/utils/url_utility'; import { getParameterValues } from '../lib/utils/url_utility';
import projectNew from './project_new'; import projectNew from './project_new';
const prepareParameters = () => {
const name = getParameterValues('name')[0];
const path = getParameterValues('path')[0];
// If the name param exists but the path doesn't then generate it from the name
if (name && !path) {
return { name, path: slugify(name) };
}
// If the path param exists but the name doesn't then generate it from the path
if (path && !name) {
return { name: convertToTitleCase(humanize(path, '-')), path };
}
return { name, path };
};
export default () => { export default () => {
const pathParam = getParameterValues('path')[0]; let hasUserDefinedProjectName = false;
const nameParam = getParameterValues('name')[0];
const $projectPath = $('.js-path-name');
const $projectName = $('.js-project-name'); const $projectName = $('.js-project-name');
const $projectPath = $('.js-path-name');
// get the path url and append it in the input const { name, path } = prepareParameters();
$projectPath.val(pathParam);
// get the project name from the URL and set it as input value // get the project name from the URL and set it as input value
$projectName.val(nameParam); $projectName.val(name);
// get the path url and append it in the input
$projectPath.val(path);
// generate slug when project name changes // generate slug when project name changes
$projectName.keyup(() => projectNew.onProjectNameChange($projectName, $projectPath)); $projectName.on('keyup', () => {
projectNew.onProjectNameChange($projectName, $projectPath);
hasUserDefinedProjectName = $projectName.val().trim().length > 0;
});
// generate project name from the slug if one isn't set
$projectPath.on('keyup', () =>
projectNew.onProjectPathChange($projectName, $projectPath, hasUserDefinedProjectName),
);
}; };
import $ from 'jquery'; import $ from 'jquery';
import { addSelectOnFocusBehaviour } from '../lib/utils/common_utils'; import { addSelectOnFocusBehaviour } from '../lib/utils/common_utils';
import { slugify } from '../lib/utils/text_utility'; import { convertToTitleCase, humanize, slugify } from '../lib/utils/text_utility';
import { s__ } from '~/locale'; import { s__ } from '~/locale';
let hasUserDefinedProjectPath = false; let hasUserDefinedProjectPath = false;
let hasUserDefinedProjectName = false;
const onProjectNameChange = ($projectNameInput, $projectPathInput) => {
const slug = slugify($projectNameInput.val());
$projectPathInput.val(slug);
};
const onProjectPathChange = ($projectNameInput, $projectPathInput, hasExistingProjectName) => {
const slug = $projectPathInput.val();
if (!hasExistingProjectName) {
$projectNameInput.val(convertToTitleCase(humanize(slug, '[-_]')));
}
};
const setProjectNamePathHandlers = ($projectNameInput, $projectPathInput) => {
$projectNameInput.off('keyup change').on('keyup change', () => {
onProjectNameChange($projectNameInput, $projectPathInput);
hasUserDefinedProjectName = $projectNameInput.val().trim().length > 0;
hasUserDefinedProjectPath = $projectPathInput.val().trim().length > 0;
});
$projectPathInput.off('keyup change').on('keyup change', () => {
onProjectPathChange($projectNameInput, $projectPathInput, hasUserDefinedProjectName);
hasUserDefinedProjectPath = $projectPathInput.val().trim().length > 0;
});
};
const deriveProjectPathFromUrl = $projectImportUrl => { const deriveProjectPathFromUrl = $projectImportUrl => {
const $currentProjectName = $projectImportUrl
.parents('.toggle-import-form')
.find('#project_name');
const $currentProjectPath = $projectImportUrl const $currentProjectPath = $projectImportUrl
.parents('.toggle-import-form') .parents('.toggle-import-form')
.find('#project_path'); .find('#project_path');
if (hasUserDefinedProjectPath) { if (hasUserDefinedProjectPath) {
return; return;
} }
...@@ -30,14 +61,10 @@ const deriveProjectPathFromUrl = $projectImportUrl => { ...@@ -30,14 +61,10 @@ const deriveProjectPathFromUrl = $projectImportUrl => {
const pathMatch = /\/([^/]+)$/.exec(importUrl); const pathMatch = /\/([^/]+)$/.exec(importUrl);
if (pathMatch) { if (pathMatch) {
$currentProjectPath.val(pathMatch[1]); $currentProjectPath.val(pathMatch[1]);
onProjectPathChange($currentProjectName, $currentProjectPath, false);
} }
}; };
const onProjectNameChange = ($projectNameInput, $projectPathInput) => {
const slug = slugify($projectNameInput.val());
$projectPathInput.val(slug);
};
const bindEvents = () => { const bindEvents = () => {
const $newProjectForm = $('#new_project'); const $newProjectForm = $('#new_project');
const $projectImportUrl = $('#project_import_url'); const $projectImportUrl = $('#project_import_url');
...@@ -202,10 +229,7 @@ const bindEvents = () => { ...@@ -202,10 +229,7 @@ const bindEvents = () => {
const $activeTabProjectName = $('.tab-pane.active #project_name'); const $activeTabProjectName = $('.tab-pane.active #project_name');
const $activeTabProjectPath = $('.tab-pane.active #project_path'); const $activeTabProjectPath = $('.tab-pane.active #project_path');
$activeTabProjectName.focus(); $activeTabProjectName.focus();
$activeTabProjectName.keyup(() => { setProjectNamePathHandlers($activeTabProjectName, $activeTabProjectPath);
onProjectNameChange($activeTabProjectName, $activeTabProjectPath);
hasUserDefinedProjectPath = $activeTabProjectPath.val().trim().length > 0;
});
} }
$useTemplateBtn.on('change', chooseTemplate); $useTemplateBtn.on('change', chooseTemplate);
...@@ -220,26 +244,24 @@ const bindEvents = () => { ...@@ -220,26 +244,24 @@ const bindEvents = () => {
$projectPath.val($projectPath.val().trim()); $projectPath.val($projectPath.val().trim());
}); });
$projectPath.on('keyup', () => {
hasUserDefinedProjectPath = $projectPath.val().trim().length > 0;
});
$projectImportUrl.keyup(() => deriveProjectPathFromUrl($projectImportUrl)); $projectImportUrl.keyup(() => deriveProjectPathFromUrl($projectImportUrl));
$('.js-import-git-toggle-button').on('click', () => { $('.js-import-git-toggle-button').on('click', () => {
const $projectMirror = $('#project_mirror'); const $projectMirror = $('#project_mirror');
$projectMirror.attr('disabled', !$projectMirror.attr('disabled')); $projectMirror.attr('disabled', !$projectMirror.attr('disabled'));
setProjectNamePathHandlers(
$('.tab-pane.active #project_name'),
$('.tab-pane.active #project_path'),
);
}); });
$projectName.on('keyup change', () => { setProjectNamePathHandlers($projectName, $projectPath);
onProjectNameChange($projectName, $projectPath);
hasUserDefinedProjectPath = $projectPath.val().trim().length > 0;
});
}; };
export default { export default {
bindEvents, bindEvents,
deriveProjectPathFromUrl, deriveProjectPathFromUrl,
onProjectNameChange, onProjectNameChange,
onProjectPathChange,
}; };
...@@ -461,6 +461,7 @@ img.emoji { ...@@ -461,6 +461,7 @@ img.emoji {
.w-3rem { width: 3rem; } .w-3rem { width: 3rem; }
.w-15p { width: 15%; } .w-15p { width: 15%; }
.w-30p { width: 30%; } .w-30p { width: 30%; }
.w-60p { width: 60%; }
.w-70p { width: 70%; } .w-70p { width: 70%; }
.h-12em { height: 12em; } .h-12em { height: 12em; }
......
...@@ -17,6 +17,8 @@ class DeploymentsFinder ...@@ -17,6 +17,8 @@ class DeploymentsFinder
def execute def execute
items = init_collection items = init_collection
items = by_updated_at(items) items = by_updated_at(items)
items = by_environment(items)
items = by_status(items)
sort(items) sort(items)
end end
...@@ -58,6 +60,24 @@ class DeploymentsFinder ...@@ -58,6 +60,24 @@ class DeploymentsFinder
items items
end end
def by_environment(items)
if params[:environment].present?
items.for_environment_name(params[:environment])
else
items
end
end
def by_status(items)
return items unless params[:status].present?
unless Deployment.statuses.key?(params[:status])
raise ArgumentError, "The deployment status #{params[:status]} is invalid"
end
items.for_status(params[:status])
end
def sort_params def sort_params
order_by = ALLOWED_SORT_VALUES.include?(params[:order_by]) ? params[:order_by] : DEFAULT_SORT_VALUE order_by = ALLOWED_SORT_VALUES.include?(params[:order_by]) ? params[:order_by] : DEFAULT_SORT_VALUE
order_direction = ALLOWED_SORT_DIRECTIONS.include?(params[:sort]) ? params[:sort] : DEFAULT_SORT_DIRECTION order_direction = ALLOWED_SORT_DIRECTIONS.include?(params[:sort]) ? params[:sort] : DEFAULT_SORT_DIRECTION
......
...@@ -30,6 +30,11 @@ class Deployment < ApplicationRecord ...@@ -30,6 +30,11 @@ class Deployment < ApplicationRecord
delegate :name, to: :environment, prefix: true delegate :name, to: :environment, prefix: true
scope :for_environment, -> (environment) { where(environment_id: environment) } scope :for_environment, -> (environment) { where(environment_id: environment) }
scope :for_environment_name, -> (name) do
joins(:environment).where(environments: { name: name })
end
scope :for_status, -> (status) { where(status: status) }
scope :visible, -> { where(status: %i[running success failed canceled]) } scope :visible, -> { where(status: %i[running success failed canceled]) }
......
...@@ -57,6 +57,8 @@ class Group < Namespace ...@@ -57,6 +57,8 @@ class Group < Namespace
has_one :import_export_upload has_one :import_export_upload
has_many :import_failures, inverse_of: :group
accepts_nested_attributes_for :variables, allow_destroy: true accepts_nested_attributes_for :variables, allow_destroy: true
validate :visibility_level_allowed_by_projects validate :visibility_level_allowed_by_projects
......
...@@ -2,6 +2,8 @@ ...@@ -2,6 +2,8 @@
class ImportFailure < ApplicationRecord class ImportFailure < ApplicationRecord
belongs_to :project belongs_to :project
belongs_to :group
validates :project, presence: true validates :project, presence: true, unless: :group
validates :group, presence: true, unless: :project
end end
---
title: 'Resolve Create new project: Auto-populate project slug string to project name
if name is empty'
merge_request: 22627
author:
type: changed
---
title: Migrate issue trackers data
merge_request: 18639
author:
type: other
---
title: Hide mirror admin actions from developers
merge_request: 21569
author:
type: fixed
---
title: Add retry logic for failures during import
merge_request: 22265
author:
type: added
---
title: Filter deployments using the environment & status
merge_request: 22996
author:
type: added
---
title: Resolve Sentry errors from error tracking list
merge_request: 23135
author:
type: added
---
title: Fix analytics tracking for new merge request notes
merge_request: 23273
author:
type: fixed
# frozen_string_literal: true
Retriable.configure do |config|
config.contexts[:relation_import] = {
tries: ENV.fetch('RELATION_IMPORT_TRIES', 3).to_i,
base_interval: ENV.fetch('RELATION_IMPORT_BASE_INTERVAL', 0.5).to_f,
multiplier: ENV.fetch('RELATION_IMPORT_MULTIPLIER', 1.5).to_f,
rand_factor: ENV.fetch('RELATION_IMPORT_RAND_FACTOR', 0.5).to_f,
on: Gitlab::ImportExport::ImportFailureService::RETRIABLE_EXCEPTIONS
}
end
# frozen_string_literal: true
class AddRetryCountAndGroupIdToImportFailures < ActiveRecord::Migration[5.2]
DOWNTIME = false
def change
add_column :import_failures, :retry_count, :integer
add_column :import_failures, :group_id, :integer
change_column_null(:import_failures, :project_id, true)
end
end
# frozen_string_literal: true
class AddGroupIndexAndFkToImportFailures < ActiveRecord::Migration[5.2]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
GROUP_INDEX = 'index_import_failures_on_group_id_not_null'.freeze
disable_ddl_transaction!
def up
add_concurrent_index(:import_failures, :group_id, where: 'group_id IS NOT NULL', name: GROUP_INDEX)
add_concurrent_foreign_key(:import_failures, :namespaces, column: :group_id)
end
def down
remove_foreign_key(:import_failures, column: :group_id)
remove_concurrent_index_by_name(:import_failures, GROUP_INDEX)
end
end
# frozen_string_literal: true
class UpdateProjectIndexToImportFailures < ActiveRecord::Migration[5.2]
include Gitlab::Database::MigrationHelpers
# Set this constant to true if this migration requires downtime.
DOWNTIME = false
PROJECT_INDEX_OLD = 'index_import_failures_on_project_id'.freeze
PROJECT_INDEX_NEW = 'index_import_failures_on_project_id_not_null'.freeze
disable_ddl_transaction!
def up
add_concurrent_index(:import_failures, :project_id, where: 'project_id IS NOT NULL', name: PROJECT_INDEX_NEW)
remove_concurrent_index_by_name(:import_failures, PROJECT_INDEX_OLD)
end
def down
add_concurrent_index(:import_failures, :project_id, name: PROJECT_INDEX_OLD)
remove_concurrent_index_by_name(:import_failures, PROJECT_INDEX_NEW)
end
end
# frozen_string_literal: true
class MigrateIssueTrackersData < ActiveRecord::Migration[5.1]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
INTERVAL = 3.minutes.to_i
BATCH_SIZE = 5_000
MIGRATION = 'MigrateIssueTrackersSensitiveData'
disable_ddl_transaction!
class Service < ActiveRecord::Base
self.table_name = 'services'
self.inheritance_column = :_type_disabled
include ::EachBatch
end
def up
relation = Service.where(category: 'issue_tracker').where("properties IS NOT NULL AND properties != '{}' AND properties != ''")
queue_background_migration_jobs_by_range_at_intervals(relation,
MIGRATION,
INTERVAL,
batch_size: BATCH_SIZE)
end
def down
# no need
end
end
...@@ -10,7 +10,7 @@ ...@@ -10,7 +10,7 @@
# #
# It's strongly recommended that you check this file into your version control system. # It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2020_01_14_204949) do ActiveRecord::Schema.define(version: 2020_01_17_112554) do
# These are extensions that must be enabled in order to support this database # These are extensions that must be enabled in order to support this database
enable_extension "pg_trgm" enable_extension "pg_trgm"
...@@ -2037,14 +2037,17 @@ ActiveRecord::Schema.define(version: 2020_01_14_204949) do ...@@ -2037,14 +2037,17 @@ ActiveRecord::Schema.define(version: 2020_01_14_204949) do
create_table "import_failures", force: :cascade do |t| create_table "import_failures", force: :cascade do |t|
t.integer "relation_index" t.integer "relation_index"
t.bigint "project_id", null: false t.bigint "project_id"
t.datetime_with_timezone "created_at", null: false t.datetime_with_timezone "created_at", null: false
t.string "relation_key", limit: 64 t.string "relation_key", limit: 64
t.string "exception_class", limit: 128 t.string "exception_class", limit: 128
t.string "correlation_id_value", limit: 128 t.string "correlation_id_value", limit: 128
t.string "exception_message", limit: 255 t.string "exception_message", limit: 255
t.integer "retry_count"
t.integer "group_id"
t.index ["correlation_id_value"], name: "index_import_failures_on_correlation_id_value" t.index ["correlation_id_value"], name: "index_import_failures_on_correlation_id_value"
t.index ["project_id"], name: "index_import_failures_on_project_id" t.index ["group_id"], name: "index_import_failures_on_group_id_not_null", where: "(group_id IS NOT NULL)"
t.index ["project_id"], name: "index_import_failures_on_project_id_not_null", where: "(project_id IS NOT NULL)"
end end
create_table "index_statuses", id: :serial, force: :cascade do |t| create_table "index_statuses", id: :serial, force: :cascade do |t|
...@@ -4645,6 +4648,7 @@ ActiveRecord::Schema.define(version: 2020_01_14_204949) do ...@@ -4645,6 +4648,7 @@ ActiveRecord::Schema.define(version: 2020_01_14_204949) do
add_foreign_key "identities", "saml_providers", name: "fk_aade90f0fc", on_delete: :cascade add_foreign_key "identities", "saml_providers", name: "fk_aade90f0fc", on_delete: :cascade
add_foreign_key "import_export_uploads", "namespaces", column: "group_id", name: "fk_83319d9721", on_delete: :cascade add_foreign_key "import_export_uploads", "namespaces", column: "group_id", name: "fk_83319d9721", on_delete: :cascade
add_foreign_key "import_export_uploads", "projects", on_delete: :cascade add_foreign_key "import_export_uploads", "projects", on_delete: :cascade
add_foreign_key "import_failures", "namespaces", column: "group_id", name: "fk_24b824da43", on_delete: :cascade
add_foreign_key "index_statuses", "projects", name: "fk_74b2492545", on_delete: :cascade add_foreign_key "index_statuses", "projects", name: "fk_74b2492545", on_delete: :cascade
add_foreign_key "insights", "namespaces", on_delete: :cascade add_foreign_key "insights", "namespaces", on_delete: :cascade
add_foreign_key "insights", "projects", on_delete: :cascade add_foreign_key "insights", "projects", on_delete: :cascade
......
...@@ -15,6 +15,16 @@ GET /projects/:id/deployments ...@@ -15,6 +15,16 @@ GET /projects/:id/deployments
| `sort` | string | no | Return deployments sorted in `asc` or `desc` order. Default is `asc` | | `sort` | string | no | Return deployments sorted in `asc` or `desc` order. Default is `asc` |
| `updated_after` | datetime | no | Return deployments updated after the specified date | | `updated_after` | datetime | no | Return deployments updated after the specified date |
| `updated_before` | datetime | no | Return deployments updated before the specified date | | `updated_before` | datetime | no | Return deployments updated before the specified date |
| `environment` | string | no | The name of the environment to filter deployments by |
| `status` | string | no | The status to filter deployments by |
The status attribute can be one of the following values:
- created
- running
- success
- failed
- canceled
```bash ```bash
curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/1/deployments" curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/1/deployments"
......
...@@ -31,7 +31,14 @@ To create a new blank project on the **New project** page: ...@@ -31,7 +31,14 @@ To create a new blank project on the **New project** page:
1. On the **Blank project** tab, provide the following information: 1. On the **Blank project** tab, provide the following information:
- The name of your project in the **Project name** field. You can't use - The name of your project in the **Project name** field. You can't use
special characters, but you can use spaces, hyphens, underscores or even special characters, but you can use spaces, hyphens, underscores or even
emoji. emoji. When adding the name, the **Project slug** will auto populate.
The slug is what the GitLab instance will use as the URL path to the project.
If you want a different slug, input the project name first,
then change the slug after.
- The path to your project in the **Project slug** field. This is the URL
path for your project that the GitLab instance will use. If the
**Project name** is blank, it will auto populate when you fill in
the **Project slug**.
- The **Project description (optional)** field enables you to enter a - The **Project description (optional)** field enables you to enter a
description for your project's dashboard, which will help others description for your project's dashboard, which will help others
understand what your project is about. Though it's not required, it's a good understand what your project is about. Though it's not required, it's a good
......
...@@ -407,6 +407,7 @@ GFM will recognize the following: ...@@ -407,6 +407,7 @@ GFM will recognize the following:
| merge request | `!123` | `namespace/project!123` | `project!123` | | merge request | `!123` | `namespace/project!123` | `project!123` |
| snippet | `$123` | `namespace/project$123` | `project$123` | | snippet | `$123` | `namespace/project$123` | `project$123` |
| epic **(ULTIMATE)** | `&123` | `group1/subgroup&123` | | | epic **(ULTIMATE)** | `&123` | `group1/subgroup&123` | |
| design **(PREMIUM)** | `#123[file.jpg]` or `#123["file.png"]` | `group1/subgroup#123[file.png]` | `project#123[file.png]` |
| label by ID | `~123` | `namespace/project~123` | `project~123` | | label by ID | `~123` | `namespace/project~123` | `project~123` |
| one-word label by name | `~bug` | `namespace/project~bug` | `project~bug` | | one-word label by name | `~bug` | `namespace/project~bug` | `project~bug` |
| multi-word label by name | `~"feature request"` | `namespace/project~"feature request"` | `project~"feature request"` | | multi-word label by name | `~"feature request"` | `namespace/project~"feature request"` | `project~"feature request"` |
......
...@@ -37,6 +37,13 @@ Design Management requires that projects are using ...@@ -37,6 +37,13 @@ Design Management requires that projects are using
[hashed storage](../../../administration/repository_storage_types.html#hashed-storage) [hashed storage](../../../administration/repository_storage_types.html#hashed-storage)
(the default storage type since v10.0). (the default storage type since v10.0).
### Feature Flags
- Reference Parsing
Designs support short references in Markdown, but this needs to be enabled by setting
the `:design_management_reference_filter_gfm_pipeline` feature flag.
## Limitations ## Limitations
- Files uploaded must have a file extension of either `png`, `jpg`, `jpeg`, `gif`, `bmp`, `tiff` or `ico`. - Files uploaded must have a file extension of either `png`, `jpg`, `jpeg`, `gif`, `bmp`, `tiff` or `ico`.
...@@ -137,3 +144,32 @@ Different discussions have different badge numbers: ...@@ -137,3 +144,32 @@ Different discussions have different badge numbers:
From GitLab 12.5 on, new annotations will be outputted to the issue activity, From GitLab 12.5 on, new annotations will be outputted to the issue activity,
so that everyone involved can participate in the discussion. so that everyone involved can participate in the discussion.
## References
GitLab Flavored Markdown supports references to designs. The syntax for this is:
`#123[file.jpg]` - the issue reference, with the filename in square braces
File names may contain a variety of odd characters, so two escaping mechanisms are supported:
### Quoting
File names may be quoted with double quotation marks, eg:
`#123["file.jpg"]`
This is useful if, for instance, your filename has square braces in its name. In this scheme, all
double quotation marks in the file name need to be escaped with backslashes, and backslashes need
to be escaped likewise:
`#123["with with \"quote\" marks and a backslash \\.png"]`
### Base64 Encoding
In the case of file names that include HTML elements, you will need to escape these names to avoid
them being processed as HTML literals. To do this, we support base64 encoding, eg.
The file `<a>.jpg` can be referenced as `#123[base64:PGE+LmpwZwo=]`
Obviously we would advise against using such filenames.
...@@ -21,6 +21,14 @@ module API ...@@ -21,6 +21,14 @@ module API
optional :sort, type: String, values: DeploymentsFinder::ALLOWED_SORT_DIRECTIONS, default: DeploymentsFinder::DEFAULT_SORT_DIRECTION, desc: 'Sort by asc (ascending) or desc (descending)' optional :sort, type: String, values: DeploymentsFinder::ALLOWED_SORT_DIRECTIONS, default: DeploymentsFinder::DEFAULT_SORT_DIRECTION, desc: 'Sort by asc (ascending) or desc (descending)'
optional :updated_after, type: DateTime, desc: 'Return deployments updated after the specified date' optional :updated_after, type: DateTime, desc: 'Return deployments updated after the specified date'
optional :updated_before, type: DateTime, desc: 'Return deployments updated before the specified date' optional :updated_before, type: DateTime, desc: 'Return deployments updated before the specified date'
optional :environment,
type: String,
desc: 'The name of the environment to filter deployments by'
optional :status,
type: String,
values: Deployment.statuses.keys,
desc: 'The status to filter deployments by'
end end
get ':id/deployments' do get ':id/deployments' do
......
...@@ -43,15 +43,46 @@ module Banzai ...@@ -43,15 +43,46 @@ module Banzai
# Returns a String replaced with the return of the block. # Returns a String replaced with the return of the block.
def self.references_in(text, pattern = object_class.reference_pattern) def self.references_in(text, pattern = object_class.reference_pattern)
text.gsub(pattern) do |match| text.gsub(pattern) do |match|
symbol = $~[object_sym] if ident = identifier($~)
if object_class.reference_valid?(symbol) yield match, ident, $~[:project], $~[:namespace], $~
yield match, symbol.to_i, $~[:project], $~[:namespace], $~
else else
match match
end end
end end
end end
def self.identifier(match_data)
symbol = symbol_from_match(match_data)
parse_symbol(symbol, match_data) if object_class.reference_valid?(symbol)
end
def identifier(match_data)
self.class.identifier(match_data)
end
def self.symbol_from_match(match)
key = object_sym
match[key] if match.names.include?(key.to_s)
end
# Transform a symbol extracted from the text to a meaningful value
# In most cases these will be integers, so we call #to_i by default
#
# This method has the contract that if a string `ref` refers to a
# record `record`, then `parse_symbol(ref) == record_identifier(record)`.
def self.parse_symbol(symbol, match_data)
symbol.to_i
end
# We assume that most classes are identifying records by ID.
#
# This method has the contract that if a string `ref` refers to a
# record `record`, then `class.parse_symbol(ref) == record_identifier(record)`.
def record_identifier(record)
record.id
end
def object_class def object_class
self.class.object_class self.class.object_class
end end
...@@ -265,8 +296,10 @@ module Banzai ...@@ -265,8 +296,10 @@ module Banzai
@references_per[parent_type] ||= begin @references_per[parent_type] ||= begin
refs = Hash.new { |hash, key| hash[key] = Set.new } refs = Hash.new { |hash, key| hash[key] = Set.new }
regex = [
regex = Regexp.union(object_class.reference_pattern, object_class.link_reference_pattern) object_class.reference_pattern,
object_class.link_reference_pattern
].compact.reduce { |a, b| Regexp.union(a, b) }
nodes.each do |node| nodes.each do |node|
node.to_html.scan(regex) do node.to_html.scan(regex) do
...@@ -276,8 +309,9 @@ module Banzai ...@@ -276,8 +309,9 @@ module Banzai
full_group_path($~[:group]) full_group_path($~[:group])
end end
symbol = $~[object_sym] if ident = identifier($~)
refs[path] << symbol if object_class.reference_valid?(symbol) refs[path] << ident
end
end end
end end
......
...@@ -37,6 +37,11 @@ module Banzai ...@@ -37,6 +37,11 @@ module Banzai
end end
end end
# The default behaviour is `#to_i` - we just pass the hash through.
def self.parse_symbol(sha_hash, _match)
sha_hash
end
def url_for_object(commit, project) def url_for_object(commit, project)
h = Gitlab::Routing.url_helpers h = Gitlab::Routing.url_helpers
...@@ -65,10 +70,6 @@ module Banzai ...@@ -65,10 +70,6 @@ module Banzai
private private
def record_identifier(record)
record.id
end
def parent_records(parent, ids) def parent_records(parent, ids)
parent.commits_by(oids: ids.to_a) parent.commits_by(oids: ids.to_a)
end end
......
...@@ -5,6 +5,14 @@ module Gitlab ...@@ -5,6 +5,14 @@ module Gitlab
class ApplicationContext class ApplicationContext
include Gitlab::Utils::LazyAttributes include Gitlab::Utils::LazyAttributes
Attribute = Struct.new(:name, :type)
APPLICATION_ATTRIBUTES = [
Attribute.new(:project, Project),
Attribute.new(:namespace, Namespace),
Attribute.new(:user, User)
].freeze
def self.with_context(args, &block) def self.with_context(args, &block)
application_context = new(**args) application_context = new(**args)
Labkit::Context.with_context(application_context.to_lazy_hash, &block) Labkit::Context.with_context(application_context.to_lazy_hash, &block)
...@@ -15,21 +23,36 @@ module Gitlab ...@@ -15,21 +23,36 @@ module Gitlab
Labkit::Context.push(application_context.to_lazy_hash) Labkit::Context.push(application_context.to_lazy_hash)
end end
def initialize(user: nil, project: nil, namespace: nil) def initialize(**args)
@user, @project, @namespace = user, project, namespace unknown_attributes = args.keys - APPLICATION_ATTRIBUTES.map(&:name)
raise ArgumentError, "#{unknown_attributes} are not known keys" if unknown_attributes.any?
@set_values = args.keys
assign_attributes(args)
end end
def to_lazy_hash def to_lazy_hash
{ user: -> { username }, {}.tap do |hash|
project: -> { project_path }, hash[:user] = -> { username } if set_values.include?(:user)
root_namespace: -> { root_namespace_path } } hash[:project] = -> { project_path } if set_values.include?(:project)
hash[:root_namespace] = -> { root_namespace_path } if include_namespace?
end
end end
private private
lazy_attr_reader :user, type: User attr_reader :set_values
lazy_attr_reader :project, type: Project
lazy_attr_reader :namespace, type: Namespace APPLICATION_ATTRIBUTES.each do |attr|
lazy_attr_reader attr.name, type: attr.type
end
def assign_attributes(values)
values.slice(*APPLICATION_ATTRIBUTES.map(&:name)).each do |name, value|
instance_variable_set("@#{name}", value)
end
end
def project_path def project_path
project&.full_path project&.full_path
...@@ -46,5 +69,9 @@ module Gitlab ...@@ -46,5 +69,9 @@ module Gitlab
project&.full_path_components&.first project&.full_path_components&.first
end end
end end
def include_namespace?
set_values.include?(:namespace) || set_values.include?(:project)
end
end end
end end
# frozen_string_literal: true
module Gitlab
module BackgroundMigration
# This migration takes all issue trackers
# and move data from properties to data field tables (jira_tracker_data and issue_tracker_data)
class MigrateIssueTrackersSensitiveData
delegate :select_all, :execute, :quote_string, to: :connection
# we need to define this class and set fields encryption
class IssueTrackerData < ApplicationRecord
self.table_name = 'issue_tracker_data'
def self.encryption_options
{
key: Settings.attr_encrypted_db_key_base_32,
encode: true,
mode: :per_attribute_iv,
algorithm: 'aes-256-gcm'
}
end
attr_encrypted :project_url, encryption_options
attr_encrypted :issues_url, encryption_options
attr_encrypted :new_issue_url, encryption_options
end
# we need to define this class and set fields encryption
class JiraTrackerData < ApplicationRecord
self.table_name = 'jira_tracker_data'
def self.encryption_options
{
key: Settings.attr_encrypted_db_key_base_32,
encode: true,
mode: :per_attribute_iv,
algorithm: 'aes-256-gcm'
}
end
attr_encrypted :url, encryption_options
attr_encrypted :api_url, encryption_options
attr_encrypted :username, encryption_options
attr_encrypted :password, encryption_options
end
def perform(start_id, stop_id)
columns = 'id, properties, title, description, type'
batch_condition = "id >= #{start_id} AND id <= #{stop_id} AND category = 'issue_tracker' \
AND properties IS NOT NULL AND properties != '{}' AND properties != ''"
data_subselect = "SELECT 1 \
FROM jira_tracker_data \
WHERE jira_tracker_data.service_id = services.id \
UNION SELECT 1 \
FROM issue_tracker_data \
WHERE issue_tracker_data.service_id = services.id"
query = "SELECT #{columns} FROM services WHERE #{batch_condition} AND NOT EXISTS (#{data_subselect})"
migrated_ids = []
data_to_insert(query).each do |table, data|
service_ids = data.map { |s| s['service_id'] }
next if service_ids.empty?
migrated_ids += service_ids
Gitlab::Database.bulk_insert(table, data)
end
return if migrated_ids.empty?
move_title_description(migrated_ids)
end
private
def data_to_insert(query)
data = { 'jira_tracker_data' => [], 'issue_tracker_data' => [] }
select_all(query).each do |service|
begin
properties = JSON.parse(service['properties'])
rescue JSON::ParserError
logger.warn(
message: 'Properties data not parsed - invalid json',
service_id: service['id'],
properties: service['properties']
)
next
end
if service['type'] == 'JiraService'
row = data_row(JiraTrackerData, jira_mapping(properties), service)
key = 'jira_tracker_data'
else
row = data_row(IssueTrackerData, issue_tracker_mapping(properties), service)
key = 'issue_tracker_data'
end
data[key] << row if row
end
data
end
def data_row(klass, mapping, service)
base_params = { service_id: service['id'], created_at: Time.current, updated_at: Time.current }
klass.new(mapping).slice(*klass.column_names).compact.merge(base_params)
end
def move_title_description(service_ids)
query = "UPDATE services SET \
title = cast(properties as json)->>'title', \
description = cast(properties as json)->>'description' \
WHERE id IN (#{service_ids.join(',')}) AND title IS NULL AND description IS NULL"
execute(query)
end
def jira_mapping(properties)
{
url: properties['url'],
api_url: properties['api_url'],
username: properties['username'],
password: properties['password']
}
end
def issue_tracker_mapping(properties)
{
project_url: properties['project_url'],
issues_url: properties['issues_url'],
new_issue_url: properties['new_issue_url']
}
end
def connection
@connection ||= ActiveRecord::Base.connection
end
def logger
@logger ||= Gitlab::BackgroundMigration::Logger.build
end
end
end
end
# frozen_string_literal: true
module Gitlab
module ImportExport
class ImportFailureService
RETRIABLE_EXCEPTIONS = [GRPC::DeadlineExceeded, ActiveRecord::QueryCanceled].freeze
attr_reader :importable
def initialize(importable)
@importable = importable
@association = importable.association(:import_failures)
end
def with_retry(relation_key, relation_index)
on_retry = -> (exception, retry_count, *_args) do
log_import_failure(relation_key, relation_index, exception, retry_count)
end
Retriable.with_context(:relation_import, on_retry: on_retry) do
yield
end
end
def log_import_failure(relation_key, relation_index, exception, retry_count = 0)
extra = {
relation_key: relation_key,
relation_index: relation_index,
retry_count: retry_count
}
extra[importable_column_name] = importable.id
Gitlab::ErrorTracking.track_exception(exception, extra)
attributes = {
exception_class: exception.class.to_s,
exception_message: exception.message.truncate(255),
correlation_id_value: Labkit::Correlation::CorrelationId.current_or_new_id
}.merge(extra)
ImportFailure.create(attributes)
end
private
def importable_column_name
@importable_column_name ||= @association.reflection.foreign_key.to_sym
end
end
end
end
...@@ -72,25 +72,18 @@ module Gitlab ...@@ -72,25 +72,18 @@ module Gitlab
return if importable_class == Project && group_model?(relation_object) return if importable_class == Project && group_model?(relation_object)
relation_object.assign_attributes(importable_class_sym => @importable) relation_object.assign_attributes(importable_class_sym => @importable)
relation_object.save!
import_failure_service.with_retry(relation_key, relation_index) do
relation_object.save!
end
save_id_mapping(relation_key, data_hash, relation_object) save_id_mapping(relation_key, data_hash, relation_object)
rescue => e rescue => e
log_import_failure(relation_key, relation_index, e) import_failure_service.log_import_failure(relation_key, relation_index, e)
end end
def log_import_failure(relation_key, relation_index, exception) def import_failure_service
Gitlab::ErrorTracking.track_exception(exception, @import_failure_service ||= ImportFailureService.new(@importable)
project_id: @importable.id, relation_key: relation_key, relation_index: relation_index)
ImportFailure.create(
project: @importable,
relation_key: relation_key,
relation_index: relation_index,
exception_class: exception.class.to_s,
exception_message: exception.message.truncate(255),
correlation_id_value: Labkit::Correlation::CorrelationId.current_or_new_id
)
end end
# Older, serialized CI pipeline exports may only have a # Older, serialized CI pipeline exports may only have a
......
...@@ -25,6 +25,42 @@ describe DeploymentsFinder do ...@@ -25,6 +25,42 @@ describe DeploymentsFinder do
is_expected.to match_array([deployment_1]) is_expected.to match_array([deployment_1])
end end
end end
context 'when the environment name is specified' do
let!(:environment1) { create(:environment, project: project) }
let!(:environment2) { create(:environment, project: project) }
let!(:deployment1) do
create(:deployment, project: project, environment: environment1)
end
let!(:deployment2) do
create(:deployment, project: project, environment: environment2)
end
let(:params) { { environment: environment1.name } }
it 'returns deployments for the given environment' do
is_expected.to match_array([deployment1])
end
end
context 'when the deployment status is specified' do
let!(:deployment1) { create(:deployment, :success, project: project) }
let!(:deployment2) { create(:deployment, :failed, project: project) }
let(:params) { { status: 'success' } }
it 'returns deployments for the given environment' do
is_expected.to match_array([deployment1])
end
end
context 'when using an invalid deployment status' do
let(:params) { { status: 'kittens' } }
it 'raises ArgumentError' do
expect { subject }.to raise_error(ArgumentError)
end
end
end end
describe 'ordering' do describe 'ordering' do
......
...@@ -143,10 +143,14 @@ describe('ErrorTrackingList', () => { ...@@ -143,10 +143,14 @@ describe('ErrorTrackingList', () => {
}); });
it('each error in the list should have an ignore button', () => { it('each error in the list should have an ignore button', () => {
const error = wrapper.findAll('tbody tr'); findErrorListRows().wrappers.forEach(row => {
expect(row.contains('glicon-stub[name="eye-slash"]')).toBe(true);
});
});
error.wrappers.forEach((_, index) => { it('each error in the list should have a resolve button', () => {
expect(error.at(index).exists('glicon-stub[name="eye-slash"]')).toBe(true); findErrorListRows().wrappers.forEach(row => {
expect(row.contains('glicon-stub[name="check-circle"]')).toBe(true);
}); });
}); });
...@@ -231,8 +235,7 @@ describe('ErrorTrackingList', () => { ...@@ -231,8 +235,7 @@ describe('ErrorTrackingList', () => {
}); });
it('sends the "ignored" status and error ID', () => { it('sends the "ignored" status and error ID', () => {
const ignoreButton = wrapper.find({ ref: 'ignoreError' }); wrapper.find({ ref: 'ignoreError' }).trigger('click');
ignoreButton.trigger('click');
expect(actions.updateStatus).toHaveBeenCalledWith( expect(actions.updateStatus).toHaveBeenCalledWith(
expect.anything(), expect.anything(),
{ {
...@@ -245,6 +248,34 @@ describe('ErrorTrackingList', () => { ...@@ -245,6 +248,34 @@ describe('ErrorTrackingList', () => {
}); });
}); });
describe('When the resolve button on an error is clicked', () => {
beforeEach(() => {
store.state.list.loading = false;
store.state.list.errors = errorsList;
mountComponent({
stubs: {
GlTable: false,
GlLink: false,
GlButton: false,
},
});
});
it('sends "resolved" status and error ID', () => {
wrapper.find({ ref: 'resolveError' }).trigger('click');
expect(actions.updateStatus).toHaveBeenCalledWith(
expect.anything(),
{
endpoint: '/project/test/-/error_tracking/3.json',
redirectUrl: '/error_tracking',
status: 'resolved',
},
undefined,
);
});
});
describe('When error tracking is disabled and user is not allowed to enable it', () => { describe('When error tracking is disabled and user is not allowed to enable it', () => {
beforeEach(() => { beforeEach(() => {
mountComponent({ mountComponent({
......
...@@ -27,6 +27,9 @@ describe('text_utility', () => { ...@@ -27,6 +27,9 @@ describe('text_utility', () => {
it('should remove underscores and uppercase the first letter', () => { it('should remove underscores and uppercase the first letter', () => {
expect(textUtils.humanize('foo_bar')).toEqual('Foo bar'); expect(textUtils.humanize('foo_bar')).toEqual('Foo bar');
}); });
it('should remove underscores and dashes and uppercase the first letter', () => {
expect(textUtils.humanize('foo_bar-foo', '[_-]')).toEqual('Foo bar foo');
});
}); });
describe('dasherize', () => { describe('dasherize', () => {
...@@ -52,14 +55,20 @@ describe('text_utility', () => { ...@@ -52,14 +55,20 @@ describe('text_utility', () => {
expect(textUtils.slugify(' a new project ')).toEqual('a-new-project'); expect(textUtils.slugify(' a new project ')).toEqual('a-new-project');
}); });
it('should only remove non-allowed special characters', () => { it('should only remove non-allowed special characters', () => {
expect(textUtils.slugify('test!_pro-ject~')).toEqual('test-_pro-ject-'); expect(textUtils.slugify('test!_pro-ject~')).toEqual('test-_pro-ject');
}); });
it('should squash multiple hypens', () => { it('should squash multiple hypens', () => {
expect(textUtils.slugify('test!!!!_pro-ject~')).toEqual('test-_pro-ject-'); expect(textUtils.slugify('test!!!!_pro-ject~')).toEqual('test-_pro-ject');
}); });
it('should return empty string if only non-allowed characters', () => { it('should return empty string if only non-allowed characters', () => {
expect(textUtils.slugify('здрасти')).toEqual(''); expect(textUtils.slugify('здрасти')).toEqual('');
}); });
it('should squash multiple separators', () => {
expect(textUtils.slugify('Test:-)')).toEqual('test');
});
it('should trim any separators from the beginning and end of the slug', () => {
expect(textUtils.slugify('-Test:-)-')).toEqual('test');
});
}); });
describe('stripHtml', () => { describe('stripHtml', () => {
...@@ -109,6 +118,12 @@ describe('text_utility', () => { ...@@ -109,6 +118,12 @@ describe('text_utility', () => {
}); });
}); });
describe('convertToTitleCase', () => {
it('converts sentence case to Sentence Case', () => {
expect(textUtils.convertToTitleCase('hello world')).toBe('Hello World');
});
});
describe('truncateSha', () => { describe('truncateSha', () => {
it('shortens SHAs to 8 characters', () => { it('shortens SHAs to 8 characters', () => {
expect(textUtils.truncateSha('verylongsha')).toBe('verylong'); expect(textUtils.truncateSha('verylongsha')).toBe('verylong');
......
...@@ -29,8 +29,8 @@ describe('Monitoring mutations', () => { ...@@ -29,8 +29,8 @@ describe('Monitoring mutations', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, payload); mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, payload);
const groups = getGroups(); const groups = getGroups();
expect(groups[0].key).toBe('response-metrics-nginx-ingress-vts--0'); expect(groups[0].key).toBe('response-metrics-nginx-ingress-vts-0');
expect(groups[1].key).toBe('system-metrics-kubernetes--1'); expect(groups[1].key).toBe('system-metrics-kubernetes-1');
}); });
it('normalizes values', () => { it('normalizes values', () => {
mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, payload); mutations[types.RECEIVE_METRICS_DATA_SUCCESS](stateCopy, payload);
......
...@@ -169,7 +169,7 @@ describe('Release block', () => { ...@@ -169,7 +169,7 @@ describe('Release block', () => {
releaseClone.tag_name = 'a dangerous tag name <script>alert("hello")</script>'; releaseClone.tag_name = 'a dangerous tag name <script>alert("hello")</script>';
return factory(releaseClone).then(() => { return factory(releaseClone).then(() => {
expect(wrapper.attributes().id).toBe('a-dangerous-tag-name-script-alert-hello-script-'); expect(wrapper.attributes().id).toBe('a-dangerous-tag-name-script-alert-hello-script');
}); });
}); });
......
import projectImportGitlab from '~/projects/project_import_gitlab_project'; import projectImportGitlab from '~/projects/project_import_gitlab_project';
describe('Import Gitlab project', () => { describe('Import Gitlab project', () => {
let projectName; const pathName = 'my-project';
beforeEach(() => { const projectName = 'My Project';
projectName = 'project';
window.history.pushState({}, null, `?path=${projectName}`); const setTestFixtures = url => {
window.history.pushState({}, null, url);
setFixtures(` setFixtures(`
<input class="js-path-name" /> <input class="js-path-name" />
<input class="js-project-name" />
`); `);
projectImportGitlab(); projectImportGitlab();
};
beforeEach(() => {
setTestFixtures(`?name=${projectName}&path=${pathName}`);
}); });
afterEach(() => { afterEach(() => {
window.history.pushState({}, null, ''); window.history.pushState({}, null, '');
}); });
describe('path name', () => { describe('project name', () => {
it('should fill in the project name derived from the previously filled project name', () => { it('should fill in the project name derived from the previously filled project name', () => {
expect(document.querySelector('.js-path-name').value).toEqual(projectName); expect(document.querySelector('.js-project-name').value).toEqual(projectName);
});
describe('empty path name', () => {
it('derives the path name from the previously filled project name', () => {
const alternateProjectName = 'My Alt Project';
const alternatePathName = 'my-alt-project';
setTestFixtures(`?name=${alternateProjectName}`);
expect(document.querySelector('.js-path-name').value).toEqual(alternatePathName);
});
});
});
describe('path name', () => {
it('should fill in the path name derived from the previously filled path name', () => {
expect(document.querySelector('.js-path-name').value).toEqual(pathName);
});
describe('empty project name', () => {
it('derives the project name from the previously filled path name', () => {
const alternateProjectName = 'My Alt Project';
const alternatePathName = 'my-alt-project';
setTestFixtures(`?path=${alternatePathName}`);
expect(document.querySelector('.js-project-name').value).toEqual(alternateProjectName);
});
}); });
}); });
}); });
...@@ -172,4 +172,34 @@ describe('New Project', () => { ...@@ -172,4 +172,34 @@ describe('New Project', () => {
expect($projectPath.val()).toEqual('my-dash-delimited-awesome-project'); expect($projectPath.val()).toEqual('my-dash-delimited-awesome-project');
}); });
}); });
describe('derivesProjectNameFromSlug', () => {
const dummyProjectPath = 'my-awesome-project';
const dummyProjectName = 'Original Awesome Project';
beforeEach(() => {
projectNew.bindEvents();
$projectPath.val('').change();
});
it('converts slug to humanized project name', () => {
$projectPath.val(dummyProjectPath);
projectNew.onProjectPathChange($projectName, $projectPath);
expect($projectName.val()).toEqual('My Awesome Project');
});
it('does not convert slug to humanized project name if a project name already exists', () => {
$projectName.val(dummyProjectName);
$projectPath.val(dummyProjectPath);
projectNew.onProjectPathChange(
$projectName,
$projectPath,
$projectName.val().trim().length > 0,
);
expect($projectName.val()).toEqual(dummyProjectName);
});
});
}); });
...@@ -3,30 +3,27 @@ ...@@ -3,30 +3,27 @@
require 'spec_helper' require 'spec_helper'
describe Banzai::Filter::AbstractReferenceFilter do describe Banzai::Filter::AbstractReferenceFilter do
let(:project) { create(:project) } let_it_be(:project) { create(:project) }
let(:doc) { Nokogiri::HTML.fragment('') }
let(:filter) { described_class.new(doc, project: project) }
describe '#references_per_parent' do describe '#references_per_parent' do
it 'returns a Hash containing references grouped per parent paths' do let(:doc) { Nokogiri::HTML.fragment("#1 #{project.full_path}#2 #2") }
doc = Nokogiri::HTML.fragment("#1 #{project.full_path}#2")
filter = described_class.new(doc, project: project)
expect(filter).to receive(:object_class).exactly(4).times.and_return(Issue) it 'returns a Hash containing references grouped per parent paths' do
expect(filter).to receive(:object_sym).twice.and_return(:issue) expect(described_class).to receive(:object_class).exactly(6).times.and_return(Issue)
refs = filter.references_per_parent refs = filter.references_per_parent
expect(refs).to be_an_instance_of(Hash) expect(refs).to match(a_hash_including(project.full_path => contain_exactly(1, 2)))
expect(refs[project.full_path]).to eq(Set.new(%w[1 2]))
end end
end end
describe '#parent_per_reference' do describe '#parent_per_reference' do
it 'returns a Hash containing projects grouped per parent paths' do it 'returns a Hash containing projects grouped per parent paths' do
doc = Nokogiri::HTML.fragment('')
filter = described_class.new(doc, project: project)
expect(filter).to receive(:references_per_parent) expect(filter).to receive(:references_per_parent)
.and_return({ project.full_path => Set.new(%w[1]) }) .and_return({ project.full_path => Set.new([1]) })
expect(filter.parent_per_reference) expect(filter.parent_per_reference)
.to eq({ project.full_path => project }) .to eq({ project.full_path => project })
...@@ -34,9 +31,6 @@ describe Banzai::Filter::AbstractReferenceFilter do ...@@ -34,9 +31,6 @@ describe Banzai::Filter::AbstractReferenceFilter do
end end
describe '#find_for_paths' do describe '#find_for_paths' do
let(:doc) { Nokogiri::HTML.fragment('') }
let(:filter) { described_class.new(doc, project: project) }
context 'with RequestStore disabled' do context 'with RequestStore disabled' do
it 'returns a list of Projects for a list of paths' do it 'returns a list of Projects for a list of paths' do
expect(filter.find_for_paths([project.full_path])) expect(filter.find_for_paths([project.full_path]))
......
...@@ -28,7 +28,7 @@ describe Gitlab::ApplicationContext do ...@@ -28,7 +28,7 @@ describe Gitlab::ApplicationContext do
describe '.push' do describe '.push' do
it 'passes the expected context on to labkit' do it 'passes the expected context on to labkit' do
fake_proc = duck_type(:call) fake_proc = duck_type(:call)
expected_context = hash_including(user: fake_proc, project: fake_proc, root_namespace: fake_proc) expected_context = { user: fake_proc }
expect(Labkit::Context).to receive(:push).with(expected_context) expect(Labkit::Context).to receive(:push).with(expected_context)
...@@ -78,5 +78,27 @@ describe Gitlab::ApplicationContext do ...@@ -78,5 +78,27 @@ describe Gitlab::ApplicationContext do
expect(result(context)) expect(result(context))
.to include(project: project.full_path, root_namespace: project.full_path_components.first) .to include(project: project.full_path, root_namespace: project.full_path_components.first)
end end
context 'only include values for which an option was specified' do
using RSpec::Parameterized::TableSyntax
where(:provided_options, :expected_context_keys) do
[:user, :namespace, :project] | [:user, :project, :root_namespace]
[:user, :project] | [:user, :project, :root_namespace]
[:user, :namespace] | [:user, :root_namespace]
[:user] | [:user]
[] | []
end
with_them do
it do
# Build a hash that has all `provided_options` as keys, and `nil` as value
provided_values = provided_options.map { |key| [key, nil] }.to_h
context = described_class.new(provided_values)
expect(context.to_lazy_hash.keys).to contain_exactly(*expected_context_keys)
end
end
end
end end
end end
# frozen_string_literal: true
require 'spec_helper'
describe Gitlab::ImportExport::ImportFailureService do
let(:importable) { create(:project, :builds_enabled, :issues_disabled, name: 'project', path: 'project') }
let(:label) { create(:label) }
let(:subject) { described_class.new(importable) }
let(:relation_key) { "labels" }
let(:relation_index) { 0 }
describe '#log_import_failure' do
let(:standard_error_message) { "StandardError message" }
let(:exception) { StandardError.new(standard_error_message) }
let(:correlation_id) { 'my-correlation-id' }
let(:retry_count) { 2 }
let(:log_import_failure) do
subject.log_import_failure(relation_key, relation_index, exception, retry_count)
end
before do
# Import is running from the rake task, `correlation_id` is not assigned
allow(Labkit::Correlation::CorrelationId).to receive(:current_or_new_id).and_return(correlation_id)
end
context 'when importable is a group' do
let(:importable) { create(:group) }
it_behaves_like 'log import failure', :group_id
end
context 'when importable is a project' do
it_behaves_like 'log import failure', :project_id
end
context 'when ImportFailure does not support importable class' do
let(:importable) { create(:merge_request) }
it 'raise exception' do
expect { subject }.to raise_exception(ActiveRecord::AssociationNotFoundError, "Association named 'import_failures' was not found on MergeRequest; perhaps you misspelled it?")
end
end
end
describe '#with_retry' do
let(:perform_retry) do
subject.with_retry(relation_key, relation_index) do
label.save!
end
end
context 'when exceptions are retriable' do
where(:exception) { Gitlab::ImportExport::ImportFailureService::RETRIABLE_EXCEPTIONS }
with_them do
context 'when retry succeeds' do
before do
expect(label).to receive(:save!).and_raise(exception.new)
expect(label).to receive(:save!).and_return(true)
end
it 'retries and logs import failure once with correct params' do
expect(subject).to receive(:log_import_failure).with(relation_key, relation_index, instance_of(exception), 1).once
perform_retry
end
end
context 'when retry continues to fail with intermittent errors' do
let(:maximum_retry_count) do
Retriable.config.tries
end
before do
expect(label).to receive(:save!)
.exactly(maximum_retry_count).times
.and_raise(exception.new)
end
it 'retries the number of times allowed and raise exception', :aggregate_failures do
expect { perform_retry }.to raise_exception(exception)
end
it 'logs import failure each time and raise exception', :aggregate_failures do
maximum_retry_count.times do |index|
retry_count = index + 1
expect(subject).to receive(:log_import_failure).with(relation_key, relation_index, instance_of(exception), retry_count)
end
expect { perform_retry }.to raise_exception(exception)
end
end
end
end
context 'when exception is not retriable' do
let(:exception) { StandardError.new }
it 'raise the exception', :aggregate_failures do
expect(label).to receive(:save!).once.and_raise(exception)
expect(subject).not_to receive(:log_import_failure)
expect { perform_retry }.to raise_exception(exception)
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
require Rails.root.join('db', 'post_migrate', '20190924152703_migrate_issue_trackers_data.rb')
describe MigrateIssueTrackersData, :migration do
let(:services) { table(:services) }
let(:migration_class) { Gitlab::BackgroundMigration::MigrateIssueTrackersSensitiveData }
let(:migration_name) { migration_class.to_s.demodulize }
let(:properties) do
{
'url' => 'http://example.com'
}
end
let!(:jira_service) do
services.create(id: 10, type: 'JiraService', properties: properties, category: 'issue_tracker')
end
let!(:jira_service_nil) do
services.create(id: 11, type: 'JiraService', properties: nil, category: 'issue_tracker')
end
let!(:bugzilla_service) do
services.create(id: 12, type: 'BugzillaService', properties: properties, category: 'issue_tracker')
end
let!(:youtrack_service) do
services.create(id: 13, type: 'YoutrackService', properties: properties, category: 'issue_tracker')
end
let!(:youtrack_service_empty) do
services.create(id: 14, type: 'YoutrackService', properties: '', category: 'issue_tracker')
end
let!(:gitlab_service) do
services.create(id: 15, type: 'GitlabIssueTrackerService', properties: properties, category: 'issue_tracker')
end
let!(:gitlab_service_empty) do
services.create(id: 16, type: 'GitlabIssueTrackerService', properties: {}, category: 'issue_tracker')
end
let!(:other_service) do
services.create(id: 17, type: 'OtherService', properties: properties, category: 'other_category')
end
before do
stub_const("#{described_class}::BATCH_SIZE", 2)
end
it 'schedules background migrations at correct time' do
Sidekiq::Testing.fake! do
Timecop.freeze do
migrate!
expect(migration_name).to be_scheduled_delayed_migration(3.minutes, jira_service.id, bugzilla_service.id)
expect(migration_name).to be_scheduled_delayed_migration(6.minutes, youtrack_service.id, gitlab_service.id)
expect(BackgroundMigrationWorker.jobs.size).to eq(2)
end
end
end
end
# frozen_string_literal: true
require 'spec_helper'
describe ImportFailure do
describe "Associations" do
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:group) }
end
describe 'Validations' do
context 'has no group' do
before do
allow(subject).to receive(:group).and_return(nil)
end
it { is_expected.to validate_presence_of(:project) }
end
context 'has no project' do
before do
allow(subject).to receive(:project).and_return(nil)
end
it { is_expected.to validate_presence_of(:group) }
end
end
end
...@@ -40,6 +40,18 @@ describe API::Deployments do ...@@ -40,6 +40,18 @@ describe API::Deployments do
end end
end end
context 'with the environment filter specifed' do
it 'returns deployments for the environment' do
get(
api("/projects/#{project.id}/deployments", user),
params: { environment: deployment_1.environment.name }
)
expect(json_response.size).to eq(1)
expect(json_response.first['iid']).to eq(deployment_1.iid)
end
end
describe 'ordering' do describe 'ordering' do
let(:order_by) { 'iid' } let(:order_by) { 'iid' }
let(:sort) { 'desc' } let(:sort) { 'desc' }
......
...@@ -74,7 +74,9 @@ describe MergeRequests::Conflicts::ListService do ...@@ -74,7 +74,9 @@ describe MergeRequests::Conflicts::ListService do
it 'returns a falsey value when the MR has a missing ref after a force push' do it 'returns a falsey value when the MR has a missing ref after a force push' do
merge_request = create_merge_request('conflict-resolvable') merge_request = create_merge_request('conflict-resolvable')
service = conflicts_service(merge_request) service = conflicts_service(merge_request)
allow_any_instance_of(Gitlab::GitalyClient::ConflictsService).to receive(:list_conflict_files).and_raise(GRPC::Unknown) allow_next_instance_of(Gitlab::GitalyClient::ConflictsService) do |instance|
allow(instance).to receive(:list_conflict_files).and_raise(GRPC::Unknown)
end
expect(service.can_be_resolved_in_ui?).to be_falsey expect(service.can_be_resolved_in_ui?).to be_falsey
end end
......
...@@ -55,7 +55,9 @@ describe MergeRequests::CreateFromIssueService do ...@@ -55,7 +55,9 @@ describe MergeRequests::CreateFromIssueService do
end end
it 'creates the new_issue_branch system note when the branch could be created but the merge_request cannot be created', :sidekiq_might_not_need_inline do it 'creates the new_issue_branch system note when the branch could be created but the merge_request cannot be created', :sidekiq_might_not_need_inline do
expect_any_instance_of(MergeRequest).to receive(:valid?).at_least(:once).and_return(false) expect_next_instance_of(MergeRequest) do |instance|
expect(instance).to receive(:valid?).at_least(:once).and_return(false)
end
expect(SystemNoteService).to receive(:new_issue_branch).with(issue, project, user, issue.to_branch_name, branch_project: target_project) expect(SystemNoteService).to receive(:new_issue_branch).with(issue, project, user, issue.to_branch_name, branch_project: target_project)
......
...@@ -714,9 +714,9 @@ describe MergeRequests::PushOptionsHandlerService do ...@@ -714,9 +714,9 @@ describe MergeRequests::PushOptionsHandlerService do
let(:exception) { StandardError.new('My standard error') } let(:exception) { StandardError.new('My standard error') }
def run_service_with_exception def run_service_with_exception
allow_any_instance_of( allow_next_instance_of(MergeRequests::BuildService) do |instance|
MergeRequests::BuildService allow(instance).to receive(:execute).and_raise(exception)
).to receive(:execute).and_raise(exception) end
service.execute service.execute
end end
...@@ -766,9 +766,9 @@ describe MergeRequests::PushOptionsHandlerService do ...@@ -766,9 +766,9 @@ describe MergeRequests::PushOptionsHandlerService do
invalid_merge_request = MergeRequest.new invalid_merge_request = MergeRequest.new
invalid_merge_request.errors.add(:base, 'my error') invalid_merge_request.errors.add(:base, 'my error')
expect_any_instance_of( expect_next_instance_of(MergeRequests::CreateService) do |instance|
MergeRequests::CreateService expect(instance).to receive(:execute).and_return(invalid_merge_request)
).to receive(:execute).and_return(invalid_merge_request) end
service.execute service.execute
......
...@@ -31,7 +31,9 @@ describe Milestones::PromoteService do ...@@ -31,7 +31,9 @@ describe Milestones::PromoteService do
it 'does not promote milestone and update issuables if promoted milestone is not valid' do it 'does not promote milestone and update issuables if promoted milestone is not valid' do
issue = create(:issue, milestone: milestone, project: project) issue = create(:issue, milestone: milestone, project: project)
merge_request = create(:merge_request, milestone: milestone, source_project: project) merge_request = create(:merge_request, milestone: milestone, source_project: project)
allow_any_instance_of(Milestone).to receive(:valid?).and_return(false) allow_next_instance_of(Milestone) do |instance|
allow(instance).to receive(:valid?).and_return(false)
end
expect { service.execute(milestone) }.to raise_error(described_class::PromoteMilestoneError) expect { service.execute(milestone) }.to raise_error(described_class::PromoteMilestoneError)
......
...@@ -71,7 +71,9 @@ describe Milestones::TransferService do ...@@ -71,7 +71,9 @@ describe Milestones::TransferService do
context 'when find_or_create_milestone returns nil' do context 'when find_or_create_milestone returns nil' do
before do before do
allow_any_instance_of(Milestones::FindOrCreateService).to receive(:execute).and_return(nil) allow_next_instance_of(Milestones::FindOrCreateService) do |instance|
allow(instance).to receive(:execute).and_return(nil)
end
end end
it 'removes issues group milestone' do it 'removes issues group milestone' do
......
...@@ -17,7 +17,9 @@ describe Namespaces::StatisticsRefresherService, '#execute' do ...@@ -17,7 +17,9 @@ describe Namespaces::StatisticsRefresherService, '#execute' do
end end
it 'recalculate the namespace statistics' do it 'recalculate the namespace statistics' do
expect_any_instance_of(Namespace::RootStorageStatistics).to receive(:recalculate!).once expect_next_instance_of(Namespace::RootStorageStatistics) do |instance|
expect(instance).to receive(:recalculate!).once
end
service.execute(group) service.execute(group)
end end
...@@ -45,8 +47,9 @@ describe Namespaces::StatisticsRefresherService, '#execute' do ...@@ -45,8 +47,9 @@ describe Namespaces::StatisticsRefresherService, '#execute' do
context 'when something goes wrong' do context 'when something goes wrong' do
before do before do
allow_any_instance_of(Namespace::RootStorageStatistics) allow_next_instance_of(Namespace::RootStorageStatistics) do |instance|
.to receive(:recalculate!).and_raise(ActiveRecord::ActiveRecordError) allow(instance).to receive(:recalculate!).and_raise(ActiveRecord::ActiveRecordError)
end
end end
it 'raises RefreshError' do it 'raises RefreshError' do
......
...@@ -17,7 +17,9 @@ describe Notes::ResolveService do ...@@ -17,7 +17,9 @@ describe Notes::ResolveService do
end end
it "sends notifications if all discussions are resolved" do it "sends notifications if all discussions are resolved" do
expect_any_instance_of(MergeRequests::ResolvedDiscussionNotificationService).to receive(:execute).with(merge_request) expect_next_instance_of(MergeRequests::ResolvedDiscussionNotificationService) do |instance|
expect(instance).to receive(:execute).with(merge_request)
end
described_class.new(merge_request.project, user).execute(note) described_class.new(merge_request.project, user).execute(note)
end end
......
...@@ -32,9 +32,9 @@ describe PagesDomains::ObtainLetsEncryptCertificateService do ...@@ -32,9 +32,9 @@ describe PagesDomains::ObtainLetsEncryptCertificateService do
def stub_lets_encrypt_order(url, status) def stub_lets_encrypt_order(url, status)
order = ::Gitlab::LetsEncrypt::Order.new(acme_order_double(status: status)) order = ::Gitlab::LetsEncrypt::Order.new(acme_order_double(status: status))
allow_any_instance_of(::Gitlab::LetsEncrypt::Client).to( allow_next_instance_of(::Gitlab::LetsEncrypt::Client) do |instance|
receive(:load_order).with(url).and_return(order) allow(instance).to receive(:load_order).with(url).and_return(order)
) end
order order
end end
......
...@@ -247,7 +247,9 @@ describe Projects::CreateService, '#execute' do ...@@ -247,7 +247,9 @@ describe Projects::CreateService, '#execute' do
context 'repository creation' do context 'repository creation' do
it 'synchronously creates the repository' do it 'synchronously creates the repository' do
expect_any_instance_of(Project).to receive(:create_repository) expect_next_instance_of(Project) do |instance|
expect(instance).to receive(:create_repository)
end
project = create_project(user, opts) project = create_project(user, opts)
expect(project).to be_valid expect(project).to be_valid
......
...@@ -94,7 +94,9 @@ describe Projects::ImportExport::ExportService do ...@@ -94,7 +94,9 @@ describe Projects::ImportExport::ExportService do
end end
it 'notifies the user' do it 'notifies the user' do
expect_any_instance_of(NotificationService).to receive(:project_not_exported) expect_next_instance_of(NotificationService) do |instance|
expect(instance).to receive(:project_not_exported)
end
end end
it 'notifies logger' do it 'notifies logger' do
...@@ -122,7 +124,9 @@ describe Projects::ImportExport::ExportService do ...@@ -122,7 +124,9 @@ describe Projects::ImportExport::ExportService do
end end
it 'notifies the user' do it 'notifies the user' do
expect_any_instance_of(NotificationService).to receive(:project_not_exported) expect_next_instance_of(NotificationService) do |instance|
expect(instance).to receive(:project_not_exported)
end
end end
it 'notifies logger' do it 'notifies logger' do
......
...@@ -16,7 +16,9 @@ describe Projects::LfsPointers::LfsImportService do ...@@ -16,7 +16,9 @@ describe Projects::LfsPointers::LfsImportService do
it 'downloads lfs objects' do it 'downloads lfs objects' do
service = double service = double
expect_any_instance_of(Projects::LfsPointers::LfsObjectDownloadListService).to receive(:execute).and_return(oid_download_links) expect_next_instance_of(Projects::LfsPointers::LfsObjectDownloadListService) do |instance|
expect(instance).to receive(:execute).and_return(oid_download_links)
end
expect(Projects::LfsPointers::LfsDownloadService).to receive(:new).and_return(service).twice expect(Projects::LfsPointers::LfsDownloadService).to receive(:new).and_return(service).twice
expect(service).to receive(:execute).twice expect(service).to receive(:execute).twice
...@@ -27,7 +29,9 @@ describe Projects::LfsPointers::LfsImportService do ...@@ -27,7 +29,9 @@ describe Projects::LfsPointers::LfsImportService do
context 'when no downloadable lfs object links' do context 'when no downloadable lfs object links' do
it 'does not call LfsDownloadService' do it 'does not call LfsDownloadService' do
expect_any_instance_of(Projects::LfsPointers::LfsObjectDownloadListService).to receive(:execute).and_return({}) expect_next_instance_of(Projects::LfsPointers::LfsObjectDownloadListService) do |instance|
expect(instance).to receive(:execute).and_return({})
end
expect(Projects::LfsPointers::LfsDownloadService).not_to receive(:new) expect(Projects::LfsPointers::LfsDownloadService).not_to receive(:new)
result = subject.execute result = subject.execute
...@@ -39,7 +43,9 @@ describe Projects::LfsPointers::LfsImportService do ...@@ -39,7 +43,9 @@ describe Projects::LfsPointers::LfsImportService do
context 'when an exception is raised' do context 'when an exception is raised' do
it 'returns error' do it 'returns error' do
error_message = "error message" error_message = "error message"
expect_any_instance_of(Projects::LfsPointers::LfsObjectDownloadListService).to receive(:execute).and_raise(StandardError, error_message) expect_next_instance_of(Projects::LfsPointers::LfsObjectDownloadListService) do |instance|
expect(instance).to receive(:execute).and_raise(StandardError, error_message)
end
result = subject.execute result = subject.execute
......
...@@ -110,8 +110,9 @@ describe Projects::UpdatePagesService do ...@@ -110,8 +110,9 @@ describe Projects::UpdatePagesService do
context 'when timeout happens by DNS error' do context 'when timeout happens by DNS error' do
before do before do
allow_any_instance_of(described_class) allow_next_instance_of(described_class) do |instance|
.to receive(:extract_zip_archive!).and_raise(SocketError) allow(instance).to receive(:extract_zip_archive!).and_raise(SocketError)
end
end end
it 'raises an error' do it 'raises an error' do
...@@ -125,9 +126,10 @@ describe Projects::UpdatePagesService do ...@@ -125,9 +126,10 @@ describe Projects::UpdatePagesService do
context 'when failed to extract zip artifacts' do context 'when failed to extract zip artifacts' do
before do before do
expect_any_instance_of(described_class) expect_next_instance_of(described_class) do |instance|
.to receive(:extract_zip_archive!) expect(instance).to receive(:extract_zip_archive!)
.and_raise(Projects::UpdatePagesService::FailedToExtractError) .and_raise(Projects::UpdatePagesService::FailedToExtractError)
end
end end
it 'raises an error' do it 'raises an error' do
......
...@@ -265,7 +265,9 @@ describe ::SystemNotes::IssuablesService do ...@@ -265,7 +265,9 @@ describe ::SystemNotes::IssuablesService do
context 'when cross-reference disallowed' do context 'when cross-reference disallowed' do
before do before do
expect_any_instance_of(described_class).to receive(:cross_reference_disallowed?).and_return(true) expect_next_instance_of(described_class) do |instance|
expect(instance).to receive(:cross_reference_disallowed?).and_return(true)
end
end end
it 'returns nil' do it 'returns nil' do
...@@ -279,7 +281,9 @@ describe ::SystemNotes::IssuablesService do ...@@ -279,7 +281,9 @@ describe ::SystemNotes::IssuablesService do
context 'when cross-reference allowed' do context 'when cross-reference allowed' do
before do before do
expect_any_instance_of(described_class).to receive(:cross_reference_disallowed?).and_return(false) expect_next_instance_of(described_class) do |instance|
expect(instance).to receive(:cross_reference_disallowed?).and_return(false)
end
end end
it_behaves_like 'a system note' do it_behaves_like 'a system note' do
......
...@@ -28,6 +28,17 @@ module FilterSpecHelper ...@@ -28,6 +28,17 @@ module FilterSpecHelper
described_class.call(html, context) described_class.call(html, context)
end end
# Get an instance of the Filter class
#
# Use this for testing instance methods, but remember to test the result of
# the full pipeline by calling #call using the other methods in this helper.
def filter_instance
render_context = Banzai::RenderContext.new(project, current_user)
context = { project: project, current_user: current_user, render_context: render_context }
described_class.new(input_text, context)
end
# Run text through HTML::Pipeline with the current filter and return the # Run text through HTML::Pipeline with the current filter and return the
# result Hash # result Hash
# #
......
...@@ -3,7 +3,9 @@ ...@@ -3,7 +3,9 @@
module ImportExport module ImportExport
module CommonUtil module CommonUtil
def setup_symlink(tmpdir, symlink_name) def setup_symlink(tmpdir, symlink_name)
allow_any_instance_of(Gitlab::ImportExport).to receive(:storage_path).and_return(tmpdir) allow_next_instance_of(Gitlab::ImportExport) do |instance|
allow(instance).to receive(:storage_path).and_return(tmpdir)
end
File.open("#{tmpdir}/test", 'w') { |file| file.write("test") } File.open("#{tmpdir}/test", 'w') { |file| file.write("test") }
FileUtils.ln_s("#{tmpdir}/test", "#{tmpdir}/#{symlink_name}") FileUtils.ln_s("#{tmpdir}/test", "#{tmpdir}/#{symlink_name}")
......
# frozen_string_literal: true
shared_examples 'log import failure' do |importable_column|
it 'tracks error' do
extra = {
relation_key: relation_key,
relation_index: relation_index,
retry_count: retry_count
}
extra[importable_column] = importable.id
expect(Gitlab::ErrorTracking).to receive(:track_exception).with(exception, extra)
subject.log_import_failure(relation_key, relation_index, exception, retry_count)
end
it 'saves data to ImportFailure' do
log_import_failure
import_failure = ImportFailure.last
aggregate_failures do
expect(import_failure[importable_column]).to eq(importable.id)
expect(import_failure.relation_key).to eq(relation_key)
expect(import_failure.relation_index).to eq(relation_index)
expect(import_failure.exception_class).to eq('StandardError')
expect(import_failure.exception_message).to eq(standard_error_message)
expect(import_failure.correlation_id_value).to eq(correlation_id)
expect(import_failure.retry_count).to eq(retry_count)
end
end
end
...@@ -35,8 +35,9 @@ describe Ci::ArchiveTracesCronWorker do ...@@ -35,8 +35,9 @@ describe Ci::ArchiveTracesCronWorker do
it_behaves_like 'archives trace' it_behaves_like 'archives trace'
it 'executes service' do it 'executes service' do
expect_any_instance_of(Ci::ArchiveTraceService) expect_next_instance_of(Ci::ArchiveTraceService) do |instance|
.to receive(:execute).with(build, anything) expect(instance).to receive(:execute).with(build, anything)
end
subject subject
end end
...@@ -64,7 +65,9 @@ describe Ci::ArchiveTracesCronWorker do ...@@ -64,7 +65,9 @@ describe Ci::ArchiveTracesCronWorker do
before do before do
allow(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception) allow(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
allow_any_instance_of(Gitlab::Ci::Trace).to receive(:archive!).and_raise('Unexpected error') allow_next_instance_of(Gitlab::Ci::Trace) do |instance|
allow(instance).to receive(:archive!).and_raise('Unexpected error')
end
end end
it 'puts a log' do it 'puts a log' do
......
...@@ -57,9 +57,9 @@ describe Gitlab::GithubImport::ReschedulingMethods do ...@@ -57,9 +57,9 @@ describe Gitlab::GithubImport::ReschedulingMethods do
expect(worker) expect(worker)
.not_to receive(:notify_waiter) .not_to receive(:notify_waiter)
expect_any_instance_of(Gitlab::GithubImport::Client) expect_next_instance_of(Gitlab::GithubImport::Client) do |instance|
.to receive(:rate_limit_resets_in) expect(instance).to receive(:rate_limit_resets_in).and_return(14)
.and_return(14) end
expect(worker.class) expect(worker.class)
.to receive(:perform_in) .to receive(:perform_in)
......
...@@ -9,7 +9,9 @@ describe DeleteMergedBranchesWorker do ...@@ -9,7 +9,9 @@ describe DeleteMergedBranchesWorker do
describe "#perform" do describe "#perform" do
it "delegates to Branches::DeleteMergedService" do it "delegates to Branches::DeleteMergedService" do
expect_any_instance_of(::Branches::DeleteMergedService).to receive(:execute).and_return(true) expect_next_instance_of(::Branches::DeleteMergedService) do |instance|
expect(instance).to receive(:execute).and_return(true)
end
worker.perform(project.id, project.owner.id) worker.perform(project.id, project.owner.id)
end end
......
...@@ -7,7 +7,9 @@ describe ExpireBuildArtifactsWorker do ...@@ -7,7 +7,9 @@ describe ExpireBuildArtifactsWorker do
describe '#perform' do describe '#perform' do
it 'executes a service' do it 'executes a service' do
expect_any_instance_of(Ci::DestroyExpiredJobArtifactsService).to receive(:execute) expect_next_instance_of(Ci::DestroyExpiredJobArtifactsService) do |instance|
expect(instance).to receive(:execute)
end
worker.perform worker.perform
end end
......
...@@ -21,9 +21,9 @@ describe Gitlab::GithubImport::Stage::ImportRepositoryWorker do ...@@ -21,9 +21,9 @@ describe Gitlab::GithubImport::Stage::ImportRepositoryWorker do
it 'schedules the importing of the base data' do it 'schedules the importing of the base data' do
client = double(:client) client = double(:client)
expect_any_instance_of(Gitlab::GithubImport::Importer::RepositoryImporter) expect_next_instance_of(Gitlab::GithubImport::Importer::RepositoryImporter) do |instance|
.to receive(:execute) expect(instance).to receive(:execute).and_return(true)
.and_return(true) end
expect(Gitlab::GithubImport::Stage::ImportBaseDataWorker) expect(Gitlab::GithubImport::Stage::ImportBaseDataWorker)
.to receive(:perform_async) .to receive(:perform_async)
...@@ -37,9 +37,9 @@ describe Gitlab::GithubImport::Stage::ImportRepositoryWorker do ...@@ -37,9 +37,9 @@ describe Gitlab::GithubImport::Stage::ImportRepositoryWorker do
it 'does not schedule the importing of the base data' do it 'does not schedule the importing of the base data' do
client = double(:client) client = double(:client)
expect_any_instance_of(Gitlab::GithubImport::Importer::RepositoryImporter) expect_next_instance_of(Gitlab::GithubImport::Importer::RepositoryImporter) do |instance|
.to receive(:execute) expect(instance).to receive(:execute).and_return(false)
.and_return(false) end
expect(Gitlab::GithubImport::Stage::ImportBaseDataWorker) expect(Gitlab::GithubImport::Stage::ImportBaseDataWorker)
.not_to receive(:perform_async) .not_to receive(:perform_async)
......
...@@ -7,7 +7,9 @@ describe GitlabShellWorker do ...@@ -7,7 +7,9 @@ describe GitlabShellWorker do
describe '#perform with add_key' do describe '#perform with add_key' do
it 'calls add_key on Gitlab::Shell' do it 'calls add_key on Gitlab::Shell' do
expect_any_instance_of(Gitlab::Shell).to receive(:add_key).with('foo', 'bar') expect_next_instance_of(Gitlab::Shell) do |instance|
expect(instance).to receive(:add_key).with('foo', 'bar')
end
worker.perform(:add_key, 'foo', 'bar') worker.perform(:add_key, 'foo', 'bar')
end end
end end
......
...@@ -8,7 +8,9 @@ describe GitlabUsagePingWorker do ...@@ -8,7 +8,9 @@ describe GitlabUsagePingWorker do
it 'delegates to SubmitUsagePingService' do it 'delegates to SubmitUsagePingService' do
allow(subject).to receive(:try_obtain_lease).and_return(true) allow(subject).to receive(:try_obtain_lease).and_return(true)
expect_any_instance_of(SubmitUsagePingService).to receive(:execute) expect_next_instance_of(SubmitUsagePingService) do |instance|
expect(instance).to receive(:execute)
end
subject.perform subject.perform
end end
......
...@@ -10,7 +10,9 @@ describe HashedStorage::MigratorWorker do ...@@ -10,7 +10,9 @@ describe HashedStorage::MigratorWorker do
describe '#perform' do describe '#perform' do
it 'delegates to MigratorService' do it 'delegates to MigratorService' do
expect_any_instance_of(Gitlab::HashedStorage::Migrator).to receive(:bulk_migrate).with(start: 5, finish: 10) expect_next_instance_of(Gitlab::HashedStorage::Migrator) do |instance|
expect(instance).to receive(:bulk_migrate).with(start: 5, finish: 10)
end
worker.perform(5, 10) worker.perform(5, 10)
end end
......
...@@ -10,7 +10,9 @@ describe HashedStorage::RollbackerWorker do ...@@ -10,7 +10,9 @@ describe HashedStorage::RollbackerWorker do
describe '#perform' do describe '#perform' do
it 'delegates to MigratorService' do it 'delegates to MigratorService' do
expect_any_instance_of(Gitlab::HashedStorage::Migrator).to receive(:bulk_rollback).with(start: 5, finish: 10) expect_next_instance_of(Gitlab::HashedStorage::Migrator) do |instance|
expect(instance).to receive(:bulk_rollback).with(start: 5, finish: 10)
end
worker.perform(5, 10) worker.perform(5, 10)
end end
......
...@@ -11,7 +11,9 @@ describe ImportIssuesCsvWorker do ...@@ -11,7 +11,9 @@ describe ImportIssuesCsvWorker do
describe '#perform' do describe '#perform' do
it 'calls #execute on Issues::ImportCsvService and destroys upload' do it 'calls #execute on Issues::ImportCsvService and destroys upload' do
expect_any_instance_of(Issues::ImportCsvService).to receive(:execute).and_return({ success: 5, errors: [], valid_file: true }) expect_next_instance_of(Issues::ImportCsvService) do |instance|
expect(instance).to receive(:execute).and_return({ success: 5, errors: [], valid_file: true })
end
worker.perform(user.id, project.id, upload.id) worker.perform(user.id, project.id, upload.id)
......
...@@ -6,7 +6,9 @@ describe NewReleaseWorker do ...@@ -6,7 +6,9 @@ describe NewReleaseWorker do
let(:release) { create(:release) } let(:release) { create(:release) }
it 'sends a new release notification' do it 'sends a new release notification' do
expect_any_instance_of(NotificationService).to receive(:send_new_release_notifications).with(release) expect_next_instance_of(NotificationService) do |instance|
expect(instance).to receive(:send_new_release_notifications).with(release)
end
described_class.new.perform(release.id) described_class.new.perform(release.id)
end end
......
...@@ -21,8 +21,9 @@ describe RepositoryImportWorker do ...@@ -21,8 +21,9 @@ describe RepositoryImportWorker do
allow(subject).to receive(:jid).and_return(jid) allow(subject).to receive(:jid).and_return(jid)
expect_any_instance_of(Projects::ImportService).to receive(:execute) expect_next_instance_of(Projects::ImportService) do |instance|
.and_return({ status: :ok }) expect(instance).to receive(:execute).and_return({ status: :ok })
end
# Works around https://github.com/rspec/rspec-mocks/issues/910 # Works around https://github.com/rspec/rspec-mocks/issues/910
expect(Project).to receive(:find).with(started_project.id).and_return(started_project) expect(Project).to receive(:find).with(started_project.id).and_return(started_project)
...@@ -36,8 +37,9 @@ describe RepositoryImportWorker do ...@@ -36,8 +37,9 @@ describe RepositoryImportWorker do
context 'when the import was successful' do context 'when the import was successful' do
it 'imports a project' do it 'imports a project' do
expect_any_instance_of(Projects::ImportService).to receive(:execute) expect_next_instance_of(Projects::ImportService) do |instance|
.and_return({ status: :ok }) expect(instance).to receive(:execute).and_return({ status: :ok })
end
# Works around https://github.com/rspec/rspec-mocks/issues/910 # Works around https://github.com/rspec/rspec-mocks/issues/910
expect(Project).to receive(:find).with(project.id).and_return(project) expect(Project).to receive(:find).with(project.id).and_return(project)
...@@ -54,7 +56,9 @@ describe RepositoryImportWorker do ...@@ -54,7 +56,9 @@ describe RepositoryImportWorker do
error = %q{remote: Not Found fatal: repository 'https://user:pass@test.com/root/repoC.git/' not found } error = %q{remote: Not Found fatal: repository 'https://user:pass@test.com/root/repoC.git/' not found }
import_state.update(jid: '123') import_state.update(jid: '123')
expect_any_instance_of(Projects::ImportService).to receive(:execute).and_return({ status: :error, message: error }) expect_next_instance_of(Projects::ImportService) do |instance|
expect(instance).to receive(:execute).and_return({ status: :error, message: error })
end
expect do expect do
subject.perform(project.id) subject.perform(project.id)
...@@ -67,7 +71,9 @@ describe RepositoryImportWorker do ...@@ -67,7 +71,9 @@ describe RepositoryImportWorker do
project.update(import_type: 'gitlab_project') project.update(import_type: 'gitlab_project')
import_state.update(jid: '123') import_state.update(jid: '123')
expect_any_instance_of(Projects::ImportService).to receive(:execute).and_return({ status: :error, message: error }) expect_next_instance_of(Projects::ImportService) do |instance|
expect(instance).to receive(:execute).and_return({ status: :error, message: error })
end
expect do expect do
subject.perform(project.id) subject.perform(project.id)
...@@ -93,8 +99,9 @@ describe RepositoryImportWorker do ...@@ -93,8 +99,9 @@ describe RepositoryImportWorker do
.to receive(:async?) .to receive(:async?)
.and_return(true) .and_return(true)
expect_any_instance_of(ProjectImportState) expect_next_instance_of(ProjectImportState) do |instance|
.not_to receive(:finish) expect(instance).not_to receive(:finish)
end
subject.perform(project.id) subject.perform(project.id)
end end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment