Commit 5426ca99 authored by GitLab Bot's avatar GitLab Bot

Add latest changes from gitlab-org/gitlab@master

parent 67cdfd26
......@@ -58,7 +58,7 @@ export default {
<template>
<div class="frequent-items-list-container">
<ul class="list-unstyled">
<ul ref="frequentItemsList" class="list-unstyled">
<li v-if="isListEmpty" :class="{ 'section-failure': isFetchFailed }" class="section-empty">
{{ listEmptyMessage }}
</li>
......
......@@ -117,3 +117,23 @@ export const scaledSIFormatter = (unit = '', prefixOffset = 0) => {
return scaledFormatter(units);
};
/**
* Returns a function that formats a number scaled using SI units notation.
*/
export const scaledBinaryFormatter = (unit = '', prefixOffset = 0) => {
// eslint-disable-next-line @gitlab/i18n/no-non-i18n-strings
const multiplicative = ['Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi', 'Yi'];
const symbols = ['', ...multiplicative];
const units = symbols.slice(prefixOffset).map(prefix => {
return `${prefix}${unit}`;
});
if (!units.length) {
// eslint-disable-next-line @gitlab/i18n/no-non-i18n-strings
throw new RangeError('The unit cannot be converted, please try a different scale');
}
return scaledFormatter(units, 1024);
};
import { s__ } from '~/locale';
import { suffixFormatter, scaledSIFormatter, numberFormatter } from './formatter_factory';
import {
suffixFormatter,
scaledSIFormatter,
scaledBinaryFormatter,
numberFormatter,
} from './formatter_factory';
/**
* Supported formats
*
* Based on:
*
* https://tc39.es/proposal-unified-intl-numberformat/section6/locales-currencies-tz_proposed_out.html#sec-issanctionedsimpleunitidentifier
*/
export const SUPPORTED_FORMATS = {
// Number
......@@ -13,15 +22,23 @@ export const SUPPORTED_FORMATS = {
// Duration
seconds: 'seconds',
miliseconds: 'miliseconds',
milliseconds: 'milliseconds',
// Digital
bytes: 'bytes',
// Digital (Metric)
decimalBytes: 'decimalBytes',
kilobytes: 'kilobytes',
megabytes: 'megabytes',
gigabytes: 'gigabytes',
terabytes: 'terabytes',
petabytes: 'petabytes',
// Digital (IEC)
bytes: 'bytes',
kibibytes: 'kibibytes',
mebibytes: 'mebibytes',
gibibytes: 'gibibytes',
tebibytes: 'tebibytes',
pebibytes: 'pebibytes',
};
/**
......@@ -32,6 +49,7 @@ export const SUPPORTED_FORMATS = {
*/
export const getFormatter = (format = SUPPORTED_FORMATS.number) => {
// Number
if (format === SUPPORTED_FORMATS.number) {
/**
* Formats a number
......@@ -70,6 +88,7 @@ export const getFormatter = (format = SUPPORTED_FORMATS.number) => {
}
// Durations
if (format === SUPPORTED_FORMATS.seconds) {
/**
* Formats a number of seconds
......@@ -82,9 +101,9 @@ export const getFormatter = (format = SUPPORTED_FORMATS.number) => {
*/
return suffixFormatter(s__('Units|s'));
}
if (format === SUPPORTED_FORMATS.miliseconds) {
if (format === SUPPORTED_FORMATS.milliseconds) {
/**
* Formats a number of miliseconds with ms as units
* Formats a number of milliseconds with ms as units
*
* @function
* @param {Number} value - Number to format, `1` is formatted as `1ms`
......@@ -95,8 +114,9 @@ export const getFormatter = (format = SUPPORTED_FORMATS.number) => {
return suffixFormatter(s__('Units|ms'));
}
// Digital
if (format === SUPPORTED_FORMATS.bytes) {
// Digital (Metric)
if (format === SUPPORTED_FORMATS.decimalBytes) {
/**
* Formats a number of bytes scaled up to larger digital
* units for larger numbers.
......@@ -162,6 +182,76 @@ export const getFormatter = (format = SUPPORTED_FORMATS.number) => {
*/
return scaledSIFormatter('B', 5);
}
// Digital (IEC)
if (format === SUPPORTED_FORMATS.bytes) {
/**
* Formats a number of bytes scaled up to larger digital
* units for larger numbers.
*
* @function
* @param {Number} value - Number to format, `1` is formatted as `1B`
* @param {Number} fractionDigits - number of precision decimals
*/
return scaledBinaryFormatter('B');
}
if (format === SUPPORTED_FORMATS.kibibytes) {
/**
* Formats a number of kilobytes scaled up to larger digital
* units for larger numbers.
*
* @function
* @param {Number} value - Number to format, `1` is formatted as `1kB`
* @param {Number} fractionDigits - number of precision decimals
*/
return scaledBinaryFormatter('B', 1);
}
if (format === SUPPORTED_FORMATS.mebibytes) {
/**
* Formats a number of megabytes scaled up to larger digital
* units for larger numbers.
*
* @function
* @param {Number} value - Number to format, `1` is formatted as `1MB`
* @param {Number} fractionDigits - number of precision decimals
*/
return scaledBinaryFormatter('B', 2);
}
if (format === SUPPORTED_FORMATS.gibibytes) {
/**
* Formats a number of gigabytes scaled up to larger digital
* units for larger numbers.
*
* @function
* @param {Number} value - Number to format, `1` is formatted as `1GB`
* @param {Number} fractionDigits - number of precision decimals
*/
return scaledBinaryFormatter('B', 3);
}
if (format === SUPPORTED_FORMATS.tebibytes) {
/**
* Formats a number of terabytes scaled up to larger digital
* units for larger numbers.
*
* @function
* @param {Number} value - Number to format, `1` is formatted as `1GB`
* @param {Number} fractionDigits - number of precision decimals
*/
return scaledBinaryFormatter('B', 4);
}
if (format === SUPPORTED_FORMATS.pebibytes) {
/**
* Formats a number of petabytes scaled up to larger digital
* units for larger numbers.
*
* @function
* @param {Number} value - Number to format, `1` is formatted as `1PB`
* @param {Number} fractionDigits - number of precision decimals
*/
return scaledBinaryFormatter('B', 5);
}
// Fail so client library addresses issue
throw TypeError(`${format} is not a valid number format`);
};
import axios from '~/lib/utils/axios_utils';
import * as constants from '../constants';
export default {
fetchDiscussions(endpoint, filter, persistFilter = true) {
const config =
filter !== undefined
? { params: { notes_filter: filter, persist_filter: persistFilter } }
: null;
return axios.get(endpoint, config);
},
replyToDiscussion(endpoint, data) {
return axios.post(endpoint, data);
},
updateNote(endpoint, data) {
return axios.put(endpoint, data);
},
createNewNote(endpoint, data) {
return axios.post(endpoint, data);
},
toggleResolveNote(endpoint, isResolved) {
const { RESOLVE_NOTE_METHOD_NAME, UNRESOLVE_NOTE_METHOD_NAME } = constants;
const method = isResolved ? UNRESOLVE_NOTE_METHOD_NAME : RESOLVE_NOTE_METHOD_NAME;
return axios[method](endpoint);
},
poll(data = {}) {
const endpoint = data.notesData.notesPath;
const { lastFetchedAt } = data;
const options = {
headers: {
'X-Last-Fetched-At': lastFetchedAt ? `${lastFetchedAt}` : undefined,
},
};
return axios.get(endpoint, options);
},
toggleIssueState(endpoint, data) {
return axios.put(endpoint, data);
},
};
......@@ -8,7 +8,6 @@ import Poll from '../../lib/utils/poll';
import * as types from './mutation_types';
import * as utils from './utils';
import * as constants from '../constants';
import service from '../services/notes_service';
import loadAwardsHandler from '../../awards_handler';
import sidebarTimeTrackingEventHub from '../../sidebar/event_hub';
import { isInViewport, scrollToElement, isInMRPage } from '../../lib/utils/common_utils';
......@@ -47,11 +46,17 @@ export const setNotesFetchedState = ({ commit }, state) =>
export const toggleDiscussion = ({ commit }, data) => commit(types.TOGGLE_DISCUSSION, data);
export const fetchDiscussions = ({ commit, dispatch }, { path, filter, persistFilter }) =>
service.fetchDiscussions(path, filter, persistFilter).then(({ data }) => {
export const fetchDiscussions = ({ commit, dispatch }, { path, filter, persistFilter }) => {
const config =
filter !== undefined
? { params: { notes_filter: filter, persist_filter: persistFilter } }
: null;
return axios.get(path, config).then(({ data }) => {
commit(types.SET_INITIAL_DISCUSSIONS, data);
dispatch('updateResolvableDiscussionsCounts');
});
};
export const updateDiscussion = ({ commit, state }, discussion) => {
commit(types.UPDATE_DISCUSSION, discussion);
......@@ -78,7 +83,7 @@ export const deleteNote = ({ dispatch }, note) =>
});
export const updateNote = ({ commit, dispatch }, { endpoint, note }) =>
service.updateNote(endpoint, note).then(({ data }) => {
axios.put(endpoint, note).then(({ data }) => {
commit(types.UPDATE_NOTE, data);
dispatch('startTaskList');
});
......@@ -109,7 +114,7 @@ export const replyToDiscussion = (
{ commit, state, getters, dispatch },
{ endpoint, data: reply },
) =>
service.replyToDiscussion(endpoint, reply).then(({ data }) => {
axios.post(endpoint, reply).then(({ data }) => {
if (data.discussion) {
commit(types.UPDATE_DISCUSSION, data.discussion);
......@@ -126,7 +131,7 @@ export const replyToDiscussion = (
});
export const createNewNote = ({ commit, dispatch }, { endpoint, data: reply }) =>
service.createNewNote(endpoint, reply).then(({ data }) => {
axios.post(endpoint, reply).then(({ data }) => {
if (!data.errors) {
commit(types.ADD_NEW_NOTE, data);
......@@ -156,20 +161,24 @@ export const resolveDiscussion = ({ state, dispatch, getters }, { discussionId }
});
};
export const toggleResolveNote = ({ commit, dispatch }, { endpoint, isResolved, discussion }) =>
service.toggleResolveNote(endpoint, isResolved).then(({ data }) => {
const mutationType = discussion ? types.UPDATE_DISCUSSION : types.UPDATE_NOTE;
export const toggleResolveNote = ({ commit, dispatch }, { endpoint, isResolved, discussion }) => {
const method = isResolved
? constants.UNRESOLVE_NOTE_METHOD_NAME
: constants.RESOLVE_NOTE_METHOD_NAME;
const mutationType = discussion ? types.UPDATE_DISCUSSION : types.UPDATE_NOTE;
return axios[method](endpoint).then(({ data }) => {
commit(mutationType, data);
dispatch('updateResolvableDiscussionsCounts');
dispatch('updateMergeRequestWidget');
});
};
export const closeIssue = ({ commit, dispatch, state }) => {
dispatch('toggleStateButtonLoading', true);
return service.toggleIssueState(state.notesData.closePath).then(({ data }) => {
return axios.put(state.notesData.closePath).then(({ data }) => {
commit(types.CLOSE_ISSUE);
dispatch('emitStateChangedEvent', data);
dispatch('toggleStateButtonLoading', false);
......@@ -178,7 +187,7 @@ export const closeIssue = ({ commit, dispatch, state }) => {
export const reopenIssue = ({ commit, dispatch, state }) => {
dispatch('toggleStateButtonLoading', true);
return service.toggleIssueState(state.notesData.reopenPath).then(({ data }) => {
return axios.put(state.notesData.reopenPath).then(({ data }) => {
commit(types.REOPEN_ISSUE);
dispatch('emitStateChangedEvent', data);
dispatch('toggleStateButtonLoading', false);
......@@ -355,11 +364,35 @@ const pollSuccessCallBack = (resp, commit, state, getters, dispatch) => {
return resp;
};
const getFetchDataParams = state => {
const endpoint = state.notesData.notesPath;
const options = {
headers: {
'X-Last-Fetched-At': state.lastFetchedAt ? `${state.lastFetchedAt}` : undefined,
},
};
return { endpoint, options };
};
export const fetchData = ({ commit, state, getters }) => {
const { endpoint, options } = getFetchDataParams(state);
axios
.get(endpoint, options)
.then(({ data }) => pollSuccessCallBack(data, commit, state, getters))
.catch(() => Flash(__('Something went wrong while fetching latest comments.')));
};
export const poll = ({ commit, state, getters, dispatch }) => {
eTagPoll = new Poll({
resource: service,
resource: {
poll: () => {
const { endpoint, options } = getFetchDataParams(state);
return axios.get(endpoint, options);
},
},
method: 'poll',
data: state,
successCallback: ({ data }) => pollSuccessCallBack(data, commit, state, getters, dispatch),
errorCallback: () => Flash(__('Something went wrong while fetching latest comments.')),
});
......@@ -367,7 +400,7 @@ export const poll = ({ commit, state, getters, dispatch }) => {
if (!Visibility.hidden()) {
eTagPoll.makeRequest();
} else {
service.poll(state);
fetchData({ commit, state, getters });
}
Visibility.change(() => {
......@@ -387,18 +420,6 @@ export const restartPolling = () => {
if (eTagPoll) eTagPoll.restart();
};
export const fetchData = ({ commit, state, getters }) => {
const requestData = {
endpoint: state.notesData.notesPath,
lastFetchedAt: state.lastFetchedAt,
};
service
.poll(requestData)
.then(({ data }) => pollSuccessCallBack(data, commit, state, getters))
.catch(() => Flash(__('Something went wrong while fetching latest comments.')));
};
export const toggleAward = ({ commit, getters }, { awardName, noteId }) => {
commit(types.TOGGLE_AWARD, { awardName, note: getters.notesById[noteId] });
};
......
import $ from 'jquery';
import _ from 'underscore';
import { escape } from 'lodash';
import { s__, n__, sprintf } from '~/locale';
import axios from '../lib/utils/axios_utils';
import PANEL_STATE from './constants';
......@@ -69,13 +69,13 @@ export default class PrometheusMetrics {
if (metric.active_metrics > 0) {
totalExporters += 1;
this.$monitoredMetricsList.append(
`<li>${_.escape(metric.group)}<span class="badge">${_.escape(
`<li>${escape(metric.group)}<span class="badge">${escape(
metric.active_metrics,
)}</span></li>`,
);
totalMonitoredMetrics += metric.active_metrics;
if (metric.metrics_missing_requirements > 0) {
this.$missingEnvVarMetricsList.append(`<li>${_.escape(metric.group)}</li>`);
this.$missingEnvVarMetricsList.append(`<li>${escape(metric.group)}</li>`);
totalMissingEnvVarMetrics += 1;
}
}
......
......@@ -59,7 +59,7 @@
max-width: 100%;
}
&:not(.md-file) img:not(.emoji) {
&:not(.md) img:not(.emoji) {
border: 1px solid $white-normal;
padding: 5px;
margin: 5px 0;
......
......@@ -312,6 +312,7 @@ class ProjectPolicy < BasePolicy
enable :destroy_artifacts
enable :daily_statistics
enable :admin_operations
enable :read_deploy_token
end
rule { (mirror_available & can?(:admin_project)) | admin }.enable :admin_remote_mirror
......
- if @wiki_home.present?
%div{ class: container_class }
.md.md-file.prepend-top-default.append-bottom-default
.md.prepend-top-default.append-bottom-default
= render_wiki_content(@wiki_home)
- else
- can_create_wiki = can?(current_user, :create_wiki, @project)
......
- if markup?(@blob.name)
.file-content.md.md-file
.file-content.md
= markup(@blob.name, @content)
- else
.diff-file
......
- blob = viewer.blob
- context = blob.respond_to?(:rendered_markup) ? { rendered: blob.rendered_markup } : {}
.file-content.md.md-file
.file-content.md
= markup(blob.name, blob.data, context)
......@@ -26,7 +26,7 @@
= (s_("WikiHistoricalPage|You can view the %{most_recent_link} or browse the %{history_link}.") % { most_recent_link: most_recent_link, history_link: history_link }).html_safe
.prepend-top-default.append-bottom-default
.md.md-file{ data: { qa_selector: 'wiki_page_content' } }
.md{ data: { qa_selector: 'wiki_page_content' } }
= render_wiki_content(@page)
= render 'sidebar'
......@@ -23,7 +23,7 @@
%i.fa.fa-file
%strong= snippet.file_name
- if markup?(snippet.file_name)
.file-content.md.md-file
.file-content.md
- snippet_chunks.each do |chunk|
- unless chunk[:data].empty?
= markup(snippet.file_name, chunk[:data])
......
---
title: Adds new activity panel to package details page
merge_request: 25534
author:
type: added
---
title: Empty state for Code Review Analytics
merge_request: 25793
author:
type: added
---
title: Add endpoint for listing all deploy tokens for a project
merge_request: 25186
author:
type: added
---
title: Update DAST auto-deploy-image to v0.10.0
merge_request: 25922
author:
type: other
......@@ -2,15 +2,14 @@
## List all deploy tokens
Get a list of all deploy tokens across all projects of the GitLab instance.
Get a list of all deploy tokens across the GitLab instance. This endpoint requires admin access.
>**Note:**
> This endpoint requires admin access.
```
```plaintext
GET /deploy_tokens
```
Example request:
```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/deploy_tokens"
```
......@@ -24,7 +23,47 @@ Example response:
"name": "MyToken",
"username": "gitlab+deploy-token-1",
"expires_at": "2020-02-14T00:00:00.000Z",
"token": "jMRvtPNxrn3crTAGukpZ",
"scopes": [
"read_repository",
"read_registry"
]
}
]
```
## Project deploy tokens
Project deploy token API endpoints require project maintainer access or higher.
### List project deploy tokens
Get a list of a project's deploy tokens.
```plaintext
GET /projects/:id/deploy_tokens
```
Parameters:
| Attribute | Type | Required | Description |
|:---------------|:---------------|:---------|:-----------------------------------------------------------------------------|
| `id` | integer/string | yes | ID or [URL-encoded path of the project](README.md#namespaced-path-encoding). |
Example request:
```shell
curl --header "PRIVATE-TOKEN: <your_access_token>" "https://gitlab.example.com/api/v4/projects/1/deploy_tokens"
```
Example response:
```json
[
{
"id": 1,
"name": "MyToken",
"username": "gitlab+deploy-token-1",
"expires_at": "2020-02-14T00:00:00.000Z",
"scopes": [
"read_repository",
"read_registry"
......
......@@ -2609,12 +2609,12 @@ input EpicSetSubscriptionInput {
clientMutationId: String
"""
The group the epic to (un)subscribe is in
The group the epic to mutate is in
"""
groupPath: ID!
"""
The iid of the epic to (un)subscribe
The iid of the epic to mutate
"""
iid: ID!
......@@ -7820,7 +7820,7 @@ input UpdateEpicInput {
"""
The iid of the epic to mutate
"""
iid: String!
iid: ID!
"""
The IDs of labels to be removed from the epic.
......
......@@ -24409,6 +24409,20 @@
"description": "Autogenerated input type of UpdateEpic",
"fields": null,
"inputFields": [
{
"name": "iid",
"description": "The iid of the epic to mutate",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "ID",
"ofType": null
}
},
"defaultValue": null
},
{
"name": "groupPath",
"description": "The group the epic to mutate is in",
......@@ -24519,20 +24533,6 @@
},
"defaultValue": null
},
{
"name": "iid",
"description": "The iid of the epic to mutate",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "SCALAR",
"name": "String",
"ofType": null
}
},
"defaultValue": null
},
{
"name": "stateEvent",
"description": "State event for the epic",
......@@ -24863,8 +24863,8 @@
"fields": null,
"inputFields": [
{
"name": "groupPath",
"description": "The group the epic to (un)subscribe is in",
"name": "iid",
"description": "The iid of the epic to mutate",
"type": {
"kind": "NON_NULL",
"name": null,
......@@ -24877,8 +24877,8 @@
"defaultValue": null
},
{
"name": "iid",
"description": "The iid of the epic to (un)subscribe",
"name": "groupPath",
"description": "The group the epic to mutate is in",
"type": {
"kind": "NON_NULL",
"name": null,
......
......@@ -4,11 +4,14 @@ disqus_identifier: 'https://docs.gitlab.com/ee/workflow/forking_workflow.html'
# Project forking workflow
Forking a project to your own namespace is useful if you have no write
access to the project you want to contribute to. Even if you do have write
access or can request it, we recommend working together in the same
repository since it is simpler. See our [GitLab Flow](../../../topics/gitlab_flow.md)
document more information about using branches to work together.
Whenever possible, it's recommended to work in a common Git repository and use
[branching strategies](../../../topics/gitlab_flow.md) to manage your work. However,
if you do not have write access for the repository you want to contribute to, you
can create a fork.
A fork is a personal copy of the repository and all its branches, which you create
in a namespace of your choice. This way you can make changes in your own fork and
submit them through a merge request to the repo you don't have access to.
## Creating a fork
......@@ -27,7 +30,7 @@ Forking a project is, in most cases, a two-step process.
The fork is created. The permissions you have in the namespace are the permissions you will have in the fork.
CAUTION: **CAUTION:**
CAUTION: **Caution:**
In GitLab 12.6 and later, when project owners [reduce a project's visibility](../../../public_access/public_access.md#reducing-visibility),
it **removes the relationship** between a project and all its forks.
......@@ -37,10 +40,11 @@ You can use [repository mirroring](repository_mirroring.md) to keep your fork sy
The main difference is that with repository mirroring your remote fork will be automatically kept up-to-date.
Without mirroring, to work locally you'll have to user `git pull` to update your local repo with the fork on GitLab. You'll have to fetch locally and push it back to the remote repo to update it.
Without mirroring, to work locally you'll have to use `git pull` to update your local repo
with the upstream project, then push the changes back to your fork to update it.
CAUTION: **Caution:**
With mirroring, before approving a merge request you'll likely to be asked to sync, hence automating it is recommend.
With mirroring, before approving a merge request, you'll likely be asked to sync; hence automating it is recommend.
Read more about [How to keep your fork up to date with its origin](https://about.gitlab.com/blog/2016/12/01/how-to-keep-your-fork-up-to-date-with-its-origin/).
......
......@@ -4,8 +4,6 @@ module API
class DeployTokens < Grape::API
include PaginationParams
before { authenticated_as_admin! }
desc 'Return all deploy tokens' do
detail 'This feature was introduced in GitLab 12.9.'
success Entities::DeployToken
......@@ -14,7 +12,27 @@ module API
use :pagination
end
get 'deploy_tokens' do
authenticated_as_admin!
present paginate(DeployToken.all), with: Entities::DeployToken
end
params do
requires :id, type: Integer, desc: 'The ID of a project'
end
resource :projects, requirements: API::NAMESPACE_OR_PROJECT_REQUIREMENTS do
params do
use :pagination
end
desc 'List deploy tokens for a project' do
detail 'This feature was introduced in GitLab 12.9'
success Entities::DeployToken
end
get ':id/deploy_tokens' do
authorize!(:read_deploy_token, user_project)
present paginate(user_project.deploy_tokens), with: Entities::DeployToken
end
end
end
end
......@@ -3,7 +3,8 @@
module API
module Entities
class DeployToken < Grape::Entity
expose :id, :name, :username, :expires_at, :token, :scopes
# exposing :token is a security risk and should be avoided
expose :id, :name, :username, :expires_at, :scopes
end
end
end
.dast-auto-deploy:
image: "registry.gitlab.com/gitlab-org/cluster-integration/auto-deploy-image:v0.9.1"
image: "registry.gitlab.com/gitlab-org/cluster-integration/auto-deploy-image:v0.10.0"
dast_environment_deploy:
extends: .dast-auto-deploy
......
......@@ -4820,6 +4820,9 @@ msgstr ""
msgid "Code Review"
msgstr ""
msgid "Code Review Analytics displays a table of open merge requests considered to be in code review. There are currently no merge requests in review for this project and/or filters."
msgstr ""
msgid "Code owner approval is required"
msgstr ""
......@@ -13602,22 +13605,22 @@ msgstr ""
msgid "PackageRegistry|NuGet Command"
msgstr ""
msgid "PackageRegistry|Registry Setup"
msgid "PackageRegistry|Pipeline %{linkStart}%{linkEnd} triggered %{timestamp} by %{author}"
msgstr ""
msgid "PackageRegistry|Remove package"
msgid "PackageRegistry|Published to the repository at %{timestamp}"
msgstr ""
msgid "PackageRegistry|There are no packages yet"
msgid "PackageRegistry|Registry Setup"
msgstr ""
msgid "PackageRegistry|There was a problem fetching the details for this package."
msgid "PackageRegistry|Remove package"
msgstr ""
msgid "PackageRegistry|There was an error fetching the pipeline information."
msgid "PackageRegistry|There are no packages yet"
msgstr ""
msgid "PackageRegistry|Unable to fetch pipeline information"
msgid "PackageRegistry|There was a problem fetching the details for this package."
msgstr ""
msgid "PackageRegistry|Unable to load package"
......@@ -22468,6 +22471,9 @@ msgstr ""
msgid "You don't have any deployments right now."
msgstr ""
msgid "You don't have any open merge requests"
msgstr ""
msgid "You don't have any projects available."
msgstr ""
......
{
"type": "object",
"required": [
"id",
"name",
"username",
"expires_at",
"scopes"
],
"properties": {
"id": {
"type": "integer"
},
"name": {
"type": "string"
},
"username": {
"type": "string"
},
"expires_at": {
"type": "date"
},
"scopes": {
"type": "array",
"items": {
"type": "string"
}
}
},
"additionalProperties": false
}
\ No newline at end of file
{
"type": "array",
"items": {
"$ref": "deploy_token.json"
}
}
\ No newline at end of file
import { shallowMount } from '@vue/test-utils';
import { trimText } from 'helpers/text_helper';
import frequentItemsListItemComponent from '~/frequent_items/components/frequent_items_list_item.vue';
import mockData from '../mock_data'; // can also use 'mockGroup', but not useful to test here
const mockProject = mockData();
import { mockProject } from '../mock_data'; // can also use 'mockGroup', but not useful to test here
describe('FrequentItemsListItemComponent', () => {
let wrapper;
......
import Vue from 'vue';
import mountComponent from 'spec/helpers/vue_mount_component_helper';
import { mount } from '@vue/test-utils';
import frequentItemsListComponent from '~/frequent_items/components/frequent_items_list.vue';
import frequentItemsListItemComponent from '~/frequent_items/components/frequent_items_list_item.vue';
import { mockFrequentProjects } from '../mock_data';
const createComponent = (namespace = 'projects') => {
const Component = Vue.extend(frequentItemsListComponent);
return mountComponent(Component, {
namespace,
items: mockFrequentProjects,
isFetchFailed: false,
hasSearchQuery: false,
matcher: 'lab',
});
};
describe('FrequentItemsListComponent', () => {
let vm;
beforeEach(() => {
vm = createComponent();
});
let wrapper;
const createComponent = (props = {}) => {
wrapper = mount(frequentItemsListComponent, {
propsData: {
namespace: 'projects',
items: mockFrequentProjects,
isFetchFailed: false,
hasSearchQuery: false,
matcher: 'lab',
...props,
},
});
};
afterEach(() => {
vm.$destroy();
wrapper.destroy();
});
describe('computed', () => {
describe('isListEmpty', () => {
it('should return `true` or `false` representing whether if `items` is empty or not with projects', () => {
vm.items = [];
createComponent({
items: [],
});
expect(vm.isListEmpty).toBe(true);
expect(wrapper.vm.isListEmpty).toBe(true);
vm.items = mockFrequentProjects;
wrapper.setProps({
items: mockFrequentProjects,
});
expect(vm.isListEmpty).toBe(false);
expect(wrapper.vm.isListEmpty).toBe(false);
});
});
describe('fetched item messages', () => {
it('should return appropriate empty list message based on value of `localStorageFailed` prop with projects', () => {
vm.isFetchFailed = true;
createComponent({
isFetchFailed: true,
});
expect(vm.listEmptyMessage).toBe('This feature requires browser localStorage support');
expect(wrapper.vm.listEmptyMessage).toBe(
'This feature requires browser localStorage support',
);
vm.isFetchFailed = false;
wrapper.setProps({
isFetchFailed: false,
});
expect(vm.listEmptyMessage).toBe('Projects you visit often will appear here');
expect(wrapper.vm.listEmptyMessage).toBe('Projects you visit often will appear here');
});
});
describe('searched item messages', () => {
it('should return appropriate empty list message based on value of `searchFailed` prop with projects', () => {
vm.hasSearchQuery = true;
vm.isFetchFailed = true;
createComponent({
hasSearchQuery: true,
isFetchFailed: true,
});
expect(vm.listEmptyMessage).toBe('Something went wrong on our end.');
expect(wrapper.vm.listEmptyMessage).toBe('Something went wrong on our end.');
vm.isFetchFailed = false;
wrapper.setProps({
isFetchFailed: false,
});
expect(vm.listEmptyMessage).toBe('Sorry, no projects matched your search');
expect(wrapper.vm.listEmptyMessage).toBe('Sorry, no projects matched your search');
});
});
});
describe('template', () => {
it('should render component element with list of projects', done => {
vm.items = mockFrequentProjects;
Vue.nextTick(() => {
expect(vm.$el.classList.contains('frequent-items-list-container')).toBe(true);
expect(vm.$el.querySelectorAll('ul.list-unstyled').length).toBe(1);
expect(vm.$el.querySelectorAll('li.frequent-items-list-item-container').length).toBe(5);
done();
it('should render component element with list of projects', () => {
createComponent();
return wrapper.vm.$nextTick(() => {
expect(wrapper.classes('frequent-items-list-container')).toBe(true);
expect(wrapper.findAll({ ref: 'frequentItemsList' })).toHaveLength(1);
expect(wrapper.findAll(frequentItemsListItemComponent)).toHaveLength(5);
});
});
it('should render component element with empty message', done => {
vm.items = [];
it('should render component element with empty message', () => {
createComponent({
items: [],
});
Vue.nextTick(() => {
expect(vm.$el.querySelectorAll('li.section-empty').length).toBe(1);
expect(vm.$el.querySelectorAll('li.frequent-items-list-item-container').length).toBe(0);
done();
return wrapper.vm.$nextTick(() => {
expect(wrapper.vm.$el.querySelectorAll('li.section-empty')).toHaveLength(1);
expect(wrapper.findAll(frequentItemsListItemComponent)).toHaveLength(0);
});
});
});
......
import { TEST_HOST } from 'helpers/test_constants';
export default () => ({
export const mockFrequentProjects = [
{
id: 1,
name: 'GitLab Community Edition',
namespace: 'gitlab-org / gitlab-ce',
webUrl: `${TEST_HOST}/gitlab-org/gitlab-foss`,
avatarUrl: null,
frequency: 1,
lastAccessedOn: Date.now(),
},
{
id: 2,
name: 'GitLab CI',
namespace: 'gitlab-org / gitlab-ci',
webUrl: `${TEST_HOST}/gitlab-org/gitlab-ci`,
avatarUrl: null,
frequency: 9,
lastAccessedOn: Date.now(),
},
{
id: 3,
name: 'Typeahead.Js',
namespace: 'twitter / typeahead-js',
webUrl: `${TEST_HOST}/twitter/typeahead-js`,
avatarUrl: '/uploads/-/system/project/avatar/7/TWBS.png',
frequency: 2,
lastAccessedOn: Date.now(),
},
{
id: 4,
name: 'Intel',
namespace: 'platform / hardware / bsp / intel',
webUrl: `${TEST_HOST}/platform/hardware/bsp/intel`,
avatarUrl: null,
frequency: 3,
lastAccessedOn: Date.now(),
},
{
id: 5,
name: 'v4.4',
namespace: 'platform / hardware / bsp / kernel / common / v4.4',
webUrl: `${TEST_HOST}/platform/hardware/bsp/kernel/common/v4.4`,
avatarUrl: null,
frequency: 8,
lastAccessedOn: Date.now(),
},
];
export const mockProject = {
id: 1,
name: 'GitLab Community Edition',
namespace: 'gitlab-org / gitlab-ce',
webUrl: `${TEST_HOST}/gitlab-org/gitlab-foss`,
avatarUrl: null,
});
};
......@@ -3,109 +3,149 @@ import { getFormatter, SUPPORTED_FORMATS } from '~/lib/utils/unit_format';
describe('unit_format', () => {
describe('when a supported format is provided, the returned function formats', () => {
it('numbers, by default', () => {
expect(getFormatter()(1)).toEqual('1');
expect(getFormatter()(1)).toBe('1');
});
it('numbers', () => {
const formatNumber = getFormatter(SUPPORTED_FORMATS.number);
expect(formatNumber(1)).toEqual('1');
expect(formatNumber(100)).toEqual('100');
expect(formatNumber(1000)).toEqual('1,000');
expect(formatNumber(10000)).toEqual('10,000');
expect(formatNumber(1000000)).toEqual('1,000,000');
expect(formatNumber(1)).toBe('1');
expect(formatNumber(100)).toBe('100');
expect(formatNumber(1000)).toBe('1,000');
expect(formatNumber(10000)).toBe('10,000');
expect(formatNumber(1000000)).toBe('1,000,000');
});
it('percent', () => {
const formatPercent = getFormatter(SUPPORTED_FORMATS.percent);
expect(formatPercent(1)).toEqual('100%');
expect(formatPercent(1, 2)).toEqual('100.00%');
expect(formatPercent(1)).toBe('100%');
expect(formatPercent(1, 2)).toBe('100.00%');
expect(formatPercent(0.1)).toEqual('10%');
expect(formatPercent(0.5)).toEqual('50%');
expect(formatPercent(0.1)).toBe('10%');
expect(formatPercent(0.5)).toBe('50%');
expect(formatPercent(0.888888)).toEqual('89%');
expect(formatPercent(0.888888, 2)).toEqual('88.89%');
expect(formatPercent(0.888888, 5)).toEqual('88.88880%');
expect(formatPercent(0.888888)).toBe('89%');
expect(formatPercent(0.888888, 2)).toBe('88.89%');
expect(formatPercent(0.888888, 5)).toBe('88.88880%');
expect(formatPercent(2)).toEqual('200%');
expect(formatPercent(10)).toEqual('1,000%');
expect(formatPercent(2)).toBe('200%');
expect(formatPercent(10)).toBe('1,000%');
});
it('percentunit', () => {
const formatPercentHundred = getFormatter(SUPPORTED_FORMATS.percentHundred);
expect(formatPercentHundred(1)).toEqual('1%');
expect(formatPercentHundred(1, 2)).toEqual('1.00%');
expect(formatPercentHundred(1)).toBe('1%');
expect(formatPercentHundred(1, 2)).toBe('1.00%');
expect(formatPercentHundred(88.8888)).toEqual('89%');
expect(formatPercentHundred(88.8888, 2)).toEqual('88.89%');
expect(formatPercentHundred(88.8888, 5)).toEqual('88.88880%');
expect(formatPercentHundred(88.8888)).toBe('89%');
expect(formatPercentHundred(88.8888, 2)).toBe('88.89%');
expect(formatPercentHundred(88.8888, 5)).toBe('88.88880%');
expect(formatPercentHundred(100)).toEqual('100%');
expect(formatPercentHundred(100, 2)).toEqual('100.00%');
expect(formatPercentHundred(100)).toBe('100%');
expect(formatPercentHundred(100, 2)).toBe('100.00%');
expect(formatPercentHundred(200)).toEqual('200%');
expect(formatPercentHundred(1000)).toEqual('1,000%');
expect(formatPercentHundred(200)).toBe('200%');
expect(formatPercentHundred(1000)).toBe('1,000%');
});
it('seconds', () => {
expect(getFormatter(SUPPORTED_FORMATS.seconds)(1)).toEqual('1s');
expect(getFormatter(SUPPORTED_FORMATS.seconds)(1)).toBe('1s');
});
it('miliseconds', () => {
const formatMiliseconds = getFormatter(SUPPORTED_FORMATS.miliseconds);
it('milliseconds', () => {
const formatMilliseconds = getFormatter(SUPPORTED_FORMATS.milliseconds);
expect(formatMiliseconds(1)).toEqual('1ms');
expect(formatMiliseconds(100)).toEqual('100ms');
expect(formatMiliseconds(1000)).toEqual('1,000ms');
expect(formatMiliseconds(10000)).toEqual('10,000ms');
expect(formatMiliseconds(1000000)).toEqual('1,000,000ms');
expect(formatMilliseconds(1)).toBe('1ms');
expect(formatMilliseconds(100)).toBe('100ms');
expect(formatMilliseconds(1000)).toBe('1,000ms');
expect(formatMilliseconds(10000)).toBe('10,000ms');
expect(formatMilliseconds(1000000)).toBe('1,000,000ms');
});
it('bytes', () => {
const formatBytes = getFormatter(SUPPORTED_FORMATS.bytes);
expect(formatBytes(1)).toEqual('1B');
expect(formatBytes(1, 1)).toEqual('1.0B');
expect(formatBytes(10)).toEqual('10B');
expect(formatBytes(10 ** 2)).toEqual('100B');
expect(formatBytes(10 ** 3)).toEqual('1kB');
expect(formatBytes(10 ** 4)).toEqual('10kB');
expect(formatBytes(10 ** 5)).toEqual('100kB');
expect(formatBytes(10 ** 6)).toEqual('1MB');
expect(formatBytes(10 ** 7)).toEqual('10MB');
expect(formatBytes(10 ** 8)).toEqual('100MB');
expect(formatBytes(10 ** 9)).toEqual('1GB');
expect(formatBytes(10 ** 10)).toEqual('10GB');
expect(formatBytes(10 ** 11)).toEqual('100GB');
it('decimalBytes', () => {
const formatDecimalBytes = getFormatter(SUPPORTED_FORMATS.decimalBytes);
expect(formatDecimalBytes(1)).toBe('1B');
expect(formatDecimalBytes(1, 1)).toBe('1.0B');
expect(formatDecimalBytes(10)).toBe('10B');
expect(formatDecimalBytes(10 ** 2)).toBe('100B');
expect(formatDecimalBytes(10 ** 3)).toBe('1kB');
expect(formatDecimalBytes(10 ** 4)).toBe('10kB');
expect(formatDecimalBytes(10 ** 5)).toBe('100kB');
expect(formatDecimalBytes(10 ** 6)).toBe('1MB');
expect(formatDecimalBytes(10 ** 7)).toBe('10MB');
expect(formatDecimalBytes(10 ** 8)).toBe('100MB');
expect(formatDecimalBytes(10 ** 9)).toBe('1GB');
expect(formatDecimalBytes(10 ** 10)).toBe('10GB');
expect(formatDecimalBytes(10 ** 11)).toBe('100GB');
});
it('kilobytes', () => {
expect(getFormatter(SUPPORTED_FORMATS.kilobytes)(1)).toEqual('1kB');
expect(getFormatter(SUPPORTED_FORMATS.kilobytes)(1, 1)).toEqual('1.0kB');
expect(getFormatter(SUPPORTED_FORMATS.kilobytes)(1)).toBe('1kB');
expect(getFormatter(SUPPORTED_FORMATS.kilobytes)(1, 1)).toBe('1.0kB');
});
it('megabytes', () => {
expect(getFormatter(SUPPORTED_FORMATS.megabytes)(1)).toEqual('1MB');
expect(getFormatter(SUPPORTED_FORMATS.megabytes)(1, 1)).toEqual('1.0MB');
expect(getFormatter(SUPPORTED_FORMATS.megabytes)(1)).toBe('1MB');
expect(getFormatter(SUPPORTED_FORMATS.megabytes)(1, 1)).toBe('1.0MB');
});
it('gigabytes', () => {
expect(getFormatter(SUPPORTED_FORMATS.gigabytes)(1)).toEqual('1GB');
expect(getFormatter(SUPPORTED_FORMATS.gigabytes)(1, 1)).toEqual('1.0GB');
expect(getFormatter(SUPPORTED_FORMATS.gigabytes)(1)).toBe('1GB');
expect(getFormatter(SUPPORTED_FORMATS.gigabytes)(1, 1)).toBe('1.0GB');
});
it('terabytes', () => {
expect(getFormatter(SUPPORTED_FORMATS.terabytes)(1)).toEqual('1TB');
expect(getFormatter(SUPPORTED_FORMATS.terabytes)(1, 1)).toEqual('1.0TB');
expect(getFormatter(SUPPORTED_FORMATS.terabytes)(1)).toBe('1TB');
expect(getFormatter(SUPPORTED_FORMATS.terabytes)(1, 1)).toBe('1.0TB');
});
it('petabytes', () => {
expect(getFormatter(SUPPORTED_FORMATS.petabytes)(1)).toEqual('1PB');
expect(getFormatter(SUPPORTED_FORMATS.petabytes)(1, 1)).toEqual('1.0PB');
expect(getFormatter(SUPPORTED_FORMATS.petabytes)(1)).toBe('1PB');
expect(getFormatter(SUPPORTED_FORMATS.petabytes)(1, 1)).toBe('1.0PB');
});
it('bytes', () => {
const formatBytes = getFormatter(SUPPORTED_FORMATS.bytes);
expect(formatBytes(1)).toBe('1B');
expect(formatBytes(1, 1)).toBe('1.0B');
expect(formatBytes(10)).toBe('10B');
expect(formatBytes(100)).toBe('100B');
expect(formatBytes(1000)).toBe('1,000B');
expect(formatBytes(1 * 1024)).toBe('1KiB');
expect(formatBytes(1 * 1024 ** 2)).toBe('1MiB');
expect(formatBytes(1 * 1024 ** 3)).toBe('1GiB');
});
it('kibibytes', () => {
expect(getFormatter(SUPPORTED_FORMATS.kibibytes)(1)).toBe('1KiB');
expect(getFormatter(SUPPORTED_FORMATS.kibibytes)(1, 1)).toBe('1.0KiB');
});
it('mebibytes', () => {
expect(getFormatter(SUPPORTED_FORMATS.mebibytes)(1)).toBe('1MiB');
expect(getFormatter(SUPPORTED_FORMATS.mebibytes)(1, 1)).toBe('1.0MiB');
});
it('gibibytes', () => {
expect(getFormatter(SUPPORTED_FORMATS.gibibytes)(1)).toBe('1GiB');
expect(getFormatter(SUPPORTED_FORMATS.gibibytes)(1, 1)).toBe('1.0GiB');
});
it('tebibytes', () => {
expect(getFormatter(SUPPORTED_FORMATS.tebibytes)(1)).toBe('1TiB');
expect(getFormatter(SUPPORTED_FORMATS.tebibytes)(1, 1)).toBe('1.0TiB');
});
it('pebibytes', () => {
expect(getFormatter(SUPPORTED_FORMATS.pebibytes)(1)).toBe('1PiB');
expect(getFormatter(SUPPORTED_FORMATS.pebibytes)(1, 1)).toBe('1.0PiB');
});
});
......
......@@ -5,7 +5,6 @@ import { mount } from '@vue/test-utils';
import { setTestTimeout } from 'helpers/timeout';
import axios from '~/lib/utils/axios_utils';
import NotesApp from '~/notes/components/notes_app.vue';
import service from '~/notes/services/notes_service';
import createStore from '~/notes/stores';
import '~/behaviors/markdown/render_gfm';
// TODO: use generated fixture (https://gitlab.com/gitlab-org/gitlab-foss/issues/62491)
......@@ -192,7 +191,6 @@ describe('note_app', () => {
describe('individual note', () => {
beforeEach(() => {
axiosMock.onAny().reply(mockData.getIndividualNoteResponse);
jest.spyOn(service, 'updateNote');
wrapper = mountComponent();
return waitForDiscussionsRequest().then(() => {
wrapper.find('.js-note-edit').trigger('click');
......@@ -203,18 +201,18 @@ describe('note_app', () => {
expect(wrapper.find('.js-vue-issue-note-form').exists()).toBe(true);
});
it('calls the service to update the note', () => {
it('calls the store action to update the note', () => {
jest.spyOn(axios, 'put').mockImplementation(() => Promise.resolve({ data: {} }));
wrapper.find('.js-vue-issue-note-form').value = 'this is a note';
wrapper.find('.js-vue-issue-save').trigger('click');
expect(service.updateNote).toHaveBeenCalled();
expect(axios.put).toHaveBeenCalled();
});
});
describe('discussion note', () => {
beforeEach(() => {
axiosMock.onAny().reply(mockData.getDiscussionNoteResponse);
jest.spyOn(service, 'updateNote');
wrapper = mountComponent();
return waitForDiscussionsRequest().then(() => {
wrapper.find('.js-note-edit').trigger('click');
......@@ -226,10 +224,11 @@ describe('note_app', () => {
});
it('updates the note and resets the edit form', () => {
jest.spyOn(axios, 'put').mockImplementation(() => Promise.resolve({ data: {} }));
wrapper.find('.js-vue-issue-note-form').value = 'this is a note';
wrapper.find('.js-vue-issue-save').trigger('click');
expect(service.updateNote).toHaveBeenCalled();
expect(axios.put).toHaveBeenCalled();
});
});
});
......
......@@ -178,67 +178,63 @@ describe Issue do
let(:namespace) { build(:namespace, path: 'sample-namespace') }
let(:project) { build(:project, name: 'sample-project', namespace: namespace) }
let(:issue) { build(:issue, iid: 1, project: project) }
let(:group) { create(:group, name: 'Group', path: 'sample-group') }
context 'when nil argument' do
it 'returns issue id' do
expect(issue.to_reference).to eq "#1"
end
end
context 'when full is true' do
it 'returns complete path to the issue' do
expect(issue.to_reference(full: true)).to eq 'sample-namespace/sample-project#1'
expect(issue.to_reference(project, full: true)).to eq 'sample-namespace/sample-project#1'
expect(issue.to_reference(group, full: true)).to eq 'sample-namespace/sample-project#1'
end
end
context 'when same project argument' do
it 'returns issue id' do
expect(issue.to_reference(project)).to eq("#1")
it 'returns complete path to the issue with full: true' do
expect(issue.to_reference(full: true)).to eq 'sample-namespace/sample-project#1'
end
end
context 'when cross namespace project argument' do
let(:another_namespace_project) { create(:project, name: 'another-project') }
context 'when argument is a project' do
context 'when same project' do
it 'returns issue id' do
expect(issue.to_reference(project)).to eq("#1")
end
it 'returns complete path to the issue' do
expect(issue.to_reference(another_namespace_project)).to eq 'sample-namespace/sample-project#1'
it 'returns full reference with full: true' do
expect(issue.to_reference(project, full: true)).to eq 'sample-namespace/sample-project#1'
end
end
end
it 'supports a cross-project reference' do
another_project = build(:project, name: 'another-project', namespace: project.namespace)
expect(issue.to_reference(another_project)).to eq "sample-project#1"
end
context 'when same namespace / cross-project argument' do
let(:another_project) { create(:project, namespace: namespace) }
context 'when cross-project in same namespace' do
let(:another_project) do
build(:project, name: 'another-project', namespace: project.namespace)
end
it 'returns path to the issue with the project name' do
expect(issue.to_reference(another_project)).to eq 'sample-project#1'
it 'returns a cross-project reference' do
expect(issue.to_reference(another_project)).to eq "sample-project#1"
end
end
end
context 'when different namespace / cross-project argument' do
let(:another_namespace) { create(:namespace, path: 'another-namespace') }
let(:another_project) { create(:project, path: 'another-project', namespace: another_namespace) }
context 'when cross-project in different namespace' do
let(:another_namespace) { build(:namespace, path: 'another-namespace') }
let(:another_namespace_project) { build(:project, path: 'another-project', namespace: another_namespace) }
it 'returns full path to the issue' do
expect(issue.to_reference(another_project)).to eq 'sample-namespace/sample-project#1'
it 'returns complete path to the issue' do
expect(issue.to_reference(another_namespace_project)).to eq 'sample-namespace/sample-project#1'
end
end
end
context 'when argument is a namespace' do
context 'with same project path' do
context 'when same as issue' do
it 'returns path to the issue with the project name' do
expect(issue.to_reference(namespace)).to eq 'sample-project#1'
end
it 'returns full reference with full: true' do
expect(issue.to_reference(namespace, full: true)).to eq 'sample-namespace/sample-project#1'
end
end
context 'with different project path' do
it 'returns full path to the issue' do
context 'when different to issue namespace' do
let(:group) { build(:group, name: 'Group', path: 'sample-group') }
it 'returns full path to the issue with full: true' do
expect(issue.to_reference(group)).to eq 'sample-namespace/sample-project#1'
end
end
......
......@@ -52,7 +52,7 @@ describe ProjectPolicy do
admin_snippet admin_project_member admin_note admin_wiki admin_project
admin_commit_status admin_build admin_container_image
admin_pipeline admin_environment admin_deployment destroy_release add_cluster
daily_statistics
daily_statistics read_deploy_token
]
end
......
......@@ -3,43 +3,84 @@
require 'spec_helper'
describe API::DeployTokens do
let(:user) { create(:user) }
let(:creator) { create(:user) }
let(:project) { create(:project, creator_id: creator.id) }
let!(:deploy_token) { create(:deploy_token, projects: [project]) }
describe 'GET /deploy_tokens' do
subject { get api('/deploy_tokens', user) }
subject do
get api('/deploy_tokens', user)
response
end
context 'when unauthenticated' do
let(:user) { nil }
it 'rejects the response as unauthorized' do
subject
expect(response).to have_gitlab_http_status(:unauthorized)
end
it { is_expected.to have_gitlab_http_status(:unauthorized) }
end
context 'when authenticated as non-admin user' do
let(:user) { creator }
it 'rejects the response as forbidden' do
subject
expect(response).to have_gitlab_http_status(:forbidden)
end
it { is_expected.to have_gitlab_http_status(:forbidden) }
end
context 'when authenticated as admin' do
let(:user) { create(:admin) }
it { is_expected.to have_gitlab_http_status(:ok) }
it 'returns all deploy tokens' do
subject
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.first['id']).to eq(deploy_token.id)
expect(response).to match_response_schema('public_api/v4/deploy_tokens')
end
end
end
describe 'GET /projects/:id/deploy_tokens' do
subject do
get api("/projects/#{project.id}/deploy_tokens", user)
response
end
context 'when unauthenticated' do
let(:user) { nil }
it { is_expected.to have_gitlab_http_status(:not_found) }
end
context 'when authenticated as non-admin user' do
before do
project.add_developer(user)
end
it { is_expected.to have_gitlab_http_status(:forbidden) }
end
context 'when authenticated as maintainer' do
let!(:other_deploy_token) { create(:deploy_token) }
before do
project.add_maintainer(user)
end
it { is_expected.to have_gitlab_http_status(:ok) }
it 'returns all deploy tokens for the project' do
subject
expect(response).to include_pagination_headers
expect(response).to match_response_schema('public_api/v4/deploy_tokens')
end
it 'does not return deploy tokens for other projects' do
subject
token_ids = json_response.map { |token| token['id'] }
expect(token_ids).not_to include(other_deploy_token.id)
end
end
end
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment