Commit 756bd842 authored by Phil Hughes's avatar Phil Hughes

Merge branch 'ce-to-ee-2018-05-23' into 'master'

CE upstream - 2018-05-23 09:39 UTC

See merge request gitlab-org/gitlab-ee!5820
parents 8d8c89ff 51b94860
......@@ -24,6 +24,8 @@ const Api = {
commitPath: '/api/:version/projects/:id/repository/commits',
branchSinglePath: '/api/:version/projects/:id/repository/branches/:branch',
createBranchPath: '/api/:version/projects/:id/repository/branches',
pipelinesPath: '/api/:version/projects/:id/pipelines',
pipelineJobsPath: '/api/:version/projects/:id/pipelines/:pipeline_id/jobs',
geoNodesPath: '/api/:version/geo_nodes',
group(groupId, callback) {
......@@ -224,33 +226,54 @@ const Api = {
});
},
pipelines(projectPath, params = {}) {
const url = Api.buildUrl(this.pipelinesPath).replace(':id', encodeURIComponent(projectPath));
return axios.get(url, { params });
},
pipelineJobs(projectPath, pipelineId, params = {}) {
const url = Api.buildUrl(this.pipelineJobsPath)
.replace(':id', encodeURIComponent(projectPath))
.replace(':pipeline_id', pipelineId);
return axios.get(url, { params });
},
approverUsers(search, options, callback = $.noop) {
const url = Api.buildUrl('/autocomplete/users.json');
return axios.get(url, {
params: Object.assign({
search,
per_page: 20,
}, options),
}).then(({ data }) => {
callback(data);
return axios
.get(url, {
params: Object.assign(
{
search,
per_page: 20,
},
options,
),
})
.then(({ data }) => {
callback(data);
return data;
});
return data;
});
},
ldap_groups(query, provider, callback) {
const url = Api.buildUrl(this.ldapGroupsPath).replace(':provider', provider);
return axios.get(url, {
params: {
search: query,
per_page: 20,
active: true,
},
}).then(({ data }) => {
callback(data);
return axios
.get(url, {
params: {
search: query,
per_page: 20,
active: true,
},
})
.then(({ data }) => {
callback(data);
return data;
});
return data;
});
},
buildUrl(url) {
......
......@@ -39,12 +39,10 @@ export default {
return this.allBlobs.slice(0, MAX_FILE_FINDER_RESULTS);
}
return fuzzaldrinPlus
.filter(this.allBlobs, searchText, {
key: 'path',
maxResults: MAX_FILE_FINDER_RESULTS,
})
.sort((a, b) => b.lastOpenedAt - a.lastOpenedAt);
return fuzzaldrinPlus.filter(this.allBlobs, searchText, {
key: 'path',
maxResults: MAX_FILE_FINDER_RESULTS,
});
},
filteredBlobsLength() {
return this.filteredBlobs.length;
......
......@@ -52,7 +52,10 @@ export default {
methods: {
...mapActions(['toggleFileFinder']),
mousetrapStopCallback(e, el, combo) {
if (combo === 't' && el.classList.contains('dropdown-input-field')) {
if (
(combo === 't' && el.classList.contains('dropdown-input-field')) ||
el.classList.contains('inputarea')
) {
return true;
} else if (combo === 'command+p' || combo === 'ctrl+p') {
return false;
......
......@@ -43,9 +43,13 @@ export default {
},
},
watch: {
file(oldVal, newVal) {
file(newVal, oldVal) {
if (oldVal.pending) {
this.removePendingTab(oldVal);
}
// Compare key to allow for files opened in review mode to be cached differently
if (newVal.key !== this.file.key) {
if (oldVal.key !== this.file.key) {
this.initMonaco();
if (this.currentActivityView !== activityBarViews.edit) {
......@@ -99,6 +103,7 @@ export default {
'setFileViewMode',
'setFileEOL',
'updateViewer',
'removePendingTab',
]),
initMonaco() {
if (this.shouldHideEditor) return;
......
......@@ -41,7 +41,7 @@ const router = new VueRouter({
component: EmptyRouterComponent,
children: [
{
path: ':targetmode(edit|tree|blob)/:branch/*',
path: ':targetmode(edit|tree|blob)/*',
component: EmptyRouterComponent,
},
{
......@@ -63,23 +63,27 @@ router.beforeEach((to, from, next) => {
.then(() => {
const fullProjectId = `${to.params.namespace}/${to.params.project}`;
if (to.params.branch) {
store.dispatch('setCurrentBranchId', to.params.branch);
const baseSplit = to.params[0].split('/-/');
const branchId = baseSplit[0].slice(-1) === '/' ? baseSplit[0].slice(0, -1) : baseSplit[0];
if (branchId) {
const basePath = baseSplit.length > 1 ? baseSplit[1] : '';
store.dispatch('setCurrentBranchId', branchId);
store.dispatch('getBranchData', {
projectId: fullProjectId,
branchId: to.params.branch,
branchId,
});
store
.dispatch('getFiles', {
projectId: fullProjectId,
branchId: to.params.branch,
branchId,
})
.then(() => {
if (to.params[0]) {
const path =
to.params[0].slice(-1) === '/' ? to.params[0].slice(0, -1) : to.params[0];
if (basePath) {
const path = basePath.slice(-1) === '/' ? basePath.slice(0, -1) : basePath;
const treeEntryKey = Object.keys(store.state.entries).find(
key => key === path && !store.state.entries[key].pending,
);
......
......@@ -63,7 +63,9 @@ export const getFileData = ({ state, commit, dispatch }, { path, makeFileActive
const file = state.entries[path];
commit(types.TOGGLE_LOADING, { entry: file });
return service
.getFileData(`${gon.relative_url_root ? gon.relative_url_root : ''}${file.url}`)
.getFileData(
`${gon.relative_url_root ? gon.relative_url_root : ''}${file.url.replace('/-/', '/')}`,
)
.then(res => {
const pageTitle = decodeURI(normalizeHeaders(res.headers)['PAGE-TITLE']);
setPageTitle(pageTitle);
......
......@@ -5,6 +5,7 @@ import * as actions from './actions';
import * as getters from './getters';
import mutations from './mutations';
import commitModule from './modules/commit';
import pipelines from './modules/pipelines';
Vue.use(Vuex);
......@@ -15,5 +16,6 @@ export default new Vuex.Store({
getters,
modules: {
commit: commitModule,
pipelines,
},
});
......@@ -204,17 +204,23 @@ export const commitChanges = ({ commit, state, getters, dispatch, rootState, roo
dispatch('updateViewer', 'editor', { root: true });
router.push(
`/project/${rootState.currentProjectId}/blob/${getters.branchName}/${
`/project/${rootState.currentProjectId}/blob/${getters.branchName}/-/${
rootGetters.activeFile.path
}`,
);
}
})
.then(() => dispatch('updateCommitAction', consts.COMMIT_TO_CURRENT_BRANCH))
.then(() => dispatch('refreshLastCommitData', {
projectId: rootState.currentProjectId,
branchId: rootState.currentBranchId,
}, { root: true }));
.then(() =>
dispatch(
'refreshLastCommitData',
{
projectId: rootState.currentProjectId,
branchId: rootState.currentBranchId,
},
{ root: true },
),
);
})
.catch(err => {
let errMsg = __('Error committing changes. Please try again.');
......
import { __ } from '../../../../locale';
import Api from '../../../../api';
import flash from '../../../../flash';
import * as types from './mutation_types';
export const requestLatestPipeline = ({ commit }) => commit(types.REQUEST_LATEST_PIPELINE);
export const receiveLatestPipelineError = ({ commit }) => {
flash(__('There was an error loading latest pipeline'));
commit(types.RECEIVE_LASTEST_PIPELINE_ERROR);
};
export const receiveLatestPipelineSuccess = ({ commit }, pipeline) =>
commit(types.RECEIVE_LASTEST_PIPELINE_SUCCESS, pipeline);
export const fetchLatestPipeline = ({ dispatch, rootState }, sha) => {
dispatch('requestLatestPipeline');
return Api.pipelines(rootState.currentProjectId, { sha, per_page: '1' })
.then(({ data }) => {
dispatch('receiveLatestPipelineSuccess', data.pop());
})
.catch(() => dispatch('receiveLatestPipelineError'));
};
export const requestJobs = ({ commit }) => commit(types.REQUEST_JOBS);
export const receiveJobsError = ({ commit }) => {
flash(__('There was an error loading jobs'));
commit(types.RECEIVE_JOBS_ERROR);
};
export const receiveJobsSuccess = ({ commit }, data) => commit(types.RECEIVE_JOBS_SUCCESS, data);
export const fetchJobs = ({ dispatch, state, rootState }, page = '1') => {
dispatch('requestJobs');
Api.pipelineJobs(rootState.currentProjectId, state.latestPipeline.id, {
page,
})
.then(({ data, headers }) => {
const nextPage = headers && headers['x-next-page'];
dispatch('receiveJobsSuccess', data);
if (nextPage) {
dispatch('fetchJobs', nextPage);
}
})
.catch(() => dispatch('receiveJobsError'));
};
export default () => {};
export const hasLatestPipeline = state => !state.isLoadingPipeline && !!state.latestPipeline;
export const failedJobs = state =>
state.stages.reduce(
(acc, stage) => acc.concat(stage.jobs.filter(job => job.status === 'failed')),
[],
);
import state from './state';
import * as actions from './actions';
import mutations from './mutations';
import * as getters from './getters';
export default {
namespaced: true,
state: state(),
actions,
mutations,
getters,
};
export const REQUEST_LATEST_PIPELINE = 'REQUEST_LATEST_PIPELINE';
export const RECEIVE_LASTEST_PIPELINE_ERROR = 'RECEIVE_LASTEST_PIPELINE_ERROR';
export const RECEIVE_LASTEST_PIPELINE_SUCCESS = 'RECEIVE_LASTEST_PIPELINE_SUCCESS';
export const REQUEST_JOBS = 'REQUEST_JOBS';
export const RECEIVE_JOBS_ERROR = 'RECEIVE_JOBS_ERROR';
export const RECEIVE_JOBS_SUCCESS = 'RECEIVE_JOBS_SUCCESS';
/* eslint-disable no-param-reassign */
import * as types from './mutation_types';
export default {
[types.REQUEST_LATEST_PIPELINE](state) {
state.isLoadingPipeline = true;
},
[types.RECEIVE_LASTEST_PIPELINE_ERROR](state) {
state.isLoadingPipeline = false;
},
[types.RECEIVE_LASTEST_PIPELINE_SUCCESS](state, pipeline) {
state.isLoadingPipeline = false;
if (pipeline) {
state.latestPipeline = {
id: pipeline.id,
status: pipeline.status,
};
}
},
[types.REQUEST_JOBS](state) {
state.isLoadingJobs = true;
},
[types.RECEIVE_JOBS_ERROR](state) {
state.isLoadingJobs = false;
},
[types.RECEIVE_JOBS_SUCCESS](state, jobs) {
state.isLoadingJobs = false;
state.stages = jobs.reduce((acc, job) => {
let stage = acc.find(s => s.title === job.stage);
if (!stage) {
stage = {
title: job.stage,
jobs: [],
};
acc.push(stage);
}
stage.jobs = stage.jobs.concat({
id: job.id,
name: job.name,
status: job.status,
stage: job.stage,
duration: job.duration,
});
return acc;
}, state.stages);
},
};
export default () => ({
isLoadingPipeline: false,
isLoadingJobs: false,
latestPipeline: null,
stages: [],
});
......@@ -26,7 +26,7 @@ self.addEventListener('message', e => {
id: folderPath,
name: folderName,
path: folderPath,
url: `/${projectId}/tree/${branchId}/${folderPath}/`,
url: `/${projectId}/tree/${branchId}/-/${folderPath}/`,
type: 'tree',
parentTreeUrl: parentFolder ? parentFolder.url : `/${projectId}/tree/${branchId}/`,
tempFile,
......@@ -64,7 +64,7 @@ self.addEventListener('message', e => {
id: path,
name: blobName,
path,
url: `/${projectId}/blob/${branchId}/${path}`,
url: `/${projectId}/blob/${branchId}/-/${path}`,
type: 'blob',
parentTreeUrl: fileFolder ? fileFolder.url : `/${projectId}/blob/${branchId}`,
tempFile,
......
......@@ -17,7 +17,9 @@ module BlobHelper
end
def ide_edit_path(project = @project, ref = @ref, path = @path, options = {})
"#{ide_path}/project#{url_for([project, "edit", "blob", id: [ref, path], script_name: "/"])}"
segments = [ide_path, 'project', project.full_path, 'edit', ref]
segments.concat(['-', path]) if path.present?
File.join(segments)
end
def edit_blob_button(project = @project, ref = @ref, path = @path, options = {})
......@@ -331,7 +333,6 @@ module BlobHelper
if !on_top_of_branch?(project, ref)
edit_disabled_button_tag(text, common_classes)
# This condition only applies to users who are logged in
# Web IDE (Beta) requires the user to have this feature enabled
elsif !current_user || (current_user && can_modify_blob?(blob, project, ref))
edit_link_tag(text, edit_path, common_classes)
elsif can?(current_user, :fork_project, project) && can?(current_user, :create_merge_request_in, project)
......
......@@ -84,7 +84,7 @@
- if can_collaborate
= succeed " " do
= link_to ide_edit_path(@project, @id, ""), class: 'btn btn-default' do
= link_to ide_edit_path(@project, @ref, @path), class: 'btn btn-default' do
= _('Web IDE')
= render 'projects/buttons/download', project: @project, ref: @ref
......@@ -9,6 +9,7 @@ const CompressionPlugin = require('compression-webpack-plugin');
const BundleAnalyzerPlugin = require('webpack-bundle-analyzer').BundleAnalyzerPlugin;
const ROOT_PATH = path.resolve(__dirname, '..');
const CACHE_PATH = path.join(ROOT_PATH, 'tmp/cache');
const IS_PRODUCTION = process.env.NODE_ENV === 'production';
const IS_DEV_SERVER = process.argv.join(' ').indexOf('webpack-dev-server') !== -1;
const DEV_SERVER_HOST = process.env.DEV_SERVER_HOST || 'localhost';
......@@ -17,6 +18,9 @@ const DEV_SERVER_LIVERELOAD = IS_DEV_SERVER && process.env.DEV_SERVER_LIVERELOAD
const WEBPACK_REPORT = process.env.WEBPACK_REPORT;
const NO_COMPRESSION = process.env.NO_COMPRESSION;
const VUE_VERSION = require('vue/package.json').version;
const VUE_LOADER_VERSION = require('vue-loader/package.json').version;
let autoEntriesCount = 0;
let watchAutoEntries = [];
const defaultEntries = ['./main'];
......@@ -112,12 +116,21 @@ module.exports = {
exclude: path => /node_modules|vendor[\\/]assets/.test(path) && !/\.vue\.js/.test(path),
loader: 'babel-loader',
options: {
cacheDirectory: path.join(ROOT_PATH, 'tmp/cache/babel-loader'),
cacheDirectory: path.join(CACHE_PATH, 'babel-loader'),
},
},
{
test: /\.vue$/,
loader: 'vue-loader',
options: {
cacheDirectory: path.join(CACHE_PATH, 'vue-loader'),
cacheIdentifier: [
process.env.NODE_ENV || 'development',
webpack.version,
VUE_VERSION,
VUE_LOADER_VERSION,
].join('|'),
},
},
{
test: /\.svg$/,
......
......@@ -258,13 +258,19 @@ describe BlobHelper do
it 'returns full IDE path' do
Rails.application.routes.default_url_options[:script_name] = nil
expect(helper.ide_edit_path(project, "master", "")).to eq("/-/ide/project/#{project.namespace.path}/#{project.path}/edit/master/")
expect(helper.ide_edit_path(project, "master", "")).to eq("/-/ide/project/#{project.namespace.path}/#{project.path}/edit/master")
end
it 'returns full IDE path with second -' do
Rails.application.routes.default_url_options[:script_name] = nil
expect(helper.ide_edit_path(project, "testing/slashes", "readme.md")).to eq("/-/ide/project/#{project.namespace.path}/#{project.path}/edit/testing/slashes/-/readme.md")
end
it 'returns IDE path without relative_url_root' do
Rails.application.routes.default_url_options[:script_name] = "/gitlab"
expect(helper.ide_edit_path(project, "master", "")).to eq("/gitlab/-/ide/project/#{project.namespace.path}/#{project.path}/edit/master/")
expect(helper.ide_edit_path(project, "master", "")).to eq("/gitlab/-/ide/project/#{project.namespace.path}/#{project.path}/edit/master")
end
end
end
......@@ -55,7 +55,7 @@ export default (action, payload, state, expectedMutations, expectedActions, done
};
// call the action with mocked store and arguments
action({ commit, state, dispatch }, payload);
action({ commit, state, dispatch, rootState: state }, payload);
// check if no mutations should have been dispatched
if (expectedMutations.length === 0) {
......
......@@ -107,5 +107,11 @@ describe('ide component', () => {
vm.mousetrapStopCallback(null, vm.$el.querySelector('.dropdown-input-field'), 't'),
).toBe(true);
});
it('stops callback in monaco editor', () => {
setFixtures('<div class="inputarea"></div>');
expect(vm.mousetrapStopCallback(null, document.querySelector('.inputarea'), 't')).toBe(true);
});
});
});
......@@ -346,4 +346,24 @@ describe('RepoEditor', () => {
});
});
});
it('calls removePendingTab when old file is pending', done => {
spyOnProperty(vm, 'shouldHideEditor').and.returnValue(true);
spyOn(vm, 'removePendingTab');
vm.file.pending = true;
vm
.$nextTick()
.then(() => {
vm.file = file('testing');
return vm.$nextTick();
})
.then(() => {
expect(vm.removePendingTab).toHaveBeenCalled();
})
.then(done)
.catch(done.fail);
});
});
// eslint-disable-next-line import/prefer-default-export
export const projectData = {
id: 1,
name: 'abcproject',
......@@ -14,3 +13,49 @@ export const projectData = {
mergeRequests: {},
merge_requests_enabled: true,
};
export const pipelines = [
{
id: 1,
ref: 'master',
sha: '123',
status: 'failed',
},
{
id: 2,
ref: 'master',
sha: '213',
status: 'success',
},
];
export const jobs = [
{
id: 1,
name: 'test',
status: 'failed',
stage: 'test',
duration: 1,
},
{
id: 2,
name: 'test 2',
status: 'failed',
stage: 'test',
duration: 1,
},
{
id: 3,
name: 'test 3',
status: 'failed',
stage: 'test',
duration: 1,
},
{
id: 4,
name: 'test 3',
status: 'failed',
stage: 'build',
duration: 1,
},
];
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import actions, {
requestLatestPipeline,
receiveLatestPipelineError,
receiveLatestPipelineSuccess,
fetchLatestPipeline,
requestJobs,
receiveJobsError,
receiveJobsSuccess,
fetchJobs,
} from '~/ide/stores/modules/pipelines/actions';
import state from '~/ide/stores/modules/pipelines/state';
import * as types from '~/ide/stores/modules/pipelines/mutation_types';
import testAction from '../../../../helpers/vuex_action_helper';
import { pipelines, jobs } from '../../../mock_data';
describe('IDE pipelines actions', () => {
let mockedState;
let mock;
beforeEach(() => {
mockedState = state();
mock = new MockAdapter(axios);
gon.api_version = 'v4';
mockedState.currentProjectId = 'test/project';
});
afterEach(() => {
mock.restore();
});
describe('requestLatestPipeline', () => {
it('commits request', done => {
testAction(
requestLatestPipeline,
null,
mockedState,
[{ type: types.REQUEST_LATEST_PIPELINE }],
[],
done,
);
});
});
describe('receiveLatestPipelineError', () => {
it('commits error', done => {
testAction(
receiveLatestPipelineError,
null,
mockedState,
[{ type: types.RECEIVE_LASTEST_PIPELINE_ERROR }],
[],
done,
);
});
it('creates flash message', () => {
const flashSpy = spyOnDependency(actions, 'flash');
receiveLatestPipelineError({ commit() {} });
expect(flashSpy).toHaveBeenCalled();
});
});
describe('receiveLatestPipelineSuccess', () => {
it('commits pipeline', done => {
testAction(
receiveLatestPipelineSuccess,
pipelines[0],
mockedState,
[{ type: types.RECEIVE_LASTEST_PIPELINE_SUCCESS, payload: pipelines[0] }],
[],
done,
);
});
});
describe('fetchLatestPipeline', () => {
describe('success', () => {
beforeEach(() => {
mock.onGet(/\/api\/v4\/projects\/(.*)\/pipelines(.*)/).replyOnce(200, pipelines);
});
it('dispatches request', done => {
testAction(
fetchLatestPipeline,
'123',
mockedState,
[],
[{ type: 'requestLatestPipeline' }, { type: 'receiveLatestPipelineSuccess' }],
done,
);
});
it('dispatches success with latest pipeline', done => {
testAction(
fetchLatestPipeline,
'123',
mockedState,
[],
[
{ type: 'requestLatestPipeline' },
{ type: 'receiveLatestPipelineSuccess', payload: pipelines[0] },
],
done,
);
});
it('calls axios with correct params', () => {
const apiSpy = spyOn(axios, 'get').and.callThrough();
fetchLatestPipeline({ dispatch() {}, rootState: state }, '123');
expect(apiSpy).toHaveBeenCalledWith(jasmine.anything(), {
params: {
sha: '123',
per_page: '1',
},
});
});
});
describe('error', () => {
beforeEach(() => {
mock.onGet(/\/api\/v4\/projects\/(.*)\/pipelines(.*)/).replyOnce(500);
});
it('dispatches error', done => {
testAction(
fetchLatestPipeline,
'123',
mockedState,
[],
[{ type: 'requestLatestPipeline' }, { type: 'receiveLatestPipelineError' }],
done,
);
});
});
});
describe('requestJobs', () => {
it('commits request', done => {
testAction(requestJobs, null, mockedState, [{ type: types.REQUEST_JOBS }], [], done);
});
});
describe('receiveJobsError', () => {
it('commits error', done => {
testAction(
receiveJobsError,
null,
mockedState,
[{ type: types.RECEIVE_JOBS_ERROR }],
[],
done,
);
});
it('creates flash message', () => {
const flashSpy = spyOnDependency(actions, 'flash');
receiveJobsError({ commit() {} });
expect(flashSpy).toHaveBeenCalled();
});
});
describe('receiveJobsSuccess', () => {
it('commits jobs', done => {
testAction(
receiveJobsSuccess,
jobs,
mockedState,
[{ type: types.RECEIVE_JOBS_SUCCESS, payload: jobs }],
[],
done,
);
});
});
describe('fetchJobs', () => {
let page = '';
beforeEach(() => {
mockedState.latestPipeline = pipelines[0];
});
describe('success', () => {
beforeEach(() => {
mock.onGet(/\/api\/v4\/projects\/(.*)\/pipelines\/(.*)\/jobs/).replyOnce(() => [
200,
jobs,
{
'x-next-page': page,
},
]);
});
it('dispatches request', done => {
testAction(
fetchJobs,
null,
mockedState,
[],
[{ type: 'requestJobs' }, { type: 'receiveJobsSuccess' }],
done,
);
});
it('dispatches success with latest pipeline', done => {
testAction(
fetchJobs,
null,
mockedState,
[],
[{ type: 'requestJobs' }, { type: 'receiveJobsSuccess', payload: jobs }],
done,
);
});
it('dispatches twice for both pages', done => {
page = '2';
testAction(
fetchJobs,
null,
mockedState,
[],
[
{ type: 'requestJobs' },
{ type: 'receiveJobsSuccess', payload: jobs },
{ type: 'fetchJobs', payload: '2' },
{ type: 'requestJobs' },
{ type: 'receiveJobsSuccess', payload: jobs },
],
done,
);
});
it('calls axios with correct URL', () => {
const apiSpy = spyOn(axios, 'get').and.callThrough();
fetchJobs({ dispatch() {}, state: mockedState, rootState: mockedState });
expect(apiSpy).toHaveBeenCalledWith('/api/v4/projects/test%2Fproject/pipelines/1/jobs', {
params: { page: '1' },
});
});
it('calls axios with page next page', () => {
const apiSpy = spyOn(axios, 'get').and.callThrough();
fetchJobs({ dispatch() {}, state: mockedState, rootState: mockedState });
expect(apiSpy).toHaveBeenCalledWith('/api/v4/projects/test%2Fproject/pipelines/1/jobs', {
params: { page: '1' },
});
page = '2';
fetchJobs({ dispatch() {}, state: mockedState, rootState: mockedState }, page);
expect(apiSpy).toHaveBeenCalledWith('/api/v4/projects/test%2Fproject/pipelines/1/jobs', {
params: { page: '2' },
});
});
});
describe('error', () => {
beforeEach(() => {
mock.onGet(/\/api\/v4\/projects\/(.*)\/pipelines(.*)/).replyOnce(500);
});
it('dispatches error', done => {
testAction(
fetchJobs,
null,
mockedState,
[],
[{ type: 'requestJobs' }, { type: 'receiveJobsError' }],
done,
);
});
});
});
});
import * as getters from '~/ide/stores/modules/pipelines/getters';
import state from '~/ide/stores/modules/pipelines/state';
describe('IDE pipeline getters', () => {
let mockedState;
beforeEach(() => {
mockedState = state();
});
describe('hasLatestPipeline', () => {
it('returns false when loading is true', () => {
mockedState.isLoadingPipeline = true;
expect(getters.hasLatestPipeline(mockedState)).toBe(false);
});
it('returns false when pipelines is null', () => {
mockedState.latestPipeline = null;
expect(getters.hasLatestPipeline(mockedState)).toBe(false);
});
it('returns false when loading is true & pipelines is null', () => {
mockedState.latestPipeline = null;
mockedState.isLoadingPipeline = true;
expect(getters.hasLatestPipeline(mockedState)).toBe(false);
});
it('returns true when loading is false & pipelines is an object', () => {
mockedState.latestPipeline = {
id: 1,
};
mockedState.isLoadingPipeline = false;
expect(getters.hasLatestPipeline(mockedState)).toBe(true);
});
});
describe('failedJobs', () => {
it('returns array of failed jobs', () => {
mockedState.stages = [
{
title: 'test',
jobs: [{ id: 1, status: 'failed' }, { id: 2, status: 'success' }],
},
{
title: 'build',
jobs: [{ id: 3, status: 'failed' }, { id: 4, status: 'failed' }],
},
];
expect(getters.failedJobs(mockedState).length).toBe(3);
expect(getters.failedJobs(mockedState)).toEqual([
{
id: 1,
status: jasmine.anything(),
},
{
id: 3,
status: jasmine.anything(),
},
{
id: 4,
status: jasmine.anything(),
},
]);
});
});
});
import mutations from '~/ide/stores/modules/pipelines/mutations';
import state from '~/ide/stores/modules/pipelines/state';
import * as types from '~/ide/stores/modules/pipelines/mutation_types';
import { pipelines, jobs } from '../../../mock_data';
describe('IDE pipelines mutations', () => {
let mockedState;
beforeEach(() => {
mockedState = state();
});
describe(types.REQUEST_LATEST_PIPELINE, () => {
it('sets loading to true', () => {
mutations[types.REQUEST_LATEST_PIPELINE](mockedState);
expect(mockedState.isLoadingPipeline).toBe(true);
});
});
describe(types.RECEIVE_LASTEST_PIPELINE_ERROR, () => {
it('sets loading to false', () => {
mutations[types.RECEIVE_LASTEST_PIPELINE_ERROR](mockedState);
expect(mockedState.isLoadingPipeline).toBe(false);
});
});
describe(types.RECEIVE_LASTEST_PIPELINE_SUCCESS, () => {
it('sets loading to false on success', () => {
mutations[types.RECEIVE_LASTEST_PIPELINE_SUCCESS](mockedState, pipelines[0]);
expect(mockedState.isLoadingPipeline).toBe(false);
});
it('sets latestPipeline', () => {
mutations[types.RECEIVE_LASTEST_PIPELINE_SUCCESS](mockedState, pipelines[0]);
expect(mockedState.latestPipeline).toEqual({
id: pipelines[0].id,
status: pipelines[0].status,
});
});
it('does not set latest pipeline if pipeline is null', () => {
mutations[types.RECEIVE_LASTEST_PIPELINE_SUCCESS](mockedState, null);
expect(mockedState.latestPipeline).toEqual(null);
});
});
describe(types.REQUEST_JOBS, () => {
it('sets jobs loading to true', () => {
mutations[types.REQUEST_JOBS](mockedState);
expect(mockedState.isLoadingJobs).toBe(true);
});
});
describe(types.RECEIVE_JOBS_ERROR, () => {
it('sets jobs loading to false', () => {
mutations[types.RECEIVE_JOBS_ERROR](mockedState);
expect(mockedState.isLoadingJobs).toBe(false);
});
});
describe(types.RECEIVE_JOBS_SUCCESS, () => {
it('sets jobs loading to false on success', () => {
mutations[types.RECEIVE_JOBS_SUCCESS](mockedState, jobs);
expect(mockedState.isLoadingJobs).toBe(false);
});
it('sets stages', () => {
mutations[types.RECEIVE_JOBS_SUCCESS](mockedState, jobs);
expect(mockedState.stages.length).toBe(2);
expect(mockedState.stages).toEqual([
{
title: 'test',
jobs: jasmine.anything(),
},
{
title: 'build',
jobs: jasmine.anything(),
},
]);
});
it('sets jobs in stages', () => {
mutations[types.RECEIVE_JOBS_SUCCESS](mockedState, jobs);
expect(mockedState.stages[0].jobs.length).toBe(3);
expect(mockedState.stages[1].jobs.length).toBe(1);
expect(mockedState.stages).toEqual([
{
title: jasmine.anything(),
jobs: jobs.filter(job => job.stage === 'test').map(job => ({
id: job.id,
name: job.name,
status: job.status,
stage: job.stage,
duration: job.duration,
})),
},
{
title: jasmine.anything(),
jobs: jobs.filter(job => job.stage === 'build').map(job => ({
id: job.id,
name: job.name,
status: job.status,
stage: job.stage,
duration: job.duration,
})),
},
]);
});
});
});
......@@ -1481,6 +1481,15 @@ cache-base@^1.0.1:
union-value "^1.0.0"
unset-value "^1.0.0"
cache-loader@^1.2.2:
version "1.2.2"
resolved "https://registry.yarnpkg.com/cache-loader/-/cache-loader-1.2.2.tgz#6d5c38ded959a09cc5d58190ab5af6f73bd353f5"
dependencies:
loader-utils "^1.1.0"
mkdirp "^0.5.1"
neo-async "^2.5.0"
schema-utils "^0.4.2"
cacheable-request@^2.1.1:
version "2.1.4"
resolved "https://registry.yarnpkg.com/cacheable-request/-/cacheable-request-2.1.4.tgz#0d808801b6342ad33c91df9d0b44dc09b91e5c3d"
......@@ -7689,7 +7698,7 @@ sax@~1.2.1:
version "1.2.2"
resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.2.tgz#fd8631a23bc7826bef5d871bdb87378c95647828"
schema-utils@^0.4.0, schema-utils@^0.4.3, schema-utils@^0.4.4, schema-utils@^0.4.5:
schema-utils@^0.4.0, schema-utils@^0.4.2, schema-utils@^0.4.3, schema-utils@^0.4.4, schema-utils@^0.4.5:
version "0.4.5"
resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-0.4.5.tgz#21836f0608aac17b78f9e3e24daff14a5ca13a3e"
dependencies:
......@@ -8957,9 +8966,9 @@ vue-hot-reload-api@^2.3.0:
version "2.3.0"
resolved "https://registry.yarnpkg.com/vue-hot-reload-api/-/vue-hot-reload-api-2.3.0.tgz#97976142405d13d8efae154749e88c4e358cf926"
vue-loader@^15.0.12:
version "15.0.12"
resolved "https://registry.yarnpkg.com/vue-loader/-/vue-loader-15.0.12.tgz#9221e88f1c4f7657d425e40c676cd25671d5d294"
vue-loader@^15.2.0:
version "15.2.0"
resolved "https://registry.yarnpkg.com/vue-loader/-/vue-loader-15.2.0.tgz#5a8138e490a1040942d2f10ae68fa72b5a923364"
dependencies:
"@vue/component-compiler-utils" "^1.2.1"
hash-sum "^1.0.2"
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment