Commit 029d68d3 authored by Stan Hu's avatar Stan Hu

Merge branch 'backstage/gb/improve-jobs-controller-performance' into 'master'

Improve performance of jobs controller show

Closes #60708

See merge request gitlab-org/gitlab-ce!28093
parents c2b7c6e6 a66cbb6e
......@@ -86,6 +86,7 @@ export default {
'isScrollTopDisabled',
'isScrolledToBottomBeforeReceivingTrace',
'hasError',
'selectedStage',
]),
...mapGetters([
'headerTime',
......@@ -121,7 +122,13 @@ export default {
// fetch the stages for the dropdown on the sidebar
job(newVal, oldVal) {
if (_.isEmpty(oldVal) && !_.isEmpty(newVal.pipeline)) {
this.fetchStages();
const stages = this.job.pipeline.details.stages || [];
const defaultStage = stages.find(stage => stage && stage.name === this.selectedStage);
if (defaultStage) {
this.fetchJobsForStage(defaultStage);
}
}
if (newVal.archived) {
......@@ -160,7 +167,7 @@ export default {
'setJobEndpoint',
'setTraceOptions',
'fetchJob',
'fetchStages',
'fetchJobsForStage',
'hideSidebar',
'showSidebar',
'toggleSidebar',
......@@ -269,7 +276,6 @@ export default {
:class="{ 'sticky-top border-bottom-0': hasTrace }"
>
<icon name="lock" class="align-text-bottom" />
{{ __('This job is archived. Only the complete pipeline can be retried.') }}
</div>
<!-- job log -->
......
......@@ -34,7 +34,7 @@ export default {
},
},
computed: {
...mapState(['job', 'stages', 'jobs', 'selectedStage', 'isLoadingStages']),
...mapState(['job', 'stages', 'jobs', 'selectedStage']),
coverage() {
return `${this.job.coverage}%`;
},
......@@ -208,7 +208,6 @@ export default {
/>
<stages-dropdown
v-if="!isLoadingStages"
:stages="stages"
:pipeline="job.pipeline"
:selected-stage="selectedStage"
......
......@@ -178,30 +178,6 @@ export const receiveTraceError = ({ commit }) => {
flash(__('An error occurred while fetching the job log.'));
};
/**
* Stages dropdown on sidebar
*/
export const requestStages = ({ commit }) => commit(types.REQUEST_STAGES);
export const fetchStages = ({ state, dispatch }) => {
dispatch('requestStages');
axios
.get(`${state.job.pipeline.path}.json`)
.then(({ data }) => {
// Set selected stage
dispatch('receiveStagesSuccess', data.details.stages);
const selectedStage = data.details.stages.find(stage => stage.name === state.selectedStage);
dispatch('fetchJobsForStage', selectedStage);
})
.catch(() => dispatch('receiveStagesError'));
};
export const receiveStagesSuccess = ({ commit }, data) =>
commit(types.RECEIVE_STAGES_SUCCESS, data);
export const receiveStagesError = ({ commit }) => {
commit(types.RECEIVE_STAGES_ERROR);
flash(__('An error occurred while fetching stages.'));
};
/**
* Jobs list on sidebar - depend on stages dropdown
*/
......@@ -209,7 +185,7 @@ export const requestJobsForStage = ({ commit }, stage) =>
commit(types.REQUEST_JOBS_FOR_STAGE, stage);
// On stage click, set selected stage + fetch job
export const fetchJobsForStage = ({ dispatch }, stage) => {
export const fetchJobsForStage = ({ dispatch }, stage = {}) => {
dispatch('requestJobsForStage', stage);
axios
......
......@@ -24,10 +24,6 @@ export const STOP_POLLING_TRACE = 'STOP_POLLING_TRACE';
export const RECEIVE_TRACE_SUCCESS = 'RECEIVE_TRACE_SUCCESS';
export const RECEIVE_TRACE_ERROR = 'RECEIVE_TRACE_ERROR';
export const REQUEST_STAGES = 'REQUEST_STAGES';
export const RECEIVE_STAGES_SUCCESS = 'RECEIVE_STAGES_SUCCESS';
export const RECEIVE_STAGES_ERROR = 'RECEIVE_STAGES_ERROR';
export const SET_SELECTED_STAGE = 'SET_SELECTED_STAGE';
export const REQUEST_JOBS_FOR_STAGE = 'REQUEST_JOBS_FOR_STAGE';
export const RECEIVE_JOBS_FOR_STAGE_SUCCESS = 'RECEIVE_JOBS_FOR_STAGE_SUCCESS';
......
......@@ -65,6 +65,11 @@ export default {
state.isLoading = false;
state.job = job;
state.stages =
job.pipeline && job.pipeline.details && job.pipeline.details.stages
? job.pipeline.details.stages
: [];
/**
* We only update it on the first request
* The dropdown can be changed by the user
......@@ -101,19 +106,7 @@ export default {
state.isScrolledToBottomBeforeReceivingTrace = toggle;
},
[types.REQUEST_STAGES](state) {
state.isLoadingStages = true;
},
[types.RECEIVE_STAGES_SUCCESS](state, stages) {
state.isLoadingStages = false;
state.stages = stages;
},
[types.RECEIVE_STAGES_ERROR](state) {
state.isLoadingStages = false;
state.stages = [];
},
[types.REQUEST_JOBS_FOR_STAGE](state, stage) {
[types.REQUEST_JOBS_FOR_STAGE](state, stage = {}) {
state.isLoadingJobs = true;
state.selectedStage = stage.name;
},
......
......@@ -25,7 +25,6 @@ export default () => ({
traceState: null,
// sidebar dropdown & list of jobs
isLoadingStages: false,
isLoadingJobs: false,
selectedStage: '',
stages: [],
......
......@@ -8,16 +8,18 @@ class BuildDetailsEntity < JobEntity
expose :stuck?, as: :stuck
expose :user, using: UserEntity
expose :runner, using: RunnerEntity
expose :metadata, using: BuildMetadataEntity
expose :pipeline, using: PipelineEntity
expose :deployment_status, if: -> (*) { build.starts_environment? } do
expose :deployment_status, as: :status
expose :persisted_environment, as: :environment, with: EnvironmentEntity
expose :persisted_environment, as: :environment do |build, options|
options.merge(deployment_details: false).yield_self do |opts|
EnvironmentEntity.represent(build.persisted_environment, opts)
end
end
end
expose :metadata, using: BuildMetadataEntity
expose :artifact, if: -> (*) { can?(current_user, :read_build, build) } do
expose :download_path, if: -> (*) { build.artifacts? } do |build|
download_project_job_artifacts_path(project, build)
......
......@@ -20,16 +20,39 @@ class DeploymentEntity < Grape::Entity
expose :created_at
expose :tag
expose :last?
expose :user, using: UserEntity
expose :commit, using: CommitEntity
expose :deployable, using: JobEntity
expose :manual_actions, using: JobEntity, if: -> (*) { can_create_deployment? }
expose :scheduled_actions, using: JobEntity, if: -> (*) { can_create_deployment? }
expose :deployable do |deployment, opts|
deployment.deployable.yield_self do |deployable|
if include_details?
JobEntity.represent(deployable, opts)
elsif can_read_deployables?
{ name: deployable.name,
build_path: project_job_path(deployable.project, deployable) }
end
end
end
expose :commit, using: CommitEntity, if: -> (*) { include_details? }
expose :manual_actions, using: JobEntity, if: -> (*) { include_details? && can_create_deployment? }
expose :scheduled_actions, using: JobEntity, if: -> (*) { include_details? && can_create_deployment? }
private
def include_details?
options.fetch(:deployment_details, true)
end
def can_create_deployment?
can?(request.current_user, :create_deployment, request.project)
end
def can_read_deployables?
##
# We intentionally do not check `:read_build, deployment.deployable`
# because it triggers a policy evaluation that involves multiple
# Gitaly calls that might not be cached.
#
can?(request.current_user, :read_build, request.project)
end
end
# frozen_string_literal: true
class PipelineDetailsEntity < PipelineEntity
expose :flags do
expose :latest?, as: :latest
end
expose :details do
expose :ordered_stages, as: :stages, using: StageEntity
expose :artifacts, using: BuildArtifactEntity
expose :manual_actions, using: BuildActionEntity
expose :scheduled_actions, using: BuildActionEntity
......
......@@ -20,7 +20,6 @@ class PipelineEntity < Grape::Entity
end
expose :flags do
expose :latest?, as: :latest
expose :stuck?, as: :stuck
expose :auto_devops_source?, as: :auto_devops
expose :merge_request_event?, as: :merge_request
......@@ -34,6 +33,7 @@ class PipelineEntity < Grape::Entity
expose :details do
expose :detailed_status, as: :status, with: DetailedStatusEntity
expose :ordered_stages, as: :stages, using: StageEntity
expose :duration
expose :finished_at
end
......
---
title: Improve performance of jobs controller
merge_request: 28093
author:
type: performance
......@@ -877,9 +877,6 @@ msgstr ""
msgid "An error occurred while fetching sidebar data"
msgstr ""
msgid "An error occurred while fetching stages."
msgstr ""
msgid "An error occurred while fetching the board lists. Please try again."
msgstr ""
......
......@@ -101,7 +101,7 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
end
end
describe 'GET show' do
describe 'GET show', :request_store do
let!(:job) { create(:ci_build, :failed, pipeline: pipeline) }
let!(:second_job) { create(:ci_build, :failed, pipeline: pipeline) }
let!(:third_job) { create(:ci_build, :failed) }
......@@ -143,13 +143,24 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
project.add_developer(user)
sign_in(user)
allow_any_instance_of(Ci::Build).to receive(:merge_request).and_return(merge_request)
allow_any_instance_of(Ci::Build)
.to receive(:merge_request)
.and_return(merge_request)
end
it 'does not serialize builds in exposed stages' do
get_show_json
get_show(id: job.id, format: :json)
json_response.dig('pipeline', 'details', 'stages').tap do |stages|
expect(stages.map(&:keys).flatten)
.to eq %w[name title status path dropdown_path]
end
end
context 'when job failed' do
it 'exposes needed information' do
get_show_json
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('job/job_details')
expect(json_response['raw_path']).to match(%r{jobs/\d+/raw\z})
......@@ -159,6 +170,10 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
end
context 'when job is running' do
before do
get_show_json
end
context 'job is cancelable' do
let(:job) { create(:ci_build, :running, pipeline: pipeline) }
......@@ -181,6 +196,10 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
end
context 'when job has artifacts' do
before do
get_show_json
end
context 'with not expiry date' do
let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline) }
......@@ -212,6 +231,8 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
let(:job) { create(:ci_build, :success, :artifacts, pipeline: pipeline) }
it 'exposes empty state illustrations' do
get_show_json
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('job/job_details')
expect(json_response['status']['illustration']).to have_key('image')
......@@ -224,6 +245,8 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
let(:job) { create(:ci_build, :success, pipeline: pipeline) }
it 'does not exposes the deployment information' do
get_show_json
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['deployment_status']).to be_nil
end
......@@ -234,11 +257,20 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
let(:environment) { create(:environment, project: project, name: 'staging', state: :available) }
let(:job) { create(:ci_build, :running, environment: environment.name, pipeline: pipeline) }
before do
create(:deployment, :success, environment: environment, project: project)
end
it 'exposes the deployment information' do
get_show_json
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to match_schema('job/job_details')
expect(json_response['deployment_status']["status"]).to eq 'creating'
expect(json_response['deployment_status']["environment"]).not_to be_nil
expect(json_response.dig('deployment_status', 'status')).to eq 'creating'
expect(json_response.dig('deployment_status', 'environment')).not_to be_nil
expect(json_response.dig('deployment_status', 'environment', 'last_deployment')).not_to be_nil
expect(json_response.dig('deployment_status', 'environment', 'last_deployment'))
.not_to include('commit')
end
end
......@@ -250,11 +282,11 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
before do
project.add_maintainer(user)
sign_in(user)
get_show(id: job.id, format: :json)
end
it 'user can edit runner' do
get_show_json
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('job/job_details')
expect(json_response['runner']).to have_key('edit_path')
......@@ -270,11 +302,11 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
before do
project.add_maintainer(user)
sign_in(user)
get_show(id: job.id, format: :json)
end
it 'user can not edit runner' do
get_show_json
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('job/job_details')
expect(json_response['runner']).not_to have_key('edit_path')
......@@ -289,11 +321,11 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
before do
project.add_maintainer(user)
sign_in(user)
get_show(id: job.id, format: :json)
end
it 'user can not edit runner' do
get_show_json
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('job/job_details')
expect(json_response['runner']).not_to have_key('edit_path')
......@@ -306,6 +338,8 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
let(:job) { create(:ci_build, :pending, pipeline: pipeline, runner: runner) }
it 'exposes needed information' do
get_show_json
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('job/job_details')
expect(json_response['runners']['online']).to be false
......@@ -319,6 +353,8 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
let(:job) { create(:ci_build, :pending, pipeline: pipeline, runner: runner) }
it 'exposes needed information' do
get_show_json
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('job/job_details')
expect(json_response['runners']['online']).to be false
......@@ -328,6 +364,10 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
end
context 'settings_path' do
before do
get_show_json
end
context 'when user is developer' do
it 'settings_path is not available' do
expect(response).to have_gitlab_http_status(:ok)
......@@ -354,6 +394,8 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
context 'when no trace is available' do
it 'has_trace is false' do
get_show_json
expect(response).to match_response_schema('job/job_details')
expect(json_response['has_trace']).to be false
end
......@@ -363,17 +405,21 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
let(:job) { create(:ci_build, :running, :trace_live, pipeline: pipeline) }
it "has_trace is true" do
get_show_json
expect(response).to match_response_schema('job/job_details')
expect(json_response['has_trace']).to be true
end
end
it 'exposes the stage the job belongs to' do
get_show_json
expect(json_response['stage']).to eq('test')
end
end
context 'when requesting JSON job is triggered' do
context 'when requesting triggered job JSON' do
let!(:merge_request) { create(:merge_request, source_project: project) }
let(:trigger) { create(:ci_trigger, project: project) }
let(:trigger_request) { create(:ci_trigger_request, pipeline: pipeline, trigger: trigger) }
......@@ -383,15 +429,15 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
project.add_developer(user)
sign_in(user)
allow_any_instance_of(Ci::Build).to receive(:merge_request).and_return(merge_request)
allow_any_instance_of(Ci::Build)
.to receive(:merge_request)
.and_return(merge_request)
end
context 'with no variables' do
before do
get_show(id: job.id, format: :json)
end
it 'exposes trigger information' do
get_show_json
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('job/job_details')
expect(json_response['trigger']['short_token']).to eq 'toke'
......@@ -408,7 +454,7 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
before do
project.add_maintainer(user)
get_show(id: job.id, format: :json)
get_show_json
end
it 'returns a job_detail' do
......@@ -432,7 +478,7 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
context 'user is not a mantainer' do
before do
get_show(id: job.id, format: :json)
get_show_json
end
it 'returns a job_detail' do
......@@ -456,6 +502,11 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
end
end
def get_show_json
expect { get_show(id: job.id, format: :json) }
.not_to change { Gitlab::GitalyClient.get_request_count }
end
def get_show(**extra_params)
params = {
namespace_id: project.namespace.to_param,
......
......@@ -31,7 +31,11 @@
"last_deployment": {
"oneOf": [
{ "type": "null" },
{ "$ref": "deployment.json" }
{ "$ref": "deployment.json" },
{
"name": { "type": "string" },
"build_path": { "type": "string" }
}
]
}
},
......
......@@ -150,44 +150,8 @@ describe('Jobs Store Mutations', () => {
});
});
describe('REQUEST_STAGES', () => {
it('sets isLoadingStages to true', () => {
mutations[types.REQUEST_STAGES](stateCopy);
expect(stateCopy.isLoadingStages).toEqual(true);
});
});
describe('RECEIVE_STAGES_SUCCESS', () => {
beforeEach(() => {
mutations[types.RECEIVE_STAGES_SUCCESS](stateCopy, [{ name: 'build' }]);
});
it('sets isLoadingStages to false', () => {
expect(stateCopy.isLoadingStages).toEqual(false);
});
it('sets stages', () => {
expect(stateCopy.stages).toEqual([{ name: 'build' }]);
});
});
describe('RECEIVE_STAGES_ERROR', () => {
beforeEach(() => {
mutations[types.RECEIVE_STAGES_ERROR](stateCopy);
});
it('sets isLoadingStages to false', () => {
expect(stateCopy.isLoadingStages).toEqual(false);
});
it('resets stages', () => {
expect(stateCopy.stages).toEqual([]);
});
});
describe('REQUEST_JOBS_FOR_STAGE', () => {
it('sets isLoadingStages to true', () => {
it('sets isLoadingJobs to true', () => {
mutations[types.REQUEST_JOBS_FOR_STAGE](stateCopy, { name: 'deploy' });
expect(stateCopy.isLoadingJobs).toEqual(true);
......
import Vue from 'vue';
import sidebarDetailsBlock from '~/jobs/components/sidebar.vue';
import createStore from '~/jobs/store';
import job, { stages, jobsInStage } from '../mock_data';
import job, { jobsInStage } from '../mock_data';
import { mountComponentWithStore } from '../../helpers/vue_mount_component_helper';
import { trimText } from '../../helpers/text_helper';
......@@ -131,18 +131,8 @@ describe('Sidebar details block', () => {
store.dispatch('receiveJobSuccess', job);
});
describe('while fetching stages', () => {
it('it does not render dropdown', () => {
store.dispatch('requestStages');
vm = mountComponentWithStore(SidebarComponent, { store });
expect(vm.$el.querySelector('.js-selected-stage')).toBeNull();
});
});
describe('with stages', () => {
beforeEach(() => {
store.dispatch('receiveStagesSuccess', stages);
vm = mountComponentWithStore(SidebarComponent, { store });
});
......@@ -156,7 +146,6 @@ describe('Sidebar details block', () => {
describe('without jobs for stages', () => {
beforeEach(() => {
store.dispatch('receiveJobSuccess', job);
store.dispatch('receiveStagesSuccess', stages);
vm = mountComponentWithStore(SidebarComponent, { store });
});
......@@ -168,7 +157,6 @@ describe('Sidebar details block', () => {
describe('with jobs for stages', () => {
beforeEach(() => {
store.dispatch('receiveJobSuccess', job);
store.dispatch('receiveStagesSuccess', stages);
store.dispatch('receiveJobsForStageSuccess', jobsInStage.latest_statuses);
vm = mountComponentWithStore(SidebarComponent, { store });
});
......
......@@ -3,140 +3,6 @@ import { TEST_HOST } from 'spec/test_constants';
const threeWeeksAgo = new Date();
threeWeeksAgo.setDate(threeWeeksAgo.getDate() - 21);
export default {
id: 4757,
name: 'test',
build_path: '/root/ci-mock/-/jobs/4757',
retry_path: '/root/ci-mock/-/jobs/4757/retry',
cancel_path: '/root/ci-mock/-/jobs/4757/cancel',
new_issue_path: '/root/ci-mock/issues/new',
playable: false,
created_at: threeWeeksAgo.toISOString(),
updated_at: threeWeeksAgo.toISOString(),
finished_at: threeWeeksAgo.toISOString(),
queued: 9.54,
status: {
icon: 'status_success',
text: 'passed',
label: 'passed',
group: 'success',
has_details: true,
details_path: `${TEST_HOST}/root/ci-mock/-/jobs/4757`,
favicon:
'/assets/ci_favicons/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.png',
action: {
icon: 'retry',
title: 'Retry',
path: '/root/ci-mock/-/jobs/4757/retry',
method: 'post',
},
},
coverage: 20,
erased_at: threeWeeksAgo.toISOString(),
erased: false,
duration: 6.785563,
tags: ['tag'],
user: {
name: 'Root',
username: 'root',
id: 1,
state: 'active',
avatar_url:
'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon',
web_url: 'http://localhost:3000/root',
},
erase_path: '/root/ci-mock/-/jobs/4757/erase',
artifacts: [null],
runner: {
id: 1,
description: 'local ci runner',
edit_path: '/root/ci-mock/runners/1/edit',
},
pipeline: {
id: 140,
user: {
name: 'Root',
username: 'root',
id: 1,
state: 'active',
avatar_url:
'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon',
web_url: 'http://localhost:3000/root',
},
active: false,
coverage: null,
source: 'unknown',
created_at: '2017-05-24T09:59:58.634Z',
updated_at: '2017-06-01T17:32:00.062Z',
path: '/root/ci-mock/pipelines/140',
flags: {
latest: true,
stuck: false,
yaml_errors: false,
retryable: false,
cancelable: false,
},
details: {
status: {
icon: 'status_success',
text: 'passed',
label: 'passed',
group: 'success',
has_details: true,
details_path: '/root/ci-mock/pipelines/140',
favicon:
'/assets/ci_favicons/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.png',
},
duration: 6,
finished_at: '2017-06-01T17:32:00.042Z',
},
ref: {
name: 'abc',
path: '/root/ci-mock/commits/abc',
tag: false,
branch: true,
},
commit: {
id: 'c58647773a6b5faf066d4ad6ff2c9fbba5f180f6',
short_id: 'c5864777',
title: 'Add new file',
created_at: '2017-05-24T10:59:52.000+01:00',
parent_ids: ['798e5f902592192afaba73f4668ae30e56eae492'],
message: 'Add new file',
author_name: 'Root',
author_email: 'admin@example.com',
authored_date: '2017-05-24T10:59:52.000+01:00',
committer_name: 'Root',
committer_email: 'admin@example.com',
committed_date: '2017-05-24T10:59:52.000+01:00',
author: {
name: 'Root',
username: 'root',
id: 1,
state: 'active',
avatar_url:
'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon',
web_url: 'http://localhost:3000/root',
},
author_gravatar_url:
'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon',
commit_url:
'http://localhost:3000/root/ci-mock/commit/c58647773a6b5faf066d4ad6ff2c9fbba5f180f6',
commit_path: '/root/ci-mock/commit/c58647773a6b5faf066d4ad6ff2c9fbba5f180f6',
},
},
metadata: {
timeout_human_readable: '1m 40s',
timeout_source: 'runner',
},
merge_request: {
iid: 2,
path: '/root/ci-mock/merge_requests/2',
},
raw_path: '/root/ci-mock/builds/4757/raw',
has_trace: true,
};
export const stages = [
{
name: 'build',
......@@ -1043,6 +909,167 @@ export const stages = [
},
];
export default {
id: 4757,
name: 'test',
build_path: '/root/ci-mock/-/jobs/4757',
retry_path: '/root/ci-mock/-/jobs/4757/retry',
cancel_path: '/root/ci-mock/-/jobs/4757/cancel',
new_issue_path: '/root/ci-mock/issues/new',
playable: false,
created_at: threeWeeksAgo.toISOString(),
updated_at: threeWeeksAgo.toISOString(),
finished_at: threeWeeksAgo.toISOString(),
queued: 9.54,
status: {
icon: 'status_success',
text: 'passed',
label: 'passed',
group: 'success',
has_details: true,
details_path: `${TEST_HOST}/root/ci-mock/-/jobs/4757`,
favicon:
'/assets/ci_favicons/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.png',
action: {
icon: 'retry',
title: 'Retry',
path: '/root/ci-mock/-/jobs/4757/retry',
method: 'post',
},
},
coverage: 20,
erased_at: threeWeeksAgo.toISOString(),
erased: false,
duration: 6.785563,
tags: ['tag'],
user: {
name: 'Root',
username: 'root',
id: 1,
state: 'active',
avatar_url:
'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon',
web_url: 'http://localhost:3000/root',
},
erase_path: '/root/ci-mock/-/jobs/4757/erase',
artifacts: [null],
runner: {
id: 1,
description: 'local ci runner',
edit_path: '/root/ci-mock/runners/1/edit',
},
pipeline: {
id: 140,
user: {
name: 'Root',
username: 'root',
id: 1,
state: 'active',
avatar_url:
'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon',
web_url: 'http://localhost:3000/root',
},
active: false,
coverage: null,
source: 'unknown',
created_at: '2017-05-24T09:59:58.634Z',
updated_at: '2017-06-01T17:32:00.062Z',
path: '/root/ci-mock/pipelines/140',
flags: {
latest: true,
stuck: false,
yaml_errors: false,
retryable: false,
cancelable: false,
},
details: {
status: {
icon: 'status_success',
text: 'passed',
label: 'passed',
group: 'success',
has_details: true,
details_path: '/root/ci-mock/pipelines/140',
favicon:
'/assets/ci_favicons/favicon_status_success-308b4fc054cdd1b68d0865e6cfb7b02e92e3472f201507418f8eddb74ac11a59.png',
},
duration: 6,
finished_at: '2017-06-01T17:32:00.042Z',
stages: [
{
dropdown_path: '/jashkenas/underscore/pipelines/16/stage.json?stage=build',
name: 'build',
path: '/jashkenas/underscore/pipelines/16#build',
status: {
icon: 'status_success',
text: 'passed',
label: 'passed',
group: 'success',
tooltip: 'passed',
},
title: 'build: passed',
},
{
dropdown_path: '/jashkenas/underscore/pipelines/16/stage.json?stage=test',
name: 'test',
path: '/jashkenas/underscore/pipelines/16#test',
status: {
icon: 'status_warning',
text: 'passed',
label: 'passed with warnings',
group: 'success-with-warnings',
},
title: 'test: passed with warnings',
},
],
},
ref: {
name: 'abc',
path: '/root/ci-mock/commits/abc',
tag: false,
branch: true,
},
commit: {
id: 'c58647773a6b5faf066d4ad6ff2c9fbba5f180f6',
short_id: 'c5864777',
title: 'Add new file',
created_at: '2017-05-24T10:59:52.000+01:00',
parent_ids: ['798e5f902592192afaba73f4668ae30e56eae492'],
message: 'Add new file',
author_name: 'Root',
author_email: 'admin@example.com',
authored_date: '2017-05-24T10:59:52.000+01:00',
committer_name: 'Root',
committer_email: 'admin@example.com',
committed_date: '2017-05-24T10:59:52.000+01:00',
author: {
name: 'Root',
username: 'root',
id: 1,
state: 'active',
avatar_url:
'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon',
web_url: 'http://localhost:3000/root',
},
author_gravatar_url:
'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon',
commit_url:
'http://localhost:3000/root/ci-mock/commit/c58647773a6b5faf066d4ad6ff2c9fbba5f180f6',
commit_path: '/root/ci-mock/commit/c58647773a6b5faf066d4ad6ff2c9fbba5f180f6',
},
},
metadata: {
timeout_human_readable: '1m 40s',
timeout_source: 'runner',
},
merge_request: {
iid: 2,
path: '/root/ci-mock/merge_requests/2',
},
raw_path: '/root/ci-mock/builds/4757/raw',
has_trace: true,
};
export const jobsInStage = {
name: 'build',
title: 'build: running',
......
......@@ -16,10 +16,6 @@ import {
stopPollingTrace,
receiveTraceSuccess,
receiveTraceError,
requestStages,
fetchStages,
receiveStagesSuccess,
receiveStagesError,
requestJobsForStage,
fetchJobsForStage,
receiveJobsForStageSuccess,
......@@ -307,107 +303,6 @@ describe('Job State actions', () => {
});
});
describe('requestStages', () => {
it('should commit REQUEST_STAGES mutation ', done => {
testAction(requestStages, null, mockedState, [{ type: types.REQUEST_STAGES }], [], done);
});
});
describe('fetchStages', () => {
let mock;
beforeEach(() => {
mockedState.job.pipeline = {
path: `${TEST_HOST}/endpoint`,
};
mockedState.selectedStage = 'deploy';
mock = new MockAdapter(axios);
});
afterEach(() => {
mock.restore();
});
describe('success', () => {
it('dispatches requestStages and receiveStagesSuccess, fetchJobsForStage ', done => {
mock
.onGet(`${TEST_HOST}/endpoint.json`)
.replyOnce(200, { details: { stages: [{ name: 'build' }, { name: 'deploy' }] } });
testAction(
fetchStages,
null,
mockedState,
[],
[
{
type: 'requestStages',
},
{
payload: [{ name: 'build' }, { name: 'deploy' }],
type: 'receiveStagesSuccess',
},
{
payload: { name: 'deploy' },
type: 'fetchJobsForStage',
},
],
done,
);
});
});
describe('error', () => {
beforeEach(() => {
mock.onGet(`${TEST_HOST}/endpoint.json`).reply(500);
});
it('dispatches requestStages and receiveStagesError ', done => {
testAction(
fetchStages,
null,
mockedState,
[],
[
{
type: 'requestStages',
},
{
type: 'receiveStagesError',
},
],
done,
);
});
});
});
describe('receiveStagesSuccess', () => {
it('should commit RECEIVE_STAGES_SUCCESS mutation ', done => {
testAction(
receiveStagesSuccess,
{},
mockedState,
[{ type: types.RECEIVE_STAGES_SUCCESS, payload: {} }],
[],
done,
);
});
});
describe('receiveStagesError', () => {
it('should commit RECEIVE_STAGES_ERROR mutation ', done => {
testAction(
receiveStagesError,
null,
mockedState,
[{ type: types.RECEIVE_STAGES_ERROR }],
[],
done,
);
});
});
describe('requestJobsForStage', () => {
it('should commit REQUEST_JOBS_FOR_STAGE mutation ', done => {
testAction(
......
......@@ -122,5 +122,29 @@ describe BuildDetailsEntity do
it { is_expected.to include(failure_reason: 'unmet_prerequisites') }
end
context 'when a build has environment with latest deployment' do
let(:build) do
create(:ci_build, :running, environment: environment.name, pipeline: pipeline)
end
let(:environment) do
create(:environment, project: project, name: 'staging', state: :available)
end
before do
create(:deployment, :success, environment: environment, project: project)
allow(request).to receive(:project).and_return(project)
end
it 'does not serialize latest deployment commit and associated builds' do
response = subject.with_indifferent_access
response.dig(:deployment_status, :environment, :last_deployment).tap do |deployment|
expect(deployment).not_to include(:commit, :manual_actions, :scheduled_actions)
end
end
end
end
end
......@@ -10,6 +10,7 @@ describe DeploymentEntity do
let(:build) { create(:ci_build, :manual, pipeline: pipeline) }
let(:pipeline) { create(:ci_pipeline, project: project, user: user) }
let(:entity) { described_class.new(deployment, request: request) }
subject { entity.as_json }
before do
......@@ -47,6 +48,16 @@ describe DeploymentEntity do
expect(subject[:manual_actions]).not_to be_present
end
end
context 'when deployment details serialization was disabled' do
let(:entity) do
described_class.new(deployment, request: request, deployment_details: false)
end
it 'does not serialize manual actions details' do
expect(subject.with_indifferent_access).not_to include(:manual_actions)
end
end
end
describe 'scheduled_actions' do
......@@ -69,5 +80,35 @@ describe DeploymentEntity do
expect(subject[:scheduled_actions]).to be_empty
end
end
context 'when deployment details serialization was disabled' do
let(:entity) do
described_class.new(deployment, request: request, deployment_details: false)
end
it 'does not serialize scheduled actions details' do
expect(subject.with_indifferent_access).not_to include(:scheduled_actions)
end
end
end
context 'when deployment details serialization was disabled' do
include Gitlab::Routing
let(:entity) do
described_class.new(deployment, request: request, deployment_details: false)
end
it 'does not serialize deployment details' do
expect(subject.with_indifferent_access)
.not_to include(:commit, :manual_actions, :scheduled_actions)
end
it 'only exposes deployable name and path' do
project_job_path(project, deployment.deployable).tap do |path|
expect(subject.fetch(:deployable))
.to eq(name: 'test', build_path: path)
end
end
end
end
......@@ -48,8 +48,8 @@ describe PipelineEntity do
it 'contains flags' do
expect(subject).to include :flags
expect(subject[:flags])
.to include :latest, :stuck, :auto_devops,
:yaml_errors, :retryable, :cancelable, :merge_request
.to include :stuck, :auto_devops, :yaml_errors,
:retryable, :cancelable, :merge_request
end
end
......@@ -64,6 +64,12 @@ describe PipelineEntity do
create(:ci_build, :failed, pipeline: pipeline)
end
it 'does not serialize stage builds' do
subject.with_indifferent_access.dig(:details, :stages, 0).tap do |stage|
expect(stage).not_to include(:groups, :latest_statuses, :retries)
end
end
context 'user has ability to retry pipeline' do
before do
project.add_developer(user)
......@@ -92,6 +98,12 @@ describe PipelineEntity do
create(:ci_build, :pending, pipeline: pipeline)
end
it 'does not serialize stage builds' do
subject.with_indifferent_access.dig(:details, :stages, 0).tap do |stage|
expect(stage).not_to include(:groups, :latest_statuses, :retries)
end
end
context 'user has ability to cancel pipeline' do
before do
project.add_developer(user)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment