Commit c8761bb0 authored by Erick Bajao's avatar Erick Bajao Committed by Paul Slaughter

Expose degradation_threshold from CI config

This exposes the value from the CI YAML config variable
DEGRADATION_THRESHOLD.

https://gitlab.com/gitlab-org/gitlab/-/merge_requests/21824
parent 0b63523b
......@@ -32,6 +32,9 @@ module Ci
scheduler_failure: 2
}.freeze
CODE_NAVIGATION_JOB_NAME = 'code_navigation'
DEGRADATION_THRESHOLD_VARIABLE_NAME = 'DEGRADATION_THRESHOLD'
has_one :deployment, as: :deployable, class_name: 'Deployment'
has_one :resource, class_name: 'Ci::Resource', inverse_of: :build
has_many :trace_sections, class_name: 'Ci::BuildTraceSection'
......@@ -917,6 +920,11 @@ module Ci
failure_reason: :data_integrity_failure)
end
def degradation_threshold
var = yaml_variables.find { |v| v[:key] == DEGRADATION_THRESHOLD_VARIABLE_NAME }
var[:value]&.to_i if var
end
private
def dependencies
......
......@@ -123,6 +123,26 @@ documentation.
TIP: **Tip:**
Key metrics are automatically extracted and shown in the merge request widget.
### Configuring degradation threshold
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/27599) in GitLab 13.0.
You can configure the sensitivity of degradation alerts to avoid getting alerts for minor drops in metrics.
This is done by setting the `DEGRADATION_THRESHOLD` variable. In the example below, the alert will only show up
if the `Total Score` metric degrades by 5 points or more:
```yaml
include:
template: Verify/Browser-Performance.gitlab-ci.yml
performance:
variables:
URL: https://example.com
DEGRADATION_THRESHOLD: 5
```
The `Total Score` metric is based on sitespeed.io's [coach performance score](https://www.sitespeed.io/documentation/sitespeed.io/metrics/#performance-score). There is more information in [the coach documentation](https://www.sitespeed.io/documentation/coach/how-to/#what-do-the-coach-do).
### Performance testing on Review Apps
The above CI YAML configuration is great for testing against static environments, and it can
......
......@@ -14,6 +14,7 @@ import MrWidgetApprovals from './components/approvals/approvals.vue';
import MrWidgetGeoSecondaryNode from './components/states/mr_widget_secondary_geo_node.vue';
import MergeTrainHelperText from './components/merge_train_helper_text.vue';
import { MTWPS_MERGE_STRATEGY } from '~/vue_merge_request_widget/constants';
import { TOTAL_SCORE_METRIC_NAME } from 'ee/vue_merge_request_widget/stores/constants';
export default {
components: {
......@@ -65,9 +66,29 @@ export default {
(this.mr.performanceMetrics.improved && this.mr.performanceMetrics.improved.length > 0))
);
},
shouldRenderPerformance() {
hasPerformancePaths() {
const { performance } = this.mr || {};
return performance && performance.head_path && performance.base_path;
return Boolean(performance?.head_path && performance?.base_path);
},
degradedTotalScore() {
return this.mr?.performanceMetrics?.degraded.find(
metric => metric.name === TOTAL_SCORE_METRIC_NAME,
);
},
hasPerformanceDegradation() {
const threshold = this.mr?.performance?.degradation_threshold || 0;
if (!threshold) {
return true;
}
const totalScoreDelta = this.degradedTotalScore?.delta || 0;
return threshold + totalScoreDelta <= 0;
},
shouldRenderPerformance() {
return this.hasPerformancePaths && this.hasPerformanceDegradation;
},
shouldRenderSecurityReport() {
const { enabledReports } = this.mr;
......@@ -170,7 +191,7 @@ export default {
this.fetchCodeQuality();
}
},
shouldRenderPerformance(newVal) {
hasPerformancePaths(newVal) {
if (newVal) {
this.fetchPerformance();
}
......
/* eslint-disable import/prefer-default-export */
// This is the name of Sitespeed's Overall Score metric in the performance report
export const TOTAL_SCORE_METRIC_NAME = 'Total Score';
......@@ -122,7 +122,6 @@ export default class MergeRequestStore extends CEMergeRequestStore {
comparePerformanceMetrics(headMetrics, baseMetrics) {
const headMetricsIndexed = MergeRequestStore.normalizePerformanceMetrics(headMetrics);
const baseMetricsIndexed = MergeRequestStore.normalizePerformanceMetrics(baseMetrics);
const improved = [];
const degraded = [];
......
......@@ -37,6 +37,13 @@ module EE
proxy: true)
end
end
def degradation_threshold
if (job_artifact = batch_lookup_report_artifact_for_file_type(:performance)) &&
can?(current_user, :read_build, job_artifact.job)
job_artifact.job.degradation_threshold
end
end
end
end
end
......@@ -27,6 +27,11 @@ module EE
end
expose :performance, if: -> (mr, _) { head_pipeline_downloadable_path_for_report_type(:performance) } do
expose :degradation_threshold do |merge_request|
merge_request.head_pipeline&.present(current_user: current_user)
&.degradation_threshold
end
expose :head_path do |merge_request|
head_pipeline_downloadable_path_for_report_type(:performance)
end
......
---
title: Support setting threshold for browser performance degradation through CI config
merge_request: 21824
author:
type: added
......@@ -40,6 +40,11 @@ describe('ee merge request widget options', () => {
let mock;
let Component;
const DEFAULT_PERFORMANCE = {
head_path: 'head.json',
base_path: 'base.json',
};
beforeEach(() => {
delete mrWidgetOptions.extends.el; // Prevent component mounting
......@@ -65,8 +70,15 @@ describe('ee merge request widget options', () => {
});
});
const findPerformanceWidget = () => vm.$el.querySelector('.js-performance-widget');
const findSecurityWidget = () => vm.$el.querySelector('.js-security-widget');
const setPerformance = (data = {}) => {
const performance = { ...DEFAULT_PERFORMANCE, ...data };
gl.mrWidgetData.performance = performance;
vm.mr.performance = performance;
};
const VULNERABILITY_FEEDBACK_ENDPOINT = 'vulnerability_feedback_path';
describe('SAST', () => {
......@@ -487,13 +499,10 @@ describe('ee merge request widget options', () => {
mock.onGet('base.json').reply(200, basePerformance);
vm = mountComponent(Component, { mrData: gl.mrWidgetData });
vm.mr.performance = {
head_path: 'head.json',
base_path: 'base.json',
};
vm.mr.performance = { ...DEFAULT_PERFORMANCE };
vm.$nextTick(() => {
expect(trimText(vm.$el.querySelector('.js-performance-widget').textContent)).toContain(
expect(trimText(findPerformanceWidget().textContent)).toContain(
'Loading performance report',
);
......@@ -504,65 +513,91 @@ describe('ee merge request widget options', () => {
describe('with successful request', () => {
beforeEach(() => {
mock.onGet('head.json').reply(200, headPerformance);
mock.onGet('base.json').reply(200, basePerformance);
mock.onGet(DEFAULT_PERFORMANCE.head_path).reply(200, headPerformance);
mock.onGet(DEFAULT_PERFORMANCE.base_path).reply(200, basePerformance);
vm = mountComponent(Component, { mrData: gl.mrWidgetData });
gl.mrWidgetData.performance = {
head_path: 'head.json',
base_path: 'base.json',
};
vm.mr.performance = gl.mrWidgetData.performance;
});
it('should render provided data', done => {
setImmediate(() => {
expect(
trimText(vm.$el.querySelector('.js-performance-widget .js-code-text').textContent),
).toEqual('Performance metrics improved on 2 points and degraded on 1 point');
done();
describe('default', () => {
beforeEach(() => {
setPerformance();
});
});
describe('text connector', () => {
it('should only render information about fixed issues', done => {
it('should render provided data', done => {
setImmediate(() => {
vm.mr.performanceMetrics.degraded = [];
Vue.nextTick(() => {
expect(
trimText(vm.$el.querySelector('.js-performance-widget .js-code-text').textContent),
).toEqual('Performance metrics improved on 2 points');
done();
});
expect(
trimText(vm.$el.querySelector('.js-performance-widget .js-code-text').textContent),
).toEqual('Performance metrics improved on 2 points and degraded on 1 point');
done();
});
});
it('should only render information about added issues', done => {
setImmediate(() => {
vm.mr.performanceMetrics.improved = [];
describe('text connector', () => {
it('should only render information about fixed issues', done => {
setImmediate(() => {
vm.mr.performanceMetrics.degraded = [];
Vue.nextTick(() => {
expect(
trimText(
vm.$el.querySelector('.js-performance-widget .js-code-text').textContent,
),
).toEqual('Performance metrics improved on 2 points');
done();
});
});
});
Vue.nextTick(() => {
expect(
trimText(vm.$el.querySelector('.js-performance-widget .js-code-text').textContent),
).toEqual('Performance metrics degraded on 1 point');
done();
it('should only render information about added issues', done => {
setImmediate(() => {
vm.mr.performanceMetrics.improved = [];
Vue.nextTick(() => {
expect(
trimText(
vm.$el.querySelector('.js-performance-widget .js-code-text').textContent,
),
).toEqual('Performance metrics degraded on 1 point');
done();
});
});
});
});
});
describe.each`
degradation_threshold | shouldExist
${1} | ${true}
${3} | ${false}
`(
'with degradation_threshold = $degradation_threshold',
({ degradation_threshold, shouldExist }) => {
beforeEach(() => {
setPerformance({ degradation_threshold });
return waitForPromises();
});
if (shouldExist) {
it('should render widget when total score degradation is above threshold', () => {
expect(findPerformanceWidget()).toExist();
});
} else {
it('should not render widget when total score degradation is below threshold', () => {
expect(findPerformanceWidget()).not.toExist();
});
}
},
);
});
describe('with empty successful request', () => {
beforeEach(done => {
mock.onGet('head.json').reply(200, []);
mock.onGet('base.json').reply(200, []);
mock.onGet(DEFAULT_PERFORMANCE.head_path).reply(200, []);
mock.onGet(DEFAULT_PERFORMANCE.base_path).reply(200, []);
vm = mountComponent(Component, { mrData: gl.mrWidgetData });
gl.mrWidgetData.performance = {
head_path: 'head.json',
base_path: 'base.json',
};
gl.mrWidgetData.performance = { ...DEFAULT_PERFORMANCE };
vm.mr.performance = gl.mrWidgetData.performance;
// wait for network request from component watch update method
......@@ -590,14 +625,11 @@ describe('ee merge request widget options', () => {
describe('with failed request', () => {
beforeEach(() => {
mock.onGet('head.json').reply(500, []);
mock.onGet('base.json').reply(500, []);
mock.onGet(DEFAULT_PERFORMANCE.head_path).reply(500, []);
mock.onGet(DEFAULT_PERFORMANCE.base_path).reply(500, []);
vm = mountComponent(Component, { mrData: gl.mrWidgetData });
gl.mrWidgetData.performance = {
head_path: 'head.json',
base_path: 'base.json',
};
gl.mrWidgetData.performance = { ...DEFAULT_PERFORMANCE };
vm.mr.performance = gl.mrWidgetData.performance;
});
......
......@@ -113,7 +113,7 @@ export const headPerformance = [
subject: '/some/other/path',
metrics: [
{
name: 'Sitespeed Score',
name: 'Total Score',
value: 79,
desiredSize: 'larger',
},
......@@ -149,7 +149,7 @@ export const basePerformance = [
subject: '/some/other/path',
metrics: [
{
name: 'Sitespeed Score',
name: 'Total Score',
value: 80,
desiredSize: 'larger',
},
......
......@@ -99,6 +99,51 @@ describe MergeRequestWidgetEntity do
end
end
describe 'degradation_threshold' do
let!(:head_pipeline) { create(:ci_empty_pipeline, project: project) }
before do
allow(merge_request).to receive_messages(
base_pipeline: pipeline,
head_pipeline: head_pipeline
)
allow(head_pipeline).to receive(:available_licensed_report_type?).and_return(true)
create(
:ee_ci_build,
:performance,
pipeline: head_pipeline,
yaml_variables: yaml_variables
)
end
context "when head pipeline's performance build has the threshold variable defined" do
let(:yaml_variables) do
[
{ key: 'FOO', value: 'BAR' },
{ key: 'DEGRADATION_THRESHOLD', value: '5' }
]
end
it "returns the value of the variable" do
expect(subject.as_json[:performance][:degradation_threshold]).to eq(5)
end
end
context "when head pipeline's performance build has no threshold variable defined" do
let(:yaml_variables) do
[
{ key: 'FOO', value: 'BAR' }
]
end
it "returns nil" do
expect(subject.as_json[:performance][:degradation_threshold]).to be_nil
end
end
end
describe '#license_scanning', :request_store do
before do
allow(merge_request).to receive_messages(head_pipeline: pipeline, target_project: project)
......
......@@ -4414,4 +4414,31 @@ describe Ci::Build do
it { is_expected.to be_nil }
end
end
describe '#degradation_threshold' do
subject { build.degradation_threshold }
context 'when threshold variable is defined' do
before do
build.yaml_variables = [
{ key: 'SOME_VAR_1', value: 'SOME_VAL_1' },
{ key: 'DEGRADATION_THRESHOLD', value: '5' },
{ key: 'SOME_VAR_2', value: 'SOME_VAL_2' }
]
end
it { is_expected.to eq(5) }
end
context 'when threshold variable is not defined' do
before do
build.yaml_variables = [
{ key: 'SOME_VAR_1', value: 'SOME_VAL_1' },
{ key: 'SOME_VAR_2', value: 'SOME_VAL_2' }
]
end
it { is_expected.to be_nil }
end
end
end
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment