Commit 58780e98 authored by Ezekiel Kigbo's avatar Ezekiel Kigbo

Merge branch '220492-refactor-promql-query-parsing' into 'master'

Refactor Prometheus results handling

Closes #220492

See merge request gitlab-org/gitlab!34376
parents 5cbcc865 7a8ca4cd
......@@ -50,15 +50,14 @@ function backOffRequest(makeRequestCallback) {
}, PROMETHEUS_TIMEOUT);
}
function getPrometheusMetricResult(prometheusEndpoint, params) {
function getPrometheusQueryData(prometheusEndpoint, params) {
return backOffRequest(() => axios.get(prometheusEndpoint, { params }))
.then(res => res.data)
.then(response => {
if (response.status === 'error') {
throw new Error(response.error);
}
return response.data.result;
return response.data;
});
}
......@@ -229,9 +228,9 @@ export const fetchPrometheusMetric = (
commit(types.REQUEST_METRIC_RESULT, { metricId: metric.metricId });
return getPrometheusMetricResult(metric.prometheusEndpointPath, queryParams)
.then(result => {
commit(types.RECEIVE_METRIC_RESULT_SUCCESS, { metricId: metric.metricId, result });
return getPrometheusQueryData(metric.prometheusEndpointPath, queryParams)
.then(data => {
commit(types.RECEIVE_METRIC_RESULT_SUCCESS, { metricId: metric.metricId, data });
})
.catch(error => {
Sentry.captureException(error);
......
import Vue from 'vue';
import { pick } from 'lodash';
import * as types from './mutation_types';
import { mapToDashboardViewModel, normalizeQueryResult } from './utils';
import { mapToDashboardViewModel, normalizeQueryResponseData } from './utils';
import { BACKOFF_TIMEOUT } from '../../lib/utils/common_utils';
import { endpointKeys, initialStateKeys, metricStates } from '../constants';
import httpStatusCodes from '~/lib/utils/http_status';
......@@ -135,19 +135,19 @@ export default {
metric.state = metricStates.LOADING;
}
},
[types.RECEIVE_METRIC_RESULT_SUCCESS](state, { metricId, result }) {
[types.RECEIVE_METRIC_RESULT_SUCCESS](state, { metricId, data }) {
const metric = findMetricInDashboard(metricId, state.dashboard);
metric.loading = false;
state.showEmptyState = false;
if (!result || result.length === 0) {
state.showEmptyState = false;
if (!data.result || data.result.length === 0) {
metric.state = metricStates.NO_DATA;
metric.result = null;
} else {
const normalizedResults = result.map(normalizeQueryResult);
const result = normalizeQueryResponseData(data);
metric.state = metricStates.OK;
metric.result = Object.freeze(normalizedResults);
metric.result = Object.freeze(result);
}
},
[types.RECEIVE_METRIC_RESULT_FAILURE](state, { metricId, error }) {
......
......@@ -295,9 +295,87 @@ export const mapToDashboardViewModel = ({
};
};
// Prometheus Results Parsing
const dateTimeFromUnixTime = unixTime => new Date(unixTime * 1000).toISOString();
const mapScalarValue = ([unixTime, value]) => [dateTimeFromUnixTime(unixTime), Number(value)];
// Note: `string` value type is unused as of prometheus 2.19.
const mapStringValue = ([unixTime, value]) => [dateTimeFromUnixTime(unixTime), value];
/**
* Processes a scalar result.
*
* The corresponding result property has the following format:
*
* [ <unix_time>, "<scalar_value>" ]
*
* @param {array} result
* @returns {array}
*/
const normalizeScalarResult = result => [
{
metric: {},
value: mapScalarValue(result),
values: [mapScalarValue(result)],
},
];
/**
* Processes a string result.
*
* The corresponding result property has the following format:
*
* [ <unix_time>, "<string_value>" ]
*
* Note: This value type is unused as of prometheus 2.19.
*
* @param {array} result
* @returns {array}
*/
const normalizeStringResult = result => [
{
metric: {},
value: mapStringValue(result),
values: [mapStringValue(result)],
},
];
/**
* Processes a single Range vector, part of the result
* of type `matrix` in the form:
* Proccesses an instant vector.
*
* Instant vectors are returned as result type `vector`.
*
* The corresponding result property has the following format:
*
* [
* {
* "metric": { "<label_name>": "<label_value>", ... },
* "value": [ <unix_time>, "<sample_value>" ]
* },
* ...
* ]
*
* This method also adds the matrix version of the vector
* by introducing a `values` array with a single element. This
* allows charts to default to `values` if needed.
*
* @param {array} result
* @returns {array}
*/
const normalizeVectorResult = result =>
result.map(({ metric, value }) => {
const scalar = mapScalarValue(value);
// Add a single element to `values`, to support matrix
// style charts.
return { metric, value: scalar, values: [scalar] };
});
/**
* Range vectors are returned as result type matrix.
*
* The corresponding result property has the following format:
*
* {
* "metric": { "<label_name>": "<label_value>", ... },
......@@ -306,32 +384,45 @@ export const mapToDashboardViewModel = ({
*
* See https://prometheus.io/docs/prometheus/latest/querying/api/#range-vectors
*
* @param {*} timeSeries
* @param {array} result
* @returns {array}
*/
export const normalizeQueryResult = timeSeries => {
let normalizedResult = {};
const normalizeResultMatrix = result =>
result.map(({ metric, values }) => ({ metric, values: values.map(mapScalarValue) }));
if (timeSeries.values) {
normalizedResult = {
...timeSeries,
values: timeSeries.values.map(([timestamp, value]) => [
new Date(timestamp * 1000).toISOString(),
Number(value),
]),
};
// Check result for empty data
normalizedResult.values = normalizedResult.values.filter(series => {
const hasValue = d => !Number.isNaN(d[1]) && (d[1] !== null || d[1] !== undefined);
return series.find(hasValue);
});
} else if (timeSeries.value) {
normalizedResult = {
...timeSeries,
value: [new Date(timeSeries.value[0] * 1000).toISOString(), Number(timeSeries.value[1])],
};
/**
* Parse response data from a Prometheus Query that comes
* in the format:
*
* {
* "resultType": "matrix" | "vector" | "scalar" | "string",
* "result": <value>
* }
*
* @see https://prometheus.io/docs/prometheus/latest/querying/api/#expression-query-result-formats
*
* @param {object} data - Data containing results and result type.
* @returns {object} - A result array of metric results:
* [
* {
* metric: { ... },
* value: ['2015-07-01T20:10:51.781Z', '1'],
* values: [['2015-07-01T20:10:51.781Z', '1'] , ... ],
* },
* ...
* ]
*
*/
export const normalizeQueryResponseData = data => {
const { resultType, result } = data;
if (resultType === 'vector') {
return normalizeVectorResult(result);
} else if (resultType === 'scalar') {
return normalizeScalarResult(result);
} else if (resultType === 'string') {
return normalizeStringResult(result);
}
return normalizedResult;
return normalizeResultMatrix(result);
};
/**
......
......@@ -15,7 +15,7 @@ import { createStore } from '~/monitoring/stores';
import { panelTypes, chartHeight } from '~/monitoring/constants';
import TimeSeries from '~/monitoring/components/charts/time_series.vue';
import * as types from '~/monitoring/stores/mutation_types';
import { deploymentData, mockProjectDir, annotationsData } from '../../mock_data';
import { deploymentData, mockProjectDir, annotationsData, metricsResult } from '../../mock_data';
import {
metricsDashboardPayload,
metricsDashboardViewModel,
......@@ -702,9 +702,7 @@ describe('Time series component', () => {
beforeEach(() => {
store = createStore();
const graphData = cloneDeep(metricsDashboardViewModel.panelGroups[0].panels[3]);
graphData.metrics.forEach(metric =>
Object.assign(metric, { result: metricResultStatus.result }),
);
graphData.metrics.forEach(metric => Object.assign(metric, { result: metricsResult }));
createWrapper({ graphData: { ...graphData, type: 'area-chart' } }, mount);
return wrapper.vm.$nextTick();
......
......@@ -14,16 +14,25 @@ export const metricsDashboardPanelCount = 22;
export const metricResultStatus = {
// First metric in fixture `metrics_dashboard/environment_metrics_dashboard.json`
metricId: 'NO_DB_response_metrics_nginx_ingress_throughput_status_code',
result: metricsResult,
data: {
resultType: 'matrix',
result: metricsResult,
},
};
export const metricResultPods = {
// Second metric in fixture `metrics_dashboard/environment_metrics_dashboard.json`
metricId: 'NO_DB_response_metrics_nginx_ingress_latency_pod_average',
result: metricsResult,
data: {
resultType: 'matrix',
result: metricsResult,
},
};
export const metricResultEmpty = {
metricId: 'NO_DB_response_metrics_nginx_ingress_16_throughput_status_code',
result: [],
data: {
resultType: 'matrix',
result: [],
},
};
// Graph data
......
......@@ -738,7 +738,7 @@ describe('Monitoring store actions', () => {
type: types.RECEIVE_METRIC_RESULT_SUCCESS,
payload: {
metricId: metric.metricId,
result: data.result,
data,
},
},
],
......@@ -775,7 +775,7 @@ describe('Monitoring store actions', () => {
type: types.RECEIVE_METRIC_RESULT_SUCCESS,
payload: {
metricId: metric.metricId,
result: data.result,
data,
},
},
],
......@@ -817,7 +817,7 @@ describe('Monitoring store actions', () => {
type: types.RECEIVE_METRIC_RESULT_SUCCESS,
payload: {
metricId: metric.metricId,
result: data.result,
data,
},
},
],
......@@ -852,7 +852,7 @@ describe('Monitoring store actions', () => {
type: types.RECEIVE_METRIC_RESULT_SUCCESS,
payload: {
metricId: metric.metricId,
result: data.result,
data,
},
},
],
......
......@@ -27,7 +27,10 @@ describe('Monitoring store Getters', () => {
const { metricId } = state.dashboard.panelGroups[group].panels[panel].metrics[metric];
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](state, {
metricId,
result,
data: {
resultType: 'matrix',
result,
},
});
};
......
......@@ -225,11 +225,28 @@ describe('Monitoring mutations', () => {
describe('Individual panel/metric results', () => {
const metricId = 'NO_DB_response_metrics_nginx_ingress_throughput_status_code';
const result = [
{
values: [[0, 1], [1, 1], [1, 3]],
},
];
const data = {
resultType: 'matrix',
result: [
{
metric: {
__name__: 'up',
job: 'prometheus',
instance: 'localhost:9090',
},
values: [[1435781430.781, '1'], [1435781445.781, '1'], [1435781460.781, '1']],
},
{
metric: {
__name__: 'up',
job: 'node',
instance: 'localhost:9091',
},
values: [[1435781430.781, '0'], [1435781445.781, '0'], [1435781460.781, '1']],
},
],
};
const dashboard = metricsDashboardPayload;
const getMetric = () => stateCopy.dashboard.panelGroups[1].panels[0].metrics[0];
......@@ -262,7 +279,7 @@ describe('Monitoring mutations', () => {
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](stateCopy, {
metricId,
result,
data,
});
expect(stateCopy.showEmptyState).toBe(false);
......@@ -273,10 +290,10 @@ describe('Monitoring mutations', () => {
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](stateCopy, {
metricId,
result,
data,
});
expect(getMetric().result).toHaveLength(result.length);
expect(getMetric().result).toHaveLength(data.result.length);
expect(getMetric()).toEqual(
expect.objectContaining({
loading: false,
......
......@@ -5,7 +5,7 @@ import {
parseAnnotationsResponse,
removeLeadingSlash,
mapToDashboardViewModel,
normalizeQueryResult,
normalizeQueryResponseData,
convertToGrafanaTimeRange,
addDashboardMetaDataToLink,
} from '~/monitoring/stores/utils';
......@@ -400,28 +400,6 @@ describe('mapToDashboardViewModel', () => {
});
});
describe('normalizeQueryResult', () => {
const testData = {
metric: {
__name__: 'up',
job: 'prometheus',
instance: 'localhost:9090',
},
values: [[1435781430.781, '1'], [1435781445.781, '1'], [1435781460.781, '1']],
};
it('processes a simple matrix result', () => {
expect(normalizeQueryResult(testData)).toEqual({
metric: { __name__: 'up', job: 'prometheus', instance: 'localhost:9090' },
values: [
['2015-07-01T20:10:30.781Z', 1],
['2015-07-01T20:10:45.781Z', 1],
['2015-07-01T20:11:00.781Z', 1],
],
});
});
});
describe('uniqMetricsId', () => {
[
{ input: { id: 1 }, expected: `${NOT_IN_DB_PREFIX}_1` },
......@@ -607,3 +585,118 @@ describe('user-defined links utils', () => {
});
});
});
describe('normalizeQueryResponseData', () => {
// Data examples from
// https://prometheus.io/docs/prometheus/latest/querying/api/#expression-queries
it('processes a string result', () => {
const mockScalar = {
resultType: 'string',
result: [1435781451.781, '1'],
};
expect(normalizeQueryResponseData(mockScalar)).toEqual([
{
metric: {},
value: ['2015-07-01T20:10:51.781Z', '1'],
values: [['2015-07-01T20:10:51.781Z', '1']],
},
]);
});
it('processes a scalar result', () => {
const mockScalar = {
resultType: 'scalar',
result: [1435781451.781, '1'],
};
expect(normalizeQueryResponseData(mockScalar)).toEqual([
{
metric: {},
value: ['2015-07-01T20:10:51.781Z', 1],
values: [['2015-07-01T20:10:51.781Z', 1]],
},
]);
});
it('processes a vector result', () => {
const mockVector = {
resultType: 'vector',
result: [
{
metric: {
__name__: 'up',
job: 'prometheus',
instance: 'localhost:9090',
},
value: [1435781451.781, '1'],
},
{
metric: {
__name__: 'up',
job: 'node',
instance: 'localhost:9100',
},
value: [1435781451.781, '0'],
},
],
};
expect(normalizeQueryResponseData(mockVector)).toEqual([
{
metric: { __name__: 'up', job: 'prometheus', instance: 'localhost:9090' },
value: ['2015-07-01T20:10:51.781Z', 1],
values: [['2015-07-01T20:10:51.781Z', 1]],
},
{
metric: { __name__: 'up', job: 'node', instance: 'localhost:9100' },
value: ['2015-07-01T20:10:51.781Z', 0],
values: [['2015-07-01T20:10:51.781Z', 0]],
},
]);
});
it('processes a matrix result', () => {
const mockMatrix = {
resultType: 'matrix',
result: [
{
metric: {
__name__: 'up',
job: 'prometheus',
instance: 'localhost:9090',
},
values: [[1435781430.781, '1'], [1435781445.781, '1'], [1435781460.781, '1']],
},
{
metric: {
__name__: 'up',
job: 'node',
instance: 'localhost:9091',
},
values: [[1435781430.781, '0'], [1435781445.781, '0'], [1435781460.781, '1']],
},
],
};
expect(normalizeQueryResponseData(mockMatrix)).toEqual([
{
metric: { __name__: 'up', instance: 'localhost:9090', job: 'prometheus' },
values: [
['2015-07-01T20:10:30.781Z', 1],
['2015-07-01T20:10:45.781Z', 1],
['2015-07-01T20:11:00.781Z', 1],
],
},
{
metric: { __name__: 'up', instance: 'localhost:9091', job: 'node' },
values: [
['2015-07-01T20:10:30.781Z', 0],
['2015-07-01T20:10:45.781Z', 0],
['2015-07-01T20:11:00.781Z', 1],
],
},
]);
});
});
......@@ -8,7 +8,10 @@ export const setMetricResult = ({ store, result, group = 0, panel = 0, metric =
store.commit(`monitoringDashboard/${types.RECEIVE_METRIC_RESULT_SUCCESS}`, {
metricId,
result,
data: {
resultType: 'matrix',
result,
},
});
};
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment