Commit 72133789 authored by Adrien Kohlbecker's avatar Adrien Kohlbecker Committed by Michael Kozono

Add timestamps to pod logs

parent 1ccc1230
---
title: Add timestamps to pod logs
merge_request: 21663
author:
type: added
export const trace = state => state.logs.lines.join('\n'); import dateFormat from 'dateformat';
export const trace = state =>
state.logs.lines
.map(item => [dateFormat(item.timestamp, 'UTC:mmm dd HH:MM:ss.l"Z"'), item.message].join(' | '))
.join('\n');
// prevent babel-plugin-rewire from generating an invalid default during karma tests // prevent babel-plugin-rewire from generating an invalid default during karma tests
export default () => {}; export default () => {};
...@@ -99,10 +99,18 @@ module EE ...@@ -99,10 +99,18 @@ module EE
def platform_pod_logs(namespace, pod_name, container_name) def platform_pod_logs(namespace, pod_name, container_name)
logs = kubeclient.get_pod_log( logs = kubeclient.get_pod_log(
pod_name, namespace, container: container_name, tail_lines: LOGS_LIMIT pod_name, namespace, container: container_name, tail_lines: LOGS_LIMIT, timestamps: true
).body ).body
logs.strip.split("\n") logs.strip.split("\n").map do |line|
# message contains a RFC3339Nano timestamp, then a space, then the log line.
# resolution of the nanoseconds can vary, so we split on the first space
values = line.split(' ', 2)
{
timestamp: values[0],
message: values[1]
}
end
end end
def elastic_stack_pod_logs(namespace, pod_name, container_name) def elastic_stack_pod_logs(namespace, pod_name, container_name)
......
...@@ -51,14 +51,19 @@ module Gitlab ...@@ -51,14 +51,19 @@ module Gitlab
{ "@timestamp": { order: :desc } }, { "@timestamp": { order: :desc } },
{ "offset": { order: :desc } } { "offset": { order: :desc } }
], ],
# only return the message field in the response # only return these fields in the response
_source: ["message"], _source: ["@timestamp", "message"],
# fixed limit for now, we should support paginated queries # fixed limit for now, we should support paginated queries
size: ::Gitlab::Elasticsearch::Logs::LOGS_LIMIT size: ::Gitlab::Elasticsearch::Logs::LOGS_LIMIT
} }
response = @client.search body: body response = @client.search body: body
result = response.fetch("hits", {}).fetch("hits", []).map { |h| h["_source"]["message"] } result = response.fetch("hits", {}).fetch("hits", []).map do |hit|
{
timestamp: hit["_source"]["@timestamp"],
message: hit["_source"]["message"]
}
end
# we queried for the N-most recent records but we want them ordered oldest to newest # we queried for the N-most recent records but we want them ordered oldest to newest
result.reverse result.reverse
......
...@@ -70,7 +70,7 @@ describe 'Environment > Pod Logs', :js do ...@@ -70,7 +70,7 @@ describe 'Environment > Pod Logs', :js do
expect(item.text).to eq(pod_names[i]) expect(item.text).to eq(pod_names[i])
end end
end end
expect(page).to have_content("Log 1 Log 2 Log 3") expect(page).to have_content("Dec 13 14:04:22.123Z | Log 1 Dec 13 14:04:23.123Z | Log 2 Dec 13 14:04:24.123Z | Log 3")
end end
end end
......
...@@ -18,7 +18,8 @@ ...@@ -18,7 +18,8 @@
"_id": "SkbxAW4BWzhswgK-C5-R", "_id": "SkbxAW4BWzhswgK-C5-R",
"_score": null, "_score": null,
"_source": { "_source": {
"message": "10.8.2.1 - - [25/Oct/2019:08:03:22 UTC] \"GET / HTTP/1.1\" 200 13" "message": "10.8.2.1 - - [25/Oct/2019:08:03:22 UTC] \"GET / HTTP/1.1\" 200 13",
"@timestamp": "2019-12-13T14:35:34.034Z"
}, },
"sort": [ "sort": [
9999998, 9999998,
...@@ -31,7 +32,8 @@ ...@@ -31,7 +32,8 @@
"_id": "wEigD24BWzhswgK-WUU2", "_id": "wEigD24BWzhswgK-WUU2",
"_score": null, "_score": null,
"_source": { "_source": {
"message": "10.8.2.1 - - [27/Oct/2019:23:49:54 UTC] \"GET / HTTP/1.1\" 200 13" "message": "10.8.2.1 - - [27/Oct/2019:23:49:54 UTC] \"GET / HTTP/1.1\" 200 13",
"@timestamp": "2019-12-13T14:35:35.034Z"
}, },
"sort": [ "sort": [
9999949, 9999949,
...@@ -44,7 +46,8 @@ ...@@ -44,7 +46,8 @@
"_id": "gE6uOG4BWzhswgK-M0x2", "_id": "gE6uOG4BWzhswgK-M0x2",
"_score": null, "_score": null,
"_source": { "_source": {
"message": "10.8.2.1 - - [04/Nov/2019:23:09:24 UTC] \"GET / HTTP/1.1\" 200 13" "message": "10.8.2.1 - - [04/Nov/2019:23:09:24 UTC] \"GET / HTTP/1.1\" 200 13",
"@timestamp": "2019-12-13T14:35:36.034Z"
}, },
"sort": [ "sort": [
9999944, 9999944,
...@@ -57,7 +60,8 @@ ...@@ -57,7 +60,8 @@
"_id": "0klPHW4BWzhswgK-nfCF", "_id": "0klPHW4BWzhswgK-nfCF",
"_score": null, "_score": null,
"_source": { "_source": {
"message": "- -\u003e /" "message": "- -\u003e /",
"@timestamp": "2019-12-13T14:35:37.034Z"
}, },
"sort": [ "sort": [
9999934, 9999934,
......
...@@ -10,7 +10,8 @@ import { ...@@ -10,7 +10,8 @@ import {
mockEnvName, mockEnvName,
mockEnvironments, mockEnvironments,
mockPods, mockPods,
mockLines, mockLogsResult,
mockTrace,
mockPodName, mockPodName,
mockEnvironmentsEndpoint, mockEnvironmentsEndpoint,
} from '../mock_data'; } from '../mock_data';
...@@ -152,14 +153,14 @@ describe('EnvironmentLogs', () => { ...@@ -152,14 +153,14 @@ describe('EnvironmentLogs', () => {
[state.pods.current] = state.pods.options; [state.pods.current] = state.pods.options;
state.logs.isComplete = false; state.logs.isComplete = false;
state.logs.lines = mockLines; state.logs.lines = mockLogsResult;
}); });
actionMocks.showPodLogs.mockImplementation(podName => { actionMocks.showPodLogs.mockImplementation(podName => {
state.pods.options = mockPods; state.pods.options = mockPods;
[state.pods.current] = podName; [state.pods.current] = podName;
state.logs.isComplete = false; state.logs.isComplete = false;
state.logs.lines = mockLines; state.logs.lines = mockLogsResult;
}); });
actionMocks.fetchEnvironments.mockImplementation(() => { actionMocks.fetchEnvironments.mockImplementation(() => {
state.environments.options = mockEnvironments; state.environments.options = mockEnvironments;
...@@ -200,8 +201,8 @@ describe('EnvironmentLogs', () => { ...@@ -200,8 +201,8 @@ describe('EnvironmentLogs', () => {
it('populates logs trace', () => { it('populates logs trace', () => {
const trace = findLogTrace(); const trace = findLogTrace();
expect(trace.text().split('\n').length).toBe(mockLines.length); expect(trace.text().split('\n').length).toBe(mockTrace.length);
expect(trace.text().split('\n')).toEqual(mockLines); expect(trace.text().split('\n')).toEqual(mockTrace);
}); });
it('update control buttons state', () => { it('update control buttons state', () => {
......
...@@ -24,19 +24,57 @@ export const mockPods = [ ...@@ -24,19 +24,57 @@ export const mockPods = [
'production-764c58d697-ddddd', 'production-764c58d697-ddddd',
]; ];
export const mockLines = [ export const mockLogsResult = [
'10.36.0.1 - - [16/Oct/2019:06:29:48 UTC] "GET / HTTP/1.1" 200 13', {
'- -> /', timestamp: '2019-12-13T13:43:18.2760123Z',
'10.36.0.1 - - [16/Oct/2019:06:29:57 UTC] "GET / HTTP/1.1" 200 13', message: '10.36.0.1 - - [16/Oct/2019:06:29:48 UTC] "GET / HTTP/1.1" 200 13',
'- -> /', },
'10.36.0.1 - - [16/Oct/2019:06:29:58 UTC] "GET / HTTP/1.1" 200 13', { timestamp: '2019-12-13T13:43:18.2760123Z', message: '- -> /' },
'- -> /', {
'10.36.0.1 - - [16/Oct/2019:06:30:07 UTC] "GET / HTTP/1.1" 200 13', timestamp: '2019-12-13T13:43:26.8420123Z',
'- -> /', message: '10.36.0.1 - - [16/Oct/2019:06:29:57 UTC] "GET / HTTP/1.1" 200 13',
'10.36.0.1 - - [16/Oct/2019:06:30:08 UTC] "GET / HTTP/1.1" 200 13', },
'- -> /', { timestamp: '2019-12-13T13:43:26.8420123Z', message: '- -> /' },
'10.36.0.1 - - [16/Oct/2019:06:30:17 UTC] "GET / HTTP/1.1" 200 13', {
'- -> /', timestamp: '2019-12-13T13:43:28.3710123Z',
'10.36.0.1 - - [16/Oct/2019:06:30:18 UTC] "GET / HTTP/1.1" 200 13', message: '10.36.0.1 - - [16/Oct/2019:06:29:58 UTC] "GET / HTTP/1.1" 200 13',
'- -> /', },
{ timestamp: '2019-12-13T13:43:28.3710123Z', message: '- -> /' },
{
timestamp: '2019-12-13T13:43:36.8860123Z',
message: '10.36.0.1 - - [16/Oct/2019:06:30:07 UTC] "GET / HTTP/1.1" 200 13',
},
{ timestamp: '2019-12-13T13:43:36.8860123Z', message: '- -> /' },
{
timestamp: '2019-12-13T13:43:38.4000123Z',
message: '10.36.0.1 - - [16/Oct/2019:06:30:08 UTC] "GET / HTTP/1.1" 200 13',
},
{ timestamp: '2019-12-13T13:43:38.4000123Z', message: '- -> /' },
{
timestamp: '2019-12-13T13:43:46.8420123Z',
message: '10.36.0.1 - - [16/Oct/2019:06:30:17 UTC] "GET / HTTP/1.1" 200 13',
},
{ timestamp: '2019-12-13T13:43:46.8430123Z', message: '- -> /' },
{
timestamp: '2019-12-13T13:43:48.3240123Z',
message: '10.36.0.1 - - [16/Oct/2019:06:30:18 UTC] "GET / HTTP/1.1" 200 13',
},
{ timestamp: '2019-12-13T13:43:48.3250123Z', message: '- -> /' },
];
export const mockTrace = [
'Dec 13 13:43:18.276Z | 10.36.0.1 - - [16/Oct/2019:06:29:48 UTC] "GET / HTTP/1.1" 200 13',
'Dec 13 13:43:18.276Z | - -> /',
'Dec 13 13:43:26.842Z | 10.36.0.1 - - [16/Oct/2019:06:29:57 UTC] "GET / HTTP/1.1" 200 13',
'Dec 13 13:43:26.842Z | - -> /',
'Dec 13 13:43:28.371Z | 10.36.0.1 - - [16/Oct/2019:06:29:58 UTC] "GET / HTTP/1.1" 200 13',
'Dec 13 13:43:28.371Z | - -> /',
'Dec 13 13:43:36.886Z | 10.36.0.1 - - [16/Oct/2019:06:30:07 UTC] "GET / HTTP/1.1" 200 13',
'Dec 13 13:43:36.886Z | - -> /',
'Dec 13 13:43:38.400Z | 10.36.0.1 - - [16/Oct/2019:06:30:08 UTC] "GET / HTTP/1.1" 200 13',
'Dec 13 13:43:38.400Z | - -> /',
'Dec 13 13:43:46.842Z | 10.36.0.1 - - [16/Oct/2019:06:30:17 UTC] "GET / HTTP/1.1" 200 13',
'Dec 13 13:43:46.843Z | - -> /',
'Dec 13 13:43:48.324Z | 10.36.0.1 - - [16/Oct/2019:06:30:18 UTC] "GET / HTTP/1.1" 200 13',
'Dec 13 13:43:48.325Z | - -> /',
]; ];
...@@ -14,7 +14,7 @@ import { ...@@ -14,7 +14,7 @@ import {
mockEnvironmentsEndpoint, mockEnvironmentsEndpoint,
mockEnvironments, mockEnvironments,
mockPods, mockPods,
mockLines, mockLogsResult,
mockEnvName, mockEnvName,
} from '../mock_data'; } from '../mock_data';
...@@ -122,7 +122,7 @@ describe('Logs Store actions', () => { ...@@ -122,7 +122,7 @@ describe('Logs Store actions', () => {
.reply(200, { .reply(200, {
pod_name: mockPodName, pod_name: mockPodName,
pods: mockPods, pods: mockPods,
logs: mockLines, logs: mockLogsResult,
}); });
mock.onGet(endpoint).replyOnce(202); // mock reactive cache mock.onGet(endpoint).replyOnce(202); // mock reactive cache
...@@ -136,7 +136,7 @@ describe('Logs Store actions', () => { ...@@ -136,7 +136,7 @@ describe('Logs Store actions', () => {
{ type: types.REQUEST_LOGS_DATA }, { type: types.REQUEST_LOGS_DATA },
{ type: types.SET_CURRENT_POD_NAME, payload: mockPodName }, { type: types.SET_CURRENT_POD_NAME, payload: mockPodName },
{ type: types.RECEIVE_PODS_DATA_SUCCESS, payload: mockPods }, { type: types.RECEIVE_PODS_DATA_SUCCESS, payload: mockPods },
{ type: types.RECEIVE_LOGS_DATA_SUCCESS, payload: mockLines }, { type: types.RECEIVE_LOGS_DATA_SUCCESS, payload: mockLogsResult },
], ],
[], [],
done, done,
...@@ -152,7 +152,7 @@ describe('Logs Store actions', () => { ...@@ -152,7 +152,7 @@ describe('Logs Store actions', () => {
mock.onGet(endpoint, { params: { environment_name: mockEnvName } }).reply(200, { mock.onGet(endpoint, { params: { environment_name: mockEnvName } }).reply(200, {
pod_name: mockPodName, pod_name: mockPodName,
pods: mockPods, pods: mockPods,
logs: mockLines, logs: mockLogsResult,
}); });
mock.onGet(endpoint).replyOnce(202); // mock reactive cache mock.onGet(endpoint).replyOnce(202); // mock reactive cache
...@@ -165,7 +165,7 @@ describe('Logs Store actions', () => { ...@@ -165,7 +165,7 @@ describe('Logs Store actions', () => {
{ type: types.REQUEST_LOGS_DATA }, { type: types.REQUEST_LOGS_DATA },
{ type: types.SET_CURRENT_POD_NAME, payload: mockPodName }, { type: types.SET_CURRENT_POD_NAME, payload: mockPodName },
{ type: types.RECEIVE_PODS_DATA_SUCCESS, payload: mockPods }, { type: types.RECEIVE_PODS_DATA_SUCCESS, payload: mockPods },
{ type: types.RECEIVE_LOGS_DATA_SUCCESS, payload: mockLines }, { type: types.RECEIVE_LOGS_DATA_SUCCESS, payload: mockLogsResult },
], ],
[], [],
done, done,
......
import * as getters from 'ee/logs/stores/getters'; import * as getters from 'ee/logs/stores/getters';
import logsPageState from 'ee/logs/stores/state'; import logsPageState from 'ee/logs/stores/state';
import { mockLines } from '../mock_data'; import { mockLogsResult, mockTrace } from '../mock_data';
describe('Logs Store getters', () => { describe('Logs Store getters', () => {
let state; let state;
...@@ -29,11 +29,11 @@ describe('Logs Store getters', () => { ...@@ -29,11 +29,11 @@ describe('Logs Store getters', () => {
describe('when state logs are set', () => { describe('when state logs are set', () => {
beforeEach(() => { beforeEach(() => {
state.logs.lines = mockLines; state.logs.lines = mockLogsResult;
}); });
it('returns an empty string', () => { it('returns an empty string', () => {
expect(getters.trace(state)).toEqual(mockLines.join('\n')); expect(getters.trace(state)).toEqual(mockTrace.join('\n'));
}); });
}); });
}); });
......
...@@ -8,7 +8,7 @@ import { ...@@ -8,7 +8,7 @@ import {
mockEnvironments, mockEnvironments,
mockPods, mockPods,
mockPodName, mockPodName,
mockLines, mockLogsResult,
} from '../mock_data'; } from '../mock_data';
describe('Logs Store Mutations', () => { describe('Logs Store Mutations', () => {
...@@ -83,11 +83,11 @@ describe('Logs Store Mutations', () => { ...@@ -83,11 +83,11 @@ describe('Logs Store Mutations', () => {
describe('RECEIVE_LOGS_DATA_SUCCESS', () => { describe('RECEIVE_LOGS_DATA_SUCCESS', () => {
it('receives logs lines', () => { it('receives logs lines', () => {
mutations[types.RECEIVE_LOGS_DATA_SUCCESS](state, mockLines); mutations[types.RECEIVE_LOGS_DATA_SUCCESS](state, mockLogsResult);
expect(state.logs).toEqual( expect(state.logs).toEqual(
expect.objectContaining({ expect.objectContaining({
lines: mockLines, lines: mockLogsResult,
isLoading: false, isLoading: false,
isComplete: true, isComplete: true,
}), }),
......
...@@ -5,10 +5,10 @@ require 'spec_helper' ...@@ -5,10 +5,10 @@ require 'spec_helper'
describe Gitlab::Elasticsearch::Logs do describe Gitlab::Elasticsearch::Logs do
let(:client) { Elasticsearch::Transport::Client } let(:client) { Elasticsearch::Transport::Client }
let(:es_message_1) { "10.8.2.1 - - [25/Oct/2019:08:03:22 UTC] \"GET / HTTP/1.1\" 200 13" } let(:es_message_1) { { timestamp: "2019-12-13T14:35:34.034Z", message: "10.8.2.1 - - [25/Oct/2019:08:03:22 UTC] \"GET / HTTP/1.1\" 200 13" } }
let(:es_message_2) { "10.8.2.1 - - [27/Oct/2019:23:49:54 UTC] \"GET / HTTP/1.1\" 200 13" } let(:es_message_2) { { timestamp: "2019-12-13T14:35:35.034Z", message: "10.8.2.1 - - [27/Oct/2019:23:49:54 UTC] \"GET / HTTP/1.1\" 200 13" } }
let(:es_message_3) { "10.8.2.1 - - [04/Nov/2019:23:09:24 UTC] \"GET / HTTP/1.1\" 200 13" } let(:es_message_3) { { timestamp: "2019-12-13T14:35:36.034Z", message: "10.8.2.1 - - [04/Nov/2019:23:09:24 UTC] \"GET / HTTP/1.1\" 200 13" } }
let(:es_message_4) { "- -\u003e /" } let(:es_message_4) { { timestamp: "2019-12-13T14:35:37.034Z", message: "- -\u003e /" } }
let(:es_response) { JSON.parse(fixture_file('lib/elasticsearch/logs_response.json', dir: 'ee')) } let(:es_response) { JSON.parse(fixture_file('lib/elasticsearch/logs_response.json', dir: 'ee')) }
...@@ -53,6 +53,7 @@ describe Gitlab::Elasticsearch::Logs do ...@@ -53,6 +53,7 @@ describe Gitlab::Elasticsearch::Logs do
} }
], ],
_source: [ _source: [
"@timestamp",
"message" "message"
], ],
size: 500 size: 500
...@@ -101,6 +102,7 @@ describe Gitlab::Elasticsearch::Logs do ...@@ -101,6 +102,7 @@ describe Gitlab::Elasticsearch::Logs do
} }
], ],
_source: [ _source: [
"@timestamp",
"message" "message"
], ],
size: 500 size: 500
......
...@@ -141,12 +141,19 @@ describe Clusters::Platforms::Kubernetes do ...@@ -141,12 +141,19 @@ describe Clusters::Platforms::Kubernetes do
let(:pod_name) { 'pod-1' } let(:pod_name) { 'pod-1' }
let(:namespace) { 'app' } let(:namespace) { 'app' }
let(:container) { 'some-container' } let(:container) { 'some-container' }
let(:expected_logs) do
[
{ message: "Log 1", timestamp: "2019-12-13T14:04:22.123456Z" },
{ message: "Log 2", timestamp: "2019-12-13T14:04:23.123456Z" },
{ message: "Log 3", timestamp: "2019-12-13T14:04:24.123456Z" }
]
end
subject { service.read_pod_logs(environment.id, pod_name, namespace, container: container) } subject { service.read_pod_logs(environment.id, pod_name, namespace, container: container) }
shared_examples 'successful log request' do shared_examples 'successful log request' do
it do it do
expect(subject[:logs]).to eq(["Log 1", "Log 2", "Log 3"]) expect(subject[:logs]).to eq(expected_logs)
expect(subject[:status]).to eq(:success) expect(subject[:status]).to eq(:success)
expect(subject[:pod_name]).to eq(pod_name) expect(subject[:pod_name]).to eq(pod_name)
expect(subject[:container_name]).to eq(container) expect(subject[:container_name]).to eq(container)
...@@ -171,7 +178,7 @@ describe Clusters::Platforms::Kubernetes do ...@@ -171,7 +178,7 @@ describe Clusters::Platforms::Kubernetes do
before do before do
expect_any_instance_of(::Clusters::Applications::ElasticStack).to receive(:elasticsearch_client).at_least(:once).and_return(Elasticsearch::Transport::Client.new) expect_any_instance_of(::Clusters::Applications::ElasticStack).to receive(:elasticsearch_client).at_least(:once).and_return(Elasticsearch::Transport::Client.new)
expect_any_instance_of(::Gitlab::Elasticsearch::Logs).to receive(:pod_logs).and_return(["Log 1", "Log 2", "Log 3"]) expect_any_instance_of(::Gitlab::Elasticsearch::Logs).to receive(:pod_logs).and_return(expected_logs)
stub_feature_flags(enable_cluster_application_elastic_stack: true) stub_feature_flags(enable_cluster_application_elastic_stack: true)
end end
......
...@@ -84,7 +84,7 @@ module KubernetesHelpers ...@@ -84,7 +84,7 @@ module KubernetesHelpers
end end
logs_url = service.api_url + "/api/v1/namespaces/#{namespace}/pods/#{pod_name}" \ logs_url = service.api_url + "/api/v1/namespaces/#{namespace}/pods/#{pod_name}" \
"/log?#{container_query_param}tailLines=#{Clusters::Platforms::Kubernetes::LOGS_LIMIT}" "/log?#{container_query_param}tailLines=#{Clusters::Platforms::Kubernetes::LOGS_LIMIT}&timestamps=true"
if status if status
response = { status: status } response = { status: status }
...@@ -331,7 +331,7 @@ module KubernetesHelpers ...@@ -331,7 +331,7 @@ module KubernetesHelpers
end end
def kube_logs_body def kube_logs_body
"Log 1\nLog 2\nLog 3" "2019-12-13T14:04:22.123456Z Log 1\n2019-12-13T14:04:23.123456Z Log 2\n2019-12-13T14:04:24.123456Z Log 3"
end end
def kube_deployments_body def kube_deployments_body
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment