diff --git a/changelogs/unreleased/ak-logs-timestamp.yml b/changelogs/unreleased/ak-logs-timestamp.yml new file mode 100644 index 0000000000000000000000000000000000000000..40dca009d236afbe12dd35d14f8a723fdd737f9d --- /dev/null +++ b/changelogs/unreleased/ak-logs-timestamp.yml @@ -0,0 +1,5 @@ +--- +title: Add timestamps to pod logs +merge_request: 21663 +author: +type: added diff --git a/ee/app/assets/javascripts/logs/stores/getters.js b/ee/app/assets/javascripts/logs/stores/getters.js index 6997b2c709057567826ecf102edc18cd369c5cfe..c7dbb72ce3deffb395a907fe1a3450df72e07d28 100644 --- a/ee/app/assets/javascripts/logs/stores/getters.js +++ b/ee/app/assets/javascripts/logs/stores/getters.js @@ -1,4 +1,9 @@ -export const trace = state => state.logs.lines.join('\n'); +import dateFormat from 'dateformat'; + +export const trace = state => + state.logs.lines + .map(item => [dateFormat(item.timestamp, 'UTC:mmm dd HH:MM:ss.l"Z"'), item.message].join(' | ')) + .join('\n'); // prevent babel-plugin-rewire from generating an invalid default during karma tests export default () => {}; diff --git a/ee/app/models/ee/clusters/platforms/kubernetes.rb b/ee/app/models/ee/clusters/platforms/kubernetes.rb index dff3aa524a3dea2432dec80f71b8723d54204a7a..6b54c2b6c87feaf3920fd9a23fe2f773dcb459f7 100644 --- a/ee/app/models/ee/clusters/platforms/kubernetes.rb +++ b/ee/app/models/ee/clusters/platforms/kubernetes.rb @@ -99,10 +99,18 @@ def pod_logs(pod_name, namespace, container: nil) def platform_pod_logs(namespace, pod_name, container_name) logs = kubeclient.get_pod_log( - pod_name, namespace, container: container_name, tail_lines: LOGS_LIMIT + pod_name, namespace, container: container_name, tail_lines: LOGS_LIMIT, timestamps: true ).body - logs.strip.split("\n") + logs.strip.split("\n").map do |line| + # message contains a RFC3339Nano timestamp, then a space, then the log line. + # resolution of the nanoseconds can vary, so we split on the first space + values = line.split(' ', 2) + { + timestamp: values[0], + message: values[1] + } + end end def elastic_stack_pod_logs(namespace, pod_name, container_name) diff --git a/ee/lib/gitlab/elasticsearch/logs.rb b/ee/lib/gitlab/elasticsearch/logs.rb index 52173a52beb4310585e50aa9601f18ded2ba2428..8a071c1a7fb268ab00438ff0ae2ecf103d72bcfc 100644 --- a/ee/lib/gitlab/elasticsearch/logs.rb +++ b/ee/lib/gitlab/elasticsearch/logs.rb @@ -51,14 +51,19 @@ def pod_logs(namespace, pod_name, container_name = nil) { "@timestamp": { order: :desc } }, { "offset": { order: :desc } } ], - # only return the message field in the response - _source: ["message"], + # only return these fields in the response + _source: ["@timestamp", "message"], # fixed limit for now, we should support paginated queries size: ::Gitlab::Elasticsearch::Logs::LOGS_LIMIT } response = @client.search body: body - result = response.fetch("hits", {}).fetch("hits", []).map { |h| h["_source"]["message"] } + result = response.fetch("hits", {}).fetch("hits", []).map do |hit| + { + timestamp: hit["_source"]["@timestamp"], + message: hit["_source"]["message"] + } + end # we queried for the N-most recent records but we want them ordered oldest to newest result.reverse diff --git a/ee/spec/features/projects/environments_pod_logs_spec.rb b/ee/spec/features/projects/environments_pod_logs_spec.rb index 3f8890b8a81335d729d591e21cbb8313bc3cf857..6179b62c5eab4abc401c17acc1c0f3a4693cd08e 100644 --- a/ee/spec/features/projects/environments_pod_logs_spec.rb +++ b/ee/spec/features/projects/environments_pod_logs_spec.rb @@ -70,7 +70,7 @@ expect(item.text).to eq(pod_names[i]) end end - expect(page).to have_content("Log 1 Log 2 Log 3") + expect(page).to have_content("Dec 13 14:04:22.123Z | Log 1 Dec 13 14:04:23.123Z | Log 2 Dec 13 14:04:24.123Z | Log 3") end end diff --git a/ee/spec/fixtures/lib/elasticsearch/logs_response.json b/ee/spec/fixtures/lib/elasticsearch/logs_response.json index 3f9a976563b9c48fe76a85558fc766d7aa794153..7a733882089746bb781aa6d59ccbf321a063220d 100644 --- a/ee/spec/fixtures/lib/elasticsearch/logs_response.json +++ b/ee/spec/fixtures/lib/elasticsearch/logs_response.json @@ -18,7 +18,8 @@ "_id": "SkbxAW4BWzhswgK-C5-R", "_score": null, "_source": { - "message": "10.8.2.1 - - [25/Oct/2019:08:03:22 UTC] \"GET / HTTP/1.1\" 200 13" + "message": "10.8.2.1 - - [25/Oct/2019:08:03:22 UTC] \"GET / HTTP/1.1\" 200 13", + "@timestamp": "2019-12-13T14:35:34.034Z" }, "sort": [ 9999998, @@ -31,7 +32,8 @@ "_id": "wEigD24BWzhswgK-WUU2", "_score": null, "_source": { - "message": "10.8.2.1 - - [27/Oct/2019:23:49:54 UTC] \"GET / HTTP/1.1\" 200 13" + "message": "10.8.2.1 - - [27/Oct/2019:23:49:54 UTC] \"GET / HTTP/1.1\" 200 13", + "@timestamp": "2019-12-13T14:35:35.034Z" }, "sort": [ 9999949, @@ -44,7 +46,8 @@ "_id": "gE6uOG4BWzhswgK-M0x2", "_score": null, "_source": { - "message": "10.8.2.1 - - [04/Nov/2019:23:09:24 UTC] \"GET / HTTP/1.1\" 200 13" + "message": "10.8.2.1 - - [04/Nov/2019:23:09:24 UTC] \"GET / HTTP/1.1\" 200 13", + "@timestamp": "2019-12-13T14:35:36.034Z" }, "sort": [ 9999944, @@ -57,7 +60,8 @@ "_id": "0klPHW4BWzhswgK-nfCF", "_score": null, "_source": { - "message": "- -\u003e /" + "message": "- -\u003e /", + "@timestamp": "2019-12-13T14:35:37.034Z" }, "sort": [ 9999934, diff --git a/ee/spec/frontend/logs/components/environment_logs_spec.js b/ee/spec/frontend/logs/components/environment_logs_spec.js index fb5d684d5e4e7364df1e0cadfe60d6051636078e..7300759ffbea20a31f24e68d1ecc588b859888f7 100644 --- a/ee/spec/frontend/logs/components/environment_logs_spec.js +++ b/ee/spec/frontend/logs/components/environment_logs_spec.js @@ -10,7 +10,8 @@ import { mockEnvName, mockEnvironments, mockPods, - mockLines, + mockLogsResult, + mockTrace, mockPodName, mockEnvironmentsEndpoint, } from '../mock_data'; @@ -152,14 +153,14 @@ describe('EnvironmentLogs', () => { [state.pods.current] = state.pods.options; state.logs.isComplete = false; - state.logs.lines = mockLines; + state.logs.lines = mockLogsResult; }); actionMocks.showPodLogs.mockImplementation(podName => { state.pods.options = mockPods; [state.pods.current] = podName; state.logs.isComplete = false; - state.logs.lines = mockLines; + state.logs.lines = mockLogsResult; }); actionMocks.fetchEnvironments.mockImplementation(() => { state.environments.options = mockEnvironments; @@ -200,8 +201,8 @@ describe('EnvironmentLogs', () => { it('populates logs trace', () => { const trace = findLogTrace(); - expect(trace.text().split('\n').length).toBe(mockLines.length); - expect(trace.text().split('\n')).toEqual(mockLines); + expect(trace.text().split('\n').length).toBe(mockTrace.length); + expect(trace.text().split('\n')).toEqual(mockTrace); }); it('update control buttons state', () => { diff --git a/ee/spec/frontend/logs/mock_data.js b/ee/spec/frontend/logs/mock_data.js index 1ed160b9117eb749101f8c410f052a2395d184ac..e8b68e9b5c47f18057819fa2933ac23355eaae22 100644 --- a/ee/spec/frontend/logs/mock_data.js +++ b/ee/spec/frontend/logs/mock_data.js @@ -24,19 +24,57 @@ export const mockPods = [ 'production-764c58d697-ddddd', ]; -export const mockLines = [ - '10.36.0.1 - - [16/Oct/2019:06:29:48 UTC] "GET / HTTP/1.1" 200 13', - '- -> /', - '10.36.0.1 - - [16/Oct/2019:06:29:57 UTC] "GET / HTTP/1.1" 200 13', - '- -> /', - '10.36.0.1 - - [16/Oct/2019:06:29:58 UTC] "GET / HTTP/1.1" 200 13', - '- -> /', - '10.36.0.1 - - [16/Oct/2019:06:30:07 UTC] "GET / HTTP/1.1" 200 13', - '- -> /', - '10.36.0.1 - - [16/Oct/2019:06:30:08 UTC] "GET / HTTP/1.1" 200 13', - '- -> /', - '10.36.0.1 - - [16/Oct/2019:06:30:17 UTC] "GET / HTTP/1.1" 200 13', - '- -> /', - '10.36.0.1 - - [16/Oct/2019:06:30:18 UTC] "GET / HTTP/1.1" 200 13', - '- -> /', +export const mockLogsResult = [ + { + timestamp: '2019-12-13T13:43:18.2760123Z', + message: '10.36.0.1 - - [16/Oct/2019:06:29:48 UTC] "GET / HTTP/1.1" 200 13', + }, + { timestamp: '2019-12-13T13:43:18.2760123Z', message: '- -> /' }, + { + timestamp: '2019-12-13T13:43:26.8420123Z', + message: '10.36.0.1 - - [16/Oct/2019:06:29:57 UTC] "GET / HTTP/1.1" 200 13', + }, + { timestamp: '2019-12-13T13:43:26.8420123Z', message: '- -> /' }, + { + timestamp: '2019-12-13T13:43:28.3710123Z', + message: '10.36.0.1 - - [16/Oct/2019:06:29:58 UTC] "GET / HTTP/1.1" 200 13', + }, + { timestamp: '2019-12-13T13:43:28.3710123Z', message: '- -> /' }, + { + timestamp: '2019-12-13T13:43:36.8860123Z', + message: '10.36.0.1 - - [16/Oct/2019:06:30:07 UTC] "GET / HTTP/1.1" 200 13', + }, + { timestamp: '2019-12-13T13:43:36.8860123Z', message: '- -> /' }, + { + timestamp: '2019-12-13T13:43:38.4000123Z', + message: '10.36.0.1 - - [16/Oct/2019:06:30:08 UTC] "GET / HTTP/1.1" 200 13', + }, + { timestamp: '2019-12-13T13:43:38.4000123Z', message: '- -> /' }, + { + timestamp: '2019-12-13T13:43:46.8420123Z', + message: '10.36.0.1 - - [16/Oct/2019:06:30:17 UTC] "GET / HTTP/1.1" 200 13', + }, + { timestamp: '2019-12-13T13:43:46.8430123Z', message: '- -> /' }, + { + timestamp: '2019-12-13T13:43:48.3240123Z', + message: '10.36.0.1 - - [16/Oct/2019:06:30:18 UTC] "GET / HTTP/1.1" 200 13', + }, + { timestamp: '2019-12-13T13:43:48.3250123Z', message: '- -> /' }, +]; + +export const mockTrace = [ + 'Dec 13 13:43:18.276Z | 10.36.0.1 - - [16/Oct/2019:06:29:48 UTC] "GET / HTTP/1.1" 200 13', + 'Dec 13 13:43:18.276Z | - -> /', + 'Dec 13 13:43:26.842Z | 10.36.0.1 - - [16/Oct/2019:06:29:57 UTC] "GET / HTTP/1.1" 200 13', + 'Dec 13 13:43:26.842Z | - -> /', + 'Dec 13 13:43:28.371Z | 10.36.0.1 - - [16/Oct/2019:06:29:58 UTC] "GET / HTTP/1.1" 200 13', + 'Dec 13 13:43:28.371Z | - -> /', + 'Dec 13 13:43:36.886Z | 10.36.0.1 - - [16/Oct/2019:06:30:07 UTC] "GET / HTTP/1.1" 200 13', + 'Dec 13 13:43:36.886Z | - -> /', + 'Dec 13 13:43:38.400Z | 10.36.0.1 - - [16/Oct/2019:06:30:08 UTC] "GET / HTTP/1.1" 200 13', + 'Dec 13 13:43:38.400Z | - -> /', + 'Dec 13 13:43:46.842Z | 10.36.0.1 - - [16/Oct/2019:06:30:17 UTC] "GET / HTTP/1.1" 200 13', + 'Dec 13 13:43:46.843Z | - -> /', + 'Dec 13 13:43:48.324Z | 10.36.0.1 - - [16/Oct/2019:06:30:18 UTC] "GET / HTTP/1.1" 200 13', + 'Dec 13 13:43:48.325Z | - -> /', ]; diff --git a/ee/spec/frontend/logs/stores/actions_spec.js b/ee/spec/frontend/logs/stores/actions_spec.js index 41fe06ac0e342f633b08c392a1d561ec3288ac55..4b9a80c3711ab8c6edc9ee27ed5afab7aee1e1a0 100644 --- a/ee/spec/frontend/logs/stores/actions_spec.js +++ b/ee/spec/frontend/logs/stores/actions_spec.js @@ -14,7 +14,7 @@ import { mockEnvironmentsEndpoint, mockEnvironments, mockPods, - mockLines, + mockLogsResult, mockEnvName, } from '../mock_data'; @@ -122,7 +122,7 @@ describe('Logs Store actions', () => { .reply(200, { pod_name: mockPodName, pods: mockPods, - logs: mockLines, + logs: mockLogsResult, }); mock.onGet(endpoint).replyOnce(202); // mock reactive cache @@ -136,7 +136,7 @@ describe('Logs Store actions', () => { { type: types.REQUEST_LOGS_DATA }, { type: types.SET_CURRENT_POD_NAME, payload: mockPodName }, { type: types.RECEIVE_PODS_DATA_SUCCESS, payload: mockPods }, - { type: types.RECEIVE_LOGS_DATA_SUCCESS, payload: mockLines }, + { type: types.RECEIVE_LOGS_DATA_SUCCESS, payload: mockLogsResult }, ], [], done, @@ -152,7 +152,7 @@ describe('Logs Store actions', () => { mock.onGet(endpoint, { params: { environment_name: mockEnvName } }).reply(200, { pod_name: mockPodName, pods: mockPods, - logs: mockLines, + logs: mockLogsResult, }); mock.onGet(endpoint).replyOnce(202); // mock reactive cache @@ -165,7 +165,7 @@ describe('Logs Store actions', () => { { type: types.REQUEST_LOGS_DATA }, { type: types.SET_CURRENT_POD_NAME, payload: mockPodName }, { type: types.RECEIVE_PODS_DATA_SUCCESS, payload: mockPods }, - { type: types.RECEIVE_LOGS_DATA_SUCCESS, payload: mockLines }, + { type: types.RECEIVE_LOGS_DATA_SUCCESS, payload: mockLogsResult }, ], [], done, diff --git a/ee/spec/frontend/logs/stores/getters_spec.js b/ee/spec/frontend/logs/stores/getters_spec.js index 08431dea952f60bbe0b3814cd50b3a804373b1dd..1be598ac91d110302125ddd01752e170239af3b2 100644 --- a/ee/spec/frontend/logs/stores/getters_spec.js +++ b/ee/spec/frontend/logs/stores/getters_spec.js @@ -1,7 +1,7 @@ import * as getters from 'ee/logs/stores/getters'; import logsPageState from 'ee/logs/stores/state'; -import { mockLines } from '../mock_data'; +import { mockLogsResult, mockTrace } from '../mock_data'; describe('Logs Store getters', () => { let state; @@ -29,11 +29,11 @@ describe('Logs Store getters', () => { describe('when state logs are set', () => { beforeEach(() => { - state.logs.lines = mockLines; + state.logs.lines = mockLogsResult; }); it('returns an empty string', () => { - expect(getters.trace(state)).toEqual(mockLines.join('\n')); + expect(getters.trace(state)).toEqual(mockTrace.join('\n')); }); }); }); diff --git a/ee/spec/frontend/logs/stores/mutations_spec.js b/ee/spec/frontend/logs/stores/mutations_spec.js index f5312369206db8f0b56fe86e08c590c122e1f834..aa64985fa6a90a2737cf9648a47ccb63efcaca81 100644 --- a/ee/spec/frontend/logs/stores/mutations_spec.js +++ b/ee/spec/frontend/logs/stores/mutations_spec.js @@ -8,7 +8,7 @@ import { mockEnvironments, mockPods, mockPodName, - mockLines, + mockLogsResult, } from '../mock_data'; describe('Logs Store Mutations', () => { @@ -83,11 +83,11 @@ describe('Logs Store Mutations', () => { describe('RECEIVE_LOGS_DATA_SUCCESS', () => { it('receives logs lines', () => { - mutations[types.RECEIVE_LOGS_DATA_SUCCESS](state, mockLines); + mutations[types.RECEIVE_LOGS_DATA_SUCCESS](state, mockLogsResult); expect(state.logs).toEqual( expect.objectContaining({ - lines: mockLines, + lines: mockLogsResult, isLoading: false, isComplete: true, }), diff --git a/ee/spec/lib/gitlab/elasticsearch/logs_spec.rb b/ee/spec/lib/gitlab/elasticsearch/logs_spec.rb index f476f538d724ed3ffddb3f1365909ee8b7e757a5..a09927d7ad07abe5ad9fdea54f77422687ffa4af 100644 --- a/ee/spec/lib/gitlab/elasticsearch/logs_spec.rb +++ b/ee/spec/lib/gitlab/elasticsearch/logs_spec.rb @@ -5,10 +5,10 @@ describe Gitlab::Elasticsearch::Logs do let(:client) { Elasticsearch::Transport::Client } - let(:es_message_1) { "10.8.2.1 - - [25/Oct/2019:08:03:22 UTC] \"GET / HTTP/1.1\" 200 13" } - let(:es_message_2) { "10.8.2.1 - - [27/Oct/2019:23:49:54 UTC] \"GET / HTTP/1.1\" 200 13" } - let(:es_message_3) { "10.8.2.1 - - [04/Nov/2019:23:09:24 UTC] \"GET / HTTP/1.1\" 200 13" } - let(:es_message_4) { "- -\u003e /" } + let(:es_message_1) { { timestamp: "2019-12-13T14:35:34.034Z", message: "10.8.2.1 - - [25/Oct/2019:08:03:22 UTC] \"GET / HTTP/1.1\" 200 13" } } + let(:es_message_2) { { timestamp: "2019-12-13T14:35:35.034Z", message: "10.8.2.1 - - [27/Oct/2019:23:49:54 UTC] \"GET / HTTP/1.1\" 200 13" } } + let(:es_message_3) { { timestamp: "2019-12-13T14:35:36.034Z", message: "10.8.2.1 - - [04/Nov/2019:23:09:24 UTC] \"GET / HTTP/1.1\" 200 13" } } + let(:es_message_4) { { timestamp: "2019-12-13T14:35:37.034Z", message: "- -\u003e /" } } let(:es_response) { JSON.parse(fixture_file('lib/elasticsearch/logs_response.json', dir: 'ee')) } @@ -53,6 +53,7 @@ } ], _source: [ + "@timestamp", "message" ], size: 500 @@ -101,6 +102,7 @@ } ], _source: [ + "@timestamp", "message" ], size: 500 diff --git a/ee/spec/models/ee/clusters/platforms/kubernetes_spec.rb b/ee/spec/models/ee/clusters/platforms/kubernetes_spec.rb index b32a1f16ab9a91666a19642544ace3fc5feb1e49..b25289260e6c039d9c3d14b91e386cf3f50df4bf 100644 --- a/ee/spec/models/ee/clusters/platforms/kubernetes_spec.rb +++ b/ee/spec/models/ee/clusters/platforms/kubernetes_spec.rb @@ -141,12 +141,19 @@ let(:pod_name) { 'pod-1' } let(:namespace) { 'app' } let(:container) { 'some-container' } + let(:expected_logs) do + [ + { message: "Log 1", timestamp: "2019-12-13T14:04:22.123456Z" }, + { message: "Log 2", timestamp: "2019-12-13T14:04:23.123456Z" }, + { message: "Log 3", timestamp: "2019-12-13T14:04:24.123456Z" } + ] + end subject { service.read_pod_logs(environment.id, pod_name, namespace, container: container) } shared_examples 'successful log request' do it do - expect(subject[:logs]).to eq(["Log 1", "Log 2", "Log 3"]) + expect(subject[:logs]).to eq(expected_logs) expect(subject[:status]).to eq(:success) expect(subject[:pod_name]).to eq(pod_name) expect(subject[:container_name]).to eq(container) @@ -171,7 +178,7 @@ before do expect_any_instance_of(::Clusters::Applications::ElasticStack).to receive(:elasticsearch_client).at_least(:once).and_return(Elasticsearch::Transport::Client.new) - expect_any_instance_of(::Gitlab::Elasticsearch::Logs).to receive(:pod_logs).and_return(["Log 1", "Log 2", "Log 3"]) + expect_any_instance_of(::Gitlab::Elasticsearch::Logs).to receive(:pod_logs).and_return(expected_logs) stub_feature_flags(enable_cluster_application_elastic_stack: true) end diff --git a/spec/support/helpers/kubernetes_helpers.rb b/spec/support/helpers/kubernetes_helpers.rb index cac43e94a927886d4d8303b0641c01306a9746b6..a3b527e0ffe3d1e6a4290b5e91690cd54ac21d21 100644 --- a/spec/support/helpers/kubernetes_helpers.rb +++ b/spec/support/helpers/kubernetes_helpers.rb @@ -84,7 +84,7 @@ def stub_kubeclient_logs(pod_name, namespace, container: nil, status: nil, messa end logs_url = service.api_url + "/api/v1/namespaces/#{namespace}/pods/#{pod_name}" \ - "/log?#{container_query_param}tailLines=#{Clusters::Platforms::Kubernetes::LOGS_LIMIT}" + "/log?#{container_query_param}tailLines=#{Clusters::Platforms::Kubernetes::LOGS_LIMIT}×tamps=true" if status response = { status: status } @@ -331,7 +331,7 @@ def kube_pods_body end def kube_logs_body - "Log 1\nLog 2\nLog 3" + "2019-12-13T14:04:22.123456Z Log 1\n2019-12-13T14:04:23.123456Z Log 2\n2019-12-13T14:04:24.123456Z Log 3" end def kube_deployments_body