diff --git a/package.json b/package.json index d660bc30e5d2a5..0265250842756d 100644 --- a/package.json +++ b/package.json @@ -131,6 +131,7 @@ "@kbn/config-schema": "link:packages/kbn-config-schema", "@kbn/i18n": "link:packages/kbn-i18n", "@kbn/interpreter": "link:packages/kbn-interpreter", + "@kbn/legacy-logging": "link:packages/kbn-legacy-logging", "@kbn/logging": "link:packages/kbn-logging", "@kbn/monaco": "link:packages/kbn-monaco", "@kbn/std": "link:packages/kbn-std", diff --git a/packages/kbn-legacy-logging/README.md b/packages/kbn-legacy-logging/README.md new file mode 100644 index 00000000000000..4c5989fc892dc5 --- /dev/null +++ b/packages/kbn-legacy-logging/README.md @@ -0,0 +1,4 @@ +# @kbn/legacy-logging + +This package contains the implementation of the legacy logging +system, based on `@hapi/good` \ No newline at end of file diff --git a/packages/kbn-legacy-logging/package.json b/packages/kbn-legacy-logging/package.json new file mode 100644 index 00000000000000..9311b3e2a77b37 --- /dev/null +++ b/packages/kbn-legacy-logging/package.json @@ -0,0 +1,15 @@ +{ + "name": "@kbn/legacy-logging", + "version": "1.0.0", + "private": true, + "license": "Apache-2.0", + "main": "./target/index.js", + "scripts": { + "build": "tsc", + "kbn:bootstrap": "yarn build", + "kbn:watch": "yarn build --watch" + }, + "dependencies": { + "@kbn/std": "link:../kbn-std" + } +} diff --git a/src/legacy/server/logging/configuration.js b/packages/kbn-legacy-logging/src/get_logging_config.ts similarity index 74% rename from src/legacy/server/logging/configuration.js rename to packages/kbn-legacy-logging/src/get_logging_config.ts index 267dc9a334de88..cf49177e50b7be 100644 --- a/src/legacy/server/logging/configuration.js +++ b/packages/kbn-legacy-logging/src/get_logging_config.ts @@ -18,20 +18,25 @@ */ import _ from 'lodash'; -import { getLoggerStream } from './log_reporter'; +import { getLogReporter } from './log_reporter'; +import { LegacyLoggingConfig } from './schema'; -export default function loggingConfiguration(config) { - const events = config.get('logging.events'); +/** + * Returns the `@hapi/good` plugin configuration to be used for the legacy logging + * @param config + */ +export function getLoggingConfiguration(config: LegacyLoggingConfig, opsInterval: number) { + const events = config.events; - if (config.get('logging.silent')) { + if (config.silent) { _.defaults(events, {}); - } else if (config.get('logging.quiet')) { + } else if (config.quiet) { _.defaults(events, { log: ['listening', 'error', 'fatal'], request: ['error'], error: '*', }); - } else if (config.get('logging.verbose')) { + } else if (config.verbose) { _.defaults(events, { log: '*', ops: '*', @@ -47,24 +52,24 @@ export default function loggingConfiguration(config) { }); } - const loggerStream = getLoggerStream({ + const loggerStream = getLogReporter({ config: { - json: config.get('logging.json'), - dest: config.get('logging.dest'), - timezone: config.get('logging.timezone'), + json: config.json, + dest: config.dest, + timezone: config.timezone, // I'm adding the default here because if you add another filter // using the commandline it will remove authorization. I want users // to have to explicitly set --logging.filter.authorization=none or // --logging.filter.cookie=none to have it show up in the logs. - filter: _.defaults(config.get('logging.filter'), { + filter: _.defaults(config.filter, { authorization: 'remove', cookie: 'remove', }), }, events: _.transform( events, - function (filtered, val, key) { + function (filtered: Record, val: string, key: string) { // provide a string compatible way to remove events if (val !== '!') filtered[key] = val; }, @@ -74,7 +79,7 @@ export default function loggingConfiguration(config) { const options = { ops: { - interval: config.get('ops.interval'), + interval: opsInterval, }, includes: { request: ['headers', 'payload'], diff --git a/packages/kbn-legacy-logging/src/index.ts b/packages/kbn-legacy-logging/src/index.ts new file mode 100644 index 00000000000000..0fa5f65abf8617 --- /dev/null +++ b/packages/kbn-legacy-logging/src/index.ts @@ -0,0 +1,25 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +export { LegacyLoggingConfig, legacyLoggingConfigSchema } from './schema'; +export { attachMetaData } from './metadata'; +export { setupLoggingRotate } from './rotate'; +export { setupLogging, reconfigureLogging } from './setup_logging'; +export { getLoggingConfiguration } from './get_logging_config'; +export { LegacyLoggingServer } from './legacy_logging_server'; diff --git a/src/core/server/legacy/logging/legacy_logging_server.test.ts b/packages/kbn-legacy-logging/src/legacy_logging_server.test.ts similarity index 86% rename from src/core/server/legacy/logging/legacy_logging_server.test.ts rename to packages/kbn-legacy-logging/src/legacy_logging_server.test.ts index 2f6c34e0fc5d6b..9b1ba87c250dcc 100644 --- a/src/core/server/legacy/logging/legacy_logging_server.test.ts +++ b/packages/kbn-legacy-logging/src/legacy_logging_server.test.ts @@ -17,11 +17,9 @@ * under the License. */ -jest.mock('../../../../legacy/server/config'); -jest.mock('../../../../legacy/server/logging'); +jest.mock('./setup_logging'); -import { LogLevel } from '../../logging'; -import { LegacyLoggingServer } from './legacy_logging_server'; +import { LegacyLoggingServer, LogRecord } from './legacy_logging_server'; test('correctly forwards log records.', () => { const loggingServer = new LegacyLoggingServer({ events: {} }); @@ -29,28 +27,37 @@ test('correctly forwards log records.', () => { loggingServer.events.on('log', onLogMock); const timestamp = 1554433221100; - const firstLogRecord = { + const firstLogRecord: LogRecord = { timestamp: new Date(timestamp), pid: 5355, - level: LogLevel.Info, + level: { + id: 'info', + value: 5, + }, context: 'some-context', message: 'some-message', }; - const secondLogRecord = { + const secondLogRecord: LogRecord = { timestamp: new Date(timestamp), pid: 5355, - level: LogLevel.Error, + level: { + id: 'error', + value: 3, + }, context: 'some-context.sub-context', message: 'some-message', meta: { unknown: 2 }, error: new Error('some-error'), }; - const thirdLogRecord = { + const thirdLogRecord: LogRecord = { timestamp: new Date(timestamp), pid: 5355, - level: LogLevel.Trace, + level: { + id: 'trace', + value: 7, + }, context: 'some-context.sub-context', message: 'some-message', meta: { tags: ['important', 'tags'], unknown: 2 }, diff --git a/src/core/server/legacy/logging/legacy_logging_server.ts b/packages/kbn-legacy-logging/src/legacy_logging_server.ts similarity index 73% rename from src/core/server/legacy/logging/legacy_logging_server.ts rename to packages/kbn-legacy-logging/src/legacy_logging_server.ts index 690c9c0bfe21d5..45e4bda0b007c0 100644 --- a/src/core/server/legacy/logging/legacy_logging_server.ts +++ b/packages/kbn-legacy-logging/src/legacy_logging_server.ts @@ -17,29 +17,40 @@ * under the License. */ -import { ServerExtType } from '@hapi/hapi'; -import Podium from '@hapi/podium'; -// @ts-expect-error: implicit any for JS file -import { Config } from '../../../../legacy/server/config'; -// @ts-expect-error: implicit any for JS file -import { setupLogging } from '../../../../legacy/server/logging'; -import { LogLevel, LogRecord } from '../../logging'; -import { LegacyVars } from '../../types'; - -export const metadataSymbol = Symbol('log message with metadata'); -export function attachMetaData(message: string, metadata: LegacyVars = {}) { - return { - [metadataSymbol]: { - message, - metadata, - }, - }; +import { ServerExtType, Server } from '@hapi/hapi'; +import Podium from 'podium'; +import { setupLogging } from './setup_logging'; +import { attachMetaData } from './metadata'; +import { legacyLoggingConfigSchema } from './schema'; + +// these LogXXX types are duplicated to avoid a cross dependency with the @kbn/logging package. +// typescript will error if they diverge at some point. +type LogLevelId = 'all' | 'fatal' | 'error' | 'warn' | 'info' | 'debug' | 'trace' | 'off'; + +interface LogLevel { + id: LogLevelId; + value: number; +} + +export interface LogRecord { + timestamp: Date; + level: LogLevel; + context: string; + message: string; + error?: Error; + meta?: { [name: string]: any }; + pid: number; } + const isEmptyObject = (obj: object) => Object.keys(obj).length === 0; function getDataToLog(error: Error | undefined, metadata: object, message: string) { - if (error) return error; - if (!isEmptyObject(metadata)) return attachMetaData(message, metadata); + if (error) { + return error; + } + if (!isEmptyObject(metadata)) { + return attachMetaData(message, metadata); + } return message; } @@ -50,7 +61,7 @@ interface PluginRegisterParams { options: PluginRegisterParams['options'] ) => Promise; }; - options: LegacyVars; + options: Record; } /** @@ -84,22 +95,19 @@ export class LegacyLoggingServer { private onPostStopCallback?: () => void; - constructor(legacyLoggingConfig: Readonly) { + constructor(legacyLoggingConfig: any) { // We set `ops.interval` to max allowed number and `ops` filter to value // that doesn't exist to avoid logging of ops at all, if turned on it will be // logged by the "legacy" Kibana. - const config = { - logging: { - ...legacyLoggingConfig, - events: { - ...legacyLoggingConfig.events, - ops: '__no-ops__', - }, + const { value: loggingConfig } = legacyLoggingConfigSchema.validate({ + ...legacyLoggingConfig, + events: { + ...legacyLoggingConfig.events, + ops: '__no-ops__', }, - ops: { interval: 2147483647 }, - }; + }); - setupLogging(this, Config.withDefaultSchema(config)); + setupLogging((this as unknown) as Server, loggingConfig, 2147483647); } public register({ plugin: { register }, options }: PluginRegisterParams): Promise { diff --git a/packages/kbn-legacy-logging/src/log_events.ts b/packages/kbn-legacy-logging/src/log_events.ts new file mode 100644 index 00000000000000..296c255a75185c --- /dev/null +++ b/packages/kbn-legacy-logging/src/log_events.ts @@ -0,0 +1,80 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { EventData, isEventData } from './metadata'; + +export interface BaseEvent { + event: string; + timestamp: number; + pid: number; + tags?: string[]; +} + +export interface ResponseEvent extends BaseEvent { + event: 'response'; + method: 'GET' | 'POST' | 'PUT' | 'DELETE'; + statusCode: number; + path: string; + headers: Record; + responsePayload: string; + responseTime: string; + query: Record; +} + +export interface OpsEvent extends BaseEvent { + event: 'ops'; + os: { + load: string[]; + }; + proc: Record; + load: string; +} + +export interface ErrorEvent extends BaseEvent { + event: 'error'; + error: Error; + url: string; +} + +export interface UndeclaredErrorEvent extends BaseEvent { + error: Error; +} + +export interface LogEvent extends BaseEvent { + data: EventData; +} + +export interface UnkownEvent extends BaseEvent { + data: string | Record; +} + +export type AnyEvent = + | ResponseEvent + | OpsEvent + | ErrorEvent + | UndeclaredErrorEvent + | LogEvent + | UnkownEvent; + +export const isResponseEvent = (e: AnyEvent): e is ResponseEvent => e.event === 'response'; +export const isOpsEvent = (e: AnyEvent): e is OpsEvent => e.event === 'ops'; +export const isErrorEvent = (e: AnyEvent): e is ErrorEvent => e.event === 'error'; +export const isLogEvent = (e: AnyEvent): e is LogEvent => isEventData((e as LogEvent).data); +export const isUndeclaredErrorEvent = (e: AnyEvent): e is UndeclaredErrorEvent => + (e as any).error instanceof Error; diff --git a/src/legacy/server/logging/log_format.js b/packages/kbn-legacy-logging/src/log_format.ts similarity index 61% rename from src/legacy/server/logging/log_format.js rename to packages/kbn-legacy-logging/src/log_format.ts index 6edda8c4be9076..e357c2420c1788 100644 --- a/src/legacy/server/logging/log_format.js +++ b/packages/kbn-legacy-logging/src/log_format.ts @@ -19,16 +19,29 @@ import Stream from 'stream'; import moment from 'moment-timezone'; -import { get, _ } from 'lodash'; +import _ from 'lodash'; import queryString from 'query-string'; import numeral from '@elastic/numeral'; import chalk from 'chalk'; +// @ts-expect-error missing type def import stringify from 'json-stringify-safe'; -import applyFiltersToKeys from './apply_filters_to_keys'; import { inspect } from 'util'; -import { logWithMetadata } from './log_with_metadata'; -function serializeError(err = {}) { +import { applyFiltersToKeys } from './utils'; +import { getLogEventData } from './metadata'; +import { LegacyLoggingConfig } from './schema'; +import { + AnyEvent, + isResponseEvent, + isOpsEvent, + isErrorEvent, + isLogEvent, + isUndeclaredErrorEvent, +} from './log_events'; + +export type LogFormatConfig = Pick; + +function serializeError(err: any = {}) { return { message: err.message, name: err.name, @@ -38,34 +51,37 @@ function serializeError(err = {}) { }; } -const levelColor = function (code) { - if (code < 299) return chalk.green(code); - if (code < 399) return chalk.yellow(code); - if (code < 499) return chalk.magentaBright(code); - return chalk.red(code); +const levelColor = function (code: number) { + if (code < 299) return chalk.green(String(code)); + if (code < 399) return chalk.yellow(String(code)); + if (code < 499) return chalk.magentaBright(String(code)); + return chalk.red(String(code)); }; -export default class TransformObjStream extends Stream.Transform { - constructor(config) { +export abstract class BaseLogFormat extends Stream.Transform { + constructor(private readonly config: LogFormatConfig) { super({ readableObjectMode: false, writableObjectMode: true, }); - this.config = config; } - filter(data) { - if (!this.config.filter) return data; + abstract format(data: Record): string; + + filter(data: Record) { + if (!this.config.filter) { + return data; + } return applyFiltersToKeys(data, this.config.filter); } - _transform(event, enc, next) { + _transform(event: AnyEvent, enc: string, next: Stream.TransformCallback) { const data = this.filter(this.readEvent(event)); this.push(this.format(data) + '\n'); next(); } - extractAndFormatTimestamp(data, format) { + extractAndFormatTimestamp(data: Record, format?: string) { const { timezone } = this.config; const date = moment(data['@timestamp']); if (timezone) { @@ -74,18 +90,18 @@ export default class TransformObjStream extends Stream.Transform { return date.format(format); } - readEvent(event) { - const data = { + readEvent(event: AnyEvent) { + const data: Record = { type: event.event, '@timestamp': event.timestamp, - tags: [].concat(event.tags || []), + tags: [...(event.tags || [])], pid: event.pid, }; - if (data.type === 'response') { + if (isResponseEvent(event)) { _.defaults(data, _.pick(event, ['method', 'statusCode'])); - const source = get(event, 'source', {}); + const source = _.get(event, 'source', {}); data.req = { url: event.path, method: event.method || '', @@ -95,21 +111,21 @@ export default class TransformObjStream extends Stream.Transform { referer: source.referer, }; - let contentLength = 0; - if (typeof event.responsePayload === 'object') { - contentLength = stringify(event.responsePayload).length; - } else { - contentLength = String(event.responsePayload).length; - } + const contentLength = + event.responsePayload === 'object' + ? stringify(event.responsePayload).length + : String(event.responsePayload).length; data.res = { statusCode: event.statusCode, responseTime: event.responseTime, - contentLength: contentLength, + contentLength, }; const query = queryString.stringify(event.query, { sort: false }); - if (query) data.req.url += '?' + query; + if (query) { + data.req.url += '?' + query; + } data.message = data.req.method.toUpperCase() + ' '; data.message += data.req.url; @@ -118,38 +134,38 @@ export default class TransformObjStream extends Stream.Transform { data.message += ' '; data.message += chalk.gray(data.res.responseTime + 'ms'); data.message += chalk.gray(' - ' + numeral(contentLength).format('0.0b')); - } else if (data.type === 'ops') { + } else if (isOpsEvent(event)) { _.defaults(data, _.pick(event, ['pid', 'os', 'proc', 'load'])); data.message = chalk.gray('memory: '); - data.message += numeral(get(data, 'proc.mem.heapUsed')).format('0.0b'); + data.message += numeral(_.get(data, 'proc.mem.heapUsed')).format('0.0b'); data.message += ' '; data.message += chalk.gray('uptime: '); - data.message += numeral(get(data, 'proc.uptime')).format('00:00:00'); + data.message += numeral(_.get(data, 'proc.uptime')).format('00:00:00'); data.message += ' '; data.message += chalk.gray('load: ['); - data.message += get(data, 'os.load', []) - .map(function (val) { + data.message += _.get(data, 'os.load', []) + .map((val: number) => { return numeral(val).format('0.00'); }) .join(' '); data.message += chalk.gray(']'); data.message += ' '; data.message += chalk.gray('delay: '); - data.message += numeral(get(data, 'proc.delay')).format('0.000'); - } else if (data.type === 'error') { + data.message += numeral(_.get(data, 'proc.delay')).format('0.000'); + } else if (isErrorEvent(event)) { data.level = 'error'; data.error = serializeError(event.error); data.url = event.url; - const message = get(event, 'error.message'); + const message = _.get(event, 'error.message'); data.message = message || 'Unknown error (no message)'; - } else if (event.error instanceof Error) { + } else if (isUndeclaredErrorEvent(event)) { data.type = 'error'; data.level = _.includes(event.tags, 'fatal') ? 'fatal' : 'error'; data.error = serializeError(event.error); - const message = get(event, 'error.message'); + const message = _.get(event, 'error.message'); data.message = message || 'Unknown error object (no message)'; - } else if (logWithMetadata.isLogEvent(event.data)) { - _.assign(data, logWithMetadata.getLogEventData(event.data)); + } else if (isLogEvent(event)) { + _.assign(data, getLogEventData(event.data)); } else { data.message = _.isString(event.data) ? event.data : inspect(event.data); } diff --git a/src/legacy/server/logging/log_format_json.test.js b/packages/kbn-legacy-logging/src/log_format_json.test.ts similarity index 79% rename from src/legacy/server/logging/log_format_json.test.js rename to packages/kbn-legacy-logging/src/log_format_json.test.ts index ec7296d21672b2..f762daf01e5fa9 100644 --- a/src/legacy/server/logging/log_format_json.test.js +++ b/packages/kbn-legacy-logging/src/log_format_json.test.ts @@ -19,30 +19,31 @@ import moment from 'moment'; -// eslint-disable-next-line @kbn/eslint/no-restricted-paths -import { attachMetaData } from '../../../../src/core/server/legacy/logging/legacy_logging_server'; -import { createListStream, createPromiseFromStreams } from '../../../core/server/utils'; - -import KbnLoggerJsonFormat from './log_format_json'; +import { attachMetaData } from './metadata'; +import { createListStream, createPromiseFromStreams } from './test_utils'; +import { KbnLoggerJsonFormat } from './log_format_json'; const time = +moment('2010-01-01T05:15:59Z', moment.ISO_8601); -const makeEvent = (eventType) => ({ +const makeEvent = (eventType: string) => ({ event: eventType, timestamp: time, }); describe('KbnLoggerJsonFormat', () => { - const config = {}; + const config: any = {}; describe('event types and messages', () => { - let format; + let format: KbnLoggerJsonFormat; beforeEach(() => { format = new KbnLoggerJsonFormat(config); }); it('log', async () => { - const result = await createPromiseFromStreams([createListStream([makeEvent('log')]), format]); + const result = await createPromiseFromStreams([ + createListStream([makeEvent('log')]), + format, + ]); const { type, message } = JSON.parse(result); expect(type).toBe('log'); @@ -64,7 +65,7 @@ describe('KbnLoggerJsonFormat', () => { referer: 'elastic.co', }, }; - const result = await createPromiseFromStreams([createListStream([event]), format]); + const result = await createPromiseFromStreams([createListStream([event]), format]); const { type, method, statusCode, message, req } = JSON.parse(result); expect(type).toBe('response'); @@ -82,7 +83,7 @@ describe('KbnLoggerJsonFormat', () => { load: [1, 1, 2], }, }; - const result = await createPromiseFromStreams([createListStream([event]), format]); + const result = await createPromiseFromStreams([createListStream([event]), format]); const { type, message } = JSON.parse(result); expect(type).toBe('ops'); @@ -98,7 +99,7 @@ describe('KbnLoggerJsonFormat', () => { }), tags: ['tag1', 'tag2'], }; - const result = await createPromiseFromStreams([createListStream([event]), format]); + const result = await createPromiseFromStreams([createListStream([event]), format]); const { level, message, prop1, prop2, tags } = JSON.parse(result); expect(level).toBe(undefined); @@ -117,7 +118,7 @@ describe('KbnLoggerJsonFormat', () => { }), tags: ['tag1', 'tag2'], }; - const result = await createPromiseFromStreams([createListStream([event]), format]); + const result = await createPromiseFromStreams([createListStream([event]), format]); const { level, message, prop1, prop2, tags } = JSON.parse(result); expect(level).toBe(undefined); @@ -132,7 +133,7 @@ describe('KbnLoggerJsonFormat', () => { data: attachMetaData('message for event'), tags: ['tag1', 'tag2'], }; - const result = await createPromiseFromStreams([createListStream([event]), format]); + const result = await createPromiseFromStreams([createListStream([event]), format]); const { level, message, prop1, prop2, tags } = JSON.parse(result); expect(level).toBe(undefined); @@ -151,7 +152,7 @@ describe('KbnLoggerJsonFormat', () => { }), tags: ['tag1', 'tag2'], }; - const result = await createPromiseFromStreams([createListStream([event]), format]); + const result = await createPromiseFromStreams([createListStream([event]), format]); const { level, message, prop1, prop2, tags } = JSON.parse(result); expect(level).toBe('error'); @@ -170,7 +171,7 @@ describe('KbnLoggerJsonFormat', () => { message: 'test error 0', }, }; - const result = await createPromiseFromStreams([createListStream([event]), format]); + const result = await createPromiseFromStreams([createListStream([event]), format]); const { level, message, error } = JSON.parse(result); expect(level).toBe('error'); @@ -183,7 +184,7 @@ describe('KbnLoggerJsonFormat', () => { event: 'error', error: {}, }; - const result = await createPromiseFromStreams([createListStream([event]), format]); + const result = await createPromiseFromStreams([createListStream([event]), format]); const { level, message, error } = JSON.parse(result); expect(level).toBe('error'); @@ -193,9 +194,9 @@ describe('KbnLoggerJsonFormat', () => { it('event error instanceof Error', async () => { const event = { - error: new Error('test error 2'), + error: new Error('test error 2') as any, }; - const result = await createPromiseFromStreams([createListStream([event]), format]); + const result = await createPromiseFromStreams([createListStream([event]), format]); const { level, message, error } = JSON.parse(result); expect(level).toBe('error'); @@ -210,10 +211,10 @@ describe('KbnLoggerJsonFormat', () => { it('event error instanceof Error - fatal', async () => { const event = { - error: new Error('test error 2'), + error: new Error('test error 2') as any, tags: ['fatal', 'tag2'], }; - const result = await createPromiseFromStreams([createListStream([event]), format]); + const result = await createPromiseFromStreams([createListStream([event]), format]); const { tags, level, message, error } = JSON.parse(result); expect(tags).toEqual(['fatal', 'tag2']); @@ -229,9 +230,9 @@ describe('KbnLoggerJsonFormat', () => { it('event error instanceof Error, no message', async () => { const event = { - error: new Error(''), + error: new Error('') as any, }; - const result = await createPromiseFromStreams([createListStream([event]), format]); + const result = await createPromiseFromStreams([createListStream([event]), format]); const { level, message, error } = JSON.parse(result); expect(level).toBe('error'); @@ -250,18 +251,24 @@ describe('KbnLoggerJsonFormat', () => { it('logs in UTC', async () => { const format = new KbnLoggerJsonFormat({ timezone: 'UTC', - }); + } as any); - const result = await createPromiseFromStreams([createListStream([makeEvent('log')]), format]); + const result = await createPromiseFromStreams([ + createListStream([makeEvent('log')]), + format, + ]); const { '@timestamp': timestamp } = JSON.parse(result); expect(timestamp).toBe(moment.utc(time).format()); }); it('logs in local timezone timezone is undefined', async () => { - const format = new KbnLoggerJsonFormat({}); + const format = new KbnLoggerJsonFormat({} as any); - const result = await createPromiseFromStreams([createListStream([makeEvent('log')]), format]); + const result = await createPromiseFromStreams([ + createListStream([makeEvent('log')]), + format, + ]); const { '@timestamp': timestamp } = JSON.parse(result); expect(timestamp).toBe(moment(time).format()); diff --git a/src/legacy/server/logging/log_format_json.js b/packages/kbn-legacy-logging/src/log_format_json.ts similarity index 82% rename from src/legacy/server/logging/log_format_json.js rename to packages/kbn-legacy-logging/src/log_format_json.ts index bfceb78b24504c..7961fda7912ccc 100644 --- a/src/legacy/server/logging/log_format_json.js +++ b/packages/kbn-legacy-logging/src/log_format_json.ts @@ -17,15 +17,16 @@ * under the License. */ -import LogFormat from './log_format'; +// @ts-expect-error missing type def import stringify from 'json-stringify-safe'; +import { BaseLogFormat } from './log_format'; -const stripColors = function (string) { +const stripColors = function (string: string) { return string.replace(/\u001b[^m]+m/g, ''); }; -export default class KbnLoggerJsonFormat extends LogFormat { - format(data) { +export class KbnLoggerJsonFormat extends BaseLogFormat { + format(data: Record) { data.message = stripColors(data.message); data['@timestamp'] = this.extractAndFormatTimestamp(data); return stringify(data); diff --git a/src/legacy/server/logging/log_format_string.test.js b/packages/kbn-legacy-logging/src/log_format_string.test.ts similarity index 84% rename from src/legacy/server/logging/log_format_string.test.js rename to packages/kbn-legacy-logging/src/log_format_string.test.ts index 842325865cce22..0ed233228c1fd9 100644 --- a/src/legacy/server/logging/log_format_string.test.js +++ b/packages/kbn-legacy-logging/src/log_format_string.test.ts @@ -18,12 +18,10 @@ */ import moment from 'moment'; -// eslint-disable-next-line @kbn/eslint/no-restricted-paths -import { attachMetaData } from '../../../../src/core/server/legacy/logging/legacy_logging_server'; -import { createListStream, createPromiseFromStreams } from '../../../core/server/utils'; - -import KbnLoggerStringFormat from './log_format_string'; +import { attachMetaData } from './metadata'; +import { createListStream, createPromiseFromStreams } from './test_utils'; +import { KbnLoggerStringFormat } from './log_format_string'; const time = +moment('2010-01-01T05:15:59Z', moment.ISO_8601); @@ -39,7 +37,7 @@ describe('KbnLoggerStringFormat', () => { it('logs in UTC', async () => { const format = new KbnLoggerStringFormat({ timezone: 'UTC', - }); + } as any); const result = await createPromiseFromStreams([createListStream([makeEvent()]), format]); @@ -47,7 +45,7 @@ describe('KbnLoggerStringFormat', () => { }); it('logs in local timezone when timezone is undefined', async () => { - const format = new KbnLoggerStringFormat({}); + const format = new KbnLoggerStringFormat({} as any); const result = await createPromiseFromStreams([createListStream([makeEvent()]), format]); @@ -55,7 +53,7 @@ describe('KbnLoggerStringFormat', () => { }); describe('with metadata', () => { it('does not log meta data', async () => { - const format = new KbnLoggerStringFormat({}); + const format = new KbnLoggerStringFormat({} as any); const event = { data: attachMetaData('message for event', { prop1: 'value1', diff --git a/src/legacy/server/logging/log_format_string.js b/packages/kbn-legacy-logging/src/log_format_string.ts similarity index 84% rename from src/legacy/server/logging/log_format_string.js rename to packages/kbn-legacy-logging/src/log_format_string.ts index cbbf71dd894acb..3f024fac551199 100644 --- a/src/legacy/server/logging/log_format_string.js +++ b/packages/kbn-legacy-logging/src/log_format_string.ts @@ -20,11 +20,11 @@ import _ from 'lodash'; import chalk from 'chalk'; -import LogFormat from './log_format'; +import { BaseLogFormat } from './log_format'; const statuses = ['err', 'info', 'error', 'warning', 'fatal', 'status', 'debug']; -const typeColors = { +const typeColors: Record = { log: 'white', req: 'green', res: 'green', @@ -45,18 +45,19 @@ const typeColors = { scss: 'magentaBright', }; -const color = _.memoize(function (name) { +const color = _.memoize((name: string): ((...text: string[]) => string) => { + // @ts-expect-error couldn't even get rid of the error with an any cast return chalk[typeColors[name]] || _.identity; }); -const type = _.memoize(function (t) { +const type = _.memoize((t: string) => { return color(t)(_.pad(t, 7).slice(0, 7)); }); const workerType = process.env.kbnWorkerType ? `${type(process.env.kbnWorkerType)} ` : ''; -export default class KbnLoggerStringFormat extends LogFormat { - format(data) { +export class KbnLoggerStringFormat extends BaseLogFormat { + format(data: Record) { const time = color('time')(this.extractAndFormatTimestamp(data, 'HH:mm:ss.SSS')); const msg = data.error ? color('error')(data.error.stack) : color('message')(data.message); diff --git a/src/legacy/server/logging/log_interceptor.test.js b/packages/kbn-legacy-logging/src/log_interceptor.test.ts similarity index 90% rename from src/legacy/server/logging/log_interceptor.test.js rename to packages/kbn-legacy-logging/src/log_interceptor.test.ts index 492d1ffc8d167f..32da6432cc443a 100644 --- a/src/legacy/server/logging/log_interceptor.test.js +++ b/packages/kbn-legacy-logging/src/log_interceptor.test.ts @@ -17,13 +17,15 @@ * under the License. */ +import { ErrorEvent } from './log_events'; import { LogInterceptor } from './log_interceptor'; -function stubClientErrorEvent(errorMeta) { +function stubClientErrorEvent(errorMeta: Record): ErrorEvent { const error = new Error(); Object.assign(error, errorMeta); return { event: 'error', + url: '', pid: 1234, timestamp: Date.now(), tags: ['connection', 'client', 'error'], @@ -35,7 +37,7 @@ const stubEconnresetEvent = () => stubClientErrorEvent({ code: 'ECONNRESET' }); const stubEpipeEvent = () => stubClientErrorEvent({ errno: 'EPIPE' }); const stubEcanceledEvent = () => stubClientErrorEvent({ errno: 'ECANCELED' }); -function assertDowngraded(transformed) { +function assertDowngraded(transformed: Record) { expect(!!transformed).toBe(true); expect(transformed).toHaveProperty('event', 'log'); expect(transformed).toHaveProperty('tags'); @@ -47,13 +49,13 @@ describe('server logging LogInterceptor', () => { it('transforms ECONNRESET events', () => { const interceptor = new LogInterceptor(); const event = stubEconnresetEvent(); - assertDowngraded(interceptor.downgradeIfEconnreset(event)); + assertDowngraded(interceptor.downgradeIfEconnreset(event)!); }); it('does not match if the tags are not in order', () => { const interceptor = new LogInterceptor(); const event = stubEconnresetEvent(); - event.tags = [...event.tags.slice(1), event.tags[0]]; + event.tags = [...event.tags!.slice(1), event.tags![0]]; expect(interceptor.downgradeIfEconnreset(event)).toBe(null); }); @@ -75,13 +77,13 @@ describe('server logging LogInterceptor', () => { it('transforms EPIPE events', () => { const interceptor = new LogInterceptor(); const event = stubEpipeEvent(); - assertDowngraded(interceptor.downgradeIfEpipe(event)); + assertDowngraded(interceptor.downgradeIfEpipe(event)!); }); it('does not match if the tags are not in order', () => { const interceptor = new LogInterceptor(); const event = stubEpipeEvent(); - event.tags = [...event.tags.slice(1), event.tags[0]]; + event.tags = [...event.tags!.slice(1), event.tags![0]]; expect(interceptor.downgradeIfEpipe(event)).toBe(null); }); @@ -103,13 +105,13 @@ describe('server logging LogInterceptor', () => { it('transforms ECANCELED events', () => { const interceptor = new LogInterceptor(); const event = stubEcanceledEvent(); - assertDowngraded(interceptor.downgradeIfEcanceled(event)); + assertDowngraded(interceptor.downgradeIfEcanceled(event)!); }); it('does not match if the tags are not in order', () => { const interceptor = new LogInterceptor(); const event = stubEcanceledEvent(); - event.tags = [...event.tags.slice(1), event.tags[0]]; + event.tags = [...event.tags!.slice(1), event.tags![0]]; expect(interceptor.downgradeIfEcanceled(event)).toBe(null); }); @@ -131,7 +133,7 @@ describe('server logging LogInterceptor', () => { it('transforms https requests when serving http errors', () => { const interceptor = new LogInterceptor(); const event = stubClientErrorEvent({ message: 'Parse Error', code: 'HPE_INVALID_METHOD' }); - assertDowngraded(interceptor.downgradeIfHTTPSWhenHTTP(event)); + assertDowngraded(interceptor.downgradeIfHTTPSWhenHTTP(event)!); }); it('ignores non events', () => { @@ -150,7 +152,7 @@ describe('server logging LogInterceptor', () => { '4584650176:error:1408F09C:SSL routines:ssl3_get_record:http request:../deps/openssl/openssl/ssl/record/ssl3_record.c:322:\n'; const interceptor = new LogInterceptor(); const event = stubClientErrorEvent({ message }); - assertDowngraded(interceptor.downgradeIfHTTPWhenHTTPS(event)); + assertDowngraded(interceptor.downgradeIfHTTPWhenHTTPS(event)!); }); it('ignores non events', () => { diff --git a/src/legacy/server/logging/log_interceptor.js b/packages/kbn-legacy-logging/src/log_interceptor.ts similarity index 81% rename from src/legacy/server/logging/log_interceptor.js rename to packages/kbn-legacy-logging/src/log_interceptor.ts index 2298d83aa28571..2d559dc1ef55cb 100644 --- a/src/legacy/server/logging/log_interceptor.js +++ b/packages/kbn-legacy-logging/src/log_interceptor.ts @@ -19,6 +19,7 @@ import Stream from 'stream'; import { get, isEqual } from 'lodash'; +import { AnyEvent } from './log_events'; /** * Matches error messages when clients connect via HTTP instead of HTTPS; see unit test for full message. Warning: this can change when Node @@ -26,25 +27,32 @@ import { get, isEqual } from 'lodash'; */ const OPENSSL_GET_RECORD_REGEX = /ssl3_get_record:http/; -function doTagsMatch(event, tags) { - return isEqual(get(event, 'tags'), tags); +function doTagsMatch(event: AnyEvent, tags: string[]) { + return isEqual(event.tags, tags); } -function doesMessageMatch(errorMessage, match) { - if (!errorMessage) return false; - const isRegExp = match instanceof RegExp; - if (isRegExp) return match.test(errorMessage); +function doesMessageMatch(errorMessage: string, match: RegExp | string) { + if (!errorMessage) { + return false; + } + if (match instanceof RegExp) { + return match.test(errorMessage); + } return errorMessage === match; } // converts the given event into a debug log if it's an error of the given type -function downgradeIfErrorType(errorType, event) { +function downgradeIfErrorType(errorType: string, event: AnyEvent) { const isClientError = doTagsMatch(event, ['connection', 'client', 'error']); - if (!isClientError) return null; + if (!isClientError) { + return null; + } const matchesErrorType = get(event, 'error.code') === errorType || get(event, 'error.errno') === errorType; - if (!matchesErrorType) return null; + if (!matchesErrorType) { + return null; + } const errorTypeTag = errorType.toLowerCase(); @@ -57,12 +65,14 @@ function downgradeIfErrorType(errorType, event) { }; } -function downgradeIfErrorMessage(match, event) { +function downgradeIfErrorMessage(match: RegExp | string, event: AnyEvent) { const isClientError = doTagsMatch(event, ['connection', 'client', 'error']); const errorMessage = get(event, 'error.message'); const matchesErrorMessage = isClientError && doesMessageMatch(errorMessage, match); - if (!matchesErrorMessage) return null; + if (!matchesErrorMessage) { + return null; + } return { event: 'log', @@ -91,7 +101,7 @@ export class LogInterceptor extends Stream.Transform { * * @param {object} - log event */ - downgradeIfEconnreset(event) { + downgradeIfEconnreset(event: AnyEvent) { return downgradeIfErrorType('ECONNRESET', event); } @@ -105,7 +115,7 @@ export class LogInterceptor extends Stream.Transform { * * @param {object} - log event */ - downgradeIfEpipe(event) { + downgradeIfEpipe(event: AnyEvent) { return downgradeIfErrorType('EPIPE', event); } @@ -119,19 +129,19 @@ export class LogInterceptor extends Stream.Transform { * * @param {object} - log event */ - downgradeIfEcanceled(event) { + downgradeIfEcanceled(event: AnyEvent) { return downgradeIfErrorType('ECANCELED', event); } - downgradeIfHTTPSWhenHTTP(event) { + downgradeIfHTTPSWhenHTTP(event: AnyEvent) { return downgradeIfErrorType('HPE_INVALID_METHOD', event); } - downgradeIfHTTPWhenHTTPS(event) { + downgradeIfHTTPWhenHTTPS(event: AnyEvent) { return downgradeIfErrorMessage(OPENSSL_GET_RECORD_REGEX, event); } - _transform(event, enc, next) { + _transform(event: AnyEvent, enc: string, next: Stream.TransformCallback) { const downgraded = this.downgradeIfEconnreset(event) || this.downgradeIfEpipe(event) || diff --git a/src/legacy/server/logging/log_reporter.js b/packages/kbn-legacy-logging/src/log_reporter.ts similarity index 64% rename from src/legacy/server/logging/log_reporter.js rename to packages/kbn-legacy-logging/src/log_reporter.ts index 4afb00b568844b..8ecaf348bac04c 100644 --- a/src/legacy/server/logging/log_reporter.js +++ b/packages/kbn-legacy-logging/src/log_reporter.ts @@ -17,27 +17,21 @@ * under the License. */ +// @ts-expect-error missing type def import { Squeeze } from '@hapi/good-squeeze'; -import { createWriteStream as writeStr } from 'fs'; +import { createWriteStream as writeStr, WriteStream } from 'fs'; -import LogFormatJson from './log_format_json'; -import LogFormatString from './log_format_string'; +import { KbnLoggerJsonFormat } from './log_format_json'; +import { KbnLoggerStringFormat } from './log_format_string'; import { LogInterceptor } from './log_interceptor'; +import { LogFormatConfig } from './log_format'; -// NOTE: legacy logger creates a new stream for each new access -// In https://github.com/elastic/kibana/pull/55937 we reach the max listeners -// default limit of 10 for process.stdout which starts a long warning/error -// thrown every time we start the server. -// In order to keep using the legacy logger until we remove it I'm just adding -// a new hard limit here. -process.stdout.setMaxListeners(25); - -export function getLoggerStream({ events, config }) { +export function getLogReporter({ events, config }: { events: any; config: LogFormatConfig }) { const squeeze = new Squeeze(events); - const format = config.json ? new LogFormatJson(config) : new LogFormatString(config); + const format = config.json ? new KbnLoggerJsonFormat(config) : new KbnLoggerStringFormat(config); const logInterceptor = new LogInterceptor(); - let dest; + let dest: WriteStream | NodeJS.WritableStream; if (config.dest === 'stdout') { dest = process.stdout; } else { diff --git a/src/legacy/server/logging/log_with_metadata.js b/packages/kbn-legacy-logging/src/metadata.ts similarity index 55% rename from src/legacy/server/logging/log_with_metadata.js rename to packages/kbn-legacy-logging/src/metadata.ts index 73e03a154907ac..8b7c2f8f87c590 100644 --- a/src/legacy/server/logging/log_with_metadata.js +++ b/packages/kbn-legacy-logging/src/metadata.ts @@ -16,30 +16,38 @@ * specific language governing permissions and limitations * under the License. */ + import { isPlainObject } from 'lodash'; -import { - metadataSymbol, - attachMetaData, - // eslint-disable-next-line @kbn/eslint/no-restricted-paths -} from '../../../../src/core/server/legacy/logging/legacy_logging_server'; +export const metadataSymbol = Symbol('log message with metadata'); -export const logWithMetadata = { - isLogEvent(eventData) { - return Boolean(isPlainObject(eventData) && eventData[metadataSymbol]); - }, +export interface EventData { + [metadataSymbol]?: EventMetadata; + [key: string]: any; +} - getLogEventData(eventData) { - const { message, metadata } = eventData[metadataSymbol]; - return { - ...metadata, - message, - }; - }, +export interface EventMetadata { + message: string; + metadata: Record; +} + +export const isEventData = (eventData: EventData) => { + return Boolean(isPlainObject(eventData) && eventData[metadataSymbol]); +}; - decorateServer(server) { - server.decorate('server', 'logWithMetadata', (tags, message, metadata = {}) => { - server.log(tags, attachMetaData(message, metadata)); - }); - }, +export const getLogEventData = (eventData: EventData) => { + const { message, metadata } = eventData[metadataSymbol]!; + return { + ...metadata, + message, + }; +}; + +export const attachMetaData = (message: string, metadata: Record = {}) => { + return { + [metadataSymbol]: { + message, + metadata, + }, + }; }; diff --git a/src/legacy/server/logging/rotate/index.ts b/packages/kbn-legacy-logging/src/rotate/index.ts similarity index 92% rename from src/legacy/server/logging/rotate/index.ts rename to packages/kbn-legacy-logging/src/rotate/index.ts index d6b7cfa76f9ee4..2387fc530e58bf 100644 --- a/src/legacy/server/logging/rotate/index.ts +++ b/packages/kbn-legacy-logging/src/rotate/index.ts @@ -20,13 +20,13 @@ import { isMaster, isWorker } from 'cluster'; import { Server } from '@hapi/hapi'; import { LogRotator } from './log_rotator'; -import { KibanaConfig } from '../../kbn_server'; +import { LegacyLoggingConfig } from '../schema'; let logRotator: LogRotator; -export async function setupLoggingRotate(server: Server, config: KibanaConfig) { +export async function setupLoggingRotate(server: Server, config: LegacyLoggingConfig) { // If log rotate is not enabled we skip - if (!config.get('logging.rotate.enabled')) { + if (!config.rotate.enabled) { return; } @@ -38,7 +38,7 @@ export async function setupLoggingRotate(server: Server, config: KibanaConfig) { // We don't want to run logging rotate server if // we are not logging to a file - if (config.get('logging.dest') === 'stdout') { + if (config.dest === 'stdout') { server.log( ['warning', 'logging:rotate'], 'Log rotation is enabled but logging.dest is configured for stdout. Set logging.dest to a file for this setting to take effect.' diff --git a/src/legacy/server/logging/rotate/log_rotator.test.ts b/packages/kbn-legacy-logging/src/rotate/log_rotator.test.ts similarity index 93% rename from src/legacy/server/logging/rotate/log_rotator.test.ts rename to packages/kbn-legacy-logging/src/rotate/log_rotator.test.ts index 8f67b47f6949e0..1f6407d2cca30d 100644 --- a/src/legacy/server/logging/rotate/log_rotator.test.ts +++ b/packages/kbn-legacy-logging/src/rotate/log_rotator.test.ts @@ -19,10 +19,10 @@ import del from 'del'; import fs, { existsSync, mkdirSync, statSync, writeFileSync } from 'fs'; -import { LogRotator } from './log_rotator'; import { tmpdir } from 'os'; import { dirname, join } from 'path'; -import lodash from 'lodash'; +import { LogRotator } from './log_rotator'; +import { LegacyLoggingConfig } from '../schema'; const mockOn = jest.fn(); jest.mock('chokidar', () => ({ @@ -32,19 +32,26 @@ jest.mock('chokidar', () => ({ })), })); -lodash.throttle = (fn: any) => fn; +jest.mock('lodash', () => ({ + ...(jest.requireActual('lodash') as any), + throttle: (fn: any) => fn, +})); const tempDir = join(tmpdir(), 'kbn_log_rotator_test'); const testFilePath = join(tempDir, 'log_rotator_test_log_file.log'); -const createLogRotatorConfig: any = (logFilePath: string) => { - return new Map([ - ['logging.dest', logFilePath], - ['logging.rotate.everyBytes', 2], - ['logging.rotate.keepFiles', 2], - ['logging.rotate.usePolling', false], - ['logging.rotate.pollingInterval', 10000], - ] as any); +const createLogRotatorConfig = (logFilePath: string): LegacyLoggingConfig => { + return { + dest: logFilePath, + rotate: { + enabled: true, + keepFiles: 2, + everyBytes: 2, + usePolling: false, + pollingInterval: 10000, + pollingPolicyTestTimeout: 4000, + }, + } as LegacyLoggingConfig; }; const mockServer: any = { @@ -62,7 +69,7 @@ describe('LogRotator', () => { }); afterEach(() => { - del.sync(dirname(testFilePath), { force: true }); + del.sync(tempDir, { force: true }); mockOn.mockClear(); }); @@ -71,14 +78,14 @@ describe('LogRotator', () => { const logRotator = new LogRotator(createLogRotatorConfig(testFilePath), mockServer); jest.spyOn(logRotator, '_sendReloadLogConfigSignal').mockImplementation(() => {}); + await logRotator.start(); expect(logRotator.running).toBe(true); await logRotator.stop(); - const testLogFileDir = dirname(testFilePath); - expect(existsSync(join(testLogFileDir, 'log_rotator_test_log_file.log.0'))).toBeTruthy(); + expect(existsSync(join(tempDir, 'log_rotator_test_log_file.log.0'))).toBeTruthy(); }); it('rotates log file when equal than set limit over time', async () => { diff --git a/src/legacy/server/logging/rotate/log_rotator.ts b/packages/kbn-legacy-logging/src/rotate/log_rotator.ts similarity index 94% rename from src/legacy/server/logging/rotate/log_rotator.ts rename to packages/kbn-legacy-logging/src/rotate/log_rotator.ts index c4054b2daed456..54181e30d6007b 100644 --- a/src/legacy/server/logging/rotate/log_rotator.ts +++ b/packages/kbn-legacy-logging/src/rotate/log_rotator.ts @@ -27,7 +27,7 @@ import { basename, dirname, join, sep } from 'path'; import { Observable } from 'rxjs'; import { first } from 'rxjs/operators'; import { promisify } from 'util'; -import { KibanaConfig } from '../../kbn_server'; +import { LegacyLoggingConfig } from '../schema'; const mkdirAsync = promisify(fs.mkdir); const readdirAsync = promisify(fs.readdir); @@ -37,7 +37,7 @@ const unlinkAsync = promisify(fs.unlink); const writeFileAsync = promisify(fs.writeFile); export class LogRotator { - private readonly config: KibanaConfig; + private readonly config: LegacyLoggingConfig; private readonly log: Server['log']; public logFilePath: string; public everyBytes: number; @@ -52,19 +52,19 @@ export class LogRotator { private stalkerUsePollingPolicyTestTimeout: NodeJS.Timeout | null; public shouldUsePolling: boolean; - constructor(config: KibanaConfig, server: Server) { + constructor(config: LegacyLoggingConfig, server: Server) { this.config = config; this.log = server.log.bind(server); - this.logFilePath = config.get('logging.dest'); - this.everyBytes = config.get('logging.rotate.everyBytes'); - this.keepFiles = config.get('logging.rotate.keepFiles'); + this.logFilePath = config.dest; + this.everyBytes = config.rotate.everyBytes; + this.keepFiles = config.rotate.keepFiles; this.running = false; this.logFileSize = 0; this.isRotating = false; this.throttledRotate = throttle(async () => await this._rotate(), 5000); this.stalker = null; - this.usePolling = config.get('logging.rotate.usePolling'); - this.pollingInterval = config.get('logging.rotate.pollingInterval'); + this.usePolling = config.rotate.usePolling; + this.pollingInterval = config.rotate.pollingInterval; this.shouldUsePolling = false; this.stalkerUsePollingPolicyTestTimeout = null; } @@ -128,7 +128,10 @@ export class LogRotator { }; // setup conditions that would fire the observable - this.stalkerUsePollingPolicyTestTimeout = setTimeout(() => completeFn(true), 15000); + this.stalkerUsePollingPolicyTestTimeout = setTimeout( + () => completeFn(true), + this.config.rotate.pollingPolicyTestTimeout || 15000 + ); testWatcher.on('change', () => completeFn(false)); testWatcher.on('error', () => completeFn(true)); @@ -152,7 +155,7 @@ export class LogRotator { } async _startLogFileSizeMonitor() { - this.usePolling = this.config.get('logging.rotate.usePolling'); + this.usePolling = this.config.rotate.usePolling; this.shouldUsePolling = await this._shouldUsePolling(); if (this.usePolling && !this.shouldUsePolling) { diff --git a/packages/kbn-legacy-logging/src/schema.ts b/packages/kbn-legacy-logging/src/schema.ts new file mode 100644 index 00000000000000..5f0e4fe89422b8 --- /dev/null +++ b/packages/kbn-legacy-logging/src/schema.ts @@ -0,0 +1,89 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import Joi from 'joi'; + +const HANDLED_IN_KIBANA_PLATFORM = Joi.any().description( + 'This key is handled in the new platform ONLY' +); + +export interface LegacyLoggingConfig { + silent: boolean; + quiet: boolean; + verbose: boolean; + events: Record; + dest: string; + filter: Record; + json: boolean; + timezone?: string; + rotate: { + enabled: boolean; + everyBytes: number; + keepFiles: number; + pollingInterval: number; + usePolling: boolean; + pollingPolicyTestTimeout?: number; + }; +} + +export const legacyLoggingConfigSchema = Joi.object() + .keys({ + appenders: HANDLED_IN_KIBANA_PLATFORM, + loggers: HANDLED_IN_KIBANA_PLATFORM, + root: HANDLED_IN_KIBANA_PLATFORM, + + silent: Joi.boolean().default(false), + + quiet: Joi.boolean().when('silent', { + is: true, + then: Joi.boolean().default(true).valid(true), + otherwise: Joi.boolean().default(false), + }), + + verbose: Joi.boolean().when('quiet', { + is: true, + then: Joi.valid(false).default(false), + otherwise: Joi.boolean().default(false), + }), + events: Joi.any().default({}), + dest: Joi.string().default('stdout'), + filter: Joi.any().default({}), + json: Joi.boolean().when('dest', { + is: 'stdout', + then: Joi.boolean().default(!process.stdout.isTTY), + otherwise: Joi.boolean().default(true), + }), + timezone: Joi.string(), + rotate: Joi.object() + .keys({ + enabled: Joi.boolean().default(false), + everyBytes: Joi.number() + // > 1MB + .greater(1048576) + // < 1GB + .less(1073741825) + // 10MB + .default(10485760), + keepFiles: Joi.number().greater(2).less(1024).default(7), + pollingInterval: Joi.number().greater(5000).less(3600000).default(10000), + usePolling: Joi.boolean().default(false), + }) + .default(), + }) + .default(); diff --git a/packages/kbn-legacy-logging/src/setup_logging.ts b/packages/kbn-legacy-logging/src/setup_logging.ts new file mode 100644 index 00000000000000..103e81249a136d --- /dev/null +++ b/packages/kbn-legacy-logging/src/setup_logging.ts @@ -0,0 +1,52 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +// @ts-expect-error missing typedef +import good from '@elastic/good'; +import { Server } from '@hapi/hapi'; +import { LegacyLoggingConfig } from './schema'; +import { getLoggingConfiguration } from './get_logging_config'; + +export async function setupLogging( + server: Server, + config: LegacyLoggingConfig, + opsInterval: number +) { + // NOTE: legacy logger creates a new stream for each new access + // In https://github.com/elastic/kibana/pull/55937 we reach the max listeners + // default limit of 10 for process.stdout which starts a long warning/error + // thrown every time we start the server. + // In order to keep using the legacy logger until we remove it I'm just adding + // a new hard limit here. + process.stdout.setMaxListeners(25); + + return await server.register({ + plugin: good, + options: getLoggingConfiguration(config, opsInterval), + }); +} + +export function reconfigureLogging( + server: Server, + config: LegacyLoggingConfig, + opsInterval: number +) { + const loggingOptions = getLoggingConfiguration(config, opsInterval); + (server.plugins as any)['@elastic/good'].reconfigure(loggingOptions); +} diff --git a/packages/kbn-legacy-logging/src/test_utils/index.ts b/packages/kbn-legacy-logging/src/test_utils/index.ts new file mode 100644 index 00000000000000..f13c869b563a29 --- /dev/null +++ b/packages/kbn-legacy-logging/src/test_utils/index.ts @@ -0,0 +1,20 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +export { createListStream, createPromiseFromStreams } from './streams'; diff --git a/packages/kbn-legacy-logging/src/test_utils/streams.ts b/packages/kbn-legacy-logging/src/test_utils/streams.ts new file mode 100644 index 00000000000000..0f37a13f8a478b --- /dev/null +++ b/packages/kbn-legacy-logging/src/test_utils/streams.ts @@ -0,0 +1,96 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { pipeline, Writable, Readable } from 'stream'; + +/** + * Create a Readable stream that provides the items + * from a list as objects to subscribers + * + * @param {Array} items - the list of items to provide + * @return {Readable} + */ +export function createListStream(items: T | T[] = []) { + const queue = Array.isArray(items) ? [...items] : [items]; + + return new Readable({ + objectMode: true, + read(size) { + queue.splice(0, size).forEach((item) => { + this.push(item); + }); + + if (!queue.length) { + this.push(null); + } + }, + }); +} + +/** + * Take an array of streams, pipe the output + * from each one into the next, listening for + * errors from any of the streams, and then resolve + * the promise once the final stream has finished + * writing/reading. + * + * If the last stream is readable, it's final value + * will be provided as the promise value. + * + * Errors emitted from any stream will cause + * the promise to be rejected with that error. + * + * @param {Array} streams + * @return {Promise} + */ + +function isReadable(stream: Readable | Writable): stream is Readable { + return 'read' in stream && typeof stream.read === 'function'; +} + +export async function createPromiseFromStreams(streams: [Readable, ...Writable[]]): Promise { + let finalChunk: any; + const last = streams[streams.length - 1]; + if (!isReadable(last) && streams.length === 1) { + // For a nicer error than what stream.pipeline throws + throw new Error('A minimum of 2 streams is required when a non-readable stream is given'); + } + if (isReadable(last)) { + // We are pushing a writable stream to capture the last chunk + streams.push( + new Writable({ + // Use object mode even when "last" stream isn't. This allows to + // capture the last chunk as-is. + objectMode: true, + write(chunk, enc, done) { + finalChunk = chunk; + done(); + }, + }) + ); + } + + return new Promise((resolve, reject) => { + // @ts-expect-error 'pipeline' doesn't support variable length of arguments + pipeline(...streams, (err) => { + if (err) return reject(err); + resolve(finalChunk); + }); + }); +} diff --git a/src/legacy/server/logging/apply_filters_to_keys.test.js b/packages/kbn-legacy-logging/src/utils/apply_filters_to_keys.test.ts similarity index 96% rename from src/legacy/server/logging/apply_filters_to_keys.test.js rename to packages/kbn-legacy-logging/src/utils/apply_filters_to_keys.test.ts index e007157e9488b6..bfcc7b1c908d4a 100644 --- a/src/legacy/server/logging/apply_filters_to_keys.test.js +++ b/packages/kbn-legacy-logging/src/utils/apply_filters_to_keys.test.ts @@ -17,7 +17,7 @@ * under the License. */ -import applyFiltersToKeys from './apply_filters_to_keys'; +import { applyFiltersToKeys } from './apply_filters_to_keys'; describe('applyFiltersToKeys(obj, actionsByKey)', function () { it('applies for each key+prop in actionsByKey', function () { diff --git a/src/legacy/server/logging/apply_filters_to_keys.js b/packages/kbn-legacy-logging/src/utils/apply_filters_to_keys.ts similarity index 83% rename from src/legacy/server/logging/apply_filters_to_keys.js rename to packages/kbn-legacy-logging/src/utils/apply_filters_to_keys.ts index 63e5ab4c62f298..8fd7eac57fc32e 100644 --- a/src/legacy/server/logging/apply_filters_to_keys.js +++ b/packages/kbn-legacy-logging/src/utils/apply_filters_to_keys.ts @@ -17,15 +17,15 @@ * under the License. */ -function toPojo(obj) { +function toPojo(obj: Record) { return JSON.parse(JSON.stringify(obj)); } -function replacer(match, group) { +function replacer(match: string, group: any[]) { return new Array(group.length + 1).join('X'); } -function apply(obj, key, action) { +function apply(obj: Record, key: string, action: string) { for (const k in obj) { if (obj.hasOwnProperty(k)) { let val = obj[k]; @@ -44,14 +44,17 @@ function apply(obj, key, action) { } } } else if (typeof val === 'object') { - val = apply(val, key, action); + val = apply(val as Record, key, action); } } } return obj; } -export default function (obj, actionsByKey) { +export function applyFiltersToKeys( + obj: Record, + actionsByKey: Record +) { return Object.keys(actionsByKey).reduce((output, key) => { return apply(output, key, actionsByKey[key]); }, toPojo(obj)); diff --git a/packages/kbn-legacy-logging/src/utils/index.ts b/packages/kbn-legacy-logging/src/utils/index.ts new file mode 100644 index 00000000000000..5841e7b6082843 --- /dev/null +++ b/packages/kbn-legacy-logging/src/utils/index.ts @@ -0,0 +1,20 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +export { applyFiltersToKeys } from './apply_filters_to_keys'; diff --git a/packages/kbn-legacy-logging/tsconfig.json b/packages/kbn-legacy-logging/tsconfig.json new file mode 100644 index 00000000000000..8fd202a2dce8ba --- /dev/null +++ b/packages/kbn-legacy-logging/tsconfig.json @@ -0,0 +1,11 @@ +{ + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "outDir": "target", + "stripInternal": false, + "declaration": true, + "declarationMap": true, + "types": ["jest", "node"] + }, + "include": ["./src/**/*"] +} diff --git a/packages/kbn-legacy-logging/yarn.lock b/packages/kbn-legacy-logging/yarn.lock new file mode 120000 index 00000000000000..3f82ebc9cdbae3 --- /dev/null +++ b/packages/kbn-legacy-logging/yarn.lock @@ -0,0 +1 @@ +../../yarn.lock \ No newline at end of file diff --git a/src/core/server/legacy/logging/appenders/legacy_appender.test.ts b/src/core/server/legacy/logging/appenders/legacy_appender.test.ts index 697e5bc37d6027..c4dca1b84f4eb8 100644 --- a/src/core/server/legacy/logging/appenders/legacy_appender.test.ts +++ b/src/core/server/legacy/logging/appenders/legacy_appender.test.ts @@ -17,10 +17,10 @@ * under the License. */ -jest.mock('../legacy_logging_server'); +jest.mock('@kbn/legacy-logging'); import { LogRecord, LogLevel } from '../../../logging'; -import { LegacyLoggingServer } from '../legacy_logging_server'; +import { LegacyLoggingServer } from '@kbn/legacy-logging'; import { LegacyAppender } from './legacy_appender'; afterEach(() => (LegacyLoggingServer as any).mockClear()); diff --git a/src/core/server/legacy/logging/appenders/legacy_appender.ts b/src/core/server/legacy/logging/appenders/legacy_appender.ts index 67337c7d676297..286448231d23f1 100644 --- a/src/core/server/legacy/logging/appenders/legacy_appender.ts +++ b/src/core/server/legacy/logging/appenders/legacy_appender.ts @@ -18,8 +18,8 @@ */ import { schema } from '@kbn/config-schema'; -import { DisposableAppender, LogRecord } from '../../../logging'; -import { LegacyLoggingServer } from '../legacy_logging_server'; +import { LegacyLoggingServer } from '@kbn/legacy-logging'; +import { DisposableAppender, LogRecord } from '@kbn/logging'; import { LegacyVars } from '../../types'; export interface LegacyAppenderConfig { diff --git a/src/core/server/logging/logging_system.test.ts b/src/core/server/logging/logging_system.test.ts index afe58ddff92aa2..2fca2f35cb0323 100644 --- a/src/core/server/logging/logging_system.test.ts +++ b/src/core/server/logging/logging_system.test.ts @@ -25,7 +25,7 @@ jest.mock('fs', () => ({ const dynamicProps = { process: { pid: expect.any(Number) } }; -jest.mock('../../../legacy/server/logging/rotate', () => ({ +jest.mock('@kbn/legacy-logging', () => ({ setupLoggingRotate: jest.fn().mockImplementation(() => Promise.resolve({})), })); diff --git a/src/legacy/server/config/schema.js b/src/legacy/server/config/schema.js index 39df3990ff2ff2..a9b5eec45a75bd 100644 --- a/src/legacy/server/config/schema.js +++ b/src/legacy/server/config/schema.js @@ -19,6 +19,7 @@ import Joi from 'joi'; import os from 'os'; +import { legacyLoggingConfigSchema } from '@kbn/legacy-logging'; const HANDLED_IN_NEW_PLATFORM = Joi.any().description( 'This key is handled in the new platform ONLY' @@ -77,51 +78,7 @@ export default () => uiSettings: HANDLED_IN_NEW_PLATFORM, - logging: Joi.object() - .keys({ - appenders: HANDLED_IN_NEW_PLATFORM, - loggers: HANDLED_IN_NEW_PLATFORM, - root: HANDLED_IN_NEW_PLATFORM, - - silent: Joi.boolean().default(false), - - quiet: Joi.boolean().when('silent', { - is: true, - then: Joi.default(true).valid(true), - otherwise: Joi.default(false), - }), - - verbose: Joi.boolean().when('quiet', { - is: true, - then: Joi.valid(false).default(false), - otherwise: Joi.default(false), - }), - events: Joi.any().default({}), - dest: Joi.string().default('stdout'), - filter: Joi.any().default({}), - json: Joi.boolean().when('dest', { - is: 'stdout', - then: Joi.default(!process.stdout.isTTY), - otherwise: Joi.default(true), - }), - timezone: Joi.string(), - rotate: Joi.object() - .keys({ - enabled: Joi.boolean().default(false), - everyBytes: Joi.number() - // > 1MB - .greater(1048576) - // < 1GB - .less(1073741825) - // 10MB - .default(10485760), - keepFiles: Joi.number().greater(2).less(1024).default(7), - pollingInterval: Joi.number().greater(5000).less(3600000).default(10000), - usePolling: Joi.boolean().default(false), - }) - .default(), - }) - .default(), + logging: legacyLoggingConfigSchema, ops: Joi.object({ interval: Joi.number().default(5000), diff --git a/src/legacy/server/kbn_server.js b/src/legacy/server/kbn_server.js index 013da35d2acb7e..b61a86326ca1a5 100644 --- a/src/legacy/server/kbn_server.js +++ b/src/legacy/server/kbn_server.js @@ -18,12 +18,12 @@ */ import { constant, once, compact, flatten } from 'lodash'; +import { reconfigureLogging } from '@kbn/legacy-logging'; import { isWorker } from 'cluster'; // eslint-disable-next-line @kbn/eslint/no-restricted-paths import { fromRoot, pkg } from '../../core/server/utils'; import { Config } from './config'; -import loggingConfiguration from './logging/configuration'; import httpMixin from './http'; import { coreMixin } from './core'; import { loggingMixin } from './logging'; @@ -154,13 +154,17 @@ export default class KbnServer { applyLoggingConfiguration(settings) { const config = Config.withDefaultSchema(settings); - const loggingOptions = loggingConfiguration(config); + + const loggingConfig = config.get('logging'); + const opsConfig = config.get('ops'); + const subset = { - ops: config.get('ops'), - logging: config.get('logging'), + ops: opsConfig, + logging: loggingConfig, }; const plain = JSON.stringify(subset, null, 2); this.server.log(['info', 'config'], 'New logging configuration:\n' + plain); - this.server.plugins['@elastic/good'].reconfigure(loggingOptions); + + reconfigureLogging(this.server, loggingConfig, opsConfig.interval); } } diff --git a/src/legacy/server/logging/index.js b/src/legacy/server/logging/index.js index 5182de0b7f6130..cb252ba37dc4ef 100644 --- a/src/legacy/server/logging/index.js +++ b/src/legacy/server/logging/index.js @@ -17,21 +17,16 @@ * under the License. */ -import good from '@elastic/good'; -import loggingConfiguration from './configuration'; -import { logWithMetadata } from './log_with_metadata'; -import { setupLoggingRotate } from './rotate'; +import { setupLogging, setupLoggingRotate, attachMetaData } from '@kbn/legacy-logging'; -export async function setupLogging(server, config) { - return await server.register({ - plugin: good, - options: loggingConfiguration(config), +export async function loggingMixin(kbnServer, server, config) { + server.decorate('server', 'logWithMetadata', (tags, message, metadata = {}) => { + server.log(tags, attachMetaData(message, metadata)); }); -} -export async function loggingMixin(kbnServer, server, config) { - logWithMetadata.decorateServer(server); + const loggingConfig = config.get('logging'); + const opsInterval = config.get('ops.interval'); - await setupLogging(server, config); - await setupLoggingRotate(server, config); + await setupLogging(server, loggingConfig, opsInterval); + await setupLoggingRotate(server, loggingConfig); } diff --git a/yarn.lock b/yarn.lock index e75fb58216415c..0cf4328669472b 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2657,6 +2657,10 @@ version "0.0.0" uid "" +"@kbn/legacy-logging@link:packages/kbn-legacy-logging": + version "0.0.0" + uid "" + "@kbn/logging@link:packages/kbn-logging": version "0.0.0" uid ""