diff --git a/.changeset/lazy-donuts-judge.md b/.changeset/lazy-donuts-judge.md new file mode 100644 index 00000000..7501a6ca --- /dev/null +++ b/.changeset/lazy-donuts-judge.md @@ -0,0 +1,6 @@ +--- +'@hyperdx/node-logger': minor +'@hyperdx/node-opentelemetry': minor +--- + +feat + migration: Use Otel logs module diff --git a/packages/node-logger/dummy.js b/packages/node-logger/dummy.js index 8ce3a1aa..768504f4 100644 --- a/packages/node-logger/dummy.js +++ b/packages/node-logger/dummy.js @@ -1,5 +1,5 @@ const express = require('express'); -const PORT = parseInt(process.env.PORT || '7777'); +const PORT = parseInt(process.env.PORT || '7788'); const winston = require('winston'); const pino = require('pino'); const app = express(); @@ -10,12 +10,17 @@ const { HyperDXWinston } = require('./build/src'); // RANDOM API KEY const HDX_API_KEY = ''; const HDX_API_KEY2 = ''; -const HDX_API_URL = 'http://localhost:8002'; +const HDX_API_URL = 'http://localhost:4318/v1/logs'; +const DETECT_RESOURCES = false; const logger = new Logger({ - apiKey: HDX_API_KEY, + headers: { + Authorization: HDX_API_KEY, + }, baseUrl: HDX_API_URL, service: 'native', + bufferSize: 100, + queueSize: 10, }); const winstonLogger = winston.createLogger({ @@ -24,6 +29,7 @@ const winstonLogger = winston.createLogger({ transports: [ new winston.transports.Console(), new HyperDXWinston({ + detectResources: DETECT_RESOURCES, apiKey: HDX_API_KEY, maxLevel: 'info', service: 'winston', @@ -38,6 +44,7 @@ const winstonLogger2 = winston.createLogger({ transports: [ new winston.transports.Console(), new HyperDXWinston({ + detectResources: DETECT_RESOURCES, apiKey: HDX_API_KEY2, maxLevel: 'info', service: 'winston', @@ -52,6 +59,7 @@ const pinoLogger = pino( { target: './build/src/pino', options: { + detectResources: DETECT_RESOURCES, apiKey: HDX_API_KEY, service: 'pino', baseUrl: HDX_API_URL, @@ -67,15 +75,35 @@ app.get('/', (req, res) => { headers: req.headers, method: req.method, url: req.url, - query: req.query, + query: { + foo: { + bar: { + baz: 'qux', + }, + }, + }, }); - winstonLogger.info('🍕'); - winstonLogger.error({ + winstonLogger.info('🍕', { message: 'BANG !!!', - headers: req.headers, + foo: 'bar', + }); + winstonLogger.info('🍕', { + foo: 'bar', + }); + winstonLogger.info( + { + foo: 'bar', + }, + { + foo1: 'bar1', + }, + ); + pinoLogger.error('🍕'); + pinoLogger.error({ + pizza: '🍕', + message: 'BANG !!!', + foo: 'bar', }); - winstonLogger2.info('🤯🤯🤯'); - pinoLogger.info('🍕'); res.send('Hello World'); }); diff --git a/packages/node-logger/package.json b/packages/node-logger/package.json index d150a319..4003f105 100644 --- a/packages/node-logger/package.json +++ b/packages/node-logger/package.json @@ -31,22 +31,25 @@ }, "dependencies": { "@nestjs/common": "^9.4.2", - "axios": "^1.6.8", + "@opentelemetry/api-logs": "~0.51.0", + "@opentelemetry/core": "~1.24.0", + "@opentelemetry/exporter-logs-otlp-http": "~0.51.0", + "@opentelemetry/resources": "~1.24.0", + "@opentelemetry/sdk-logs": "~0.51.0", + "@opentelemetry/semantic-conventions": "~1.24.0", "debug": "^4.3.4", "json-stringify-safe": "^5.0.1", - "lodash": "^4.17.21", + "lodash.isplainobject": "^4.0.6", + "lodash.isstring": "^4.0.1", "nest-winston": "^1.9.2", "pino-abstract-transport": "^1.0.0", "reflect-metadata": "^0.1.13", "rxjs": "^7.8.1", - "strip-ansi": "^6.0.1", "tslib": "^2.5.3", "winston": "^3.9.0", - "winston-transport": "^4.5.0" - }, - "devDependencies": { - "pino": "^8.14.1" + "winston-transport": "^4.7.0" }, + "devDependencies": {}, "peerDependencies": {}, "keywords": [ "winston", diff --git a/packages/node-logger/src/_logger/index.d.ts b/packages/node-logger/src/_logger/index.d.ts deleted file mode 100644 index d641eb67..00000000 --- a/packages/node-logger/src/_logger/index.d.ts +++ /dev/null @@ -1,32 +0,0 @@ -interface ILoggerOptions { - token: string; - host?: string; - type?: string; - sendIntervalMs?: number; - bufferSize?: number; - numberOfRetries?: number; - supressErrors?: boolean; - addTimestampWithNanoSecs?: boolean; - compress?: boolean; - internalLogger?: { log(message: string, ...args: any[]): any } & Record< - string, - any - >; - protocol?: string; - setUserAgent?: boolean; - port?: string; - timeout?: number; - sleepUntilNextRetry?: number; - callback?: (err: Error, bulk: object) => void; - extraFields?: {}; -} - -interface ILogger extends ILoggerOptions { - jsonToString(json: string): string; - log(msg: any, obj?: object): void; - close(): void; - sendAndClose(callback?: (error: Error, bulk: object) => void): void; -} - -export function createLogger(options: ILoggerOptions): ILogger; -export function jsonToString(json: any): string; diff --git a/packages/node-logger/src/_logger/index.js b/packages/node-logger/src/_logger/index.js deleted file mode 100644 index 0281438c..00000000 --- a/packages/node-logger/src/_logger/index.js +++ /dev/null @@ -1,405 +0,0 @@ -const { networkInterfaces } = require('os'); -const dgram = require('dgram'); -const zlib = require('zlib'); - -const axios = require('axios'); // WARNING: axios v1 might break due to esm module import -const hdx = require('debug')('hyperdx'); -const stringifySafe = require('json-stringify-safe'); -const { assign } = require('lodash'); - -const nanoSecDigits = 9; - -exports.version = require('../../package.json').version; - -const jsonToString = (json) => { - try { - return JSON.stringify(json); - } catch (ex) { - hdx(`Failed to stringify json. e = ${ex}`); - return stringifySafe(json, null, null, () => {}); - } -}; - -const messagesToBody = (messages) => messages.map(jsonToString).join(`\n`); - -const UNAVAILABLE_CODES = [ - 'ETIMEDOUT', - 'ECONNRESET', - 'ESOCKETTIMEDOUT', - 'ECONNABORTED', -]; - -const zlibPromised = (body) => - new Promise((resolve, reject) => { - zlib.gzip(body, (err, res) => { - if (err) return reject(err); - return resolve(res); - }); - }); - -const protocolToPortMap = { - udp: 5050, - http: 8002, // dev - https: 443, -}; - -const USER_AGENT = 'HyperDX NodeJS'; - -class HyperdxLogger { - constructor({ - token, - host = 'in.hyperdx.io', - type = 'nodejs', - sendIntervalMs = 2 * 1000, - bufferSize = 100, - numberOfRetries = 3, - supressErrors = false, - addTimestampWithNanoSecs = false, - compress = true, - protocol = 'https', - port, - timeout, - sleepUntilNextRetry = 2 * 1000, - callback = this._defaultCallback, - setUserAgent = true, - extraFields = {}, - }) { - if (!token) { - throw new Error('You are required to supply a token for logging.'); - } - - this.token = token; - this.host = host; - this.type = type; - this.sendIntervalMs = sendIntervalMs; - this.bufferSize = bufferSize; - this.numberOfRetries = numberOfRetries; - this.supressErrors = supressErrors; - this.addTimestampWithNanoSecs = addTimestampWithNanoSecs; - this.compress = compress; - this.sleepUntilNextRetry = sleepUntilNextRetry; - this.setUserAgent = setUserAgent; - this.timer = null; - this.closed = false; - - this.protocol = protocol; - this._setProtocol(port); - this.url = `${this.protocol}://${this.host}:${this.port}`; - - this.axiosInstance = axios.create(); - this.axiosInstance.defaults.headers.post = { - Host: this.host, - Accept: '*/*', - Authorization: `Bearer ${this.token}`, - 'Content-Type': 'application/json', - ...(this.setUserAgent ? { 'user-agent': USER_AGENT } : {}), - ...(this.compress ? { 'content-encoding': 'gzip' } : {}), - }; - - /* - Callback method executed on each bulk of messages sent to hyperdx. - If the bulk failed, it will be called: callback(exception), otherwise upon - success it will called as callback() - */ - this.callback = callback; - - /* - * the read/write/connection timeout in milliseconds of the outgoing HTTP request - */ - this.timeout = timeout; - - // build the url for logging - - this.messages = []; - this.bulkId = 1; - this.extraFields = extraFields; - this.typeOfIP = 'IPv4'; - } - - _setProtocol(port) { - if (!protocolToPortMap[this.protocol]) { - throw new Error( - `Invalid protocol defined. Valid options are : ${JSON.stringify( - Object.keys(protocolToPortMap), - )}`, - ); - } - this.port = port || protocolToPortMap[this.protocol]; - - if (this.protocol === 'udp') { - this.udpClient = dgram.createSocket('udp4'); - } - } - - _defaultCallback(err) { - if (err && !this.supressErrors) { - hdx(`[hyperdx-log-sender] error: ${err}`, err); - } - } - - sendAndClose(callback) { - this.callback = callback || this._defaultCallback; - this._debug('Sending last messages and closing...'); - this._popMsgsAndSend(); - clearTimeout(this.timer); - - if (this.protocol === 'udp') { - this.udpClient.close(); - } - } - - _timerSend() { - this._debug('Timer fired. Trying to wake up and send messages...'); - if (this.messages.length > 0) { - this._debug( - `Woke up and saw ${this.messages.length} messages to send. Sending now...`, - ); - this._popMsgsAndSend(); - } - - this.timer = setTimeout(() => { - this._timerSend(); - }, this.sendIntervalMs); - } - - _sendMessagesUDP() { - const udpSentCallback = (err) => { - if (err) { - this._debug(`Error while sending udp packets. err = ${err}`); - this.callback( - new Error(`Failed to send udp log message. err = ${err}`), - ); - } - }; - - this.messages.forEach((message) => { - const msg = message; - msg.token = this.token; - const buff = Buffer.from(stringifySafe(msg)); - - this._debug('Starting to send messages via udp.'); - this.udpClient.send( - buff, - 0, - buff.length, - this.port, - this.host, - udpSentCallback, - ); - }); - } - - close() { - // clearing the timer allows the node event loop to quit when needed - clearTimeout(this.timer); - - // send pending messages, if any - if (this.messages.length > 0) { - this._debug('Closing, purging messages.'); - this._popMsgsAndSend(); - } - - if (this.protocol === 'udp') { - this.udpClient.close(); - } - - // no more logging allowed - this.closed = true; - } - - /** - * Attach a timestamp to the log record. - * If @timestamp already exists, use it. Else, use current time. - * The same goes for @timestamp_nano - * @param msg - The message (Object) to append the timestamp to. - * @private - */ - _addTimestamp(msg) { - const now = new Date().toISOString(); - msg['@timestamp'] = msg['@timestamp'] || now; - - if (this.addTimestampWithNanoSecs) { - const time = process.hrtime(); - msg['@timestamp_nano'] = - msg['@timestamp_nano'] || - [now, time[1].toString().padStart(nanoSecDigits, '0')].join('-'); - } - } - - /** - * Attach a Source IP to the log record. - * @param msg - The message (Object) to append the timestamp to. - * @private - */ - _addSourceIP(msg) { - const { en0 } = networkInterfaces(); - if (en0 && en0.length > 0) { - const relevantIPs = []; - en0.forEach((ip) => { - // Skip over non-IPv4 and internal (i.e. 127.0.0.1) addresses - // 'IPv4' is in Node <= 17, from 18 it's a number 4 or 6 - const familyV4Value = typeof ip.family === 'string' ? this.typeOfIP : 4; - if (ip.family === familyV4Value && !ip.internal) { - relevantIPs.push(ip.address); - // msg.sourceIP = ip.address; - } - }); - - if (relevantIPs.length > 1) { - relevantIPs.forEach((ip, idx) => { - msg[`sourceIP_${idx}`] = ip; - }); - } else if (relevantIPs.length === 1) { - const [sourceIP] = relevantIPs; - msg.sourceIP = sourceIP; - } - } - } - - log(msg, obj) { - if (this.closed === true) { - throw new Error('Logging into a logger that has been closed!'); - } - if (![null, undefined].includes(obj)) { - msg += JSON.stringify(obj); - } - if (typeof msg === 'string') { - msg = { - message: msg, - }; - } - this._addSourceIP(msg); - msg = assign(msg, this.extraFields); - - // FIXME: no need to attach type (only used for api param) - // if (!msg.type) { - // msg.type = this.type; - // } - - // FIXME: no need to attach timestamp - // this._addTimestamp(msg); - - this.messages.push(msg); - if (this.messages.length >= this.bufferSize) { - this._debug('Buffer is full - sending bulk'); - this._popMsgsAndSend(); - } - } - - _popMsgsAndSend() { - if (this.protocol === 'udp') { - this._debug('Sending messages via udp'); - this._sendMessagesUDP(); - } else { - const bulk = this._createBulk(this.messages); - this._debug(`Sending bulk #${bulk.id}`); - this._send(bulk); - } - - this.messages = []; - } - - _createBulk(msgs) { - const bulk = {}; - // creates a new copy of the array. Objects references are copied (no deep copy) - bulk.msgs = msgs.slice(); - bulk.attemptNumber = 1; - bulk.sleepUntilNextRetry = this.sleepUntilNextRetry; - bulk.id = this.bulkId; // TODO test - this.bulkId += 1; - - return bulk; - } - - _debug(msg) { - hdx(`[hyperdx-log-sender] ${msg}`); - } - - _tryAgainIn(sleepTimeMs, bulk) { - this._debug( - `Bulk #${bulk.id} - Trying again in ${sleepTimeMs}[ms], attempt no. ${bulk.attemptNumber}`, - ); - setTimeout(() => { - this._send(bulk); - }, sleepTimeMs); - } - - _send(bulk) { - const body = messagesToBody(bulk.msgs); - - if (typeof this.timeout !== 'undefined') { - this.axiosInstance.defaults.timeout = this.timeout; - } - - return Promise.resolve() - .then(() => { - if (this.compress) { - return zlibPromised(body); - } - return body; - }) - .then((finalBody) => { - this._tryToSend(finalBody, bulk); - }) - .catch((err) => { - this._debug(`Error while compressing message body. err = ${err}`); - this.callback( - new Error(`Failed to compress message body. err = ${err}`), - ); - }); - } - - _tryToSend(body, bulk) { - this._debug(`Sending bulk of ${bulk.msgs.length} logs`); - return this.axiosInstance - .post(this.url, body, { - params: { - hdx_platform: this.type, - }, - }) - .then(() => { - this._debug(`Bulk #${bulk.id} - sent successfully`); - this.callback(); - }) - .catch((err) => { - // In rare cases server is busy - const errorCode = err.code; - if (UNAVAILABLE_CODES.includes(errorCode)) { - if (bulk.attemptNumber >= this.numberOfRetries) { - return this.callback( - new Error( - `Failed after ${bulk.attemptNumber} retries on error = ${err}`, - ), - bulk, - ); - } - this._debug(`Bulk #${bulk.id} - failed on error: ${err}`); - const sleepTimeMs = bulk.sleepUntilNextRetry; - bulk.sleepUntilNextRetry *= 2; - bulk.attemptNumber += 1; - - return this._tryAgainIn(sleepTimeMs, bulk); - } - if (err.statusCode !== 200) { - return this.callback( - new Error( - `There was a problem with the request.\nResponse: ${err.statusCode}: ${err.message}`, - ), - bulk, - ); - } - return this.callback(err, bulk); - }); - } -} - -const createLogger = (options) => { - const l = new HyperdxLogger(options); - l._timerSend(); - return l; -}; - -module.exports = { - jsonToString, - createLogger, -}; diff --git a/packages/node-logger/src/logger.ts b/packages/node-logger/src/logger.ts index 9f4be17f..ef99aa53 100644 --- a/packages/node-logger/src/logger.ts +++ b/packages/node-logger/src/logger.ts @@ -1,170 +1,172 @@ -import os from 'os'; -import { URL } from 'url'; - -import stripAnsi from 'strip-ansi'; -import { isPlainObject, isString } from 'lodash'; +import stringifySafe from 'json-stringify-safe'; +import { Attributes, diag, DiagConsoleLogger } from '@opentelemetry/api'; +import { getEnvWithoutDefaults } from '@opentelemetry/core'; +import { + BatchLogRecordProcessor, + BufferConfig, + LoggerProvider, +} from '@opentelemetry/sdk-logs'; +import { OTLPLogExporter } from '@opentelemetry/exporter-logs-otlp-http'; +import { Logger as OtelLogger, SeverityNumber } from '@opentelemetry/api-logs'; +import { + Resource, + defaultServiceName, + detectResourcesSync, + envDetectorSync, + hostDetectorSync, + osDetectorSync, + processDetector, +} from '@opentelemetry/resources'; +import { SEMRESATTRS_SERVICE_NAME } from '@opentelemetry/semantic-conventions'; + +import hdx, { LOG_PREFIX as _LOG_PREFIX } from './debug'; +import { version as PKG_VERSION } from '../package.json'; + +const otelEnv = getEnvWithoutDefaults(); + +// DEBUG otel modules +if (otelEnv.OTEL_LOG_LEVEL) { + diag.setLogger(new DiagConsoleLogger(), { + logLevel: otelEnv.OTEL_LOG_LEVEL, + }); +} -import { ILogger, createLogger, jsonToString } from './_logger'; -import { LOG_PREFIX as _LOG_PREFIX } from './debug'; +// TO EXTRACT ENV VARS [https://github.com/open-telemetry/opentelemetry-js/blob/3ab4f765d8d696327b7d139ae6a45e7bd7edd924/experimental/packages/sdk-logs/src/export/BatchLogRecordProcessorBase.ts#L50] +// TO EXTRACT DEFAULTS [https://github.com/open-telemetry/opentelemetry-js/blob/3ab4f765d8d696327b7d139ae6a45e7bd7edd924/experimental/packages/sdk-logs/src/types.ts#L49] +const DEFAULT_EXPORTER_BATCH_SIZE = + otelEnv.OTEL_BLRP_MAX_EXPORT_BATCH_SIZE ?? 512; +const DEFAULT_EXPORTER_TIMEOUT_MS = otelEnv.OTEL_BLRP_EXPORT_TIMEOUT ?? 30000; +const DEFAULT_MAX_QUEUE_SIZE = otelEnv.OTEL_BLRP_MAX_QUEUE_SIZE ?? 2048; +const DEFAULT_OTEL_LOGS_EXPORTER_URL = + otelEnv.OTEL_EXPORTER_OTLP_LOGS_ENDPOINT ?? + (otelEnv.OTEL_EXPORTER_OTLP_ENDPOINT + ? `${otelEnv.OTEL_EXPORTER_OTLP_ENDPOINT}/v1/logs` + : 'https://in-otel.hyperdx.io/v1/logs'); +const DEFAULT_SEND_INTERVAL_MS = otelEnv.OTEL_BLRP_SCHEDULE_DELAY ?? 5000; +const DEFAULT_SERVICE_NAME = otelEnv.OTEL_SERVICE_NAME ?? defaultServiceName(); const LOG_PREFIX = `⚠️ ${_LOG_PREFIX}`; -// internal types -export type HdxLog = { - b: string; // message body - h: string; // hostname - sn?: number; - st: string; // level in text - sv: string; // service name - ts: Date; // timestamp -}; - -export type PinoLogLine = { - level: number; - time: number; - pid: number; - hostname: string; - msg: string; -}; - -export const parsePinoLog = (log: PinoLogLine) => { - const { level, msg, ...meta } = log; - const bodyMsg = isString(msg) ? msg : jsonToString(log); - return { - level, - message: bodyMsg, - meta: log, - }; -}; - -export const parseWinstonLog = (log: { - message: string | Record; - level: string; -}) => { - const level = log.level; - const bodyMsg = isString(log.message) - ? log.message - : jsonToString(log.message); - - const meta = { - ...log, - ...(isPlainObject(log.message) && (log.message as Record)), // spread the message if its object type - }; - - return { - level, - message: bodyMsg, - meta, - }; +export const jsonToString = (json) => { + try { + return JSON.stringify(json); + } catch (ex) { + hdx(`Failed to stringify json. e = ${ex}`); + return stringifySafe(json); + } }; -const DEFAULT_TIMEOUT = 30000; - export type LoggerOptions = { - apiKey: string; baseUrl?: string; bufferSize?: number; + detectResources?: boolean; + headers?: Record; + queueSize?: number; + resourceAttributes?: Attributes; sendIntervalMs?: number; service?: string; timeout?: number; // The read/write/connection timeout in milliseconds }; export class Logger { - private readonly service: string; + private readonly logger: OtelLogger; - private readonly client: ILogger | null; + private readonly processor: BatchLogRecordProcessor; constructor({ - apiKey, baseUrl, bufferSize, + detectResources, + headers, + queueSize, + resourceAttributes, sendIntervalMs, service, timeout, - }: { - apiKey: string; - baseUrl?: string; - bufferSize?: number; - sendIntervalMs?: number; - service?: string; - timeout?: number; - }) { - if (!apiKey) { - console.error(`${LOG_PREFIX} API key not found`); - } + }: LoggerOptions) { if (!service) { - console.warn(`${LOG_PREFIX} Service name not found. Use "default app"`); - } - this.service = service ?? 'default app'; - let protocol; - let host; - let port; - if (baseUrl) { - const url = new URL(baseUrl); - protocol = url.protocol.replace(':', ''); - host = url.hostname; - port = url.port; console.warn( - `${LOG_PREFIX} Sending logs to ${protocol}://${host}:${port} `, + `${LOG_PREFIX} Service name not found. Use "${DEFAULT_SERVICE_NAME}"`, ); } - this.client = apiKey - ? createLogger({ - bufferSize, - host, - port, - protocol, - sendIntervalMs, - timeout: timeout ?? DEFAULT_TIMEOUT, - token: apiKey, - }) - : null; - if (this.client) { - console.log(`${LOG_PREFIX} started!`); - } else { + // sanity check bufferSize and queueSize + const maxExportBatchSize = bufferSize ?? DEFAULT_EXPORTER_BATCH_SIZE; + let maxQueueSize = queueSize ?? DEFAULT_MAX_QUEUE_SIZE; + if (maxExportBatchSize > maxQueueSize) { console.error( - `${LOG_PREFIX} failed to start! Please check your API key.`, + `${LOG_PREFIX} bufferSize must be smaller or equal to queueSize. Setting queueSize to ${maxExportBatchSize}`, ); + maxQueueSize = maxExportBatchSize; } + + const detectedResource = detectResourcesSync({ + detectors: detectResources + ? [envDetectorSync, hostDetectorSync, osDetectorSync, processDetector] + : [], + }); + + const _url = baseUrl ?? DEFAULT_OTEL_LOGS_EXPORTER_URL; + + console.warn(`${LOG_PREFIX} Sending logs to ${_url}`); + + const exporter = new OTLPLogExporter({ + url: _url, + ...(headers && { headers }), + }); + this.processor = new BatchLogRecordProcessor(exporter, { + /** The maximum batch size of every export. It must be smaller or equal to + * maxQueueSize. The default value is 512. */ + maxExportBatchSize, + scheduledDelayMillis: sendIntervalMs ?? DEFAULT_SEND_INTERVAL_MS, + exportTimeoutMillis: timeout ?? DEFAULT_EXPORTER_TIMEOUT_MS, + maxQueueSize, + }); + const loggerProvider = new LoggerProvider({ + resource: detectedResource.merge( + new Resource({ + // TODO: should use otel semantic conventions + 'hyperdx.distro.version': PKG_VERSION, + [SEMRESATTRS_SERVICE_NAME]: service ?? DEFAULT_SERVICE_NAME, + ...resourceAttributes, + }), + ), + }); + loggerProvider.addLogRecordProcessor(this.processor); + + this.logger = loggerProvider.getLogger('node-logger'); + console.log(`${LOG_PREFIX} started!`); } - private parseTimestamp(meta: Record): Date { + private parseTimestamp(meta: Attributes): Date { // pino - if (meta.time) { - return new Date(meta.time); + if (Number.isInteger(meta.time)) { + return new Date(meta.time as number); } // set to current time if not provided return new Date(); } - private buildHdxLog( - level: string, - body: string, - meta: Record, - ): HdxLog { - return { - b: stripAnsi(body), - h: os.hostname(), - sn: 0, // TODO: set up the correct number - st: stripAnsi(level), - sv: stripAnsi(this.service), - ts: this.parseTimestamp(meta), - }; + shutdown() { + hdx('Shutting down HyperDX node logger...'); + return this.processor.shutdown(); } - sendAndClose(callback?: (error: Error, bulk: object) => void): void { - this.client?.sendAndClose(callback); + forceFlush() { + hdx('Forcing flush of HyperDX node logger...'); + return this.processor.forceFlush(); } - postMessage( - level: string, - body: string, - meta: Record = {}, - ): void { - this.client?.log({ - ...meta, - __hdx: this.buildHdxLog(level, body, meta), + postMessage(level: string, body: string, attributes: Attributes = {}): void { + hdx('Emitting log from HyperDX node logger...'); + this.logger.emit({ + // TODO: should map to otel severity number + severityNumber: 0, + // TODO: set up the mapping between different downstream log levels + severityText: level, + body, + attributes, + timestamp: this.parseTimestamp(attributes), }); } } diff --git a/packages/node-logger/src/pino.ts b/packages/node-logger/src/pino.ts index b352822f..373a5b59 100644 --- a/packages/node-logger/src/pino.ts +++ b/packages/node-logger/src/pino.ts @@ -1,10 +1,38 @@ import build from 'pino-abstract-transport'; +import isString from 'lodash.isstring'; +import { Attributes } from '@opentelemetry/api'; import hdx from './debug'; -import { Logger, parsePinoLog } from './logger'; +import { Logger, jsonToString } from './logger'; import type { LoggerOptions } from './logger'; +export type PinoLogLine = { + level: number; + time: number; + pid: number; + hostname: string; + msg: string; +} & Attributes; + +export const parsePinoLog = (log: PinoLogLine) => { + const { level, msg, message, ...meta } = log; + const targetMessage = msg || message; + let bodyMsg = ''; + if (targetMessage) { + bodyMsg = isString(targetMessage) + ? targetMessage + : jsonToString(targetMessage); + } else { + bodyMsg = jsonToString(log); + } + return { + level, + message: bodyMsg, + meta, + }; +}; + // map pino level to text const PINO_LEVELS = { 10: 'trace', @@ -16,13 +44,21 @@ const PINO_LEVELS = { }; export type HyperDXPinoOptions = LoggerOptions & { - getCustomMeta?: () => Record; + apiKey?: string; + getCustomMeta?: () => Attributes; }; -export default (opts: HyperDXPinoOptions) => { +export default ({ apiKey, getCustomMeta, ...options }: HyperDXPinoOptions) => { try { hdx('Initializing HyperDX pino transport...'); - const logger = new Logger(opts); + const logger = new Logger({ + ...(apiKey && { + headers: { + Authorization: apiKey, + }, + }), + ...options, + }); hdx(`HyperDX pino transport initialized!`); return build( async function (source) { @@ -30,7 +66,7 @@ export default (opts: HyperDXPinoOptions) => { const { level, message, meta } = parsePinoLog(obj); hdx('Sending log to HyperDX'); logger.postMessage(PINO_LEVELS[level], message, { - ...opts.getCustomMeta?.(), + ...getCustomMeta?.(), ...meta, }); hdx('Log sent to HyperDX'); @@ -39,16 +75,8 @@ export default (opts: HyperDXPinoOptions) => { { async close(err) { hdx('Sending and closing HyperDX pino transport...'); - await new Promise((resolve, reject) => - logger.sendAndClose((_err) => { - if (_err) { - reject(_err); - return; - } - hdx('HyperDX pino transport closed!'); - resolve(); - }), - ); + await logger.shutdown(); + hdx('HyperDX pino transport closed!'); }, }, ); diff --git a/packages/node-logger/src/winston.ts b/packages/node-logger/src/winston.ts index 1b506bc9..26ef9d16 100644 --- a/packages/node-logger/src/winston.ts +++ b/packages/node-logger/src/winston.ts @@ -1,53 +1,80 @@ import Transport from 'winston-transport'; +import isPlainObject from 'lodash.isplainobject'; +import isString from 'lodash.isstring'; +import { Attributes } from '@opentelemetry/api'; import hdx from './debug'; -import { Logger, parseWinstonLog } from './logger'; +import { Logger, jsonToString } from './logger'; import type { LoggerOptions } from './logger'; +export const parseWinstonLog = ( + log: { + message: string | Attributes; + level: string; + } & Attributes, +) => { + const { level, message, ...attributes } = log; + const bodyMsg = isString(message) ? message : jsonToString(message); + + let meta = attributes; + + if (isPlainObject(message)) { + // FIXME: attributes conflict ?? + meta = { + ...attributes, + ...(message as Attributes), + }; + } + + return { + level, + message: bodyMsg, + meta, + }; +}; + export type HyperDXWinstonOptions = LoggerOptions & { + apiKey?: string; maxLevel?: string; - getCustomMeta?: () => Record; + getCustomMeta?: () => Attributes; }; export default class HyperDXWinston extends Transport { private readonly logger: Logger; - private readonly getCustomMeta: () => Record; + private readonly getCustomMeta: () => Attributes; constructor({ - apiKey, - baseUrl, - bufferSize, maxLevel, - sendIntervalMs, - service, - timeout, getCustomMeta, + apiKey, + ...options }: HyperDXWinstonOptions) { hdx('Initializing HyperDX winston transport...'); super({ level: maxLevel ?? 'info' }); this.getCustomMeta = getCustomMeta; this.logger = new Logger({ - apiKey, - baseUrl, - bufferSize, - sendIntervalMs, - service, - timeout, + ...(apiKey && { + headers: { + Authorization: apiKey, + }, + }), + ...options, }); hdx(`HyperDX winston transport initialized!`); } log( - info: { message: string | Record; level: string }, + info: { message: string | Attributes; level: string } & Attributes, callback: () => void, ) { - hdx('Received log from winston'); setImmediate(() => { this.emit('logged', info); }); + hdx('Received log from winston'); + const { level, message, meta } = parseWinstonLog(info); hdx('Sending log to HyperDX'); this.logger.postMessage(level, message, { @@ -55,20 +82,21 @@ export default class HyperDXWinston extends Transport { ...meta, }); hdx('Log sent to HyperDX'); - callback(); - } - finish(callback) { - hdx('Sending and closing HyperDX winston transport...'); - this.logger.sendAndClose(callback); + callback(); } close() { hdx('Closing HyperDX winston transport...'); - this.finish(() => { - hdx('HyperDX winston transport closed!'); - this.emit('finish'); - this.emit('close'); - }); + this.logger + .shutdown() + .then(() => { + hdx('HyperDX winston transport closed!'); + this.emit('finish'); + this.emit('close'); + }) + .catch((err) => { + console.error('Error closing HyperDX winston transport:', err); + }); } } diff --git a/packages/node-opentelemetry/examples/dummy.js b/packages/node-opentelemetry/examples/dummy.js index 6a22ed48..315a013e 100644 --- a/packages/node-opentelemetry/examples/dummy.js +++ b/packages/node-opentelemetry/examples/dummy.js @@ -25,7 +25,9 @@ const logger = winston.createLogger({ level: 'info', format: winston.format.json(), transports: [ - getWinstonTransport('info'), // append this to the existing transports + getWinstonTransport('info', { + detectResources: true, + }), // append this to the existing transports ], }); diff --git a/packages/node-opentelemetry/package.json b/packages/node-opentelemetry/package.json index 345316a9..51cee510 100644 --- a/packages/node-opentelemetry/package.json +++ b/packages/node-opentelemetry/package.json @@ -45,7 +45,8 @@ "@opentelemetry/sdk-trace-base": "~1.21.0", "@opentelemetry/semantic-conventions": "~1.21.0", "debug": "^4.3.4", - "lodash": "^4.17.21", + "lodash.isobject": "^3.0.2", + "lodash.isplainobject": "^4.0.6", "shimmer": "^1.2.1", "tslib": "^2.5.3" }, diff --git a/packages/node-opentelemetry/src/instrumentations/console.ts b/packages/node-opentelemetry/src/instrumentations/console.ts index 648140a4..7d6e5481 100644 --- a/packages/node-opentelemetry/src/instrumentations/console.ts +++ b/packages/node-opentelemetry/src/instrumentations/console.ts @@ -1,11 +1,9 @@ import * as shimmer from 'shimmer'; -import _ from 'lodash'; -import opentelemetry from '@opentelemetry/api'; -import { - Logger, - LoggerOptions, - parseWinstonLog, -} from '@hyperdx/node-logger/build/src/logger'; +import isObject from 'lodash.isobject'; +import isPlainObject from 'lodash.isplainobject'; +import opentelemetry, { Attributes } from '@opentelemetry/api'; +import { Logger, LoggerOptions } from '@hyperdx/node-logger/build/src/logger'; +import { parseWinstonLog } from '@hyperdx/node-logger/build/src/winston'; import hdx from '../debug'; import { hyperDXGlobalContext } from '../context'; @@ -14,8 +12,8 @@ export const _parseConsoleArgs = (args: any[]) => { const stringifiedArgs = []; let firstJson; for (const arg of args) { - if (_.isObject(arg)) { - if (firstJson == null && _.isPlainObject(arg)) { + if (isObject(arg)) { + if (firstJson == null && isPlainObject(arg)) { firstJson = arg; } try { @@ -59,7 +57,7 @@ export default class HyperDXConsoleInstrumentation { const currentActiveSpan = opentelemetry.trace.getActiveSpan(); const traceId = currentActiveSpan?.spanContext().traceId; - let meta: Record = { + let meta: Attributes = { ...parsedLog.meta, // attached traceId and spanId, trace_id: traceId, diff --git a/packages/node-opentelemetry/src/logger.ts b/packages/node-opentelemetry/src/logger.ts index c0d23517..2c638a36 100644 --- a/packages/node-opentelemetry/src/logger.ts +++ b/packages/node-opentelemetry/src/logger.ts @@ -2,27 +2,28 @@ import opentelemetry from '@opentelemetry/api'; import HyperDXWinston from '@hyperdx/node-logger/build/src/winston'; +import type { HyperDXPinoOptions } from '@hyperdx/node-logger/build/src/pino'; +import type { HyperDXWinstonOptions } from '@hyperdx/node-logger/build/src/winston'; + import hdx from './debug'; import { hyperDXGlobalContext } from './context'; import { stringToBoolean } from './utils'; const env = process.env; -const HYPERDX_API_KEY = (env.HYPERDX_API_KEY ?? - env.OTEL_EXPORTER_OTLP_HEADERS?.split('=')[1]) as string; - const SERVICE_NAME = env.OTEL_SERVICE_NAME as string; const BETA_MODE = stringToBoolean(env.HDX_NODE_BETA_MODE); -type WinstonTransportOptions = { - baseUrl?: string; - bufferSize?: number; - sendIntervalMs?: number; - timeout?: number; // The read/write/connection timeout in milliseconds -}; +type WinstonTransportOptions = Omit< + HyperDXWinstonOptions, + 'apiKey' | 'getCustomMeta' | 'resourceAttributes' +>; -type PinotTransportOptions = WinstonTransportOptions; +type PinotTransportOptions = Omit< + HyperDXPinoOptions, + 'apiKey' | 'getCustomMeta' | 'resourceAttributes' +>; const getCustomMeta = () => { const currentActiveSpan = opentelemetry.trace.getActiveSpan(); @@ -36,7 +37,6 @@ export const getWinstonTransport = ( ) => { hdx('Initializing winston transport'); return new HyperDXWinston({ - apiKey: HYPERDX_API_KEY, maxLevel, service: SERVICE_NAME, getCustomMeta: BETA_MODE ? getCustomMeta : () => ({}), @@ -53,7 +53,6 @@ export const getPinoTransport = ( ) => ({ target: '@hyperdx/node-logger/build/src/pino', options: { - apiKey: HYPERDX_API_KEY, service: SERVICE_NAME, // getCustomMeta, // FIXME: DOMException [DataCloneError] ...options, diff --git a/packages/node-opentelemetry/src/otel.ts b/packages/node-opentelemetry/src/otel.ts index 3fb098e9..37597f69 100644 --- a/packages/node-opentelemetry/src/otel.ts +++ b/packages/node-opentelemetry/src/otel.ts @@ -60,16 +60,14 @@ export const initSDK = (config: SDKConfig) => { hdx('Initializing OpenTelemetry SDK'); const consoleInstrumentationEnabled = config.consoleCapture ?? true; - const apiKey = - env.HYPERDX_API_KEY ?? env.OTEL_EXPORTER_OTLP_HEADERS?.split('=')[1]; hdxConsoleInstrumentation = new HyperDXConsoleInstrumentation({ - apiKey, betaMode: config.betaMode, service: env.OTEL_SERVICE_NAME, }); sdk = new NodeSDK({ resource: new Resource({ + // TODO: should use otel semantic conventions 'hyperdx.distro.version': PKG_VERSION, 'hyperdx.distro.runtime_version': process.versions.node, }), diff --git a/yarn.lock b/yarn.lock index a03738e4..a1d0b5e3 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1192,6 +1192,13 @@ dependencies: "@opentelemetry/api" "^1.0.0" +"@opentelemetry/api-logs@0.51.0", "@opentelemetry/api-logs@~0.51.0": + version "0.51.0" + resolved "https://registry.yarnpkg.com/@opentelemetry/api-logs/-/api-logs-0.51.0.tgz#71f296661d2215167c748ca044ff184a65d9426b" + integrity sha512-m/jtfBPEIXS1asltl8fPQtO3Sb1qMpuL61unQajUmM8zIxeMF1AlqzWXM3QedcYgTTFiJCew5uJjyhpmqhc0+g== + dependencies: + "@opentelemetry/api" "^1.0.0" + "@opentelemetry/api@^1.0.0": version "1.4.1" resolved "https://registry.yarnpkg.com/@opentelemetry/api/-/api-1.4.1.tgz#ff22eb2e5d476fbc2450a196e40dd243cc20c28f" @@ -1286,6 +1293,24 @@ dependencies: "@opentelemetry/semantic-conventions" "1.21.0" +"@opentelemetry/core@1.24.0", "@opentelemetry/core@~1.24.0": + version "1.24.0" + resolved "https://registry.yarnpkg.com/@opentelemetry/core/-/core-1.24.0.tgz#5568b6c1328a6b9c94a77f9b2c7f872b852bba40" + integrity sha512-FP2oN7mVPqcdxJDTTnKExj4mi91EH+DNuArKfHTjPuJWe2K1JfMIVXNfahw1h3onJxQnxS8K0stKkogX05s+Aw== + dependencies: + "@opentelemetry/semantic-conventions" "1.24.0" + +"@opentelemetry/exporter-logs-otlp-http@~0.51.0": + version "0.51.0" + resolved "https://registry.yarnpkg.com/@opentelemetry/exporter-logs-otlp-http/-/exporter-logs-otlp-http-0.51.0.tgz#2a6737d43c32918746cd25046bf84af73133c4ef" + integrity sha512-7G+RUQ+HzLaQw5sl2hKLYmZ/pWuaZ3IR7+gLIZNS9RpLeEnG/mbNENM2YTWQYPwX/7YpSJUC1NAaiMYxQco0ow== + dependencies: + "@opentelemetry/api-logs" "0.51.0" + "@opentelemetry/core" "1.24.0" + "@opentelemetry/otlp-exporter-base" "0.51.0" + "@opentelemetry/otlp-transformer" "0.51.0" + "@opentelemetry/sdk-logs" "0.51.0" + "@opentelemetry/exporter-metrics-otlp-http@0.48.0": version "0.48.0" resolved "https://registry.yarnpkg.com/@opentelemetry/exporter-metrics-otlp-http/-/exporter-metrics-otlp-http-0.48.0.tgz#845654d9331fbc255f62852fd478a4bbf8066eb0" @@ -1738,6 +1763,13 @@ dependencies: "@opentelemetry/core" "1.21.0" +"@opentelemetry/otlp-exporter-base@0.51.0": + version "0.51.0" + resolved "https://registry.yarnpkg.com/@opentelemetry/otlp-exporter-base/-/otlp-exporter-base-0.51.0.tgz#5a94b477e41df39369f987539429530cb9c14460" + integrity sha512-hR4c9vWVz1QgzCBSyy9zSDkvfTgaK96E6/tfVP6O4dzdZW9HqWimA3lXV/KXadEGqShvM4GToz9EHp2A5RU5bQ== + dependencies: + "@opentelemetry/core" "1.24.0" + "@opentelemetry/otlp-grpc-exporter-base@0.48.0": version "0.48.0" resolved "https://registry.yarnpkg.com/@opentelemetry/otlp-grpc-exporter-base/-/otlp-grpc-exporter-base-0.48.0.tgz#11c5adb4fadf2665fa72a7e4ccda6e96354c4e40" @@ -1769,6 +1801,18 @@ "@opentelemetry/sdk-metrics" "1.21.0" "@opentelemetry/sdk-trace-base" "1.21.0" +"@opentelemetry/otlp-transformer@0.51.0": + version "0.51.0" + resolved "https://registry.yarnpkg.com/@opentelemetry/otlp-transformer/-/otlp-transformer-0.51.0.tgz#32ba761226375c27c2a593dffb233b7eaa2dc155" + integrity sha512-ylLgx2xumVoSefDHP9GMAU/LG+TU3+8eacVDXV5o1RqWxsdVOaQmCTY0XyDgeRTn6hIOVAq/HHQbRq3iWOrt2A== + dependencies: + "@opentelemetry/api-logs" "0.51.0" + "@opentelemetry/core" "1.24.0" + "@opentelemetry/resources" "1.24.0" + "@opentelemetry/sdk-logs" "0.51.0" + "@opentelemetry/sdk-metrics" "1.24.0" + "@opentelemetry/sdk-trace-base" "1.24.0" + "@opentelemetry/propagation-utils@^0.30.6": version "0.30.6" resolved "https://registry.yarnpkg.com/@opentelemetry/propagation-utils/-/propagation-utils-0.30.6.tgz#fe8769d61f6b4fa83153ec46c5f7269b6ec14d6d" @@ -1860,6 +1904,14 @@ "@opentelemetry/core" "1.21.0" "@opentelemetry/semantic-conventions" "1.21.0" +"@opentelemetry/resources@1.24.0", "@opentelemetry/resources@~1.24.0": + version "1.24.0" + resolved "https://registry.yarnpkg.com/@opentelemetry/resources/-/resources-1.24.0.tgz#f27911af0917986da5716775021eae0a872ba98e" + integrity sha512-mxC7E7ocUS1tLzepnA7O9/G8G6ZTdjCH2pXme1DDDuCuk6n2/53GADX+GWBuyX0dfIxeMInIbJAdjlfN9GNr6A== + dependencies: + "@opentelemetry/core" "1.24.0" + "@opentelemetry/semantic-conventions" "1.24.0" + "@opentelemetry/resources@^1.0.0", "@opentelemetry/resources@^1.12.0": version "1.13.0" resolved "https://registry.yarnpkg.com/@opentelemetry/resources/-/resources-1.13.0.tgz#436b33ea950004e66fce6575f2776a05faca7f8e" @@ -1876,6 +1928,14 @@ "@opentelemetry/core" "1.21.0" "@opentelemetry/resources" "1.21.0" +"@opentelemetry/sdk-logs@0.51.0", "@opentelemetry/sdk-logs@~0.51.0": + version "0.51.0" + resolved "https://registry.yarnpkg.com/@opentelemetry/sdk-logs/-/sdk-logs-0.51.0.tgz#9a3c31284c0a9f2bc57fbfd3735a990a97dfed63" + integrity sha512-K4fMBRFD8hQ6khk0rvYFuo6L9ymeGgByir6BcuFIgQuQ00OhYwBi9AruZz5V733Ejq7P8ObR3YyubkOUIbeVAw== + dependencies: + "@opentelemetry/core" "1.24.0" + "@opentelemetry/resources" "1.24.0" + "@opentelemetry/sdk-metrics@1.21.0", "@opentelemetry/sdk-metrics@~1.21.0": version "1.21.0" resolved "https://registry.yarnpkg.com/@opentelemetry/sdk-metrics/-/sdk-metrics-1.21.0.tgz#40d71aaec5b696e58743889ce6d5bf2593f9a23d" @@ -1885,6 +1945,15 @@ "@opentelemetry/resources" "1.21.0" lodash.merge "^4.6.2" +"@opentelemetry/sdk-metrics@1.24.0": + version "1.24.0" + resolved "https://registry.yarnpkg.com/@opentelemetry/sdk-metrics/-/sdk-metrics-1.24.0.tgz#205c19b6d18e385039d0a261c784a203c644fc28" + integrity sha512-4tJ+E6N019OZVB/nUW/LoK9xHxfeh88TCoaTqHeLBE9wLYfi6irWW6J9cphMav7J8Qk0D5b7/RM4VEY4dArWOA== + dependencies: + "@opentelemetry/core" "1.24.0" + "@opentelemetry/resources" "1.24.0" + lodash.merge "^4.6.2" + "@opentelemetry/sdk-metrics@^1.9.1": version "1.15.0" resolved "https://registry.yarnpkg.com/@opentelemetry/sdk-metrics/-/sdk-metrics-1.15.0.tgz#e47ad688882fc2daedcbbe3db16a5c110feb23e8" @@ -1932,6 +2001,15 @@ "@opentelemetry/resources" "1.21.0" "@opentelemetry/semantic-conventions" "1.21.0" +"@opentelemetry/sdk-trace-base@1.24.0": + version "1.24.0" + resolved "https://registry.yarnpkg.com/@opentelemetry/sdk-trace-base/-/sdk-trace-base-1.24.0.tgz#e2de869e33fd224f6d9f39bafa4172074d1086c8" + integrity sha512-H9sLETZ4jw9UJ3totV8oM5R0m4CW0ZIOLfp4NV3g0CM8HD5zGZcaW88xqzWDgiYRpctFxd+WmHtGX/Upoa2vRg== + dependencies: + "@opentelemetry/core" "1.24.0" + "@opentelemetry/resources" "1.24.0" + "@opentelemetry/semantic-conventions" "1.24.0" + "@opentelemetry/sdk-trace-node@1.21.0": version "1.21.0" resolved "https://registry.yarnpkg.com/@opentelemetry/sdk-trace-node/-/sdk-trace-node-1.21.0.tgz#20599f42a6b59bf71c64ef8630d28464e6e18f2a" @@ -1984,6 +2062,11 @@ resolved "https://registry.yarnpkg.com/@opentelemetry/semantic-conventions/-/semantic-conventions-1.21.0.tgz#83f7479c524ab523ac2df702ade30b9724476c72" integrity sha512-lkC8kZYntxVKr7b8xmjCVUgE0a8xgDakPyDo9uSWavXPyYqLgYYGdEd2j8NxihRyb6UwpX3G/hFUF4/9q2V+/g== +"@opentelemetry/semantic-conventions@1.24.0", "@opentelemetry/semantic-conventions@~1.24.0": + version "1.24.0" + resolved "https://registry.yarnpkg.com/@opentelemetry/semantic-conventions/-/semantic-conventions-1.24.0.tgz#f074db930a7feb4d64103a9a576c5fbad046fcac" + integrity sha512-yL0jI6Ltuz8R+Opj7jClGrul6pOoYrdfVmzQS4SITXRPH7I5IRZbrwe/6/v8v4WYMa6MYZG480S1+uc/IGfqsA== + "@opentelemetry/sql-common@^0.40.0": version "0.40.0" resolved "https://registry.yarnpkg.com/@opentelemetry/sql-common/-/sql-common-0.40.0.tgz#8cbed0722354d62997c3b9e1adf0e16257be6b15" @@ -2955,7 +3038,7 @@ available-typed-arrays@^1.0.5: resolved "https://registry.yarnpkg.com/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz#92f95616501069d07d10edb2fc37d3e1c65123b7" integrity sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw== -axios@^1.0.0, axios@^1.6.8: +axios@^1.0.0: version "1.6.8" resolved "https://registry.yarnpkg.com/axios/-/axios-1.6.8.tgz#66d294951f5d988a00e87a0ffb955316a619ea66" integrity sha512-v/ZHtJDU39mDpyBoFVkETcd/uNdxrWRrg3bKpOKzXFA6Bvqopts6ALSMU3y6ijYxbw2B+wPrIv46egTzJXCLGQ== @@ -5578,6 +5661,21 @@ lodash.camelcase@^4.3.0: resolved "https://registry.yarnpkg.com/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz#b28aa6288a2b9fc651035c7711f65ab6190331a6" integrity sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA== +lodash.isobject@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/lodash.isobject/-/lodash.isobject-3.0.2.tgz#3c8fb8d5b5bf4bf90ae06e14f2a530a4ed935e1d" + integrity sha512-3/Qptq2vr7WeJbB4KHUSKlq8Pl7ASXi3UG6CMbBm8WRtXi8+GHm7mKaU3urfpSEzWe2wCIChs6/sdocUsTKJiA== + +lodash.isplainobject@^4.0.6: + version "4.0.6" + resolved "https://registry.yarnpkg.com/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz#7c526a52d89b45c45cc690b88163be0497f550cb" + integrity sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA== + +lodash.isstring@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/lodash.isstring/-/lodash.isstring-4.0.1.tgz#d527dfb5456eca7cc9bb95d5daeaf88ba54a5451" + integrity sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw== + lodash.memoize@4.x: version "4.1.2" resolved "https://registry.yarnpkg.com/lodash.memoize/-/lodash.memoize-4.1.2.tgz#bcc6c49a42a2840ed997f323eada5ecd182e0bfe" @@ -7629,6 +7727,15 @@ winston-transport@^4.5.0: readable-stream "^3.6.0" triple-beam "^1.3.0" +winston-transport@^4.7.0: + version "4.7.0" + resolved "https://registry.yarnpkg.com/winston-transport/-/winston-transport-4.7.0.tgz#e302e6889e6ccb7f383b926df6936a5b781bd1f0" + integrity sha512-ajBj65K5I7denzer2IYW6+2bNIVqLGDHqDw3Ow8Ohh+vdW+rv4MZ6eiDvHoKhfJFZ2auyN8byXieDDJ96ViONg== + dependencies: + logform "^2.3.2" + readable-stream "^3.6.0" + triple-beam "^1.3.0" + winston@^3.9.0: version "3.9.0" resolved "https://registry.yarnpkg.com/winston/-/winston-3.9.0.tgz#2bbdeb8167a75fac6d9a0c6d002890cd908016c2"