Skip to content

Commit

Permalink
[Reporting/New Platform] Use the logger service from core (elastic#55442
Browse files Browse the repository at this point in the history
)

* More Typescript Fixes

* [Reporting/New Platform] Use the logger service from core

* Add log tag

* fix jest tests

* ts fixes

* fix mocha test

* convert to jest
  • Loading branch information
tsullivan committed Jan 23, 2020
1 parent 0fb7ad7 commit ee5fa28
Show file tree
Hide file tree
Showing 31 changed files with 404 additions and 362 deletions.

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -7,25 +7,26 @@
import { i18n } from '@kbn/i18n';
// eslint-disable-next-line @kbn/eslint/no-restricted-paths
import { KibanaRequest } from '../../../../../../../src/core/server';
import { CSV_JOB_TYPE } from '../../../common/constants';
import { cryptoFactory } from '../../../server/lib';
import {
ExecuteJobFactory,
ESQueueWorkerExecuteFn,
ExecuteJobFactory,
FieldFormats,
Logger,
ServerFacade,
} from '../../../types';
import { CSV_JOB_TYPE, PLUGIN_ID } from '../../../common/constants';
import { cryptoFactory, LevelLogger } from '../../../server/lib';
import { JobDocPayloadDiscoverCsv } from '../types';
import { createGenerateCsv } from './lib/generate_csv';
import { fieldFormatMapFactory } from './lib/field_format_map';
import { createGenerateCsv } from './lib/generate_csv';

export const executeJobFactory: ExecuteJobFactory<ESQueueWorkerExecuteFn<
JobDocPayloadDiscoverCsv
>> = function executeJobFactoryFn(server: ServerFacade) {
>> = function executeJobFactoryFn(server: ServerFacade, parentLogger: Logger) {
const { callWithRequest } = server.plugins.elasticsearch.getCluster('data');
const crypto = cryptoFactory(server);
const config = server.config();
const logger = LevelLogger.createForServer(server, [PLUGIN_ID, CSV_JOB_TYPE, 'execute-job']);
const logger = parentLogger.clone([CSV_JOB_TYPE, 'execute-job']);
const serverBasePath = config.get('server.basePath');

return async function executeJob(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,14 @@

import { notFound, notImplemented } from 'boom';
import { get } from 'lodash';
import { PLUGIN_ID, CSV_FROM_SAVEDOBJECT_JOB_TYPE } from '../../../../common/constants';
import { cryptoFactory, LevelLogger } from '../../../../server/lib';
import { CSV_FROM_SAVEDOBJECT_JOB_TYPE } from '../../../../common/constants';
import { cryptoFactory } from '../../../../server/lib';
import {
CreateJobFactory,
ImmediateCreateJobFn,
ServerFacade,
RequestFacade,
Logger,
} from '../../../../types';
import {
SavedObject,
Expand All @@ -34,13 +35,9 @@ interface VisData {

export const createJobFactory: CreateJobFactory<ImmediateCreateJobFn<
JobParamsPanelCsv
>> = function createJobFactoryFn(server: ServerFacade) {
>> = function createJobFactoryFn(server: ServerFacade, parentLogger: Logger) {
const crypto = cryptoFactory(server);
const logger = LevelLogger.createForServer(server, [
PLUGIN_ID,
CSV_FROM_SAVEDOBJECT_JOB_TYPE,
'create-job',
]);
const logger = parentLogger.clone([CSV_FROM_SAVEDOBJECT_JOB_TYPE, 'create-job']);

return async function createJob(
jobParams: JobParamsPanelCsv,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,38 +5,31 @@
*/

import { i18n } from '@kbn/i18n';
import { cryptoFactory, LevelLogger } from '../../../server/lib';
import { CONTENT_TYPE_CSV, CSV_FROM_SAVEDOBJECT_JOB_TYPE } from '../../../common/constants';
import { cryptoFactory } from '../../../server/lib';
import {
ExecuteJobFactory,
ImmediateExecuteFn,
JobDocOutputExecuted,
ServerFacade,
JobDocOutput,
Logger,
RequestFacade,
ServerFacade,
} from '../../../types';
import {
CONTENT_TYPE_CSV,
CSV_FROM_SAVEDOBJECT_JOB_TYPE,
PLUGIN_ID,
} from '../../../common/constants';
import { CsvResultFromSearch } from '../../csv/types';
import { JobParamsPanelCsv, SearchPanel, JobDocPayloadPanelCsv, FakeRequest } from '../types';
import { FakeRequest, JobDocPayloadPanelCsv, JobParamsPanelCsv, SearchPanel } from '../types';
import { createGenerateCsv } from './lib';

export const executeJobFactory: ExecuteJobFactory<ImmediateExecuteFn<
JobParamsPanelCsv
>> = function executeJobFactoryFn(server: ServerFacade) {
>> = function executeJobFactoryFn(server: ServerFacade, parentLogger: Logger) {
const crypto = cryptoFactory(server);
const logger = LevelLogger.createForServer(server, [
PLUGIN_ID,
CSV_FROM_SAVEDOBJECT_JOB_TYPE,
'execute-job',
]);
const logger = parentLogger.clone([CSV_FROM_SAVEDOBJECT_JOB_TYPE, 'execute-job']);

return async function executeJob(
jobId: string | null,
job: JobDocPayloadPanelCsv,
realRequest?: RequestFacade
): Promise<JobDocOutputExecuted> {
): Promise<JobDocOutput> {
// There will not be a jobID for "immediate" generation.
// jobID is only for "queued" jobs
// Use the jobID as a logging tag or "immediate"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,8 @@ beforeEach(() => {

afterEach(() => generatePngObservableFactory.mockReset());

const getMockLogger = () => new LevelLogger();

const encryptHeaders = async headers => {
const crypto = cryptoFactory(mockServer);
return await crypto.encrypt(headers);
Expand All @@ -68,7 +70,7 @@ test(`passes browserTimezone to generatePng`, async () => {
const generatePngObservable = generatePngObservableFactory();
generatePngObservable.mockReturnValue(Rx.of(Buffer.from('')));

const executeJob = executeJobFactory(mockServer, { browserDriverFactory: {} });
const executeJob = executeJobFactory(mockServer, getMockLogger(), { browserDriverFactory: {} });
const browserTimezone = 'UTC';
await executeJob(
'pngJobId',
Expand All @@ -86,7 +88,7 @@ test(`passes browserTimezone to generatePng`, async () => {
});

test(`returns content_type of application/png`, async () => {
const executeJob = executeJobFactory(mockServer, { browserDriverFactory: {} });
const executeJob = executeJobFactory(mockServer, getMockLogger(), { browserDriverFactory: {} });
const encryptedHeaders = await encryptHeaders({});

const generatePngObservable = generatePngObservableFactory();
Expand All @@ -106,7 +108,7 @@ test(`returns content of generatePng getBuffer base64 encoded`, async () => {
const generatePngObservable = generatePngObservableFactory();
generatePngObservable.mockReturnValue(Rx.of(Buffer.from(testContent)));

const executeJob = executeJobFactory(mockServer, { browserDriverFactory: {} });
const executeJob = executeJobFactory(mockServer, getMockLogger(), { browserDriverFactory: {} });
const encryptedHeaders = await encryptHeaders({});
const { content } = await executeJob(
'pngJobId',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,14 +6,14 @@

import * as Rx from 'rxjs';
import { catchError, map, mergeMap, takeUntil } from 'rxjs/operators';
import { PLUGIN_ID, PNG_JOB_TYPE } from '../../../../common/constants';
import { PNG_JOB_TYPE } from '../../../../common/constants';
import {
ServerFacade,
ExecuteJobFactory,
ESQueueWorkerExecuteFn,
HeadlessChromiumDriverFactory,
Logger,
} from '../../../../types';
import { LevelLogger } from '../../../../server/lib';
import {
decryptJobHeaders,
omitBlacklistedHeaders,
Expand All @@ -27,10 +27,11 @@ type QueuedPngExecutorFactory = ExecuteJobFactory<ESQueueWorkerExecuteFn<JobDocP

export const executeJobFactory: QueuedPngExecutorFactory = function executeJobFactoryFn(
server: ServerFacade,
parentLogger: Logger,
{ browserDriverFactory }: { browserDriverFactory: HeadlessChromiumDriverFactory }
) {
const generatePngObservable = generatePngObservableFactory(server, browserDriverFactory);
const logger = LevelLogger.createForServer(server, [PLUGIN_ID, PNG_JOB_TYPE, 'execute']);
const logger = parentLogger.clone([PNG_JOB_TYPE, 'execute']);

return function executeJob(jobId: string, job: JobDocPayloadPNG, cancellationToken: any) {
const jobLogger = logger.clone([jobId]);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,8 @@ beforeEach(() => {

afterEach(() => generatePdfObservableFactory.mockReset());

const getMockLogger = () => new LevelLogger();

const encryptHeaders = async headers => {
const crypto = cryptoFactory(mockServer);
return await crypto.encrypt(headers);
Expand All @@ -67,7 +69,7 @@ test(`passes browserTimezone to generatePdf`, async () => {
const generatePdfObservable = generatePdfObservableFactory();
generatePdfObservable.mockReturnValue(Rx.of(Buffer.from('')));

const executeJob = executeJobFactory(mockServer, { browserDriverFactory: {} });
const executeJob = executeJobFactory(mockServer, getMockLogger(), { browserDriverFactory: {} });
const browserTimezone = 'UTC';
await executeJob(
'pdfJobId',
Expand All @@ -88,7 +90,7 @@ test(`passes browserTimezone to generatePdf`, async () => {
});

test(`returns content_type of application/pdf`, async () => {
const executeJob = executeJobFactory(mockServer, { browserDriverFactory: {} });
const executeJob = executeJobFactory(mockServer, getMockLogger(), { browserDriverFactory: {} });
const encryptedHeaders = await encryptHeaders({});

const generatePdfObservable = generatePdfObservableFactory();
Expand All @@ -108,7 +110,7 @@ test(`returns content of generatePdf getBuffer base64 encoded`, async () => {
const generatePdfObservable = generatePdfObservableFactory();
generatePdfObservable.mockReturnValue(Rx.of(Buffer.from(testContent)));

const executeJob = executeJobFactory(mockServer, { browserDriverFactory: {} });
const executeJob = executeJobFactory(mockServer, getMockLogger(), { browserDriverFactory: {} });
const encryptedHeaders = await encryptHeaders({});
const { content } = await executeJob(
'pdfJobId',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,10 @@ import {
ExecuteJobFactory,
ESQueueWorkerExecuteFn,
HeadlessChromiumDriverFactory,
Logger,
} from '../../../../types';
import { JobDocPayloadPDF } from '../../types';
import { PLUGIN_ID, PDF_JOB_TYPE } from '../../../../common/constants';
import { LevelLogger } from '../../../../server/lib';
import { PDF_JOB_TYPE } from '../../../../common/constants';
import { generatePdfObservableFactory } from '../lib/generate_pdf';
import {
decryptJobHeaders,
Expand All @@ -28,10 +28,11 @@ type QueuedPdfExecutorFactory = ExecuteJobFactory<ESQueueWorkerExecuteFn<JobDocP

export const executeJobFactory: QueuedPdfExecutorFactory = function executeJobFactoryFn(
server: ServerFacade,
parentLogger: Logger,
{ browserDriverFactory }: { browserDriverFactory: HeadlessChromiumDriverFactory }
) {
const generatePdfObservable = generatePdfObservableFactory(server, browserDriverFactory);
const logger = LevelLogger.createForServer(server, [PLUGIN_ID, PDF_JOB_TYPE, 'execute']);
const logger = parentLogger.clone([PDF_JOB_TYPE, 'execute']);

return function executeJob(jobId: string, job: JobDocPayloadPDF, cancellationToken: any) {
const jobLogger = logger.clone([jobId]);
Expand Down
4 changes: 2 additions & 2 deletions x-pack/legacy/plugins/reporting/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -83,10 +83,10 @@ export const reporting = (kibana: any) => {
uiSettingsServiceFactory: server.uiSettingsServiceFactory,
// @ts-ignore Property 'fieldFormatServiceFactory' does not exist on type 'Server'.
fieldFormatServiceFactory: server.fieldFormatServiceFactory,
log: server.log.bind(server),
};

const plugin: ReportingPlugin = reportingPluginFactory(__LEGACY, this);
const initializerContext = server.newPlatform.coreContext;
const plugin: ReportingPlugin = reportingPluginFactory(initializerContext, __LEGACY, this);
await plugin.setup(coreSetup, pluginsSetup);
},

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,17 +6,16 @@

import { ensureBrowserDownloaded } from './download';
import { installBrowser } from './install';
import { LevelLogger } from '../lib/level_logger';
import { ServerFacade, CaptureConfig } from '../../types';
import { PLUGIN_ID, BROWSER_TYPE } from '../../common/constants';
import { ServerFacade, CaptureConfig, Logger } from '../../types';
import { BROWSER_TYPE } from '../../common/constants';
import { chromium } from './index';
import { HeadlessChromiumDriverFactory } from './chromium/driver_factory';

export async function createBrowserDriverFactory(
server: ServerFacade
server: ServerFacade,
logger: Logger
): Promise<HeadlessChromiumDriverFactory> {
const config = server.config();
const logger = LevelLogger.createForServer(server, [PLUGIN_ID, 'browser-driver']);

const dataDir: string = config.get('path.data');
const captureConfig: CaptureConfig = config.get('xpack.reporting.capture');
Expand Down
14 changes: 8 additions & 6 deletions x-pack/legacy/plugins/reporting/server/lib/create_queue.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,17 +4,16 @@
* you may not use this file except in compliance with the Elastic License.
*/

import { PLUGIN_ID } from '../../common/constants';
import {
ServerFacade,
ExportTypesRegistry,
HeadlessChromiumDriverFactory,
QueueConfig,
Logger,
} from '../../types';
// @ts-ignore
import { Esqueue } from './esqueue';
import { createWorkerFactory } from './create_worker';
import { LevelLogger } from './level_logger';
import { createTaggedLogger } from './create_tagged_logger'; // TODO remove createTaggedLogger once esqueue is removed

interface CreateQueueFactoryOpts {
Expand All @@ -24,6 +23,7 @@ interface CreateQueueFactoryOpts {

export function createQueueFactory(
server: ServerFacade,
logger: Logger,
{ exportTypesRegistry, browserDriverFactory }: CreateQueueFactoryOpts
): Esqueue {
const queueConfig: QueueConfig = server.config().get('xpack.reporting.queue');
Expand All @@ -34,23 +34,25 @@ export function createQueueFactory(
timeout: queueConfig.timeout,
dateSeparator: '.',
client: server.plugins.elasticsearch.getCluster('admin'),
logger: createTaggedLogger(server, [PLUGIN_ID, 'esqueue', 'queue-worker']),
logger: createTaggedLogger(logger, ['esqueue', 'queue-worker']),
};

const queue: Esqueue = new Esqueue(index, queueOptions);

if (queueConfig.pollEnabled) {
// create workers to poll the index for idle jobs waiting to be claimed and executed
const createWorker = createWorkerFactory(server, { exportTypesRegistry, browserDriverFactory });
const createWorker = createWorkerFactory(server, logger, {
exportTypesRegistry,
browserDriverFactory,
});
createWorker(queue);
} else {
const logger = LevelLogger.createForServer(server, [PLUGIN_ID, 'create_queue']);
logger.info(
'xpack.reporting.queue.pollEnabled is set to false. This Kibana instance ' +
'will not poll for idle jobs to claim and execute. Make sure another ' +
'Kibana instance with polling enabled is running in this cluster so ' +
'reporting jobs can complete.',
['info']
['create_queue']
);
}

Expand Down
32 changes: 16 additions & 16 deletions x-pack/legacy/plugins/reporting/server/lib/create_tagged_logger.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,23 +4,23 @@
* you may not use this file except in compliance with the Elastic License.
*/

import { ServerFacade } from '../../types';
import { Logger } from '../../types';

/**
* @function taggedLogger
* @param {string} message
* @param {string[]} [additionalTags]
*/

/**
* Creates a taggedLogger function with tags, allows the consumer to optionally provide additional tags
*
* @param {Server} server
* @param {string[]} tags - tags to always be passed into the `logger` function
* @returns taggedLogger
*/
export function createTaggedLogger(server: ServerFacade, tags: string[]) {
export function createTaggedLogger(logger: Logger, tags: string[]) {
return (msg: string, additionalTags = []) => {
server.log([...tags, ...additionalTags], msg);
const allTags = [...tags, ...additionalTags];

if (allTags.includes('info')) {
const newTags = allTags.filter(t => t !== 'info'); // Ensure 'info' is not included twice
logger.info(msg, newTags);
} else if (allTags.includes('debug')) {
const newTags = allTags.filter(t => t !== 'debug');
logger.debug(msg, newTags);
} else if (allTags.includes('warn') || allTags.includes('warning')) {
const newTags = allTags.filter(t => t !== 'warn' && t !== 'warning');
logger.warn(msg, newTags);
} else {
logger.error(msg, allTags);
}
};
}
Loading

0 comments on commit ee5fa28

Please sign in to comment.