Skip to content

Commit

Permalink
[Reporting] Use ES plugin from NP (#56209)
Browse files Browse the repository at this point in the history
* [Reporting] Use ES plugin from NP

* fix elasticsearchErrors reference

* fix mocha test

* convert to jest

* fix the code and tests

* cosmetics

* fix mocha tests

* fix imports

* fix mocha tests

* fix jest

* simplify

Co-authored-by: Elastic Machine <elasticmachine@users.noreply.github.com>
  • Loading branch information
tsullivan and elasticmachine committed Feb 3, 2020
1 parent dd41917 commit d9c6eb1
Show file tree
Hide file tree
Showing 35 changed files with 434 additions and 379 deletions.

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,9 @@
* you may not use this file except in compliance with the Elastic License.
*/

import Hapi from 'hapi';
import { i18n } from '@kbn/i18n';
import { KibanaRequest } from '../../../../../../../src/core/server';
import { ElasticsearchServiceSetup, KibanaRequest } from '../../../../../../../src/core/server';
import { CSV_JOB_TYPE } from '../../../common/constants';
import { cryptoFactory } from '../../../server/lib';
import { ESQueueWorkerExecuteFn, ExecuteJobFactory, Logger, ServerFacade } from '../../../types';
Expand All @@ -15,8 +16,11 @@ import { createGenerateCsv } from './lib/generate_csv';

export const executeJobFactory: ExecuteJobFactory<ESQueueWorkerExecuteFn<
JobDocPayloadDiscoverCsv
>> = function executeJobFactoryFn(server: ServerFacade, parentLogger: Logger) {
const { callWithRequest } = server.plugins.elasticsearch.getCluster('data');
>> = function executeJobFactoryFn(
server: ServerFacade,
elasticsearch: ElasticsearchServiceSetup,
parentLogger: Logger
) {
const crypto = cryptoFactory(server);
const config = server.config();
const logger = parentLogger.clone([CSV_JOB_TYPE, 'execute-job']);
Expand Down Expand Up @@ -74,8 +78,11 @@ export const executeJobFactory: ExecuteJobFactory<ESQueueWorkerExecuteFn<
},
};

const { callAsCurrentUser } = elasticsearch.dataClient.asScoped(
KibanaRequest.from(fakeRequest as Hapi.Request)
);
const callEndpoint = (endpoint: string, clientParams = {}, options = {}) => {
return callWithRequest(fakeRequest, endpoint, clientParams, options);
return callAsCurrentUser(endpoint, clientParams, options);
};
const savedObjects = server.savedObjects;
const savedObjectsClient = savedObjects.getScopedSavedObjectsClient(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,24 +6,25 @@

import { notFound, notImplemented } from 'boom';
import { get } from 'lodash';
import { ElasticsearchServiceSetup } from 'kibana/server';
import { CSV_FROM_SAVEDOBJECT_JOB_TYPE } from '../../../../common/constants';
import { cryptoFactory } from '../../../../server/lib';
import {
CreateJobFactory,
ImmediateCreateJobFn,
ServerFacade,
RequestFacade,
Logger,
RequestFacade,
ServerFacade,
} from '../../../../types';
import {
JobDocPayloadPanelCsv,
JobParamsPanelCsv,
SavedObject,
SavedObjectServiceError,
SavedSearchObjectAttributesJSON,
SearchPanel,
TimeRangeParams,
VisObjectAttributesJSON,
JobDocPayloadPanelCsv,
JobParamsPanelCsv,
} from '../../types';
import { createJobSearch } from './create_job_search';

Expand All @@ -35,7 +36,11 @@ interface VisData {

export const createJobFactory: CreateJobFactory<ImmediateCreateJobFn<
JobParamsPanelCsv
>> = function createJobFactoryFn(server: ServerFacade, parentLogger: Logger) {
>> = function createJobFactoryFn(
server: ServerFacade,
elasticsearch: ElasticsearchServiceSetup,
parentLogger: Logger
) {
const crypto = cryptoFactory(server);
const logger = parentLogger.clone([CSV_FROM_SAVEDOBJECT_JOB_TYPE, 'create-job']);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
*/

import { i18n } from '@kbn/i18n';
import { ElasticsearchServiceSetup } from 'kibana/server';
import { CONTENT_TYPE_CSV, CSV_FROM_SAVEDOBJECT_JOB_TYPE } from '../../../common/constants';
import { cryptoFactory } from '../../../server/lib';
import {
Expand All @@ -21,7 +22,11 @@ import { createGenerateCsv } from './lib';

export const executeJobFactory: ExecuteJobFactory<ImmediateExecuteFn<
JobParamsPanelCsv
>> = function executeJobFactoryFn(server: ServerFacade, parentLogger: Logger) {
>> = function executeJobFactoryFn(
server: ServerFacade,
elasticsearch: ElasticsearchServiceSetup,
parentLogger: Logger
) {
const crypto = cryptoFactory(server);
const logger = parentLogger.clone([CSV_FROM_SAVEDOBJECT_JOB_TYPE, 'execute-job']);

Expand Down Expand Up @@ -85,6 +90,7 @@ export const executeJobFactory: ExecuteJobFactory<ImmediateExecuteFn<
const generateResults: CsvResultFromSearch = await generateCsv(
requestObject,
server,
elasticsearch,
visType as string,
panel,
jobParams
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
*/

import { badRequest } from 'boom';
import { ElasticsearchServiceSetup } from 'kibana/server';
import { ServerFacade, RequestFacade, Logger } from '../../../../types';
import { SearchPanel, VisPanel, JobParamsPanelCsv, FakeRequest } from '../../types';
import { generateCsvSearch } from './generate_csv_search';
Expand All @@ -13,6 +14,7 @@ export function createGenerateCsv(logger: Logger) {
return async function generateCsv(
request: RequestFacade | FakeRequest,
server: ServerFacade,
elasticsearch: ElasticsearchServiceSetup,
visType: string,
panel: VisPanel | SearchPanel,
jobParams: JobParamsPanelCsv
Expand All @@ -27,6 +29,7 @@ export function createGenerateCsv(logger: Logger) {
return await generateCsvSearch(
request as RequestFacade,
server,
elasticsearch,
logger,
panel as SearchPanel,
jobParams
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/

// eslint-disable-next-line @kbn/eslint/no-restricted-paths
import { KibanaRequest } from '../../../../../../../../src/core/server';
import { ElasticsearchServiceSetup, KibanaRequest } from '../../../../../../../../src/core/server';
import { createGenerateCsv } from '../../../csv/server/lib/generate_csv';
import { CancellationToken } from '../../../../common/cancellation_token';
import { ServerFacade, RequestFacade, Logger } from '../../../../types';
Expand Down Expand Up @@ -58,6 +57,7 @@ const getUiSettings = async (config: any) => {
export async function generateCsvSearch(
req: RequestFacade,
server: ServerFacade,
elasticsearch: ElasticsearchServiceSetup,
logger: Logger,
searchPanel: SearchPanel,
jobParams: JobParamsDiscoverCsv
Expand Down Expand Up @@ -152,8 +152,11 @@ export async function generateCsvSearch(
sort: sortConfig,
},
};
const { callWithRequest } = server.plugins.elasticsearch.getCluster('data');
const callCluster = (...params: [string, object]) => callWithRequest(req, ...params);

const { callAsCurrentUser } = elasticsearch.dataClient.asScoped(
KibanaRequest.from(req.getRawRequest())
);
const callCluster = (...params: [string, object]) => callAsCurrentUser(...params);
const config = server.config();
const uiSettings = await getUiSettings(uiConfig);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,15 +32,6 @@ beforeEach(() => {
info: {
protocol: 'http',
},
plugins: {
elasticsearch: {
getCluster: memoize(() => {
return {
callWithRequest: jest.fn(),
};
}),
},
},
savedObjects: {
getScopedSavedObjectsClient: jest.fn(),
},
Expand All @@ -57,6 +48,12 @@ beforeEach(() => {

afterEach(() => generatePngObservableFactory.mockReset());

const mockElasticsearch = {
dataClient: {
asScoped: () => ({ callAsCurrentUser: jest.fn() }),
},
};

const getMockLogger = () => new LevelLogger();

const encryptHeaders = async headers => {
Expand All @@ -70,7 +67,9 @@ test(`passes browserTimezone to generatePng`, async () => {
const generatePngObservable = generatePngObservableFactory();
generatePngObservable.mockReturnValue(Rx.of(Buffer.from('')));

const executeJob = executeJobFactory(mockServer, getMockLogger(), { browserDriverFactory: {} });
const executeJob = executeJobFactory(mockServer, mockElasticsearch, getMockLogger(), {
browserDriverFactory: {},
});
const browserTimezone = 'UTC';
await executeJob(
'pngJobId',
Expand All @@ -88,7 +87,9 @@ test(`passes browserTimezone to generatePng`, async () => {
});

test(`returns content_type of application/png`, async () => {
const executeJob = executeJobFactory(mockServer, getMockLogger(), { browserDriverFactory: {} });
const executeJob = executeJobFactory(mockServer, mockElasticsearch, getMockLogger(), {
browserDriverFactory: {},
});
const encryptedHeaders = await encryptHeaders({});

const generatePngObservable = generatePngObservableFactory();
Expand All @@ -108,7 +109,9 @@ test(`returns content of generatePng getBuffer base64 encoded`, async () => {
const generatePngObservable = generatePngObservableFactory();
generatePngObservable.mockReturnValue(Rx.of(Buffer.from(testContent)));

const executeJob = executeJobFactory(mockServer, getMockLogger(), { browserDriverFactory: {} });
const executeJob = executeJobFactory(mockServer, mockElasticsearch, getMockLogger(), {
browserDriverFactory: {},
});
const encryptedHeaders = await encryptHeaders({});
const { content } = await executeJob(
'pngJobId',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
*/

import * as Rx from 'rxjs';
import { ElasticsearchServiceSetup } from 'kibana/server';
import { catchError, map, mergeMap, takeUntil } from 'rxjs/operators';
import { PNG_JOB_TYPE } from '../../../../common/constants';
import {
Expand All @@ -27,6 +28,7 @@ type QueuedPngExecutorFactory = ExecuteJobFactory<ESQueueWorkerExecuteFn<JobDocP

export const executeJobFactory: QueuedPngExecutorFactory = function executeJobFactoryFn(
server: ServerFacade,
elasticsearch: ElasticsearchServiceSetup,
parentLogger: Logger,
{ browserDriverFactory }: { browserDriverFactory: HeadlessChromiumDriverFactory }
) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,15 +32,6 @@ beforeEach(() => {
info: {
protocol: 'http',
},
plugins: {
elasticsearch: {
getCluster: memoize(() => {
return {
callWithRequest: jest.fn(),
};
}),
},
},
savedObjects: {
getScopedSavedObjectsClient: jest.fn(),
},
Expand All @@ -57,6 +48,11 @@ beforeEach(() => {
afterEach(() => generatePdfObservableFactory.mockReset());

const getMockLogger = () => new LevelLogger();
const mockElasticsearch = {
dataClient: {
asScoped: () => ({ callAsCurrentUser: jest.fn() }),
},
};

const encryptHeaders = async headers => {
const crypto = cryptoFactory(mockServer);
Expand All @@ -69,7 +65,9 @@ test(`passes browserTimezone to generatePdf`, async () => {
const generatePdfObservable = generatePdfObservableFactory();
generatePdfObservable.mockReturnValue(Rx.of(Buffer.from('')));

const executeJob = executeJobFactory(mockServer, getMockLogger(), { browserDriverFactory: {} });
const executeJob = executeJobFactory(mockServer, mockElasticsearch, getMockLogger(), {
browserDriverFactory: {},
});
const browserTimezone = 'UTC';
await executeJob(
'pdfJobId',
Expand All @@ -90,7 +88,9 @@ test(`passes browserTimezone to generatePdf`, async () => {
});

test(`returns content_type of application/pdf`, async () => {
const executeJob = executeJobFactory(mockServer, getMockLogger(), { browserDriverFactory: {} });
const executeJob = executeJobFactory(mockServer, mockElasticsearch, getMockLogger(), {
browserDriverFactory: {},
});
const encryptedHeaders = await encryptHeaders({});

const generatePdfObservable = generatePdfObservableFactory();
Expand All @@ -110,7 +110,9 @@ test(`returns content of generatePdf getBuffer base64 encoded`, async () => {
const generatePdfObservable = generatePdfObservableFactory();
generatePdfObservable.mockReturnValue(Rx.of(Buffer.from(testContent)));

const executeJob = executeJobFactory(mockServer, getMockLogger(), { browserDriverFactory: {} });
const executeJob = executeJobFactory(mockServer, mockElasticsearch, getMockLogger(), {
browserDriverFactory: {},
});
const encryptedHeaders = await encryptHeaders({});
const { content } = await executeJob(
'pdfJobId',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
*/

import * as Rx from 'rxjs';
import { ElasticsearchServiceSetup } from 'kibana/server';
import { catchError, map, mergeMap, takeUntil } from 'rxjs/operators';
import {
ServerFacade,
Expand All @@ -28,6 +29,7 @@ type QueuedPdfExecutorFactory = ExecuteJobFactory<ESQueueWorkerExecuteFn<JobDocP

export const executeJobFactory: QueuedPdfExecutorFactory = function executeJobFactoryFn(
server: ServerFacade,
elasticsearch: ElasticsearchServiceSetup,
parentLogger: Logger,
{ browserDriverFactory }: { browserDriverFactory: HeadlessChromiumDriverFactory }
) {
Expand Down
29 changes: 12 additions & 17 deletions x-pack/legacy/plugins/reporting/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,7 @@ import { PluginStart as DataPluginStart } from '../../../../src/plugins/data/ser
import { SecurityPluginSetup } from '../../../plugins/security/server';
import { PLUGIN_ID, UI_SETTINGS_CUSTOM_PDF_LOGO } from './common/constants';
import { config as reportingConfig } from './config';
import {
LegacySetup,
ReportingPlugin,
reportingPluginFactory,
ReportingSetupDeps,
} from './server/plugin';
import { LegacySetup, ReportingPlugin, reportingPluginFactory } from './server/plugin';
import { ReportingConfigOptions, ReportingPluginSpecOptions } from './types.d';

const kbToBase64Length = (kb: number) => {
Expand Down Expand Up @@ -74,10 +69,6 @@ export const reporting = (kibana: any) => {

async init(server: Legacy.Server) {
const coreSetup = server.newPlatform.setup.core;
const pluginsSetup: ReportingSetupDeps = {
security: server.newPlatform.setup.plugins.security as SecurityPluginSetup,
usageCollection: server.newPlatform.setup.plugins.usageCollection,
};

const fieldFormatServiceFactory = async (uiSettings: IUiSettingsClient) => {
const [, plugins] = await coreSetup.getStartServices();
Expand All @@ -90,18 +81,22 @@ export const reporting = (kibana: any) => {
config: server.config,
info: server.info,
route: server.route.bind(server),
plugins: {
elasticsearch: server.plugins.elasticsearch,
xpack_main: server.plugins.xpack_main,
},
plugins: { xpack_main: server.plugins.xpack_main },
savedObjects: server.savedObjects,
fieldFormatServiceFactory,
uiSettingsServiceFactory: server.uiSettingsServiceFactory,
};

const initializerContext = server.newPlatform.coreContext;
const plugin: ReportingPlugin = reportingPluginFactory(initializerContext, __LEGACY, this);
await plugin.setup(coreSetup, pluginsSetup);
const plugin: ReportingPlugin = reportingPluginFactory(
server.newPlatform.coreContext,
__LEGACY,
this
);
await plugin.setup(coreSetup, {
elasticsearch: coreSetup.elasticsearch,
security: server.newPlatform.setup.plugins.security as SecurityPluginSetup,
usageCollection: server.newPlatform.setup.plugins.usageCollection,
});
},

deprecations({ unused }: any) {
Expand Down
Loading

0 comments on commit d9c6eb1

Please sign in to comment.