Skip to content

Commit

Permalink
[7.x] [Reporting] Use ES plugin from NP (#56209) (#56693)
Browse files Browse the repository at this point in the history
* [Reporting] Use ES plugin from NP (#56209)

* [Reporting] Use ES plugin from NP

* fix elasticsearchErrors reference

* fix mocha test

* convert to jest

* fix the code and tests

* cosmetics

* fix mocha tests

* fix imports

* fix mocha tests

* fix jest

* simplify

Co-authored-by: Elastic Machine <elasticmachine@users.noreply.github.com>

* fix eslint

* fix missing import

* fix the backport

* fix mocha test

Co-authored-by: Elastic Machine <elasticmachine@users.noreply.github.com>
  • Loading branch information
tsullivan and elasticmachine committed Feb 5, 2020
1 parent 1575f0f commit 8bf7368
Show file tree
Hide file tree
Showing 36 changed files with 447 additions and 378 deletions.

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,9 @@
* you may not use this file except in compliance with the Elastic License.
*/

import Hapi from 'hapi';
import { i18n } from '@kbn/i18n';
import { KibanaRequest } from '../../../../../../../src/core/server';
import { ElasticsearchServiceSetup, KibanaRequest } from '../../../../../../../src/core/server';
import { CSV_JOB_TYPE } from '../../../common/constants';
import { cryptoFactory } from '../../../server/lib';
import { ESQueueWorkerExecuteFn, ExecuteJobFactory, Logger, ServerFacade } from '../../../types';
Expand All @@ -15,8 +16,11 @@ import { createGenerateCsv } from './lib/generate_csv';

export const executeJobFactory: ExecuteJobFactory<ESQueueWorkerExecuteFn<
JobDocPayloadDiscoverCsv
>> = function executeJobFactoryFn(server: ServerFacade, parentLogger: Logger) {
const { callWithRequest } = server.plugins.elasticsearch.getCluster('data');
>> = function executeJobFactoryFn(
server: ServerFacade,
elasticsearch: ElasticsearchServiceSetup,
parentLogger: Logger
) {
const crypto = cryptoFactory(server);
const config = server.config();
const logger = parentLogger.clone([CSV_JOB_TYPE, 'execute-job']);
Expand Down Expand Up @@ -74,8 +78,11 @@ export const executeJobFactory: ExecuteJobFactory<ESQueueWorkerExecuteFn<
},
};

const { callAsCurrentUser } = elasticsearch.dataClient.asScoped(
KibanaRequest.from(fakeRequest as Hapi.Request)
);
const callEndpoint = (endpoint: string, clientParams = {}, options = {}) => {
return callWithRequest(fakeRequest, endpoint, clientParams, options);
return callAsCurrentUser(endpoint, clientParams, options);
};
const savedObjects = server.savedObjects;
const savedObjectsClient = savedObjects.getScopedSavedObjectsClient(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,24 +6,25 @@

import { notFound, notImplemented } from 'boom';
import { get } from 'lodash';
import { ElasticsearchServiceSetup } from 'kibana/server';
import { CSV_FROM_SAVEDOBJECT_JOB_TYPE } from '../../../../common/constants';
import { cryptoFactory } from '../../../../server/lib';
import {
CreateJobFactory,
ImmediateCreateJobFn,
ServerFacade,
RequestFacade,
Logger,
RequestFacade,
ServerFacade,
} from '../../../../types';
import {
JobDocPayloadPanelCsv,
JobParamsPanelCsv,
SavedObject,
SavedObjectServiceError,
SavedSearchObjectAttributesJSON,
SearchPanel,
TimeRangeParams,
VisObjectAttributesJSON,
JobDocPayloadPanelCsv,
JobParamsPanelCsv,
} from '../../types';
import { createJobSearch } from './create_job_search';

Expand All @@ -35,7 +36,11 @@ interface VisData {

export const createJobFactory: CreateJobFactory<ImmediateCreateJobFn<
JobParamsPanelCsv
>> = function createJobFactoryFn(server: ServerFacade, parentLogger: Logger) {
>> = function createJobFactoryFn(
server: ServerFacade,
elasticsearch: ElasticsearchServiceSetup,
parentLogger: Logger
) {
const crypto = cryptoFactory(server);
const logger = parentLogger.clone([CSV_FROM_SAVEDOBJECT_JOB_TYPE, 'create-job']);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
*/

import { i18n } from '@kbn/i18n';
import { ElasticsearchServiceSetup } from 'kibana/server';
import { CONTENT_TYPE_CSV, CSV_FROM_SAVEDOBJECT_JOB_TYPE } from '../../../common/constants';
import { cryptoFactory } from '../../../server/lib';
import {
Expand All @@ -21,7 +22,11 @@ import { createGenerateCsv } from './lib';

export const executeJobFactory: ExecuteJobFactory<ImmediateExecuteFn<
JobParamsPanelCsv
>> = function executeJobFactoryFn(server: ServerFacade, parentLogger: Logger) {
>> = function executeJobFactoryFn(
server: ServerFacade,
elasticsearch: ElasticsearchServiceSetup,
parentLogger: Logger
) {
const crypto = cryptoFactory(server);
const logger = parentLogger.clone([CSV_FROM_SAVEDOBJECT_JOB_TYPE, 'execute-job']);

Expand Down Expand Up @@ -85,6 +90,7 @@ export const executeJobFactory: ExecuteJobFactory<ImmediateExecuteFn<
const generateResults: CsvResultFromSearch = await generateCsv(
requestObject,
server,
elasticsearch,
visType as string,
panel,
jobParams
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
*/

import { badRequest } from 'boom';
import { ElasticsearchServiceSetup } from 'kibana/server';
import { ServerFacade, RequestFacade, Logger } from '../../../../types';
import { SearchPanel, VisPanel, JobParamsPanelCsv, FakeRequest } from '../../types';
import { generateCsvSearch } from './generate_csv_search';
Expand All @@ -13,6 +14,7 @@ export function createGenerateCsv(logger: Logger) {
return async function generateCsv(
request: RequestFacade | FakeRequest,
server: ServerFacade,
elasticsearch: ElasticsearchServiceSetup,
visType: string,
panel: VisPanel | SearchPanel,
jobParams: JobParamsPanelCsv
Expand All @@ -27,6 +29,7 @@ export function createGenerateCsv(logger: Logger) {
return await generateCsvSearch(
request as RequestFacade,
server,
elasticsearch,
logger,
panel as SearchPanel,
jobParams
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,7 @@
* you may not use this file except in compliance with the Elastic License.
*/

// eslint-disable-next-line @kbn/eslint/no-restricted-paths
import { KibanaRequest } from '../../../../../../../../src/core/server';
import { ElasticsearchServiceSetup, KibanaRequest } from '../../../../../../../../src/core/server';
import { createGenerateCsv } from '../../../csv/server/lib/generate_csv';
import { CancellationToken } from '../../../../common/cancellation_token';
import { ServerFacade, RequestFacade, Logger } from '../../../../types';
Expand Down Expand Up @@ -58,6 +57,7 @@ const getUiSettings = async (config: any) => {
export async function generateCsvSearch(
req: RequestFacade,
server: ServerFacade,
elasticsearch: ElasticsearchServiceSetup,
logger: Logger,
searchPanel: SearchPanel,
jobParams: JobParamsDiscoverCsv
Expand Down Expand Up @@ -152,8 +152,11 @@ export async function generateCsvSearch(
sort: sortConfig,
},
};
const { callWithRequest } = server.plugins.elasticsearch.getCluster('data');
const callCluster = (...params: [string, object]) => callWithRequest(req, ...params);

const { callAsCurrentUser } = elasticsearch.dataClient.asScoped(
KibanaRequest.from(req.getRawRequest())
);
const callCluster = (...params: [string, object]) => callAsCurrentUser(...params);
const config = server.config();
const uiSettings = await getUiSettings(uiConfig);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,15 +32,6 @@ beforeEach(() => {
info: {
protocol: 'http',
},
plugins: {
elasticsearch: {
getCluster: memoize(() => {
return {
callWithRequest: jest.fn(),
};
}),
},
},
savedObjects: {
getScopedSavedObjectsClient: jest.fn(),
},
Expand All @@ -57,6 +48,12 @@ beforeEach(() => {

afterEach(() => generatePngObservableFactory.mockReset());

const mockElasticsearch = {
dataClient: {
asScoped: () => ({ callAsCurrentUser: jest.fn() }),
},
};

const getMockLogger = () => new LevelLogger();

const encryptHeaders = async headers => {
Expand All @@ -70,7 +67,9 @@ test(`passes browserTimezone to generatePng`, async () => {
const generatePngObservable = generatePngObservableFactory();
generatePngObservable.mockReturnValue(Rx.of(Buffer.from('')));

const executeJob = executeJobFactory(mockServer, getMockLogger(), { browserDriverFactory: {} });
const executeJob = executeJobFactory(mockServer, mockElasticsearch, getMockLogger(), {
browserDriverFactory: {},
});
const browserTimezone = 'UTC';
await executeJob(
'pngJobId',
Expand All @@ -88,7 +87,9 @@ test(`passes browserTimezone to generatePng`, async () => {
});

test(`returns content_type of application/png`, async () => {
const executeJob = executeJobFactory(mockServer, getMockLogger(), { browserDriverFactory: {} });
const executeJob = executeJobFactory(mockServer, mockElasticsearch, getMockLogger(), {
browserDriverFactory: {},
});
const encryptedHeaders = await encryptHeaders({});

const generatePngObservable = generatePngObservableFactory();
Expand All @@ -108,7 +109,9 @@ test(`returns content of generatePng getBuffer base64 encoded`, async () => {
const generatePngObservable = generatePngObservableFactory();
generatePngObservable.mockReturnValue(Rx.of(Buffer.from(testContent)));

const executeJob = executeJobFactory(mockServer, getMockLogger(), { browserDriverFactory: {} });
const executeJob = executeJobFactory(mockServer, mockElasticsearch, getMockLogger(), {
browserDriverFactory: {},
});
const encryptedHeaders = await encryptHeaders({});
const { content } = await executeJob(
'pngJobId',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
*/

import * as Rx from 'rxjs';
import { ElasticsearchServiceSetup } from 'kibana/server';
import { catchError, map, mergeMap, takeUntil } from 'rxjs/operators';
import { PNG_JOB_TYPE } from '../../../../common/constants';
import {
Expand All @@ -27,6 +28,7 @@ type QueuedPngExecutorFactory = ExecuteJobFactory<ESQueueWorkerExecuteFn<JobDocP

export const executeJobFactory: QueuedPngExecutorFactory = function executeJobFactoryFn(
server: ServerFacade,
elasticsearch: ElasticsearchServiceSetup,
parentLogger: Logger,
{ browserDriverFactory }: { browserDriverFactory: HeadlessChromiumDriverFactory }
) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ interface CreateJobFnOpts {

export const createJobFactory: CreateJobFactory<ESQueueCreateJobFn<
JobParamsPDF
>> = function createJobFactoryFn(server: ServerFacade, logger: Logger) {
>> = function createJobFactoryFn(server: ServerFacade, elasticsearch: unknown, logger: Logger) {
const compatibilityShim = compatibilityShimFactory(server, logger);
const crypto = cryptoFactory(server);

Expand All @@ -42,14 +42,14 @@ export const createJobFactory: CreateJobFactory<ESQueueCreateJobFn<
validateUrls(relativeUrls);

return {
type: objectType, // Note: this changes the shape of the job params object
title,
objects: relativeUrls.map(u => ({ relativeUrl: u })),
headers: serializedEncryptedHeaders,
browserTimezone,
layout,
basePath: request.getBasePath(),
browserTimezone,
forceNow: new Date().toISOString(),
headers: serializedEncryptedHeaders,
layout,
objects: relativeUrls.map(u => ({ relativeUrl: u })),
title,
type: objectType, // Note: this changes the shape of the job params object
};
});
};
Original file line number Diff line number Diff line change
Expand Up @@ -32,15 +32,6 @@ beforeEach(() => {
info: {
protocol: 'http',
},
plugins: {
elasticsearch: {
getCluster: memoize(() => {
return {
callWithRequest: jest.fn(),
};
}),
},
},
savedObjects: {
getScopedSavedObjectsClient: jest.fn(),
},
Expand All @@ -57,6 +48,11 @@ beforeEach(() => {
afterEach(() => generatePdfObservableFactory.mockReset());

const getMockLogger = () => new LevelLogger();
const mockElasticsearch = {
dataClient: {
asScoped: () => ({ callAsCurrentUser: jest.fn() }),
},
};

const encryptHeaders = async headers => {
const crypto = cryptoFactory(mockServer);
Expand All @@ -69,7 +65,9 @@ test(`passes browserTimezone to generatePdf`, async () => {
const generatePdfObservable = generatePdfObservableFactory();
generatePdfObservable.mockReturnValue(Rx.of(Buffer.from('')));

const executeJob = executeJobFactory(mockServer, getMockLogger(), { browserDriverFactory: {} });
const executeJob = executeJobFactory(mockServer, mockElasticsearch, getMockLogger(), {
browserDriverFactory: {},
});
const browserTimezone = 'UTC';
await executeJob(
'pdfJobId',
Expand All @@ -90,7 +88,9 @@ test(`passes browserTimezone to generatePdf`, async () => {
});

test(`returns content_type of application/pdf`, async () => {
const executeJob = executeJobFactory(mockServer, getMockLogger(), { browserDriverFactory: {} });
const executeJob = executeJobFactory(mockServer, mockElasticsearch, getMockLogger(), {
browserDriverFactory: {},
});
const encryptedHeaders = await encryptHeaders({});

const generatePdfObservable = generatePdfObservableFactory();
Expand All @@ -110,7 +110,9 @@ test(`returns content of generatePdf getBuffer base64 encoded`, async () => {
const generatePdfObservable = generatePdfObservableFactory();
generatePdfObservable.mockReturnValue(Rx.of(Buffer.from(testContent)));

const executeJob = executeJobFactory(mockServer, getMockLogger(), { browserDriverFactory: {} });
const executeJob = executeJobFactory(mockServer, mockElasticsearch, getMockLogger(), {
browserDriverFactory: {},
});
const encryptedHeaders = await encryptHeaders({});
const { content } = await executeJob(
'pdfJobId',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
*/

import * as Rx from 'rxjs';
import { ElasticsearchServiceSetup } from 'kibana/server';
import { catchError, map, mergeMap, takeUntil } from 'rxjs/operators';
import {
ServerFacade,
Expand All @@ -28,6 +29,7 @@ type QueuedPdfExecutorFactory = ExecuteJobFactory<ESQueueWorkerExecuteFn<JobDocP

export const executeJobFactory: QueuedPdfExecutorFactory = function executeJobFactoryFn(
server: ServerFacade,
elasticsearch: ElasticsearchServiceSetup,
parentLogger: Logger,
{ browserDriverFactory }: { browserDriverFactory: HeadlessChromiumDriverFactory }
) {
Expand Down
Loading

0 comments on commit 8bf7368

Please sign in to comment.