From 8bf73685b5b1a00ebdcb88dc99c7451e72434854 Mon Sep 17 00:00:00 2001 From: Tim Sullivan Date: Wed, 5 Feb 2020 09:31:50 -0700 Subject: [PATCH] [7.x] [Reporting] Use ES plugin from NP (#56209) (#56693) * [Reporting] Use ES plugin from NP (#56209) * [Reporting] Use ES plugin from NP * fix elasticsearchErrors reference * fix mocha test * convert to jest * fix the code and tests * cosmetics * fix mocha tests * fix imports * fix mocha tests * fix jest * simplify Co-authored-by: Elastic Machine * fix eslint * fix missing import * fix the backport * fix mocha test Co-authored-by: Elastic Machine --- .../csv/server/execute_job.test.js | 272 +++++++++--------- .../export_types/csv/server/execute_job.ts | 15 +- .../server/create_job/create_job.ts | 15 +- .../server/execute_job.ts | 8 +- .../server/lib/generate_csv.ts | 3 + .../server/lib/generate_csv_search.ts | 11 +- .../png/server/execute_job/index.test.js | 27 +- .../png/server/execute_job/index.ts | 2 + .../printable_pdf/server/create_job/index.ts | 14 +- .../server/execute_job/index.test.js | 26 +- .../printable_pdf/server/execute_job/index.ts | 2 + x-pack/legacy/plugins/reporting/index.ts | 29 +- .../reporting/server/lib/create_queue.ts | 6 +- .../server/lib/create_worker.test.ts | 31 +- .../reporting/server/lib/create_worker.ts | 6 +- .../reporting/server/lib/enqueue_job.ts | 4 +- .../fixtures/legacy_elasticsearch.js | 34 ++- .../esqueue/__tests__/helpers/create_index.js | 6 +- .../server/lib/esqueue/__tests__/index.js | 2 +- .../server/lib/esqueue/__tests__/job.js | 16 +- .../server/lib/esqueue/__tests__/worker.js | 57 ++-- .../lib/esqueue/helpers/create_index.js | 4 +- .../reporting/server/lib/esqueue/index.js | 2 +- .../reporting/server/lib/esqueue/job.js | 6 +- .../reporting/server/lib/esqueue/worker.js | 8 +- .../reporting/server/lib/jobs_query.ts | 14 +- .../reporting/server/lib/validate/index.ts | 6 +- ...js => validate_max_content_length.test.js} | 57 ++-- .../validate/validate_max_content_length.ts | 12 +- .../legacy/plugins/reporting/server/plugin.ts | 32 +-- .../generate_from_savedobject_immediate.ts | 5 +- .../reporting/server/routes/generation.ts | 17 +- .../reporting/server/routes/jobs.test.js | 50 ++-- .../plugins/reporting/server/routes/jobs.ts | 5 +- .../server/routes/lib/job_response_handler.ts | 8 +- x-pack/legacy/plugins/reporting/types.d.ts | 13 +- 36 files changed, 447 insertions(+), 378 deletions(-) rename x-pack/legacy/plugins/reporting/server/lib/validate/{__tests__/validate_max_content_length.js => validate_max_content_length.test.js} (65%) diff --git a/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.test.js b/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.test.js index a9ede658c4e1fb..5042389f2bf6b0 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.test.js +++ b/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.test.js @@ -37,9 +37,15 @@ describe('CSV Execute Job', function() { let cancellationToken; let mockServer; let clusterStub; - let callWithRequestStub; + let callAsCurrentUserStub; let uiSettingsGetStub; + const mockElasticsearch = { + dataClient: { + asScoped: () => clusterStub, + }, + }; + beforeAll(async function() { const crypto = nodeCrypto({ encryptionKey }); encryptedHeaders = await crypto.encrypt(headers); @@ -55,11 +61,11 @@ describe('CSV Execute Job', function() { _scroll_id: 'defaultScrollId', }; clusterStub = { - callWithRequest: function() {}, + callAsCurrentUser: function() {}, }; - callWithRequestStub = sinon - .stub(clusterStub, 'callWithRequest') + callAsCurrentUserStub = sinon + .stub(clusterStub, 'callAsCurrentUser') .resolves(defaultElasticsearchResponse); const configGetStub = sinon.stub(); @@ -68,7 +74,6 @@ describe('CSV Execute Job', function() { uiSettingsGetStub.withArgs('csv:quoteValues').returns(true); mockServer = { - expose: function() {}, fieldFormatServiceFactory: function() { const uiConfigMock = {}; uiConfigMock['format:defaultTypeMap'] = { @@ -81,13 +86,6 @@ describe('CSV Execute Job', function() { return fieldFormatsRegistry; }, - plugins: { - elasticsearch: { - getCluster: function() { - return clusterStub; - }, - }, - }, config: function() { return { get: configGetStub, @@ -117,7 +115,7 @@ describe('CSV Execute Job', function() { describe('calls getScopedSavedObjectsClient with request', function() { it('containing decrypted headers', async function() { - const executeJob = executeJobFactory(mockServer, mockLogger); + const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); await executeJob( 'job456', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -135,7 +133,7 @@ describe('CSV Execute Job', function() { .config() .get.withArgs('server.basePath') .returns(serverBasePath); - const executeJob = executeJobFactory(mockServer, mockLogger); + const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); await executeJob( 'job456', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -153,7 +151,7 @@ describe('CSV Execute Job', function() { .config() .get.withArgs('server.basePath') .returns(serverBasePath); - const executeJob = executeJobFactory(mockServer, mockLogger); + const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); const jobBasePath = 'foo-job/basePath/'; await executeJob( 'job789', @@ -176,7 +174,7 @@ describe('CSV Execute Job', function() { it('passed scoped SavedObjectsClient to uiSettingsServiceFactory', async function() { const returnValue = Symbol(); mockServer.savedObjects.getScopedSavedObjectsClient.returns(returnValue); - const executeJob = executeJobFactory(mockServer, mockLogger); + const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); await executeJob( 'job456', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -190,15 +188,15 @@ describe('CSV Execute Job', function() { }); describe('basic Elasticsearch call behavior', function() { - it('should decrypt encrypted headers and pass to callWithRequest', async function() { - const executeJob = executeJobFactory(mockServer, mockLogger); + it('should decrypt encrypted headers and pass to callAsCurrentUser', async function() { + const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); await executeJob( 'job456', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, cancellationToken ); - expect(callWithRequestStub.called).toBe(true); - expect(callWithRequestStub.firstCall.args[0].headers).toEqual(headers); + expect(callAsCurrentUserStub.called).toBe(true); + expect(callAsCurrentUserStub.firstCall.args[0]).toEqual('search'); }); it('should pass the index and body to execute the initial search', async function() { @@ -207,7 +205,7 @@ describe('CSV Execute Job', function() { testBody: true, }; - const executeJob = executeJobFactory(mockServer, mockLogger); + const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); const job = { headers: encryptedHeaders, fields: [], @@ -219,115 +217,115 @@ describe('CSV Execute Job', function() { await executeJob('job777', job, cancellationToken); - const searchCall = callWithRequestStub.firstCall; - expect(searchCall.args[1]).toBe('search'); - expect(searchCall.args[2].index).toBe(index); - expect(searchCall.args[2].body).toBe(body); + const searchCall = callAsCurrentUserStub.firstCall; + expect(searchCall.args[0]).toBe('search'); + expect(searchCall.args[1].index).toBe(index); + expect(searchCall.args[1].body).toBe(body); }); it('should pass the scrollId from the initial search to the subsequent scroll', async function() { const scrollId = getRandomScrollId(); - callWithRequestStub.onFirstCall().resolves({ + callAsCurrentUserStub.onFirstCall().resolves({ hits: { hits: [{}], }, _scroll_id: scrollId, }); - callWithRequestStub.onSecondCall().resolves(defaultElasticsearchResponse); - const executeJob = executeJobFactory(mockServer, mockLogger); + callAsCurrentUserStub.onSecondCall().resolves(defaultElasticsearchResponse); + const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); await executeJob( 'job456', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, cancellationToken ); - const scrollCall = callWithRequestStub.secondCall; + const scrollCall = callAsCurrentUserStub.secondCall; - expect(scrollCall.args[1]).toBe('scroll'); - expect(scrollCall.args[2].scrollId).toBe(scrollId); + expect(scrollCall.args[0]).toBe('scroll'); + expect(scrollCall.args[1].scrollId).toBe(scrollId); }); it('should not execute scroll if there are no hits from the search', async function() { - const executeJob = executeJobFactory(mockServer, mockLogger); + const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); await executeJob( 'job456', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, cancellationToken ); - expect(callWithRequestStub.callCount).toBe(2); + expect(callAsCurrentUserStub.callCount).toBe(2); - const searchCall = callWithRequestStub.firstCall; - expect(searchCall.args[1]).toBe('search'); + const searchCall = callAsCurrentUserStub.firstCall; + expect(searchCall.args[0]).toBe('search'); - const clearScrollCall = callWithRequestStub.secondCall; - expect(clearScrollCall.args[1]).toBe('clearScroll'); + const clearScrollCall = callAsCurrentUserStub.secondCall; + expect(clearScrollCall.args[0]).toBe('clearScroll'); }); it('should stop executing scroll if there are no hits', async function() { - callWithRequestStub.onFirstCall().resolves({ + callAsCurrentUserStub.onFirstCall().resolves({ hits: { hits: [{}], }, _scroll_id: 'scrollId', }); - callWithRequestStub.onSecondCall().resolves({ + callAsCurrentUserStub.onSecondCall().resolves({ hits: { hits: [], }, _scroll_id: 'scrollId', }); - const executeJob = executeJobFactory(mockServer, mockLogger); + const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); await executeJob( 'job456', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, cancellationToken ); - expect(callWithRequestStub.callCount).toBe(3); + expect(callAsCurrentUserStub.callCount).toBe(3); - const searchCall = callWithRequestStub.firstCall; - expect(searchCall.args[1]).toBe('search'); + const searchCall = callAsCurrentUserStub.firstCall; + expect(searchCall.args[0]).toBe('search'); - const scrollCall = callWithRequestStub.secondCall; - expect(scrollCall.args[1]).toBe('scroll'); + const scrollCall = callAsCurrentUserStub.secondCall; + expect(scrollCall.args[0]).toBe('scroll'); - const clearScroll = callWithRequestStub.thirdCall; - expect(clearScroll.args[1]).toBe('clearScroll'); + const clearScroll = callAsCurrentUserStub.thirdCall; + expect(clearScroll.args[0]).toBe('clearScroll'); }); it('should call clearScroll with scrollId when there are no more hits', async function() { const lastScrollId = getRandomScrollId(); - callWithRequestStub.onFirstCall().resolves({ + callAsCurrentUserStub.onFirstCall().resolves({ hits: { hits: [{}], }, _scroll_id: 'scrollId', }); - callWithRequestStub.onSecondCall().resolves({ + callAsCurrentUserStub.onSecondCall().resolves({ hits: { hits: [], }, _scroll_id: lastScrollId, }); - const executeJob = executeJobFactory(mockServer, mockLogger); + const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); await executeJob( 'job456', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, cancellationToken ); - const lastCall = callWithRequestStub.getCall(callWithRequestStub.callCount - 1); - expect(lastCall.args[1]).toBe('clearScroll'); - expect(lastCall.args[2].scrollId).toEqual([lastScrollId]); + const lastCall = callAsCurrentUserStub.getCall(callAsCurrentUserStub.callCount - 1); + expect(lastCall.args[0]).toBe('clearScroll'); + expect(lastCall.args[1].scrollId).toEqual([lastScrollId]); }); it('calls clearScroll when there is an error iterating the hits', async function() { const lastScrollId = getRandomScrollId(); - callWithRequestStub.onFirstCall().resolves({ + callAsCurrentUserStub.onFirstCall().resolves({ hits: { hits: [ { @@ -341,7 +339,7 @@ describe('CSV Execute Job', function() { _scroll_id: lastScrollId, }); - const executeJob = executeJobFactory(mockServer, mockLogger); + const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -352,9 +350,9 @@ describe('CSV Execute Job', function() { executeJob('job123', jobParams, cancellationToken) ).rejects.toMatchInlineSnapshot(`[TypeError: Cannot read property 'indexOf' of undefined]`); - const lastCall = callWithRequestStub.getCall(callWithRequestStub.callCount - 1); - expect(lastCall.args[1]).toBe('clearScroll'); - expect(lastCall.args[2].scrollId).toEqual([lastScrollId]); + const lastCall = callAsCurrentUserStub.getCall(callAsCurrentUserStub.callCount - 1); + expect(lastCall.args[0]).toBe('clearScroll'); + expect(lastCall.args[1].scrollId).toEqual([lastScrollId]); }); }); @@ -364,14 +362,14 @@ describe('CSV Execute Job', function() { .config() .get.withArgs('xpack.reporting.csv.checkForFormulas') .returns(true); - callWithRequestStub.onFirstCall().returns({ + callAsCurrentUserStub.onFirstCall().returns({ hits: { hits: [{ _source: { one: '=SUM(A1:A2)', two: 'bar' } }], }, _scroll_id: 'scrollId', }); - const executeJob = executeJobFactory(mockServer, mockLogger); + const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -392,14 +390,14 @@ describe('CSV Execute Job', function() { .config() .get.withArgs('xpack.reporting.csv.checkForFormulas') .returns(true); - callWithRequestStub.onFirstCall().returns({ + callAsCurrentUserStub.onFirstCall().returns({ hits: { hits: [{ _source: { '=SUM(A1:A2)': 'foo', two: 'bar' } }], }, _scroll_id: 'scrollId', }); - const executeJob = executeJobFactory(mockServer, mockLogger); + const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['=SUM(A1:A2)', 'two'], @@ -420,14 +418,14 @@ describe('CSV Execute Job', function() { .config() .get.withArgs('xpack.reporting.csv.checkForFormulas') .returns(true); - callWithRequestStub.onFirstCall().returns({ + callAsCurrentUserStub.onFirstCall().returns({ hits: { hits: [{ _source: { one: 'foo', two: 'bar' } }], }, _scroll_id: 'scrollId', }); - const executeJob = executeJobFactory(mockServer, mockLogger); + const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -448,14 +446,14 @@ describe('CSV Execute Job', function() { .config() .get.withArgs('xpack.reporting.csv.checkForFormulas') .returns(false); - callWithRequestStub.onFirstCall().returns({ + callAsCurrentUserStub.onFirstCall().returns({ hits: { hits: [{ _source: { one: '=SUM(A1:A2)', two: 'bar' } }], }, _scroll_id: 'scrollId', }); - const executeJob = executeJobFactory(mockServer, mockLogger); + const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -474,8 +472,8 @@ describe('CSV Execute Job', function() { describe('Elasticsearch call errors', function() { it('should reject Promise if search call errors out', async function() { - callWithRequestStub.rejects(new Error()); - const executeJob = executeJobFactory(mockServer, mockLogger); + callAsCurrentUserStub.rejects(new Error()); + const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); const jobParams = { headers: encryptedHeaders, fields: [], @@ -487,14 +485,14 @@ describe('CSV Execute Job', function() { }); it('should reject Promise if scroll call errors out', async function() { - callWithRequestStub.onFirstCall().resolves({ + callAsCurrentUserStub.onFirstCall().resolves({ hits: { hits: [{}], }, _scroll_id: 'scrollId', }); - callWithRequestStub.onSecondCall().rejects(new Error()); - const executeJob = executeJobFactory(mockServer, mockLogger); + callAsCurrentUserStub.onSecondCall().rejects(new Error()); + const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); const jobParams = { headers: encryptedHeaders, fields: [], @@ -508,14 +506,14 @@ describe('CSV Execute Job', function() { describe('invalid responses', function() { it('should reject Promise if search returns hits but no _scroll_id', async function() { - callWithRequestStub.resolves({ + callAsCurrentUserStub.resolves({ hits: { hits: [{}], }, _scroll_id: undefined, }); - const executeJob = executeJobFactory(mockServer, mockLogger); + const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); const jobParams = { headers: encryptedHeaders, fields: [], @@ -529,14 +527,14 @@ describe('CSV Execute Job', function() { }); it('should reject Promise if search returns no hits and no _scroll_id', async function() { - callWithRequestStub.resolves({ + callAsCurrentUserStub.resolves({ hits: { hits: [], }, _scroll_id: undefined, }); - const executeJob = executeJobFactory(mockServer, mockLogger); + const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); const jobParams = { headers: encryptedHeaders, fields: [], @@ -550,21 +548,21 @@ describe('CSV Execute Job', function() { }); it('should reject Promise if scroll returns hits but no _scroll_id', async function() { - callWithRequestStub.onFirstCall().resolves({ + callAsCurrentUserStub.onFirstCall().resolves({ hits: { hits: [{}], }, _scroll_id: 'scrollId', }); - callWithRequestStub.onSecondCall().resolves({ + callAsCurrentUserStub.onSecondCall().resolves({ hits: { hits: [{}], }, _scroll_id: undefined, }); - const executeJob = executeJobFactory(mockServer, mockLogger); + const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); const jobParams = { headers: encryptedHeaders, fields: [], @@ -578,21 +576,21 @@ describe('CSV Execute Job', function() { }); it('should reject Promise if scroll returns no hits and no _scroll_id', async function() { - callWithRequestStub.onFirstCall().resolves({ + callAsCurrentUserStub.onFirstCall().resolves({ hits: { hits: [{}], }, _scroll_id: 'scrollId', }); - callWithRequestStub.onSecondCall().resolves({ + callAsCurrentUserStub.onSecondCall().resolves({ hits: { hits: [], }, _scroll_id: undefined, }); - const executeJob = executeJobFactory(mockServer, mockLogger); + const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); const jobParams = { headers: encryptedHeaders, fields: [], @@ -611,23 +609,25 @@ describe('CSV Execute Job', function() { const scrollId = getRandomScrollId(); beforeEach(function() { - // We have to "re-stub" the callWithRequest stub here so that we can use the fakeFunction + // We have to "re-stub" the callAsCurrentUser stub here so that we can use the fakeFunction // that delays the Promise resolution so we have a chance to call cancellationToken.cancel(). // Otherwise, we get into an endless loop, and don't have a chance to call cancel - callWithRequestStub.restore(); - callWithRequestStub = sinon.stub(clusterStub, 'callWithRequest').callsFake(async function() { - await delay(1); - return { - hits: { - hits: [{}], - }, - _scroll_id: scrollId, - }; - }); + callAsCurrentUserStub.restore(); + callAsCurrentUserStub = sinon + .stub(clusterStub, 'callAsCurrentUser') + .callsFake(async function() { + await delay(1); + return { + hits: { + hits: [{}], + }, + _scroll_id: scrollId, + }; + }); }); it('should stop calling Elasticsearch when cancellationToken.cancel is called', async function() { - const executeJob = executeJobFactory(mockServer, mockLogger); + const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); executeJob( 'job345', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -635,14 +635,14 @@ describe('CSV Execute Job', function() { ); await delay(100); - const callCount = callWithRequestStub.callCount; + const callCount = callAsCurrentUserStub.callCount; cancellationToken.cancel(); await delay(250); - expect(callWithRequestStub.callCount).toBe(callCount + 1); // last call is to clear the scroll + expect(callAsCurrentUserStub.callCount).toBe(callCount + 1); // last call is to clear the scroll }); it(`shouldn't call clearScroll if it never got a scrollId`, async function() { - const executeJob = executeJobFactory(mockServer, mockLogger); + const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); executeJob( 'job345', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -650,13 +650,13 @@ describe('CSV Execute Job', function() { ); cancellationToken.cancel(); - for (let i = 0; i < callWithRequestStub.callCount; ++i) { - expect(callWithRequestStub.getCall(i).args[1]).to.not.be('clearScroll'); + for (let i = 0; i < callAsCurrentUserStub.callCount; ++i) { + expect(callAsCurrentUserStub.getCall(i).args[1]).to.not.be('clearScroll'); } }); it('should call clearScroll if it got a scrollId', async function() { - const executeJob = executeJobFactory(mockServer, mockLogger); + const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); executeJob( 'job345', { headers: encryptedHeaders, fields: [], searchRequest: { index: null, body: null } }, @@ -666,15 +666,15 @@ describe('CSV Execute Job', function() { cancellationToken.cancel(); await delay(100); - const lastCall = callWithRequestStub.getCall(callWithRequestStub.callCount - 1); - expect(lastCall.args[1]).toBe('clearScroll'); - expect(lastCall.args[2].scrollId).toEqual([scrollId]); + const lastCall = callAsCurrentUserStub.getCall(callAsCurrentUserStub.callCount - 1); + expect(lastCall.args[0]).toBe('clearScroll'); + expect(lastCall.args[1].scrollId).toEqual([scrollId]); }); }); describe('csv content', function() { it('should write column headers to output, even if there are no results', async function() { - const executeJob = executeJobFactory(mockServer, mockLogger); + const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -686,7 +686,7 @@ describe('CSV Execute Job', function() { it('should use custom uiSettings csv:separator for header', async function() { uiSettingsGetStub.withArgs('csv:separator').returns(';'); - const executeJob = executeJobFactory(mockServer, mockLogger); + const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -698,7 +698,7 @@ describe('CSV Execute Job', function() { it('should escape column headers if uiSettings csv:quoteValues is true', async function() { uiSettingsGetStub.withArgs('csv:quoteValues').returns(true); - const executeJob = executeJobFactory(mockServer, mockLogger); + const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one and a half', 'two', 'three-and-four', 'five & six'], @@ -710,7 +710,7 @@ describe('CSV Execute Job', function() { it(`shouldn't escape column headers if uiSettings csv:quoteValues is false`, async function() { uiSettingsGetStub.withArgs('csv:quoteValues').returns(false); - const executeJob = executeJobFactory(mockServer, mockLogger); + const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one and a half', 'two', 'three-and-four', 'five & six'], @@ -721,8 +721,8 @@ describe('CSV Execute Job', function() { }); it('should write column headers to output, when there are results', async function() { - const executeJob = executeJobFactory(mockServer, mockLogger); - callWithRequestStub.onFirstCall().resolves({ + const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + callAsCurrentUserStub.onFirstCall().resolves({ hits: { hits: [{ one: '1', two: '2' }], }, @@ -741,8 +741,8 @@ describe('CSV Execute Job', function() { }); it('should use comma separated values of non-nested fields from _source', async function() { - const executeJob = executeJobFactory(mockServer, mockLogger); - callWithRequestStub.onFirstCall().resolves({ + const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + callAsCurrentUserStub.onFirstCall().resolves({ hits: { hits: [{ _source: { one: 'foo', two: 'bar' } }], }, @@ -762,14 +762,14 @@ describe('CSV Execute Job', function() { }); it('should concatenate the hits from multiple responses', async function() { - const executeJob = executeJobFactory(mockServer, mockLogger); - callWithRequestStub.onFirstCall().resolves({ + const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + callAsCurrentUserStub.onFirstCall().resolves({ hits: { hits: [{ _source: { one: 'foo', two: 'bar' } }], }, _scroll_id: 'scrollId', }); - callWithRequestStub.onSecondCall().resolves({ + callAsCurrentUserStub.onSecondCall().resolves({ hits: { hits: [{ _source: { one: 'baz', two: 'qux' } }], }, @@ -790,8 +790,8 @@ describe('CSV Execute Job', function() { }); it('should use field formatters to format fields', async function() { - const executeJob = executeJobFactory(mockServer, mockLogger); - callWithRequestStub.onFirstCall().resolves({ + const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); + callAsCurrentUserStub.onFirstCall().resolves({ hits: { hits: [{ _source: { one: 'foo', two: 'bar' } }], }, @@ -835,7 +835,7 @@ describe('CSV Execute Job', function() { .get.withArgs('xpack.reporting.csv.maxSizeBytes') .returns(1); - const executeJob = executeJobFactory(mockServer, mockLogger); + const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -868,7 +868,7 @@ describe('CSV Execute Job', function() { .get.withArgs('xpack.reporting.csv.maxSizeBytes') .returns(9); - const executeJob = executeJobFactory(mockServer, mockLogger); + const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -901,14 +901,14 @@ describe('CSV Execute Job', function() { .get.withArgs('xpack.reporting.csv.maxSizeBytes') .returns(9); - callWithRequestStub.onFirstCall().returns({ + callAsCurrentUserStub.onFirstCall().returns({ hits: { hits: [{ _source: { one: 'foo', two: 'bar' } }], }, _scroll_id: 'scrollId', }); - const executeJob = executeJobFactory(mockServer, mockLogger); + const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -942,14 +942,14 @@ describe('CSV Execute Job', function() { .get.withArgs('xpack.reporting.csv.maxSizeBytes') .returns(18); - callWithRequestStub.onFirstCall().returns({ + callAsCurrentUserStub.onFirstCall().returns({ hits: { hits: [{ _source: { one: 'foo', two: 'bar' } }], }, _scroll_id: 'scrollId', }); - const executeJob = executeJobFactory(mockServer, mockLogger); + const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -982,14 +982,14 @@ describe('CSV Execute Job', function() { .get.withArgs('xpack.reporting.csv.scroll') .returns({ duration: scrollDuration }); - callWithRequestStub.onFirstCall().returns({ + callAsCurrentUserStub.onFirstCall().returns({ hits: { hits: [{}], }, _scroll_id: 'scrollId', }); - const executeJob = executeJobFactory(mockServer, mockLogger); + const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -999,9 +999,9 @@ describe('CSV Execute Job', function() { await executeJob('job123', jobParams, cancellationToken); - const searchCall = callWithRequestStub.firstCall; - expect(searchCall.args[1]).toBe('search'); - expect(searchCall.args[2].scroll).toBe(scrollDuration); + const searchCall = callAsCurrentUserStub.firstCall; + expect(searchCall.args[0]).toBe('search'); + expect(searchCall.args[1].scroll).toBe(scrollDuration); }); it('passes scroll size to initial search call', async function() { @@ -1011,14 +1011,14 @@ describe('CSV Execute Job', function() { .get.withArgs('xpack.reporting.csv.scroll') .returns({ size: scrollSize }); - callWithRequestStub.onFirstCall().resolves({ + callAsCurrentUserStub.onFirstCall().resolves({ hits: { hits: [{}], }, _scroll_id: 'scrollId', }); - const executeJob = executeJobFactory(mockServer, mockLogger); + const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -1028,9 +1028,9 @@ describe('CSV Execute Job', function() { await executeJob('job123', jobParams, cancellationToken); - const searchCall = callWithRequestStub.firstCall; - expect(searchCall.args[1]).toBe('search'); - expect(searchCall.args[2].size).toBe(scrollSize); + const searchCall = callAsCurrentUserStub.firstCall; + expect(searchCall.args[0]).toBe('search'); + expect(searchCall.args[1].size).toBe(scrollSize); }); it('passes scroll duration to subsequent scroll call', async function() { @@ -1040,14 +1040,14 @@ describe('CSV Execute Job', function() { .get.withArgs('xpack.reporting.csv.scroll') .returns({ duration: scrollDuration }); - callWithRequestStub.onFirstCall().resolves({ + callAsCurrentUserStub.onFirstCall().resolves({ hits: { hits: [{}], }, _scroll_id: 'scrollId', }); - const executeJob = executeJobFactory(mockServer, mockLogger); + const executeJob = executeJobFactory(mockServer, mockElasticsearch, mockLogger); const jobParams = { headers: encryptedHeaders, fields: ['one', 'two'], @@ -1057,9 +1057,9 @@ describe('CSV Execute Job', function() { await executeJob('job123', jobParams, cancellationToken); - const scrollCall = callWithRequestStub.secondCall; - expect(scrollCall.args[1]).toBe('scroll'); - expect(scrollCall.args[2].scroll).toBe(scrollDuration); + const scrollCall = callAsCurrentUserStub.secondCall; + expect(scrollCall.args[0]).toBe('scroll'); + expect(scrollCall.args[1].scroll).toBe(scrollDuration); }); }); }); diff --git a/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.ts b/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.ts index fe64fdc96d9043..280bbf13fa9928 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.ts +++ b/x-pack/legacy/plugins/reporting/export_types/csv/server/execute_job.ts @@ -4,8 +4,9 @@ * you may not use this file except in compliance with the Elastic License. */ +import Hapi from 'hapi'; import { i18n } from '@kbn/i18n'; -import { KibanaRequest } from '../../../../../../../src/core/server'; +import { ElasticsearchServiceSetup, KibanaRequest } from '../../../../../../../src/core/server'; import { CSV_JOB_TYPE } from '../../../common/constants'; import { cryptoFactory } from '../../../server/lib'; import { ESQueueWorkerExecuteFn, ExecuteJobFactory, Logger, ServerFacade } from '../../../types'; @@ -15,8 +16,11 @@ import { createGenerateCsv } from './lib/generate_csv'; export const executeJobFactory: ExecuteJobFactory> = function executeJobFactoryFn(server: ServerFacade, parentLogger: Logger) { - const { callWithRequest } = server.plugins.elasticsearch.getCluster('data'); +>> = function executeJobFactoryFn( + server: ServerFacade, + elasticsearch: ElasticsearchServiceSetup, + parentLogger: Logger +) { const crypto = cryptoFactory(server); const config = server.config(); const logger = parentLogger.clone([CSV_JOB_TYPE, 'execute-job']); @@ -74,8 +78,11 @@ export const executeJobFactory: ExecuteJobFactory { - return callWithRequest(fakeRequest, endpoint, clientParams, options); + return callAsCurrentUser(endpoint, clientParams, options); }; const savedObjects = server.savedObjects; const savedObjectsClient = savedObjects.getScopedSavedObjectsClient( diff --git a/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/create_job/create_job.ts b/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/create_job/create_job.ts index a270e3e0329fe8..ddef2aa0a62688 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/create_job/create_job.ts +++ b/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/create_job/create_job.ts @@ -6,24 +6,25 @@ import { notFound, notImplemented } from 'boom'; import { get } from 'lodash'; +import { ElasticsearchServiceSetup } from 'kibana/server'; import { CSV_FROM_SAVEDOBJECT_JOB_TYPE } from '../../../../common/constants'; import { cryptoFactory } from '../../../../server/lib'; import { CreateJobFactory, ImmediateCreateJobFn, - ServerFacade, - RequestFacade, Logger, + RequestFacade, + ServerFacade, } from '../../../../types'; import { + JobDocPayloadPanelCsv, + JobParamsPanelCsv, SavedObject, SavedObjectServiceError, SavedSearchObjectAttributesJSON, SearchPanel, TimeRangeParams, VisObjectAttributesJSON, - JobDocPayloadPanelCsv, - JobParamsPanelCsv, } from '../../types'; import { createJobSearch } from './create_job_search'; @@ -35,7 +36,11 @@ interface VisData { export const createJobFactory: CreateJobFactory> = function createJobFactoryFn(server: ServerFacade, parentLogger: Logger) { +>> = function createJobFactoryFn( + server: ServerFacade, + elasticsearch: ElasticsearchServiceSetup, + parentLogger: Logger +) { const crypto = cryptoFactory(server); const logger = parentLogger.clone([CSV_FROM_SAVEDOBJECT_JOB_TYPE, 'create-job']); diff --git a/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/execute_job.ts b/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/execute_job.ts index 03f491deaa43d6..b1b7b7d818200e 100644 --- a/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/execute_job.ts +++ b/x-pack/legacy/plugins/reporting/export_types/csv_from_savedobject/server/execute_job.ts @@ -5,6 +5,7 @@ */ import { i18n } from '@kbn/i18n'; +import { ElasticsearchServiceSetup } from 'kibana/server'; import { CONTENT_TYPE_CSV, CSV_FROM_SAVEDOBJECT_JOB_TYPE } from '../../../common/constants'; import { cryptoFactory } from '../../../server/lib'; import { @@ -21,7 +22,11 @@ import { createGenerateCsv } from './lib'; export const executeJobFactory: ExecuteJobFactory> = function executeJobFactoryFn(server: ServerFacade, parentLogger: Logger) { +>> = function executeJobFactoryFn( + server: ServerFacade, + elasticsearch: ElasticsearchServiceSetup, + parentLogger: Logger +) { const crypto = cryptoFactory(server); const logger = parentLogger.clone([CSV_FROM_SAVEDOBJECT_JOB_TYPE, 'execute-job']); @@ -85,6 +90,7 @@ export const executeJobFactory: ExecuteJobFactory { export async function generateCsvSearch( req: RequestFacade, server: ServerFacade, + elasticsearch: ElasticsearchServiceSetup, logger: Logger, searchPanel: SearchPanel, jobParams: JobParamsDiscoverCsv @@ -152,8 +152,11 @@ export async function generateCsvSearch( sort: sortConfig, }, }; - const { callWithRequest } = server.plugins.elasticsearch.getCluster('data'); - const callCluster = (...params: [string, object]) => callWithRequest(req, ...params); + + const { callAsCurrentUser } = elasticsearch.dataClient.asScoped( + KibanaRequest.from(req.getRawRequest()) + ); + const callCluster = (...params: [string, object]) => callAsCurrentUser(...params); const config = server.config(); const uiSettings = await getUiSettings(uiConfig); diff --git a/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.test.js b/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.test.js index 4f02ab5d4c077e..bb33ef9c19a1dd 100644 --- a/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.test.js +++ b/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.test.js @@ -32,15 +32,6 @@ beforeEach(() => { info: { protocol: 'http', }, - plugins: { - elasticsearch: { - getCluster: memoize(() => { - return { - callWithRequest: jest.fn(), - }; - }), - }, - }, savedObjects: { getScopedSavedObjectsClient: jest.fn(), }, @@ -57,6 +48,12 @@ beforeEach(() => { afterEach(() => generatePngObservableFactory.mockReset()); +const mockElasticsearch = { + dataClient: { + asScoped: () => ({ callAsCurrentUser: jest.fn() }), + }, +}; + const getMockLogger = () => new LevelLogger(); const encryptHeaders = async headers => { @@ -70,7 +67,9 @@ test(`passes browserTimezone to generatePng`, async () => { const generatePngObservable = generatePngObservableFactory(); generatePngObservable.mockReturnValue(Rx.of(Buffer.from(''))); - const executeJob = executeJobFactory(mockServer, getMockLogger(), { browserDriverFactory: {} }); + const executeJob = executeJobFactory(mockServer, mockElasticsearch, getMockLogger(), { + browserDriverFactory: {}, + }); const browserTimezone = 'UTC'; await executeJob( 'pngJobId', @@ -88,7 +87,9 @@ test(`passes browserTimezone to generatePng`, async () => { }); test(`returns content_type of application/png`, async () => { - const executeJob = executeJobFactory(mockServer, getMockLogger(), { browserDriverFactory: {} }); + const executeJob = executeJobFactory(mockServer, mockElasticsearch, getMockLogger(), { + browserDriverFactory: {}, + }); const encryptedHeaders = await encryptHeaders({}); const generatePngObservable = generatePngObservableFactory(); @@ -108,7 +109,9 @@ test(`returns content of generatePng getBuffer base64 encoded`, async () => { const generatePngObservable = generatePngObservableFactory(); generatePngObservable.mockReturnValue(Rx.of(Buffer.from(testContent))); - const executeJob = executeJobFactory(mockServer, getMockLogger(), { browserDriverFactory: {} }); + const executeJob = executeJobFactory(mockServer, mockElasticsearch, getMockLogger(), { + browserDriverFactory: {}, + }); const encryptedHeaders = await encryptHeaders({}); const { content } = await executeJob( 'pngJobId', diff --git a/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.ts b/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.ts index 7d5c69655c362f..c9f370197da662 100644 --- a/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.ts +++ b/x-pack/legacy/plugins/reporting/export_types/png/server/execute_job/index.ts @@ -5,6 +5,7 @@ */ import * as Rx from 'rxjs'; +import { ElasticsearchServiceSetup } from 'kibana/server'; import { catchError, map, mergeMap, takeUntil } from 'rxjs/operators'; import { PNG_JOB_TYPE } from '../../../../common/constants'; import { @@ -27,6 +28,7 @@ type QueuedPngExecutorFactory = ExecuteJobFactory> = function createJobFactoryFn(server: ServerFacade, logger: Logger) { +>> = function createJobFactoryFn(server: ServerFacade, elasticsearch: unknown, logger: Logger) { const compatibilityShim = compatibilityShimFactory(server, logger); const crypto = cryptoFactory(server); @@ -42,14 +42,14 @@ export const createJobFactory: CreateJobFactory ({ relativeUrl: u })), - headers: serializedEncryptedHeaders, - browserTimezone, - layout, basePath: request.getBasePath(), + browserTimezone, forceNow: new Date().toISOString(), + headers: serializedEncryptedHeaders, + layout, + objects: relativeUrls.map(u => ({ relativeUrl: u })), + title, + type: objectType, // Note: this changes the shape of the job params object }; }); }; diff --git a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.test.js b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.test.js index 61408dcaf75f42..e917b5ce991a66 100644 --- a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.test.js +++ b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.test.js @@ -32,15 +32,6 @@ beforeEach(() => { info: { protocol: 'http', }, - plugins: { - elasticsearch: { - getCluster: memoize(() => { - return { - callWithRequest: jest.fn(), - }; - }), - }, - }, savedObjects: { getScopedSavedObjectsClient: jest.fn(), }, @@ -57,6 +48,11 @@ beforeEach(() => { afterEach(() => generatePdfObservableFactory.mockReset()); const getMockLogger = () => new LevelLogger(); +const mockElasticsearch = { + dataClient: { + asScoped: () => ({ callAsCurrentUser: jest.fn() }), + }, +}; const encryptHeaders = async headers => { const crypto = cryptoFactory(mockServer); @@ -69,7 +65,9 @@ test(`passes browserTimezone to generatePdf`, async () => { const generatePdfObservable = generatePdfObservableFactory(); generatePdfObservable.mockReturnValue(Rx.of(Buffer.from(''))); - const executeJob = executeJobFactory(mockServer, getMockLogger(), { browserDriverFactory: {} }); + const executeJob = executeJobFactory(mockServer, mockElasticsearch, getMockLogger(), { + browserDriverFactory: {}, + }); const browserTimezone = 'UTC'; await executeJob( 'pdfJobId', @@ -90,7 +88,9 @@ test(`passes browserTimezone to generatePdf`, async () => { }); test(`returns content_type of application/pdf`, async () => { - const executeJob = executeJobFactory(mockServer, getMockLogger(), { browserDriverFactory: {} }); + const executeJob = executeJobFactory(mockServer, mockElasticsearch, getMockLogger(), { + browserDriverFactory: {}, + }); const encryptedHeaders = await encryptHeaders({}); const generatePdfObservable = generatePdfObservableFactory(); @@ -110,7 +110,9 @@ test(`returns content of generatePdf getBuffer base64 encoded`, async () => { const generatePdfObservable = generatePdfObservableFactory(); generatePdfObservable.mockReturnValue(Rx.of(Buffer.from(testContent))); - const executeJob = executeJobFactory(mockServer, getMockLogger(), { browserDriverFactory: {} }); + const executeJob = executeJobFactory(mockServer, mockElasticsearch, getMockLogger(), { + browserDriverFactory: {}, + }); const encryptedHeaders = await encryptHeaders({}); const { content } = await executeJob( 'pdfJobId', diff --git a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.ts b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.ts index dee53697c6681d..162376e31216e0 100644 --- a/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.ts +++ b/x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/execute_job/index.ts @@ -5,6 +5,7 @@ */ import * as Rx from 'rxjs'; +import { ElasticsearchServiceSetup } from 'kibana/server'; import { catchError, map, mergeMap, takeUntil } from 'rxjs/operators'; import { ServerFacade, @@ -28,6 +29,7 @@ type QueuedPdfExecutorFactory = ExecuteJobFactory { @@ -74,10 +69,6 @@ export const reporting = (kibana: any) => { async init(server: Legacy.Server) { const coreSetup = server.newPlatform.setup.core; - const pluginsSetup: ReportingSetupDeps = { - security: server.newPlatform.setup.plugins.security as SecurityPluginSetup, - usageCollection: server.newPlatform.setup.plugins.usageCollection, - }; const fieldFormatServiceFactory = async (uiSettings: IUiSettingsClient) => { const [, plugins] = await coreSetup.getStartServices(); @@ -90,18 +81,22 @@ export const reporting = (kibana: any) => { config: server.config, info: server.info, route: server.route.bind(server), - plugins: { - elasticsearch: server.plugins.elasticsearch, - xpack_main: server.plugins.xpack_main, - }, + plugins: { xpack_main: server.plugins.xpack_main }, savedObjects: server.savedObjects, fieldFormatServiceFactory, uiSettingsServiceFactory: server.uiSettingsServiceFactory, }; - const initializerContext = server.newPlatform.coreContext; - const plugin: ReportingPlugin = reportingPluginFactory(initializerContext, __LEGACY, this); - await plugin.setup(coreSetup, pluginsSetup); + const plugin: ReportingPlugin = reportingPluginFactory( + server.newPlatform.coreContext, + __LEGACY, + this + ); + await plugin.setup(coreSetup, { + elasticsearch: coreSetup.elasticsearch, + security: server.newPlatform.setup.plugins.security as SecurityPluginSetup, + usageCollection: server.newPlatform.setup.plugins.usageCollection, + }); }, deprecations({ unused }: any) { diff --git a/x-pack/legacy/plugins/reporting/server/lib/create_queue.ts b/x-pack/legacy/plugins/reporting/server/lib/create_queue.ts index 05b760c0c3bd6c..c4e32b3ebcd99e 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/create_queue.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/create_queue.ts @@ -4,6 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ +import { ElasticsearchServiceSetup } from 'kibana/server'; import { ServerFacade, ExportTypesRegistry, @@ -23,6 +24,7 @@ interface CreateQueueFactoryOpts { export function createQueueFactory( server: ServerFacade, + elasticsearch: ElasticsearchServiceSetup, logger: Logger, { exportTypesRegistry, browserDriverFactory }: CreateQueueFactoryOpts ): Esqueue { @@ -33,7 +35,7 @@ export function createQueueFactory( interval: queueConfig.indexInterval, timeout: queueConfig.timeout, dateSeparator: '.', - client: server.plugins.elasticsearch.getCluster('admin'), + client: elasticsearch.dataClient, logger: createTaggedLogger(logger, ['esqueue', 'queue-worker']), }; @@ -41,7 +43,7 @@ export function createQueueFactory( if (queueConfig.pollEnabled) { // create workers to poll the index for idle jobs waiting to be claimed and executed - const createWorker = createWorkerFactory(server, logger, { + const createWorker = createWorkerFactory(server, elasticsearch, logger, { exportTypesRegistry, browserDriverFactory, }); diff --git a/x-pack/legacy/plugins/reporting/server/lib/create_worker.test.ts b/x-pack/legacy/plugins/reporting/server/lib/create_worker.test.ts index 6a5c93db32376a..f5c42e5505cd1d 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/create_worker.test.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/create_worker.test.ts @@ -5,13 +5,14 @@ */ import * as sinon from 'sinon'; -import { ServerFacade, HeadlessChromiumDriverFactory } from '../../types'; -import { ExportTypesRegistry } from './export_types_registry'; +import { ElasticsearchServiceSetup } from 'kibana/server'; +import { HeadlessChromiumDriverFactory, ServerFacade } from '../../types'; import { createWorkerFactory } from './create_worker'; // @ts-ignore import { Esqueue } from './esqueue'; // @ts-ignore import { ClientMock } from './esqueue/__tests__/fixtures/legacy_elasticsearch'; +import { ExportTypesRegistry } from './export_types_registry'; const configGetStub = sinon.stub(); configGetStub.withArgs('xpack.reporting.queue').returns({ @@ -48,10 +49,15 @@ describe('Create Worker', () => { test('Creates a single Esqueue worker for Reporting', async () => { const exportTypesRegistry = getMockExportTypesRegistry(); - const createWorker = createWorkerFactory(getMockServer(), getMockLogger(), { - exportTypesRegistry: exportTypesRegistry as ExportTypesRegistry, - browserDriverFactory: {} as HeadlessChromiumDriverFactory, - }); + const createWorker = createWorkerFactory( + getMockServer(), + {} as ElasticsearchServiceSetup, + getMockLogger(), + { + exportTypesRegistry: exportTypesRegistry as ExportTypesRegistry, + browserDriverFactory: {} as HeadlessChromiumDriverFactory, + } + ); const registerWorkerSpy = sinon.spy(queue, 'registerWorker'); createWorker(queue); @@ -82,10 +88,15 @@ Object { { executeJobFactory: executeJobFactoryStub }, { executeJobFactory: executeJobFactoryStub }, ]); - const createWorker = createWorkerFactory(getMockServer(), getMockLogger(), { - exportTypesRegistry: exportTypesRegistry as ExportTypesRegistry, - browserDriverFactory: {} as HeadlessChromiumDriverFactory, - }); + const createWorker = createWorkerFactory( + getMockServer(), + {} as ElasticsearchServiceSetup, + getMockLogger(), + { + exportTypesRegistry: exportTypesRegistry as ExportTypesRegistry, + browserDriverFactory: {} as HeadlessChromiumDriverFactory, + } + ); const registerWorkerSpy = sinon.spy(queue, 'registerWorker'); createWorker(queue); diff --git a/x-pack/legacy/plugins/reporting/server/lib/create_worker.ts b/x-pack/legacy/plugins/reporting/server/lib/create_worker.ts index 67869016a250be..2ca638f641291d 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/create_worker.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/create_worker.ts @@ -4,6 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ +import { ElasticsearchServiceSetup } from 'kibana/server'; import { PLUGIN_ID } from '../../common/constants'; import { ExportTypesRegistry, HeadlessChromiumDriverFactory } from '../../types'; import { CancellationToken } from '../../common/cancellation_token'; @@ -29,6 +30,7 @@ interface CreateWorkerFactoryOpts { export function createWorkerFactory( server: ServerFacade, + elasticsearch: ElasticsearchServiceSetup, logger: Logger, { exportTypesRegistry, browserDriverFactory }: CreateWorkerFactoryOpts ) { @@ -50,7 +52,9 @@ export function createWorkerFactory( ExportTypeDefinition >) { // TODO: the executeJobFn should be unwrapped in the register method of the export types registry - const jobExecutor = exportType.executeJobFactory(server, logger, { browserDriverFactory }); + const jobExecutor = exportType.executeJobFactory(server, elasticsearch, logger, { + browserDriverFactory, + }); jobExecutors.set(exportType.jobType, jobExecutor); } diff --git a/x-pack/legacy/plugins/reporting/server/lib/enqueue_job.ts b/x-pack/legacy/plugins/reporting/server/lib/enqueue_job.ts index 14c57fa35dcf4e..1da8a3795aacc5 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/enqueue_job.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/enqueue_job.ts @@ -5,6 +5,7 @@ */ import { get } from 'lodash'; +import { ElasticsearchServiceSetup } from 'kibana/server'; // @ts-ignore import { events as esqueueEvents } from './esqueue'; import { @@ -35,6 +36,7 @@ interface EnqueueJobFactoryOpts { export function enqueueJobFactory( server: ServerFacade, + elasticsearch: ElasticsearchServiceSetup, parentLogger: Logger, { exportTypesRegistry, esqueue }: EnqueueJobFactoryOpts ): EnqueueJobFn { @@ -61,7 +63,7 @@ export function enqueueJobFactory( } // TODO: the createJobFn should be unwrapped in the register method of the export types registry - const createJob = exportType.createJobFactory(server, logger) as CreateJobFn; + const createJob = exportType.createJobFactory(server, elasticsearch, logger) as CreateJobFn; const payload = await createJob(jobParams, headers, request); const options = { diff --git a/x-pack/legacy/plugins/reporting/server/lib/esqueue/__tests__/fixtures/legacy_elasticsearch.js b/x-pack/legacy/plugins/reporting/server/lib/esqueue/__tests__/fixtures/legacy_elasticsearch.js index 31bdf7767983dd..ebda7ff955b11c 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/esqueue/__tests__/fixtures/legacy_elasticsearch.js +++ b/x-pack/legacy/plugins/reporting/server/lib/esqueue/__tests__/fixtures/legacy_elasticsearch.js @@ -1,10 +1,14 @@ -import { uniqueId, times, random } from 'lodash'; -import * as legacyElasticsearch from 'elasticsearch'; +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ -import { constants } from '../../constants'; +import { uniqueId, times, random } from 'lodash'; +import { errors as esErrors } from 'elasticsearch'; export function ClientMock() { - this.callWithInternalUser = (endpoint, params = {}, ...rest) => { + this.callAsInternalUser = (endpoint, params = {}, ...rest) => { if (endpoint === 'indices.create') { return Promise.resolve({ acknowledged: true }); } @@ -21,12 +25,12 @@ export function ClientMock() { _seq_no: 1, _primary_term: 1, _shards: { total: shardCount, successful: shardCount, failed: 0 }, - created: true + created: true, }); } if (endpoint === 'get') { - if (params === legacyElasticsearch.errors.NotFound) return legacyElasticsearch.errors.NotFound; + if (params === esErrors.NotFound) return esErrors.NotFound; const _source = { jobtype: 'jobtype', @@ -34,7 +38,7 @@ export function ClientMock() { payload: { id: 'sample-job-1', - now: 'Mon Apr 25 2016 14:13:04 GMT-0700 (MST)' + now: 'Mon Apr 25 2016 14:13:04 GMT-0700 (MST)', }, priority: 10, @@ -43,7 +47,7 @@ export function ClientMock() { attempts: 0, max_attempts: 3, status: 'pending', - ...(rest[0] || {}) + ...(rest[0] || {}), }; return Promise.resolve({ @@ -52,7 +56,7 @@ export function ClientMock() { _seq_no: params._seq_no || 1, _primary_term: params._primary_term || 1, found: true, - _source: _source + _source: _source, }); } @@ -68,8 +72,8 @@ export function ClientMock() { _source: { created_at: new Date().toString(), number: random(0, count, true), - ...source - } + ...source, + }, }; }); return Promise.resolve({ @@ -78,13 +82,13 @@ export function ClientMock() { _shards: { total: 5, successful: 5, - failed: 0 + failed: 0, }, hits: { total: count, max_score: null, - hits: hits - } + hits: hits, + }, }); } @@ -96,7 +100,7 @@ export function ClientMock() { _seq_no: params.if_seq_no + 1 || 2, _primary_term: params.if_primary_term + 1 || 2, _shards: { total: shardCount, successful: shardCount, failed: 0 }, - created: true + created: true, }); } diff --git a/x-pack/legacy/plugins/reporting/server/lib/esqueue/__tests__/helpers/create_index.js b/x-pack/legacy/plugins/reporting/server/lib/esqueue/__tests__/helpers/create_index.js index 23e9aab5bad115..2944574534a827 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/esqueue/__tests__/helpers/create_index.js +++ b/x-pack/legacy/plugins/reporting/server/lib/esqueue/__tests__/helpers/create_index.js @@ -17,7 +17,7 @@ describe('Create Index', function() { beforeEach(function() { client = new ClientMock(); - createSpy = sinon.spy(client, 'callWithInternalUser').withArgs('indices.create'); + createSpy = sinon.spy(client, 'callAsInternalUser').withArgs('indices.create'); }); it('should return true', function() { @@ -75,10 +75,10 @@ describe('Create Index', function() { beforeEach(function() { client = new ClientMock(); sinon - .stub(client, 'callWithInternalUser') + .stub(client, 'callAsInternalUser') .withArgs('indices.exists') .callsFake(() => Promise.resolve(true)); - createSpy = client.callWithInternalUser.withArgs('indices.create'); + createSpy = client.callAsInternalUser.withArgs('indices.create'); }); it('should return true', function() { diff --git a/x-pack/legacy/plugins/reporting/server/lib/esqueue/__tests__/index.js b/x-pack/legacy/plugins/reporting/server/lib/esqueue/__tests__/index.js index 8f1ed69de5e7f6..428c0f0bc0736c 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/esqueue/__tests__/index.js +++ b/x-pack/legacy/plugins/reporting/server/lib/esqueue/__tests__/index.js @@ -40,7 +40,7 @@ describe('Esqueue class', function() { describe('Queue construction', function() { it('should ping the ES server', function() { - const pingSpy = sinon.spy(client, 'callWithInternalUser').withArgs('ping'); + const pingSpy = sinon.spy(client, 'callAsInternalUser').withArgs('ping'); new Esqueue('esqueue', { client }); sinon.assert.calledOnce(pingSpy); }); diff --git a/x-pack/legacy/plugins/reporting/server/lib/esqueue/__tests__/job.js b/x-pack/legacy/plugins/reporting/server/lib/esqueue/__tests__/job.js index 2d8410c18ddeab..c7812ec151b005 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/esqueue/__tests__/job.js +++ b/x-pack/legacy/plugins/reporting/server/lib/esqueue/__tests__/job.js @@ -79,7 +79,7 @@ describe('Job Class', function() { beforeEach(function() { type = 'type1'; payload = { id: '123' }; - indexSpy = sinon.spy(client, 'callWithInternalUser').withArgs('index'); + indexSpy = sinon.spy(client, 'callAsInternalUser').withArgs('index'); }); it('should create the target index', function() { @@ -121,7 +121,7 @@ describe('Job Class', function() { }); it('should refresh the index', function() { - const refreshSpy = client.callWithInternalUser.withArgs('indices.refresh'); + const refreshSpy = client.callAsInternalUser.withArgs('indices.refresh'); const job = new Job(mockQueue, index, type, payload); return job.ready.then(() => { @@ -165,9 +165,9 @@ describe('Job Class', function() { it('should emit error on client index failure', function(done) { const errMsg = 'test document index failure'; - client.callWithInternalUser.restore(); + client.callAsInternalUser.restore(); sinon - .stub(client, 'callWithInternalUser') + .stub(client, 'callAsInternalUser') .withArgs('index') .callsFake(() => Promise.reject(new Error(errMsg))); const job = new Job(mockQueue, index, type, payload); @@ -215,7 +215,7 @@ describe('Job Class', function() { beforeEach(function() { type = 'type1'; payload = { id: '123' }; - indexSpy = sinon.spy(client, 'callWithInternalUser').withArgs('index'); + indexSpy = sinon.spy(client, 'callAsInternalUser').withArgs('index'); }); it('should set attempt count to 0', function() { @@ -281,7 +281,7 @@ describe('Job Class', function() { authorization: 'Basic cXdlcnR5', }, }; - indexSpy = sinon.spy(client, 'callWithInternalUser').withArgs('index'); + indexSpy = sinon.spy(client, 'callAsInternalUser').withArgs('index'); }); it('should index the created_by value', function() { @@ -367,10 +367,10 @@ describe('Job Class', function() { }; const job = new Job(mockQueue, index, type, payload, optionals); - return Promise.resolve(client.callWithInternalUser('get', {}, optionals)) + return Promise.resolve(client.callAsInternalUser('get', {}, optionals)) .then(doc => { sinon - .stub(client, 'callWithInternalUser') + .stub(client, 'callAsInternalUser') .withArgs('get') .returns(Promise.resolve(doc)); }) diff --git a/x-pack/legacy/plugins/reporting/server/lib/esqueue/__tests__/worker.js b/x-pack/legacy/plugins/reporting/server/lib/esqueue/__tests__/worker.js index 2dd90fbd1a71e5..5d6cb2af03a127 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/esqueue/__tests__/worker.js +++ b/x-pack/legacy/plugins/reporting/server/lib/esqueue/__tests__/worker.js @@ -288,7 +288,7 @@ describe('Worker class', function() { describe('error handling', function() { it('should pass search errors', function(done) { searchStub = sinon - .stub(mockQueue.client, 'callWithInternalUser') + .stub(mockQueue.client, 'callAsInternalUser') .withArgs('search') .callsFake(() => Promise.reject()); worker = new Worker(mockQueue, 'test', noop, defaultWorkerOptions); @@ -303,7 +303,7 @@ describe('Worker class', function() { describe('missing index', function() { it('should swallow error', function(done) { searchStub = sinon - .stub(mockQueue.client, 'callWithInternalUser') + .stub(mockQueue.client, 'callAsInternalUser') .withArgs('search') .callsFake(() => Promise.reject({ status: 404 })); worker = new Worker(mockQueue, 'test', noop, defaultWorkerOptions); @@ -317,7 +317,7 @@ describe('Worker class', function() { it('should return an empty array', function(done) { searchStub = sinon - .stub(mockQueue.client, 'callWithInternalUser') + .stub(mockQueue.client, 'callAsInternalUser') .withArgs('search') .callsFake(() => Promise.reject({ status: 404 })); worker = new Worker(mockQueue, 'test', noop, defaultWorkerOptions); @@ -343,7 +343,7 @@ describe('Worker class', function() { beforeEach(() => { searchStub = sinon - .stub(mockQueue.client, 'callWithInternalUser') + .stub(mockQueue.client, 'callAsInternalUser') .withArgs('search') .callsFake(() => Promise.resolve({ hits: { hits: [] } })); anchorMoment = moment(anchor); @@ -417,10 +417,10 @@ describe('Worker class', function() { type: 'test', id: 12345, }; - return mockQueue.client.callWithInternalUser('get', params).then(jobDoc => { + return mockQueue.client.callAsInternalUser('get', params).then(jobDoc => { job = jobDoc; worker = new Worker(mockQueue, 'test', noop, defaultWorkerOptions); - updateSpy = sinon.spy(mockQueue.client, 'callWithInternalUser').withArgs('update'); + updateSpy = sinon.spy(mockQueue.client, 'callAsInternalUser').withArgs('update'); }); }); @@ -483,9 +483,9 @@ describe('Worker class', function() { }); it('should reject the promise on conflict errors', function() { - mockQueue.client.callWithInternalUser.restore(); + mockQueue.client.callAsInternalUser.restore(); sinon - .stub(mockQueue.client, 'callWithInternalUser') + .stub(mockQueue.client, 'callAsInternalUser') .withArgs('update') .returns(Promise.reject({ statusCode: 409 })); return worker._claimJob(job).catch(err => { @@ -494,9 +494,9 @@ describe('Worker class', function() { }); it('should reject the promise on other errors', function() { - mockQueue.client.callWithInternalUser.restore(); + mockQueue.client.callAsInternalUser.restore(); sinon - .stub(mockQueue.client, 'callWithInternalUser') + .stub(mockQueue.client, 'callAsInternalUser') .withArgs('update') .returns(Promise.reject({ statusCode: 401 })); return worker._claimJob(job).catch(err => { @@ -532,12 +532,12 @@ describe('Worker class', function() { }); afterEach(() => { - mockQueue.client.callWithInternalUser.restore(); + mockQueue.client.callAsInternalUser.restore(); }); it('should emit for errors from claiming job', function(done) { sinon - .stub(mockQueue.client, 'callWithInternalUser') + .stub(mockQueue.client, 'callAsInternalUser') .withArgs('update') .rejects({ statusCode: 401 }); @@ -558,7 +558,7 @@ describe('Worker class', function() { it('should reject the promise if an error claiming the job', function() { sinon - .stub(mockQueue.client, 'callWithInternalUser') + .stub(mockQueue.client, 'callAsInternalUser') .withArgs('update') .rejects({ statusCode: 409 }); return worker._claimPendingJobs(getMockJobs()).catch(err => { @@ -568,7 +568,7 @@ describe('Worker class', function() { it('should get the pending job', function() { sinon - .stub(mockQueue.client, 'callWithInternalUser') + .stub(mockQueue.client, 'callAsInternalUser') .withArgs('update') .resolves({ test: 'cool' }); sinon.stub(worker, '_performJob').callsFake(identity); @@ -590,10 +590,10 @@ describe('Worker class', function() { anchorMoment = moment(anchor); clock = sinon.useFakeTimers(anchorMoment.valueOf()); - return mockQueue.client.callWithInternalUser('get').then(jobDoc => { + return mockQueue.client.callAsInternalUser('get').then(jobDoc => { job = jobDoc; worker = new Worker(mockQueue, 'test', noop, defaultWorkerOptions); - updateSpy = sinon.spy(mockQueue.client, 'callWithInternalUser').withArgs('update'); + updateSpy = sinon.spy(mockQueue.client, 'callAsInternalUser').withArgs('update'); }); }); @@ -625,18 +625,18 @@ describe('Worker class', function() { }); it('should return true on conflict errors', function() { - mockQueue.client.callWithInternalUser.restore(); + mockQueue.client.callAsInternalUser.restore(); sinon - .stub(mockQueue.client, 'callWithInternalUser') + .stub(mockQueue.client, 'callAsInternalUser') .withArgs('update') .rejects({ statusCode: 409 }); return worker._failJob(job).then(res => expect(res).to.equal(true)); }); it('should return false on other document update errors', function() { - mockQueue.client.callWithInternalUser.restore(); + mockQueue.client.callAsInternalUser.restore(); sinon - .stub(mockQueue.client, 'callWithInternalUser') + .stub(mockQueue.client, 'callAsInternalUser') .withArgs('update') .rejects({ statusCode: 401 }); return worker._failJob(job).then(res => expect(res).to.equal(false)); @@ -672,9 +672,9 @@ describe('Worker class', function() { }); it('should emit on other document update errors', function(done) { - mockQueue.client.callWithInternalUser.restore(); + mockQueue.client.callAsInternalUser.restore(); sinon - .stub(mockQueue.client, 'callWithInternalUser') + .stub(mockQueue.client, 'callAsInternalUser') .withArgs('update') .rejects({ statusCode: 401 }); @@ -703,9 +703,9 @@ describe('Worker class', function() { value: random(0, 100, true), }; - return mockQueue.client.callWithInternalUser('get', {}, { payload }).then(jobDoc => { + return mockQueue.client.callAsInternalUser('get', {}, { payload }).then(jobDoc => { job = jobDoc; - updateSpy = sinon.spy(mockQueue.client, 'callWithInternalUser').withArgs('update'); + updateSpy = sinon.spy(mockQueue.client, 'callAsInternalUser').withArgs('update'); }); }); @@ -876,7 +876,7 @@ describe('Worker class', function() { }; sinon - .stub(mockQueue.client, 'callWithInternalUser') + .stub(mockQueue.client, 'callAsInternalUser') .withArgs('update') .rejects({ statusCode: 413 }); @@ -898,7 +898,7 @@ describe('Worker class', function() { describe('search failure', function() { it('causes _processPendingJobs to reject the Promise', function() { sinon - .stub(mockQueue.client, 'callWithInternalUser') + .stub(mockQueue.client, 'callAsInternalUser') .withArgs('search') .rejects(new Error('test error')); worker = new Worker(mockQueue, 'test', noop, defaultWorkerOptions); @@ -1002,7 +1002,7 @@ describe('Worker class', function() { beforeEach(function() { sinon - .stub(mockQueue.client, 'callWithInternalUser') + .stub(mockQueue.client, 'callAsInternalUser') .withArgs('search') .callsFake(() => Promise.resolve({ hits: { hits: [] } })); }); @@ -1093,8 +1093,7 @@ describe('Format Job Object', () => { }); }); -// FAILING: https://github.com/elastic/kibana/issues/51372 -describe.skip('Get Doc Path from ES Response', () => { +describe('Get Doc Path from ES Response', () => { it('returns a formatted string after response of an update', function() { const responseMock = { _index: 'foo', diff --git a/x-pack/legacy/plugins/reporting/server/lib/esqueue/helpers/create_index.js b/x-pack/legacy/plugins/reporting/server/lib/esqueue/helpers/create_index.js index 9e00f0447e99e4..e1ebee933ae1dd 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/esqueue/helpers/create_index.js +++ b/x-pack/legacy/plugins/reporting/server/lib/esqueue/helpers/create_index.js @@ -78,13 +78,13 @@ export function createIndex(client, indexName, indexSettings = {}) { }; return client - .callWithInternalUser('indices.exists', { + .callAsInternalUser('indices.exists', { index: indexName, }) .then(exists => { if (!exists) { return client - .callWithInternalUser('indices.create', { + .callAsInternalUser('indices.create', { index: indexName, body: body, }) diff --git a/x-pack/legacy/plugins/reporting/server/lib/esqueue/index.js b/x-pack/legacy/plugins/reporting/server/lib/esqueue/index.js index b42ef84168940d..bd30ca9ae0f29e 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/esqueue/index.js +++ b/x-pack/legacy/plugins/reporting/server/lib/esqueue/index.js @@ -32,7 +32,7 @@ export class Esqueue extends EventEmitter { } _initTasks() { - const initTasks = [this.client.callWithInternalUser('ping')]; + const initTasks = [this.client.callAsInternalUser('ping')]; return Promise.all(initTasks).catch(err => { this._logger(['initTasks', 'error'], err); diff --git a/x-pack/legacy/plugins/reporting/server/lib/esqueue/job.js b/x-pack/legacy/plugins/reporting/server/lib/esqueue/job.js index cded6d2ce89a8c..f90d7696a88ec1 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/esqueue/job.js +++ b/x-pack/legacy/plugins/reporting/server/lib/esqueue/job.js @@ -78,7 +78,7 @@ export class Job extends events.EventEmitter { } this.ready = createIndex(this._client, this.index, this.indexSettings) - .then(() => this._client.callWithInternalUser('index', indexParams)) + .then(() => this._client.callAsInternalUser('index', indexParams)) .then(doc => { this.document = { id: doc._id, @@ -89,7 +89,7 @@ export class Job extends events.EventEmitter { this.debug(`Job created in index ${this.index}`); return this._client - .callWithInternalUser('indices.refresh', { + .callAsInternalUser('indices.refresh', { index: this.index, }) .then(() => { @@ -111,7 +111,7 @@ export class Job extends events.EventEmitter { get() { return this.ready .then(() => { - return this._client.callWithInternalUser('get', { + return this._client.callAsInternalUser('get', { index: this.index, id: this.id, }); diff --git a/x-pack/legacy/plugins/reporting/server/lib/esqueue/worker.js b/x-pack/legacy/plugins/reporting/server/lib/esqueue/worker.js index 96015bc0904922..cea60b46818f0a 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/esqueue/worker.js +++ b/x-pack/legacy/plugins/reporting/server/lib/esqueue/worker.js @@ -160,7 +160,7 @@ export class Worker extends events.EventEmitter { }; return this._client - .callWithInternalUser('update', { + .callAsInternalUser('update', { index: job._index, id: job._id, if_seq_no: job._seq_no, @@ -199,7 +199,7 @@ export class Worker extends events.EventEmitter { }); return this._client - .callWithInternalUser('update', { + .callAsInternalUser('update', { index: job._index, id: job._id, if_seq_no: job._seq_no, @@ -286,7 +286,7 @@ export class Worker extends events.EventEmitter { }; return this._client - .callWithInternalUser('update', { + .callAsInternalUser('update', { index: job._index, id: job._id, if_seq_no: job._seq_no, @@ -431,7 +431,7 @@ export class Worker extends events.EventEmitter { }; return this._client - .callWithInternalUser('search', { + .callAsInternalUser('search', { index: `${this.queue.index}-*`, body: query, }) diff --git a/x-pack/legacy/plugins/reporting/server/lib/jobs_query.ts b/x-pack/legacy/plugins/reporting/server/lib/jobs_query.ts index 0c16f780c34acd..3562834230ea1d 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/jobs_query.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/jobs_query.ts @@ -4,9 +4,12 @@ * you may not use this file except in compliance with the Elastic License. */ +import { errors as elasticsearchErrors } from 'elasticsearch'; +import { ElasticsearchServiceSetup } from 'kibana/server'; import { get } from 'lodash'; -import { ServerFacade, JobSource } from '../../types'; +import { JobSource, ServerFacade } from '../../types'; +const esErrors = elasticsearchErrors as Record; const defaultSize = 10; interface QueryBody { @@ -34,12 +37,9 @@ interface CountAggResult { count: number; } -export function jobsQueryFactory(server: ServerFacade) { +export function jobsQueryFactory(server: ServerFacade, elasticsearch: ElasticsearchServiceSetup) { const index = server.config().get('xpack.reporting.index'); - // @ts-ignore `errors` does not exist on type Cluster - const { callWithInternalUser, errors: esErrors } = server.plugins.elasticsearch.getCluster( - 'admin' - ); + const { callAsInternalUser } = elasticsearch.adminClient; function getUsername(user: any) { return get(user, 'username', false); @@ -61,7 +61,7 @@ export function jobsQueryFactory(server: ServerFacade) { body: Object.assign(defaultBody[queryType] || {}, body), }; - return callWithInternalUser(queryType, query).catch(err => { + return callAsInternalUser(queryType, query).catch(err => { if (err instanceof esErrors['401']) return; if (err instanceof esErrors['403']) return; if (err instanceof esErrors['404']) return; diff --git a/x-pack/legacy/plugins/reporting/server/lib/validate/index.ts b/x-pack/legacy/plugins/reporting/server/lib/validate/index.ts index 79a64bd82d0228..028d8fa143487c 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/validate/index.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/validate/index.ts @@ -5,7 +5,8 @@ */ import { i18n } from '@kbn/i18n'; -import { ServerFacade, Logger } from '../../../types'; +import { ElasticsearchServiceSetup } from 'kibana/server'; +import { Logger, ServerFacade } from '../../../types'; import { HeadlessChromiumDriverFactory } from '../../browsers/chromium/driver_factory'; import { validateBrowser } from './validate_browser'; import { validateEncryptionKey } from './validate_encryption_key'; @@ -14,6 +15,7 @@ import { validateServerHost } from './validate_server_host'; export async function runValidations( server: ServerFacade, + elasticsearch: ElasticsearchServiceSetup, logger: Logger, browserFactory: HeadlessChromiumDriverFactory ) { @@ -21,7 +23,7 @@ export async function runValidations( await Promise.all([ validateBrowser(server, browserFactory, logger), validateEncryptionKey(server, logger), - validateMaxContentLength(server, logger), + validateMaxContentLength(server, elasticsearch, logger), validateServerHost(server), ]); logger.debug( diff --git a/x-pack/legacy/plugins/reporting/server/lib/validate/__tests__/validate_max_content_length.js b/x-pack/legacy/plugins/reporting/server/lib/validate/validate_max_content_length.test.js similarity index 65% rename from x-pack/legacy/plugins/reporting/server/lib/validate/__tests__/validate_max_content_length.js rename to x-pack/legacy/plugins/reporting/server/lib/validate/validate_max_content_length.test.js index 48a58618f34cc7..942dcaf842696c 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/validate/__tests__/validate_max_content_length.js +++ b/x-pack/legacy/plugins/reporting/server/lib/validate/validate_max_content_length.test.js @@ -3,14 +3,26 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ -import expect from '@kbn/expect'; + import sinon from 'sinon'; -import { validateMaxContentLength } from '../validate_max_content_length'; +import { validateMaxContentLength } from './validate_max_content_length'; const FIVE_HUNDRED_MEGABYTES = 524288000; const ONE_HUNDRED_MEGABYTES = 104857600; describe('Reporting: Validate Max Content Length', () => { + const elasticsearch = { + dataClient: { + callAsInternalUser: () => ({ + defaults: { + http: { + max_content_length: '100mb', + }, + }, + }), + }, + }; + const logger = { warning: sinon.spy(), }; @@ -24,22 +36,20 @@ describe('Reporting: Validate Max Content Length', () => { config: () => ({ get: sinon.stub().returns(FIVE_HUNDRED_MEGABYTES), }), - plugins: { - elasticsearch: { - getCluster: () => ({ - callWithInternalUser: () => ({ - defaults: { - http: { - max_content_length: '100mb', - }, - }, - }), - }), - }, + }; + const elasticsearch = { + dataClient: { + callAsInternalUser: () => ({ + defaults: { + http: { + max_content_length: '100mb', + }, + }, + }), }, }; - await validateMaxContentLength(server, logger); + await validateMaxContentLength(server, elasticsearch, logger); sinon.assert.calledWithMatch( logger.warning, @@ -64,22 +74,11 @@ describe('Reporting: Validate Max Content Length', () => { config: () => ({ get: sinon.stub().returns(ONE_HUNDRED_MEGABYTES), }), - plugins: { - elasticsearch: { - getCluster: () => ({ - callWithInternalUser: () => ({ - defaults: { - http: { - max_content_length: '100mb', - }, - }, - }), - }), - }, - }, }; - expect(async () => validateMaxContentLength(server, logger.warning)).not.to.throwError(); + expect( + async () => await validateMaxContentLength(server, elasticsearch, logger.warning) + ).not.toThrow(); sinon.assert.notCalled(logger.warning); }); }); diff --git a/x-pack/legacy/plugins/reporting/server/lib/validate/validate_max_content_length.ts b/x-pack/legacy/plugins/reporting/server/lib/validate/validate_max_content_length.ts index f91cd40bfd3c73..ce4a5b93e74310 100644 --- a/x-pack/legacy/plugins/reporting/server/lib/validate/validate_max_content_length.ts +++ b/x-pack/legacy/plugins/reporting/server/lib/validate/validate_max_content_length.ts @@ -3,18 +3,24 @@ * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ + import numeral from '@elastic/numeral'; +import { ElasticsearchServiceSetup } from 'kibana/server'; import { defaults, get } from 'lodash'; import { Logger, ServerFacade } from '../../../types'; const KIBANA_MAX_SIZE_BYTES_PATH = 'xpack.reporting.csv.maxSizeBytes'; const ES_MAX_SIZE_BYTES_PATH = 'http.max_content_length'; -export async function validateMaxContentLength(server: ServerFacade, logger: Logger) { +export async function validateMaxContentLength( + server: ServerFacade, + elasticsearch: ElasticsearchServiceSetup, + logger: Logger +) { const config = server.config(); - const { callWithInternalUser } = server.plugins.elasticsearch.getCluster('data'); + const { callAsInternalUser } = elasticsearch.dataClient; - const elasticClusterSettingsResponse = await callWithInternalUser('cluster.getSettings', { + const elasticClusterSettingsResponse = await callAsInternalUser('cluster.getSettings', { includeDefaults: true, }); const { persistent, transient, defaults: defaultSettings } = elasticClusterSettingsResponse; diff --git a/x-pack/legacy/plugins/reporting/server/plugin.ts b/x-pack/legacy/plugins/reporting/server/plugin.ts index 84b0ee077cc4c5..e618d23e8ed1f9 100644 --- a/x-pack/legacy/plugins/reporting/server/plugin.ts +++ b/x-pack/legacy/plugins/reporting/server/plugin.ts @@ -5,20 +5,26 @@ */ import { Legacy } from 'kibana'; -import { CoreSetup, CoreStart, Plugin, LoggerFactory } from 'src/core/server'; +import { + CoreSetup, + CoreStart, + ElasticsearchServiceSetup, + LoggerFactory, + Plugin, +} from 'src/core/server'; import { UsageCollectionSetup } from 'src/plugins/usage_collection/server'; +import { PluginStart as DataPluginStart } from '../../../../../src/plugins/data/server'; import { SecurityPluginSetup } from '../../../../plugins/security/server'; -import { XPackMainPlugin } from '../../xpack_main/server/xpack_main'; // @ts-ignore import { mirrorPluginStatus } from '../../../server/lib/mirror_plugin_status'; +import { XPackMainPlugin } from '../../xpack_main/server/xpack_main'; import { PLUGIN_ID } from '../common/constants'; +import { logConfiguration } from '../log_configuration'; import { ReportingPluginSpecOptions } from '../types.d'; -import { registerRoutes } from './routes'; -import { checkLicenseFactory, getExportTypesRegistry, runValidations, LevelLogger } from './lib'; import { createBrowserDriverFactory } from './browsers'; +import { checkLicenseFactory, getExportTypesRegistry, LevelLogger, runValidations } from './lib'; +import { registerRoutes } from './routes'; import { registerReportingUsageCollector } from './usage'; -import { logConfiguration } from '../log_configuration'; -import { PluginStart as DataPluginStart } from '../../../../../src/plugins/data/server'; export interface ReportingInitializerContext { logger: LoggerFactory; @@ -29,22 +35,16 @@ export type ReportingSetup = object; export type ReportingStart = object; export interface ReportingSetupDeps { + elasticsearch: ElasticsearchServiceSetup; usageCollection: UsageCollectionSetup; security: SecurityPluginSetup; } export type ReportingStartDeps = object; -type LegacyPlugins = Legacy.Server['plugins']; - export interface LegacySetup { config: Legacy.Server['config']; info: Legacy.Server['info']; - plugins: { - elasticsearch: LegacyPlugins['elasticsearch']; - xpack_main: XPackMainPlugin & { - status?: any; - }; - }; + plugins: { xpack_main: XPackMainPlugin & { status?: any } }; route: Legacy.Server['route']; savedObjects: Legacy.Server['savedObjects']; uiSettingsServiceFactory: Legacy.Server['uiSettingsServiceFactory']; @@ -76,10 +76,10 @@ export function reportingPluginFactory( public async setup(core: CoreSetup, plugins: ReportingSetupDeps): Promise { const exportTypesRegistry = getExportTypesRegistry(); + const { usageCollection, elasticsearch } = plugins; let isCollectorReady = false; // Register a function with server to manage the collection of usage stats - const { usageCollection } = plugins; registerReportingUsageCollector( usageCollection, __LEGACY, @@ -91,7 +91,7 @@ export function reportingPluginFactory( const browserDriverFactory = await createBrowserDriverFactory(__LEGACY, logger); logConfiguration(__LEGACY, logger); - runValidations(__LEGACY, logger, browserDriverFactory); + runValidations(__LEGACY, elasticsearch, logger, browserDriverFactory); const { xpack_main: xpackMainPlugin } = __LEGACY.plugins; mirrorPluginStatus(xpackMainPlugin, legacyPlugin); diff --git a/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject_immediate.ts b/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject_immediate.ts index f3ed760bba4302..fd1d85fef0f21b 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject_immediate.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/generate_from_savedobject_immediate.ts @@ -36,6 +36,7 @@ export function registerGenerateCsvFromSavedObjectImmediate( parentLogger: Logger ) { const routeOptions = getRouteOptionsCsv(server, plugins, parentLogger); + const { elasticsearch } = plugins; /* * CSV export with the `immediate` option does not queue a job with Reporting's ESQueue to run the job async. Instead, this does: @@ -57,8 +58,8 @@ export function registerGenerateCsvFromSavedObjectImmediate( * * Calling an execute job factory requires passing a browserDriverFactory option, so we should not call the factory from here */ - const createJobFn = createJobFactory(server, logger); - const executeJobFn = executeJobFactory(server, logger, { + const createJobFn = createJobFactory(server, elasticsearch, logger); + const executeJobFn = executeJobFactory(server, elasticsearch, logger, { browserDriverFactory: {} as HeadlessChromiumDriverFactory, }); const jobDocPayload: JobDocPayloadPanelCsv = await createJobFn( diff --git a/x-pack/legacy/plugins/reporting/server/routes/generation.ts b/x-pack/legacy/plugins/reporting/server/routes/generation.ts index 81e405b08563dd..6e9a8e24db4670 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/generation.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/generation.ts @@ -5,6 +5,7 @@ */ import boom from 'boom'; +import { errors as elasticsearchErrors } from 'elasticsearch'; import { Legacy } from 'kibana'; import { API_BASE_URL } from '../../common/constants'; import { @@ -22,6 +23,8 @@ import { registerGenerateCsvFromSavedObjectImmediate } from './generate_from_sav import { registerLegacy } from './legacy'; import { makeRequestFacade } from './lib/make_request_facade'; +const esErrors = elasticsearchErrors as Record; + export function registerJobGenerationRoutes( server: ServerFacade, plugins: ReportingSetupDeps, @@ -31,11 +34,15 @@ export function registerJobGenerationRoutes( ) { const config = server.config(); const DOWNLOAD_BASE_URL = config.get('server.basePath') + `${API_BASE_URL}/jobs/download`; - // @ts-ignore TODO - const { errors: esErrors } = server.plugins.elasticsearch.getCluster('admin'); - - const esqueue = createQueueFactory(server, logger, { exportTypesRegistry, browserDriverFactory }); - const enqueueJob = enqueueJobFactory(server, logger, { exportTypesRegistry, esqueue }); + const { elasticsearch } = plugins; + const esqueue = createQueueFactory(server, elasticsearch, logger, { + exportTypesRegistry, + browserDriverFactory, + }); + const enqueueJob = enqueueJobFactory(server, elasticsearch, logger, { + exportTypesRegistry, + esqueue, + }); /* * Generates enqueued job details to use in responses diff --git a/x-pack/legacy/plugins/reporting/server/routes/jobs.test.js b/x-pack/legacy/plugins/reporting/server/routes/jobs.test.js index c9d4f9fc027be3..811c81c502b812 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/jobs.test.js +++ b/x-pack/legacy/plugins/reporting/server/routes/jobs.test.js @@ -43,18 +43,12 @@ beforeEach(() => { jobContentEncoding: 'base64', jobContentExtension: 'pdf', }); - mockServer.plugins = { - elasticsearch: { - getCluster: memoize(() => ({ callWithInternalUser: jest.fn() })), - createCluster: () => ({ - callWithRequest: jest.fn(), - callWithInternalUser: jest.fn(), - }), - }, - }; }); const mockPlugins = { + elasticsearch: { + adminClient: { callAsInternalUser: jest.fn() }, + }, security: null, }; @@ -67,9 +61,9 @@ const getHits = (...sources) => { }; test(`returns 404 if job not found`, async () => { - mockServer.plugins.elasticsearch - .getCluster('admin') - .callWithInternalUser.mockReturnValue(Promise.resolve(getHits())); + mockPlugins.elasticsearch.adminClient = { + callAsInternalUser: jest.fn().mockReturnValue(Promise.resolve(getHits())), + }; registerJobInfoRoutes(mockServer, mockPlugins, exportTypesRegistry, mockLogger); @@ -84,9 +78,11 @@ test(`returns 404 if job not found`, async () => { }); test(`returns 401 if not valid job type`, async () => { - mockServer.plugins.elasticsearch - .getCluster('admin') - .callWithInternalUser.mockReturnValue(Promise.resolve(getHits({ jobtype: 'invalidJobType' }))); + mockPlugins.elasticsearch.adminClient = { + callAsInternalUser: jest + .fn() + .mockReturnValue(Promise.resolve(getHits({ jobtype: 'invalidJobType' }))), + }; registerJobInfoRoutes(mockServer, mockPlugins, exportTypesRegistry, mockLogger); @@ -101,11 +97,13 @@ test(`returns 401 if not valid job type`, async () => { describe(`when job is incomplete`, () => { const getIncompleteResponse = async () => { - mockServer.plugins.elasticsearch - .getCluster('admin') - .callWithInternalUser.mockReturnValue( - Promise.resolve(getHits({ jobtype: 'unencodedJobType', status: 'pending' })) - ); + mockPlugins.elasticsearch.adminClient = { + callAsInternalUser: jest + .fn() + .mockReturnValue( + Promise.resolve(getHits({ jobtype: 'unencodedJobType', status: 'pending' })) + ), + }; registerJobInfoRoutes(mockServer, mockPlugins, exportTypesRegistry, mockLogger); @@ -145,9 +143,9 @@ describe(`when job is failed`, () => { status: 'failed', output: { content: 'job failure message' }, }); - mockServer.plugins.elasticsearch - .getCluster('admin') - .callWithInternalUser.mockReturnValue(Promise.resolve(hits)); + mockPlugins.elasticsearch.adminClient = { + callAsInternalUser: jest.fn().mockReturnValue(Promise.resolve(hits)), + }; registerJobInfoRoutes(mockServer, mockPlugins, exportTypesRegistry, mockLogger); @@ -190,9 +188,9 @@ describe(`when job is completed`, () => { title, }, }); - mockServer.plugins.elasticsearch - .getCluster('admin') - .callWithInternalUser.mockReturnValue(Promise.resolve(hits)); + mockPlugins.elasticsearch.adminClient = { + callAsInternalUser: jest.fn().mockReturnValue(Promise.resolve(hits)), + }; registerJobInfoRoutes(mockServer, mockPlugins, exportTypesRegistry, mockLogger); diff --git a/x-pack/legacy/plugins/reporting/server/routes/jobs.ts b/x-pack/legacy/plugins/reporting/server/routes/jobs.ts index f9b731db5a702b..daabc2cf22f4e2 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/jobs.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/jobs.ts @@ -38,7 +38,8 @@ export function registerJobInfoRoutes( exportTypesRegistry: ExportTypesRegistry, logger: Logger ) { - const jobsQuery = jobsQueryFactory(server); + const { elasticsearch } = plugins; + const jobsQuery = jobsQueryFactory(server, elasticsearch); const getRouteConfig = getRouteConfigFactoryManagementPre(server, plugins, logger); const getRouteConfigDownload = getRouteConfigFactoryDownloadPre(server, plugins, logger); @@ -137,7 +138,7 @@ export function registerJobInfoRoutes( }); // trigger a download of the output from a job - const jobResponseHandler = jobResponseHandlerFactory(server, exportTypesRegistry); + const jobResponseHandler = jobResponseHandlerFactory(server, elasticsearch, exportTypesRegistry); server.route({ path: `${MAIN_ENTRY}/download/{docId}`, method: 'GET', diff --git a/x-pack/legacy/plugins/reporting/server/routes/lib/job_response_handler.ts b/x-pack/legacy/plugins/reporting/server/routes/lib/job_response_handler.ts index 3ba7aa30eedcb7..62f0d0a72b389a 100644 --- a/x-pack/legacy/plugins/reporting/server/routes/lib/job_response_handler.ts +++ b/x-pack/legacy/plugins/reporting/server/routes/lib/job_response_handler.ts @@ -5,10 +5,11 @@ */ import Boom from 'boom'; +import { ElasticsearchServiceSetup } from 'kibana/server'; import { ResponseToolkit } from 'hapi'; -import { ServerFacade, ExportTypesRegistry } from '../../../types'; -import { jobsQueryFactory } from '../../lib/jobs_query'; import { WHITELISTED_JOB_CONTENT_TYPES } from '../../../common/constants'; +import { ExportTypesRegistry, ServerFacade } from '../../../types'; +import { jobsQueryFactory } from '../../lib/jobs_query'; import { getDocumentPayloadFactory } from './get_document_payload'; interface JobResponseHandlerParams { @@ -21,9 +22,10 @@ interface JobResponseHandlerOpts { export function jobResponseHandlerFactory( server: ServerFacade, + elasticsearch: ElasticsearchServiceSetup, exportTypesRegistry: ExportTypesRegistry ) { - const jobsQuery = jobsQueryFactory(server); + const jobsQuery = jobsQueryFactory(server, elasticsearch); const getDocumentPayload = getDocumentPayloadFactory(server, exportTypesRegistry); return function jobResponseHandler( diff --git a/x-pack/legacy/plugins/reporting/types.d.ts b/x-pack/legacy/plugins/reporting/types.d.ts index 2c09b97f4e8ab7..76d4fd90996661 100644 --- a/x-pack/legacy/plugins/reporting/types.d.ts +++ b/x-pack/legacy/plugins/reporting/types.d.ts @@ -4,15 +4,16 @@ * you may not use this file except in compliance with the Elastic License. */ -import { ResponseObject } from 'hapi'; import { EventEmitter } from 'events'; +import { ResponseObject } from 'hapi'; +import { ElasticsearchServiceSetup } from 'kibana/server'; import { Legacy } from 'kibana'; import { CallCluster } from '../../../../src/legacy/core_plugins/elasticsearch'; import { CancellationToken } from './common/cancellation_token'; -import { LevelLogger } from './server/lib/level_logger'; import { HeadlessChromiumDriverFactory } from './server/browsers/chromium/driver_factory'; import { BrowserType } from './server/browsers/types'; -import { LegacySetup } from './server/plugin'; +import { LevelLogger } from './server/lib/level_logger'; +import { LegacySetup, ReportingSetupDeps } from './server/plugin'; export type ReportingPlugin = object; // For Plugin contract @@ -276,10 +277,12 @@ export interface ESQueueInstance { export type CreateJobFactory = ( server: ServerFacade, + elasticsearch: ElasticsearchServiceSetup, logger: LevelLogger ) => CreateJobFnType; export type ExecuteJobFactory = ( server: ServerFacade, + elasticsearch: ElasticsearchServiceSetup, logger: LevelLogger, opts: { browserDriverFactory: HeadlessChromiumDriverFactory; @@ -302,10 +305,10 @@ export interface ExportTypeDefinition< validLicenses: string[]; } -export { ExportTypesRegistry } from './server/lib/export_types_registry'; +export { CancellationToken } from './common/cancellation_token'; export { HeadlessChromiumDriver } from './server/browsers/chromium/driver'; export { HeadlessChromiumDriverFactory } from './server/browsers/chromium/driver_factory'; -export { CancellationToken } from './common/cancellation_token'; +export { ExportTypesRegistry } from './server/lib/export_types_registry'; export { LevelLogger as Logger };