Skip to content

Commit

Permalink
Merge branch 'master' into alerting/rename-reload-alerts
Browse files Browse the repository at this point in the history
* master:
  Ability to filter alerts by string parameters (#92036)
  [APM] Fix for flaky correlations API test (#91673) (#92094)
  [Enterprise Search] Migrate shared role mapping components (#91723)
  [file_upload] move ml Importer classes to file_upload plugin (#91559)
  [Discover] Always show the "hide missing fields" toggle (#91889)
  v2 migrations should exit process on corrupt saved object document (#91465)
  [ML] Data Frame Analytics exploration page: filters improvements (#91748)
  [ML] Data Frame Analytics: Improved error handling for scatterplot matrix. (#91993)
  [coverage] speed up merging results of functional tests (#92111)
  Adds a Reason indicator to the onClose handler in AddAlert and EditAlert (#92149)
  • Loading branch information
gmmorris committed Feb 22, 2021
2 parents a61dd80 + 0c2495a commit 441445b
Show file tree
Hide file tree
Showing 92 changed files with 2,433 additions and 1,414 deletions.
2 changes: 2 additions & 0 deletions docs/api/alerts/find.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,8 @@ Retrieve a paginated set of alerts based on condition.
NOTE: As alerts change in {kib}, the results on each page of the response also
change. Use the find API for traditional paginated results, but avoid using it to export large amounts of data.

NOTE: Alert `params` are stored as {ref}/flattened.html[flattened] and analyzed as `keyword`.

[[alerts-api-find-request-codes]]
==== Response code

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -722,7 +722,7 @@ describe('DocumentMigrator', () => {
});
});

it('logs the document and transform that failed', () => {
it('logs the original error and throws a transform error if a document transform fails', () => {
const log = mockLogger;
const migrator = new DocumentMigrator({
...testOpts(),
Expand All @@ -747,10 +747,13 @@ describe('DocumentMigrator', () => {
migrator.migrate(_.cloneDeep(failedDoc));
expect('Did not throw').toEqual('But it should have!');
} catch (error) {
expect(error.message).toMatch(/Dang diggity!/);
const warning = loggingSystemMock.collect(mockLoggerFactory).warn[0][0];
expect(warning).toContain(JSON.stringify(failedDoc));
expect(warning).toContain('dog:1.2.3');
expect(error.message).toMatchInlineSnapshot(`
"Failed to transform document smelly. Transform: dog:1.2.3
Doc: {\\"id\\":\\"smelly\\",\\"type\\":\\"dog\\",\\"attributes\\":{},\\"migrationVersion\\":{}}"
`);
expect(loggingSystemMock.collect(mockLoggerFactory).error[0][0]).toMatchInlineSnapshot(
`[Error: Dang diggity!]`
);
}
});

Expand Down Expand Up @@ -779,7 +782,7 @@ describe('DocumentMigrator', () => {
};
migrator.migrate(doc);
expect(loggingSystemMock.collect(mockLoggerFactory).info[0][0]).toEqual(logTestMsg);
expect(loggingSystemMock.collect(mockLoggerFactory).warn[1][0]).toEqual(logTestMsg);
expect(loggingSystemMock.collect(mockLoggerFactory).warn[0][0]).toEqual(logTestMsg);
});

test('extracts the latest migration version info', () => {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -678,10 +678,11 @@ function wrapWithTry(
} catch (error) {
const failedTransform = `${type.name}:${version}`;
const failedDoc = JSON.stringify(doc);
log.warn(
log.error(error);

throw new Error(
`Failed to transform document ${doc?.id}. Transform: ${failedTransform}\nDoc: ${failedDoc}`
);
throw error;
}
};
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,12 +58,12 @@ describe('migrateRawDocs', () => {
expect(transform).toHaveBeenNthCalledWith(2, obj2);
});

test('passes invalid docs through untouched and logs error', async () => {
test('throws when encountering a corrupt saved object document', async () => {
const logger = createSavedObjectsMigrationLoggerMock();
const transform = jest.fn<any, any>((doc: any) => [
set(_.cloneDeep(doc), 'attributes.name', 'TADA'),
]);
const result = await migrateRawDocs(
const result = migrateRawDocs(
new SavedObjectsSerializer(new SavedObjectTypeRegistry()),
transform,
[
Expand All @@ -73,25 +73,11 @@ describe('migrateRawDocs', () => {
logger
);

expect(result).toEqual([
{ _id: 'foo:b', _source: { type: 'a', a: { name: 'AAA' } } },
{
_id: 'c:d',
_source: { type: 'c', c: { name: 'TADA' }, migrationVersion: {}, references: [] },
},
]);

const obj2 = {
id: 'd',
type: 'c',
attributes: { name: 'DDD' },
migrationVersion: {},
references: [],
};
expect(transform).toHaveBeenCalledTimes(1);
expect(transform).toHaveBeenCalledWith(obj2);
expect(result).rejects.toMatchInlineSnapshot(
`[Error: Unable to migrate the corrupt saved object document with _id: 'foo:b'.]`
);

expect(logger.error).toBeCalledTimes(1);
expect(transform).toHaveBeenCalledTimes(0);
});

test('handles when one document is transformed into multiple documents', async () => {
Expand Down
25 changes: 19 additions & 6 deletions src/core/server/saved_objects/migrations/core/migrate_raw_docs.ts
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,23 @@ import {
import { MigrateAndConvertFn } from './document_migrator';
import { SavedObjectsMigrationLogger } from '.';

/**
* Error thrown when saved object migrations encounter a corrupt saved object.
* Corrupt saved objects cannot be serialized because:
* - there's no `[type]` property which contains the type attributes
* - the type or namespace in the _id doesn't match the `type` or `namespace`
* properties
*/
export class CorruptSavedObjectError extends Error {
constructor(public readonly rawId: string) {
super(`Unable to migrate the corrupt saved object document with _id: '${rawId}'.`);

// Set the prototype explicitly, see:
// https://github.com/Microsoft/TypeScript/wiki/Breaking-Changes#extending-built-ins-like-error-array-and-map-may-no-longer-work
Object.setPrototypeOf(this, CorruptSavedObjectError.prototype);
}
}

/**
* Applies the specified migration function to every saved object document in the list
* of raw docs. Any raw docs that are not valid saved objects will simply be passed through.
Expand All @@ -35,7 +52,7 @@ export async function migrateRawDocs(
const migrateDocWithoutBlocking = transformNonBlocking(migrateDoc);
const processedDocs = [];
for (const raw of rawDocs) {
const options = { namespaceTreatment: 'lax' as 'lax' };
const options = { namespaceTreatment: 'lax' as const };
if (serializer.isRawSavedObject(raw, options)) {
const savedObject = serializer.rawToSavedObject(raw, options);
savedObject.migrationVersion = savedObject.migrationVersion || {};
Expand All @@ -48,11 +65,7 @@ export async function migrateRawDocs(
)
);
} else {
log.error(
`Error: Unable to migrate the corrupt Saved Object document ${raw._id}. To prevent Kibana from performing a migration on every restart, please delete or fix this document by ensuring that the namespace and type in the document's id matches the values in the namespace and type fields.`,
{ rawDocument: raw }
);
processedDocs.push(raw);
throw new CorruptSavedObjectError(raw._id);
}
}
return processedDocs;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -330,7 +330,7 @@ describe('KibanaMigrator', () => {
const migrator = new KibanaMigrator(options);
migrator.prepareMigrations();
await expect(migrator.runMigrations()).rejects.toMatchInlineSnapshot(`
[Error: Unable to complete saved object migrations for the [.my-index] index. Please check the health of your Elasticsearch cluster and try again. Error: Reindex failed with the following error:
[Error: Unable to complete saved object migrations for the [.my-index] index. Error: Reindex failed with the following error:
{"_tag":"Some","value":{"type":"elatsicsearch_exception","reason":"task failed with an error"}}]
`);
expect(loggingSystemMock.collect(options.logger).error[0][0]).toMatchInlineSnapshot(`
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -314,20 +314,25 @@ describe('migrationsStateActionMachine', () => {
next: () => {
throw new ResponseError(
elasticsearchClientMock.createApiResponse({
body: { error: { type: 'snapshot_in_progress_exception', reason: 'error reason' } },
body: {
error: {
type: 'snapshot_in_progress_exception',
reason: 'Cannot delete indices that are being snapshotted',
},
},
})
);
},
})
).rejects.toMatchInlineSnapshot(
`[Error: Unable to complete saved object migrations for the [.my-so-index] index. Please check the health of your Elasticsearch cluster and try again. ResponseError: snapshot_in_progress_exception]`
`[Error: Unable to complete saved object migrations for the [.my-so-index] index. Please check the health of your Elasticsearch cluster and try again. Error: [snapshot_in_progress_exception]: Cannot delete indices that are being snapshotted]`
);
expect(loggingSystemMock.collect(mockLogger)).toMatchInlineSnapshot(`
Object {
"debug": Array [],
"error": Array [
Array [
"[.my-so-index] [snapshot_in_progress_exception]: error reason",
"[.my-so-index] [snapshot_in_progress_exception]: Cannot delete indices that are being snapshotted",
],
Array [
"[.my-so-index] migration failed, dumping execution log:",
Expand All @@ -352,7 +357,7 @@ describe('migrationsStateActionMachine', () => {
},
})
).rejects.toMatchInlineSnapshot(
`[Error: Unable to complete saved object migrations for the [.my-so-index] index. Please check the health of your Elasticsearch cluster and try again. Error: this action throws]`
`[Error: Unable to complete saved object migrations for the [.my-so-index] index. Error: this action throws]`
);
expect(loggingSystemMock.collect(mockLogger)).toMatchInlineSnapshot(`
Object {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import { errors as EsErrors } from '@elastic/elasticsearch';
import * as Option from 'fp-ts/lib/Option';
import { performance } from 'perf_hooks';
import { Logger, LogMeta } from '../../logging';
import { CorruptSavedObjectError } from '../migrations/core/migrate_raw_docs';
import { Model, Next, stateActionMachine } from './state_action_machine';
import { State } from './types';

Expand Down Expand Up @@ -153,12 +154,27 @@ export async function migrationStateActionMachine({
logger.error(
logMessagePrefix + `[${e.body?.error?.type}]: ${e.body?.error?.reason ?? e.message}`
);
dumpExecutionLog(logger, logMessagePrefix, executionLog);
throw new Error(
`Unable to complete saved object migrations for the [${
initialState.indexPrefix
}] index. Please check the health of your Elasticsearch cluster and try again. Error: [${
e.body?.error?.type
}]: ${e.body?.error?.reason ?? e.message}`
);
} else {
logger.error(e);

dumpExecutionLog(logger, logMessagePrefix, executionLog);
if (e instanceof CorruptSavedObjectError) {
throw new Error(
`${e.message} To allow migrations to proceed, please delete this document from the [${initialState.indexPrefix}_${initialState.kibanaVersion}_001] index.`
);
}

throw new Error(
`Unable to complete saved object migrations for the [${initialState.indexPrefix}] index. ${e}`
);
}
dumpExecutionLog(logger, logMessagePrefix, executionLog);
throw new Error(
`Unable to complete saved object migrations for the [${initialState.indexPrefix}] index. Please check the health of your Elasticsearch cluster and try again. ${e}`
);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,9 @@ const mockMappings = {
},
},
},
params: {
type: 'flattened',
},
},
},
hiddenType: {
Expand Down Expand Up @@ -168,6 +171,12 @@ describe('Filter Utils', () => {
).toEqual(esKuery.fromKueryExpression('alert.actions:{ actionTypeId: ".server-log" }'));
});

test('Assemble filter for flattened fields', () => {
expect(
validateConvertFilterToKueryNode(['alert'], 'alert.attributes.params.foo:bar', mockMappings)
).toEqual(esKuery.fromKueryExpression('alert.params.foo:bar'));
});

test('Lets make sure that we are throwing an exception if we get an error', () => {
expect(() => {
validateConvertFilterToKueryNode(
Expand Down
36 changes: 23 additions & 13 deletions src/core/server/saved_objects/service/lib/filter_utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -207,23 +207,33 @@ export const hasFilterKeyError = (

export const fieldDefined = (indexMappings: IndexMapping, key: string): boolean => {
const mappingKey = 'properties.' + key.split('.').join('.properties.');
const potentialKey = get(indexMappings, mappingKey);
if (get(indexMappings, mappingKey) != null) {
return true;
}

// If the `mappingKey` does not match a valid path, before returning null,
// If the `mappingKey` does not match a valid path, before returning false,
// we want to check and see if the intended path was for a multi-field
// such as `x.attributes.field.text` where `field` is mapped to both text
// and keyword
if (potentialKey == null) {
const propertiesAttribute = 'properties';
const indexOfLastProperties = mappingKey.lastIndexOf(propertiesAttribute);
const fieldMapping = mappingKey.substr(0, indexOfLastProperties);
const fieldType = mappingKey.substr(
mappingKey.lastIndexOf(propertiesAttribute) + `${propertiesAttribute}.`.length
);
const mapping = `${fieldMapping}fields.${fieldType}`;

return get(indexMappings, mapping) != null;
} else {
const propertiesAttribute = 'properties';
const indexOfLastProperties = mappingKey.lastIndexOf(propertiesAttribute);
const fieldMapping = mappingKey.substr(0, indexOfLastProperties);
const fieldType = mappingKey.substr(
mappingKey.lastIndexOf(propertiesAttribute) + `${propertiesAttribute}.`.length
);
const mapping = `${fieldMapping}fields.${fieldType}`;
if (get(indexMappings, mapping) != null) {
return true;
}

// If the path is for a flattned type field, we'll assume the mappings are defined.
const keys = key.split('.');
for (let i = 0; i < keys.length; i++) {
const path = `properties.${keys.slice(0, i + 1).join('.properties.')}`;
if (get(indexMappings, path)?.type === 'flattened') {
return true;
}
}

return false;
};
14 changes: 0 additions & 14 deletions src/dev/code_coverage/shell_scripts/fix_html_reports_parallel.sh

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -32,15 +32,12 @@ TEAM_ASSIGN_PATH=$5
# Build team assignments dat file
node scripts/generate_team_assignments.js --verbose --src .github/CODEOWNERS --dest $TEAM_ASSIGN_PATH

# Need to override COVERAGE_INGESTION_KIBANA_ROOT since json file has original intake worker path
export COVERAGE_INGESTION_KIBANA_ROOT=/dev/shm/workspace/kibana

for x in functional jest; do
echo "### Ingesting coverage for ${x}"

COVERAGE_SUMMARY_FILE=target/kibana-coverage/${x}-combined/coverage-summary.json

if [[ $x == "jest" ]]; then
# Need to override COVERAGE_INGESTION_KIBANA_ROOT since json file has original intake worker path
export COVERAGE_INGESTION_KIBANA_ROOT=/dev/shm/workspace/kibana
fi
# running in background to speed up ingestion
node scripts/ingest_coverage.js --verbose --path ${COVERAGE_SUMMARY_FILE} --vcsInfoPath ./VCS_INFO.txt --teamAssignmentsPath $TEAM_ASSIGN_PATH &
done
Expand Down
20 changes: 20 additions & 0 deletions src/dev/code_coverage/shell_scripts/merge_functional.sh
Original file line number Diff line number Diff line change
Expand Up @@ -3,5 +3,25 @@
COVERAGE_TEMP_DIR=/tmp/extracted_coverage/target/kibana-coverage/
export COVERAGE_TEMP_DIR

checkoutDir="$(pwd)"
echo "### checkoutDir=${checkoutDir}"

coverageBasePath="/dev/shm/workspace"
echo "### Clone kibana to ${coverageBasePath}"
mkdir -p "$coverageBasePath/kibana"
rsync -ahSD --ignore-errors --force --delete --stats ./ "$coverageBasePath/kibana/"
cd "$coverageBasePath/kibana"

echo "### bootstrap from x-pack folder"
cd x-pack
yarn kbn bootstrap
# Return to project root
cd ..
echo "### Merge coverage reports"
yarn nyc report --nycrc-path src/dev/code_coverage/nyc_config/nyc.functional.config.js

echo "### Copy 'target' to ${checkoutDir}"
rsync -ahSD --ignore-errors --force --delete --stats target "$checkoutDir/"

echo "### Back to $checkoutDir"
cd "$checkoutDir"
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,13 @@ describe('DiscoverFieldSearch', () => {
expect(badge.text()).toEqual('0');
});

test('missing switch appears with new fields api', () => {
const component = mountComponent({ ...defaultProps, useNewFieldsApi: true });
const btn = findTestSubject(component, 'toggleFieldFilterButton');
btn.simulate('click');
expect(findTestSubject(component, 'missingSwitch').exists()).toBeTruthy();
});

test('change in filters triggers onChange', () => {
const onChange = jest.fn();
const component = mountComponent({ ...defaultProps, ...{ onChange } });
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -232,9 +232,6 @@ export function DiscoverFieldSearch({ onChange, value, types, useNewFieldsApi }:
};

const footer = () => {
if (useNewFieldsApi) {
return null;
}
return (
<EuiPopoverFooter>
<EuiSwitch
Expand Down

0 comments on commit 441445b

Please sign in to comment.