From 8de93e2dbfea3882554f7de32a34868e0d03b0e9 Mon Sep 17 00:00:00 2001 From: Kerry Gallagher Date: Thu, 26 Sep 2019 12:00:19 +0100 Subject: [PATCH 01/10] Add partitioning to log rate job --- .../modules/logs_ui_analysis/ml/log_entry_rate.json | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/x-pack/legacy/plugins/ml/server/models/data_recognizer/modules/logs_ui_analysis/ml/log_entry_rate.json b/x-pack/legacy/plugins/ml/server/models/data_recognizer/modules/logs_ui_analysis/ml/log_entry_rate.json index b59042ce2053fd..81f8b2bb522204 100644 --- a/x-pack/legacy/plugins/ml/server/models/data_recognizer/modules/logs_ui_analysis/ml/log_entry_rate.json +++ b/x-pack/legacy/plugins/ml/server/models/data_recognizer/modules/logs_ui_analysis/ml/log_entry_rate.json @@ -8,10 +8,12 @@ { "detector_description": "count", "function": "count", - "detector_index": 0 + "detector_index": 0, + "partition_field_name": "event.dataset", + "use_null": true } ], - "influencers": [] + "influencers": ["event.dataset"] }, "analysis_limits": { "model_memory_limit": "10mb" From 6504c2384f5278f4df12e4eed285612377fbdb61 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20St=C3=BCrmer?= Date: Fri, 27 Sep 2019 01:16:59 +0200 Subject: [PATCH 02/10] Adapt log rate result API to return partitioned values --- .../log_analysis/results/log_entry_rate.ts | 21 ++- .../server/lib/log_analysis/log_analysis.ts | 133 ++++++++++++------ 2 files changed, 100 insertions(+), 54 deletions(-) diff --git a/x-pack/legacy/plugins/infra/common/http_api/log_analysis/results/log_entry_rate.ts b/x-pack/legacy/plugins/infra/common/http_api/log_analysis/results/log_entry_rate.ts index 2dcaf35cc41d9f..3af07980910b8a 100644 --- a/x-pack/legacy/plugins/infra/common/http_api/log_analysis/results/log_entry_rate.ts +++ b/x-pack/legacy/plugins/infra/common/http_api/log_analysis/results/log_entry_rate.ts @@ -6,13 +6,7 @@ import * as rt from 'io-ts'; -import { - badRequestErrorRT, - conflictErrorRT, - forbiddenErrorRT, - metricStatisticsRT, - timeRangeRT, -} from '../../shared'; +import { badRequestErrorRT, conflictErrorRT, forbiddenErrorRT, timeRangeRT } from '../../shared'; export const LOG_ANALYSIS_GET_LOG_ENTRY_RATE_PATH = '/api/infra/log_analysis/results/log_entry_rate'; @@ -43,12 +37,15 @@ export const logEntryRateAnomaly = rt.type({ typicalLogEntryRate: rt.number, }); -export const logEntryRateHistogramBucket = rt.type({ +export const logEntryRateDataSetRT = rt.type({ + analysisBucketCount: rt.number, anomalies: rt.array(logEntryRateAnomaly), - duration: rt.number, - logEntryRateStats: metricStatisticsRT, - modelLowerBoundStats: metricStatisticsRT, - modelUpperBoundStats: metricStatisticsRT, + averageActualLogEntryRate: rt.number, + dataSetId: rt.string, +}); + +export const logEntryRateHistogramBucket = rt.type({ + dataSets: rt.array(logEntryRateDataSetRT), startTime: rt.number, }); diff --git a/x-pack/legacy/plugins/infra/server/lib/log_analysis/log_analysis.ts b/x-pack/legacy/plugins/infra/server/lib/log_analysis/log_analysis.ts index ac7f7b6df8d62f..007174a7c28321 100644 --- a/x-pack/legacy/plugins/infra/server/lib/log_analysis/log_analysis.ts +++ b/x-pack/legacy/plugins/infra/server/lib/log_analysis/log_analysis.ts @@ -15,6 +15,7 @@ import { InfraBackendFrameworkAdapter, InfraFrameworkRequest } from '../adapters import { NoLogRateResultsIndexError } from './errors'; const ML_ANOMALY_INDEX_PREFIX = '.ml-anomalies-'; +const COMPOSITE_AGGREGATION_BATCH_SIZE = 1000; export class InfraLogAnalysis { constructor( @@ -38,6 +39,7 @@ export class InfraLogAnalysis { ) { const logRateJobId = this.getJobIds(request, sourceId).logEntryRate; + // TODO: fetch all batches const mlModelPlotResponse = await this.libs.framework.callWithRequest(request, 'search', { allowNoIndices: true, body: { @@ -68,10 +70,28 @@ export class InfraLogAnalysis { }, }, aggs: { - timestamp_buckets: { - date_histogram: { - field: 'timestamp', - fixed_interval: `${bucketDuration}ms`, + timestamp_data_set_buckets: { + composite: { + size: COMPOSITE_AGGREGATION_BATCH_SIZE, + sources: [ + { + timestamp: { + date_histogram: { + field: 'timestamp', + fixed_interval: `${bucketDuration}ms`, + order: 'asc', + }, + }, + }, + { + data_set: { + terms: { + field: 'partition_field_value', + order: 'asc', + }, + }, + }, + ], }, aggs: { filter_model_plot: { @@ -81,18 +101,8 @@ export class InfraLogAnalysis { }, }, aggs: { - stats_model_lower: { - stats: { - field: 'model_lower', - }, - }, - stats_model_upper: { - stats: { - field: 'model_upper', - }, - }, - stats_actual: { - stats: { + average_actual: { + avg: { field: 'actual', }, }, @@ -137,24 +147,63 @@ export class InfraLogAnalysis { const mlModelPlotBuckets = pipe( logRateModelPlotResponseRT.decode(mlModelPlotResponse), - map(response => response.aggregations.timestamp_buckets.buckets), + map(response => response.aggregations.timestamp_data_set_buckets.buckets), fold(throwErrors(createPlainError), identity) ); - return mlModelPlotBuckets.map(bucket => ({ - anomalies: bucket.filter_records.top_hits_record.hits.hits.map(({ _source: record }) => ({ - actualLogEntryRate: record.actual[0], - anomalyScore: record.record_score, - duration: record.bucket_span * 1000, - startTime: record.timestamp, - typicalLogEntryRate: record.typical[0], - })), - duration: bucketDuration, - logEntryRateStats: bucket.filter_model_plot.stats_actual, - modelLowerBoundStats: bucket.filter_model_plot.stats_model_lower, - modelUpperBoundStats: bucket.filter_model_plot.stats_model_upper, - startTime: bucket.key, - })); + return mlModelPlotBuckets.reduce< + Array<{ + dataSets: Array<{ + analysisBucketCount: number; + anomalies: Array<{ + actualLogEntryRate: number; + anomalyScore: number; + duration: number; + startTime: number; + typicalLogEntryRate: number; + }>; + averageActualLogEntryRate: number; + dataSetId: string; + }>; + startTime: number; + }> + >((histogramBuckets, timestampDataSetBucket) => { + const previousHistogramBucket = histogramBuckets[histogramBuckets.length - 1]; + const dataSet = { + analysisBucketCount: timestampDataSetBucket.filter_model_plot.doc_count, + anomalies: timestampDataSetBucket.filter_records.top_hits_record.hits.hits.map( + ({ _source: record }) => ({ + actualLogEntryRate: record.actual[0], + anomalyScore: record.record_score, + duration: record.bucket_span * 1000, + startTime: record.timestamp, + typicalLogEntryRate: record.typical[0], + }) + ), + averageActualLogEntryRate: timestampDataSetBucket.filter_model_plot.average_actual.value, + dataSetId: timestampDataSetBucket.key.data_set, + }; + if ( + previousHistogramBucket && + previousHistogramBucket.startTime === timestampDataSetBucket.key.timestamp + ) { + return [ + ...histogramBuckets.slice(0, -1), + { + ...previousHistogramBucket, + dataSets: [...previousHistogramBucket.dataSets, dataSet], + }, + ]; + } else { + return [ + ...histogramBuckets, + { + dataSets: [dataSet], + startTime: timestampDataSetBucket.key.timestamp, + }, + ]; + } + }, []); } } @@ -166,20 +215,22 @@ const logRateMlRecordRT = rt.type({ typical: rt.array(rt.number), }); -const logRateStatsAggregationRT = rt.type({ - avg: rt.union([rt.number, rt.null]), - count: rt.number, - max: rt.union([rt.number, rt.null]), - min: rt.union([rt.number, rt.null]), - sum: rt.number, +const metricAggregationRT = rt.type({ + value: rt.number, +}); + +const compositeTimestampDataSetKeyRT = rt.type({ + data_set: rt.string, + timestamp: rt.number, }); const logRateModelPlotResponseRT = rt.type({ aggregations: rt.type({ - timestamp_buckets: rt.type({ + timestamp_data_set_buckets: rt.type({ + after_key: compositeTimestampDataSetKeyRT, buckets: rt.array( rt.type({ - key: rt.number, + key: compositeTimestampDataSetKeyRT, filter_records: rt.type({ doc_count: rt.number, top_hits_record: rt.type({ @@ -194,9 +245,7 @@ const logRateModelPlotResponseRT = rt.type({ }), filter_model_plot: rt.type({ doc_count: rt.number, - stats_actual: logRateStatsAggregationRT, - stats_model_lower: logRateStatsAggregationRT, - stats_model_upper: logRateStatsAggregationRT, + average_actual: metricAggregationRT, }), }) ), From df4845a22271c552f3829bcf455e3e30632c7a3f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20St=C3=BCrmer?= Date: Sat, 28 Sep 2019 01:29:18 +0200 Subject: [PATCH 03/10] Move log entry rate query to separate file for readability --- .../server/lib/log_analysis/log_analysis.ts | 159 ++--------------- .../server/lib/log_analysis/queries/index.ts | 7 + .../log_analysis/queries/log_entry_rate.ts | 164 ++++++++++++++++++ 3 files changed, 183 insertions(+), 147 deletions(-) create mode 100644 x-pack/legacy/plugins/infra/server/lib/log_analysis/queries/index.ts create mode 100644 x-pack/legacy/plugins/infra/server/lib/log_analysis/queries/log_entry_rate.ts diff --git a/x-pack/legacy/plugins/infra/server/lib/log_analysis/log_analysis.ts b/x-pack/legacy/plugins/infra/server/lib/log_analysis/log_analysis.ts index 007174a7c28321..260aa60c0f68bd 100644 --- a/x-pack/legacy/plugins/infra/server/lib/log_analysis/log_analysis.ts +++ b/x-pack/legacy/plugins/infra/server/lib/log_analysis/log_analysis.ts @@ -4,8 +4,6 @@ * you may not use this file except in compliance with the Elastic License. */ -import * as rt from 'io-ts'; - import { pipe } from 'fp-ts/lib/pipeable'; import { map, fold } from 'fp-ts/lib/Either'; import { identity } from 'fp-ts/lib/function'; @@ -13,8 +11,8 @@ import { getJobId } from '../../../common/log_analysis'; import { throwErrors, createPlainError } from '../../../common/runtime_types'; import { InfraBackendFrameworkAdapter, InfraFrameworkRequest } from '../adapters/framework'; import { NoLogRateResultsIndexError } from './errors'; +import { logRateModelPlotResponseRT, createLogEntryRateQuery } from './queries'; -const ML_ANOMALY_INDEX_PREFIX = '.ml-anomalies-'; const COMPOSITE_AGGREGATION_BATCH_SIZE = 1000; export class InfraLogAnalysis { @@ -40,104 +38,17 @@ export class InfraLogAnalysis { const logRateJobId = this.getJobIds(request, sourceId).logEntryRate; // TODO: fetch all batches - const mlModelPlotResponse = await this.libs.framework.callWithRequest(request, 'search', { - allowNoIndices: true, - body: { - query: { - bool: { - filter: [ - { - range: { - timestamp: { - gte: startTime, - lt: endTime, - }, - }, - }, - { - terms: { - result_type: ['model_plot', 'record'], - }, - }, - { - term: { - detector_index: { - value: 0, - }, - }, - }, - ], - }, - }, - aggs: { - timestamp_data_set_buckets: { - composite: { - size: COMPOSITE_AGGREGATION_BATCH_SIZE, - sources: [ - { - timestamp: { - date_histogram: { - field: 'timestamp', - fixed_interval: `${bucketDuration}ms`, - order: 'asc', - }, - }, - }, - { - data_set: { - terms: { - field: 'partition_field_value', - order: 'asc', - }, - }, - }, - ], - }, - aggs: { - filter_model_plot: { - filter: { - term: { - result_type: 'model_plot', - }, - }, - aggs: { - average_actual: { - avg: { - field: 'actual', - }, - }, - }, - }, - filter_records: { - filter: { - term: { - result_type: 'record', - }, - }, - aggs: { - top_hits_record: { - top_hits: { - _source: Object.keys(logRateMlRecordRT.props), - size: 100, - sort: [ - { - timestamp: 'asc', - }, - ], - }, - }, - }, - }, - }, - }, - }, - }, - ignoreUnavailable: true, - index: `${ML_ANOMALY_INDEX_PREFIX}${logRateJobId}`, - size: 0, - trackScores: false, - trackTotalHits: false, - }); + const mlModelPlotResponse = await this.libs.framework.callWithRequest( + request, + 'search', + createLogEntryRateQuery( + logRateJobId, + startTime, + endTime, + bucketDuration, + COMPOSITE_AGGREGATION_BATCH_SIZE + ) + ); if (mlModelPlotResponse._shards.total === 0) { throw new NoLogRateResultsIndexError( @@ -206,49 +117,3 @@ export class InfraLogAnalysis { }, []); } } - -const logRateMlRecordRT = rt.type({ - actual: rt.array(rt.number), - bucket_span: rt.number, - record_score: rt.number, - timestamp: rt.number, - typical: rt.array(rt.number), -}); - -const metricAggregationRT = rt.type({ - value: rt.number, -}); - -const compositeTimestampDataSetKeyRT = rt.type({ - data_set: rt.string, - timestamp: rt.number, -}); - -const logRateModelPlotResponseRT = rt.type({ - aggregations: rt.type({ - timestamp_data_set_buckets: rt.type({ - after_key: compositeTimestampDataSetKeyRT, - buckets: rt.array( - rt.type({ - key: compositeTimestampDataSetKeyRT, - filter_records: rt.type({ - doc_count: rt.number, - top_hits_record: rt.type({ - hits: rt.type({ - hits: rt.array( - rt.type({ - _source: logRateMlRecordRT, - }) - ), - }), - }), - }), - filter_model_plot: rt.type({ - doc_count: rt.number, - average_actual: metricAggregationRT, - }), - }) - ), - }), - }), -}); diff --git a/x-pack/legacy/plugins/infra/server/lib/log_analysis/queries/index.ts b/x-pack/legacy/plugins/infra/server/lib/log_analysis/queries/index.ts new file mode 100644 index 00000000000000..17494212777198 --- /dev/null +++ b/x-pack/legacy/plugins/infra/server/lib/log_analysis/queries/index.ts @@ -0,0 +1,7 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +export * from './log_entry_rate'; diff --git a/x-pack/legacy/plugins/infra/server/lib/log_analysis/queries/log_entry_rate.ts b/x-pack/legacy/plugins/infra/server/lib/log_analysis/queries/log_entry_rate.ts new file mode 100644 index 00000000000000..e2550115b12919 --- /dev/null +++ b/x-pack/legacy/plugins/infra/server/lib/log_analysis/queries/log_entry_rate.ts @@ -0,0 +1,164 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import * as rt from 'io-ts'; + +const ML_ANOMALY_INDEX_PREFIX = '.ml-anomalies-'; + +export const createLogEntryRateQuery = ( + logRateJobId: string, + startTime: number, + endTime: number, + bucketDuration: number, + size: number +) => ({ + allowNoIndices: true, + body: { + query: { + bool: { + filter: [ + { + range: { + timestamp: { + gte: startTime, + lt: endTime, + }, + }, + }, + { + terms: { + result_type: ['model_plot', 'record'], + }, + }, + { + term: { + detector_index: { + value: 0, + }, + }, + }, + ], + }, + }, + aggs: { + timestamp_data_set_buckets: { + composite: { + size, + sources: [ + { + timestamp: { + date_histogram: { + field: 'timestamp', + fixed_interval: `${bucketDuration}ms`, + order: 'asc', + }, + }, + }, + { + data_set: { + terms: { + field: 'partition_field_value', + order: 'asc', + }, + }, + }, + ], + }, + aggs: { + filter_model_plot: { + filter: { + term: { + result_type: 'model_plot', + }, + }, + aggs: { + average_actual: { + avg: { + field: 'actual', + }, + }, + }, + }, + filter_records: { + filter: { + term: { + result_type: 'record', + }, + }, + aggs: { + top_hits_record: { + top_hits: { + _source: Object.keys(logRateMlRecordRT.props), + size: 100, + sort: [ + { + timestamp: 'asc', + }, + ], + }, + }, + }, + }, + }, + }, + }, + }, + ignoreUnavailable: true, + index: `${ML_ANOMALY_INDEX_PREFIX}${logRateJobId}`, + size: 0, + trackScores: false, + trackTotalHits: false, +}); + +const logRateMlRecordRT = rt.type({ + actual: rt.array(rt.number), + bucket_span: rt.number, + record_score: rt.number, + timestamp: rt.number, + typical: rt.array(rt.number), +}); + +const metricAggregationRT = rt.type({ + value: rt.number, +}); + +const compositeTimestampDataSetKeyRT = rt.type({ + data_set: rt.string, + timestamp: rt.number, +}); + +export const logRateModelPlotResponseRT = rt.type({ + aggregations: rt.type({ + timestamp_data_set_buckets: rt.intersection([ + rt.type({ + buckets: rt.array( + rt.type({ + key: compositeTimestampDataSetKeyRT, + filter_records: rt.type({ + doc_count: rt.number, + top_hits_record: rt.type({ + hits: rt.type({ + hits: rt.array( + rt.type({ + _source: logRateMlRecordRT, + }) + ), + }), + }), + }), + filter_model_plot: rt.type({ + doc_count: rt.number, + average_actual: metricAggregationRT, + }), + }) + ), + }), + rt.partial({ + after_key: compositeTimestampDataSetKeyRT, + }), + ]), + }), +}); From 3bd2276bac641a49d59797a24a5edb6b6c88b603 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20St=C3=BCrmer?= Date: Sat, 28 Sep 2019 01:29:53 +0200 Subject: [PATCH 04/10] Adapt log entry rate api consumers --- .../log_entry_rate.tsx | 53 ++++++------------ .../logs/analysis/page_results_content.tsx | 3 +- .../logs/analysis/sections/log_rate/chart.tsx | 55 +------------------ .../logs/analysis/sections/log_rate/table.tsx | 4 +- 4 files changed, 24 insertions(+), 91 deletions(-) diff --git a/x-pack/legacy/plugins/infra/public/containers/logs/log_analysis/log_analysis_graph_data/log_entry_rate.tsx b/x-pack/legacy/plugins/infra/public/containers/logs/log_analysis/log_analysis_graph_data/log_entry_rate.tsx index f54402a1a87073..587a1907e8dec7 100644 --- a/x-pack/legacy/plugins/infra/public/containers/logs/log_analysis/log_analysis_graph_data/log_entry_rate.tsx +++ b/x-pack/legacy/plugins/infra/public/containers/logs/log_analysis/log_analysis_graph_data/log_entry_rate.tsx @@ -7,12 +7,6 @@ import { useMemo } from 'react'; import { GetLogEntryRateSuccessResponsePayload } from '../../../../../common/http_api/log_analysis'; -interface LogRateAreaSeriesDataPoint { - x: number; - min: number | null; - max: number | null; -} -type LogRateAreaSeries = LogRateAreaSeriesDataPoint[]; type LogRateLineSeriesDataPoint = [number, number | null]; type LogRateLineSeries = LogRateLineSeriesDataPoint[]; type LogRateAnomalySeriesDataPoint = [number, number]; @@ -23,48 +17,37 @@ export const useLogEntryRateGraphData = ({ }: { data: GetLogEntryRateSuccessResponsePayload['data'] | null; }) => { - const areaSeries: LogRateAreaSeries = useMemo(() => { - if (!data || (data && data.histogramBuckets && !data.histogramBuckets.length)) { - return []; - } - return data.histogramBuckets.reduce((acc: any, bucket) => { - acc.push({ - x: bucket.startTime, - min: bucket.modelLowerBoundStats.min, - max: bucket.modelUpperBoundStats.max, - }); - return acc; - }, []); - }, [data]); - const lineSeries: LogRateLineSeries = useMemo(() => { - if (!data || (data && data.histogramBuckets && !data.histogramBuckets.length)) { + if (!data) { return []; } - return data.histogramBuckets.reduce((acc: any, bucket) => { - acc.push([bucket.startTime, bucket.logEntryRateStats.avg]); - return acc; - }, []); + + return data.histogramBuckets.map(bucket => [ + bucket.startTime, + bucket.dataSets.length > 0 ? bucket.dataSets[0].averageActualLogEntryRate : null, + ]); }, [data]); const anomalySeries: LogRateAnomalySeries = useMemo(() => { - if (!data || (data && data.histogramBuckets && !data.histogramBuckets.length)) { + if (!data) { return []; } - return data.histogramBuckets.reduce((acc: any, bucket) => { - if (bucket.anomalies.length > 0) { - bucket.anomalies.forEach(anomaly => { - acc.push([anomaly.startTime, anomaly.actualLogEntryRate]); - }); - return acc; - } else { - return acc; + + return data.histogramBuckets.reduce>((acc, bucket) => { + if (bucket.dataSets.length === 0) { + return []; } + + return [ + ...acc, + ...bucket.dataSets[0].anomalies.map( + anomaly => [anomaly.startTime, anomaly.actualLogEntryRate] as [number, number] + ), + ]; }, []); }, [data]); return { - areaSeries, lineSeries, anomalySeries, }; diff --git a/x-pack/legacy/plugins/infra/public/pages/logs/analysis/page_results_content.tsx b/x-pack/legacy/plugins/infra/public/pages/logs/analysis/page_results_content.tsx index 3629413d6d30c5..058c72839946b7 100644 --- a/x-pack/legacy/plugins/infra/public/pages/logs/analysis/page_results_content.tsx +++ b/x-pack/legacy/plugins/infra/public/pages/logs/analysis/page_results_content.tsx @@ -128,7 +128,8 @@ export const AnalysisResultsContent = ({ } else { if (logEntryRate.histogramBuckets && logEntryRate.histogramBuckets.length) { return logEntryRate.histogramBuckets.reduce( - (acc, bucket) => acc + bucket.anomalies.length, + (acc, bucket) => + acc + (bucket.dataSets.length > 0 ? bucket.dataSets[0].anomalies.length : 0), 0 ); } else { diff --git a/x-pack/legacy/plugins/infra/public/pages/logs/analysis/sections/log_rate/chart.tsx b/x-pack/legacy/plugins/infra/public/pages/logs/analysis/sections/log_rate/chart.tsx index 0d703420e7412a..6780922ba37090 100644 --- a/x-pack/legacy/plugins/infra/public/pages/logs/analysis/sections/log_rate/chart.tsx +++ b/x-pack/legacy/plugins/infra/public/pages/logs/analysis/sections/log_rate/chart.tsx @@ -4,7 +4,7 @@ * you may not use this file except in compliance with the Elastic License. */ -import React, { useMemo, useCallback, useState } from 'react'; +import React, { useMemo, useCallback } from 'react'; import { i18n } from '@kbn/i18n'; import moment from 'moment'; import { @@ -12,20 +12,17 @@ import { Chart, getAxisId, getSpecId, - AreaSeries, LineSeries, niceTimeFormatter, Settings, TooltipValue, } from '@elastic/charts'; -import { EuiFlexGroup, EuiFlexItem, EuiCheckbox } from '@elastic/eui'; import { getColorsMap, isDarkMode, getChartTheme } from '../../chart_helpers'; import { GetLogEntryRateSuccessResponsePayload } from '../../../../../../common/http_api/log_analysis/results/log_entry_rate'; import { useLogEntryRateGraphData } from '../../../../../containers/logs/log_analysis/log_analysis_graph_data/log_entry_rate'; import { useKibanaUiSetting } from '../../../../../utils/use_kibana_ui_setting'; import { TimeRange } from '../../../../../../common/http_api/shared/time_range'; -const areaSeriesColour = 'rgb(224, 237, 255)'; const lineSeriesColour = 'rgb(49, 133, 252)'; interface Props { @@ -35,7 +32,7 @@ interface Props { } export const ChartView = ({ data, setTimeRange, timeRange }: Props) => { - const { areaSeries, lineSeries, anomalySeries } = useLogEntryRateGraphData({ data }); + const { lineSeries, anomalySeries } = useLogEntryRateGraphData({ data }); const dateFormatter = useMemo( () => @@ -45,7 +42,6 @@ export const ChartView = ({ data, setTimeRange, timeRange }: Props) => { [lineSeries, timeRange] ); - const areaSpecId = getSpecId('modelBounds'); const lineSpecId = getSpecId('averageValues'); const anomalySpecId = getSpecId('anomalies'); @@ -59,8 +55,6 @@ export const ChartView = ({ data, setTimeRange, timeRange }: Props) => { [dateFormat] ); - const [isShowingModelBounds, setIsShowingModelBounds] = useState(true); - const handleBrushEnd = useCallback( (startTime: number, endTime: number) => { setTimeRange({ @@ -73,19 +67,6 @@ export const ChartView = ({ data, setTimeRange, timeRange }: Props) => { return ( <> - - - { - setIsShowingModelBounds(e.target.checked); - }} - /> - -
{ position="left" tickFormat={value => Number(value).toFixed(0)} /> - {isShowingModelBounds ? ( - - ) : null} { ); }; - -const showModelBoundsLabel = i18n.translate( - 'xpack.infra.logs.analysis.logRateSectionModelBoundsCheckboxLabel', - { defaultMessage: 'Show model bounds' } -); diff --git a/x-pack/legacy/plugins/infra/public/pages/logs/analysis/sections/log_rate/table.tsx b/x-pack/legacy/plugins/infra/public/pages/logs/analysis/sections/log_rate/table.tsx index 19bfaacf5d5dfa..a32cc163c39e9b 100644 --- a/x-pack/legacy/plugins/infra/public/pages/logs/analysis/sections/log_rate/table.tsx +++ b/x-pack/legacy/plugins/infra/public/pages/logs/analysis/sections/log_rate/table.tsx @@ -37,8 +37,8 @@ export const TableView = ({ data }: Props) => { const formattedAnomalies = useMemo(() => { return data.histogramBuckets.reduce((acc: any, bucket) => { - if (bucket.anomalies.length > 0) { - bucket.anomalies.forEach(anomaly => { + if (bucket.dataSets.length > 0) { + bucket.dataSets[0].anomalies.forEach(anomaly => { const formattedAnomaly = { startTime: moment(anomaly.startTime).format(dateFormat || 'Y-MM-DD HH:mm:ss.SSS'), anomalyScore: Number(anomaly.anomalyScore).toFixed(3), From 76908746fb9e702791b1c1b7fae073d05a85dec3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20St=C3=BCrmer?= Date: Sat, 28 Sep 2019 19:52:30 +0200 Subject: [PATCH 05/10] Display partitons in a bar chart --- .../analysis/sections/log_rate/bar_chart.tsx | 167 ++++++++++++++++++ .../logs/analysis/sections/log_rate/index.tsx | 9 +- .../server/lib/log_analysis/log_analysis.ts | 61 ++++--- .../log_analysis/queries/log_entry_rate.ts | 50 +++--- 4 files changed, 241 insertions(+), 46 deletions(-) create mode 100644 x-pack/legacy/plugins/infra/public/pages/logs/analysis/sections/log_rate/bar_chart.tsx diff --git a/x-pack/legacy/plugins/infra/public/pages/logs/analysis/sections/log_rate/bar_chart.tsx b/x-pack/legacy/plugins/infra/public/pages/logs/analysis/sections/log_rate/bar_chart.tsx new file mode 100644 index 00000000000000..0719bf956b680c --- /dev/null +++ b/x-pack/legacy/plugins/infra/public/pages/logs/analysis/sections/log_rate/bar_chart.tsx @@ -0,0 +1,167 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { + Axis, + BarSeries, + Chart, + getAnnotationId, + getAxisId, + getSpecId, + niceTimeFormatter, + RectAnnotation, + RectAnnotationDatum, + Settings, + TooltipValue, + LIGHT_THEME, + DARK_THEME, +} from '@elastic/charts'; +import { i18n } from '@kbn/i18n'; +import moment from 'moment'; +import React, { useCallback, useMemo } from 'react'; + +import { GetLogEntryRateSuccessResponsePayload } from '../../../../../../common/http_api/log_analysis/results/log_entry_rate'; +import { TimeRange } from '../../../../../../common/http_api/shared/time_range'; +import { useKibanaUiSetting } from '../../../../../utils/use_kibana_ui_setting'; + +type LogEntryRateHistogramBuckets = GetLogEntryRateSuccessResponsePayload['data']['histogramBuckets']; + +export const LogEntryRateBarChart: React.FunctionComponent<{ + bucketDuration: number; + histogramBuckets: LogEntryRateHistogramBuckets | null; + setTimeRange: (timeRange: TimeRange) => void; + timeRange: TimeRange; +}> = ({ bucketDuration, histogramBuckets, setTimeRange, timeRange }) => { + const [dateFormat] = useKibanaUiSetting('dateFormat'); + const [isDarkMode] = useKibanaUiSetting('theme:darkMode'); + + const chartDateFormatter = useMemo( + () => niceTimeFormatter([timeRange.startTime, timeRange.endTime]), + [timeRange] + ); + + const logEntryRateSeries = useMemo( + () => + histogramBuckets + ? histogramBuckets.reduce>( + (buckets, bucket) => { + return [ + ...buckets, + ...bucket.dataSets.map(dataSet => ({ + group: dataSet.dataSetId === '' ? 'unknown' : dataSet.dataSetId, + time: bucket.startTime, + value: dataSet.averageActualLogEntryRate, + })), + ]; + }, + [] + ) + : [], + [histogramBuckets] + ); + + const logEntryRateAnomalyAnnotations = useMemo( + () => + histogramBuckets + ? histogramBuckets.reduce((annotatedBuckets, bucket) => { + const anomalies = bucket.dataSets.reduce( + (accumulatedAnomalies, dataSet) => [...accumulatedAnomalies, ...dataSet.anomalies], + [] + ); + if (anomalies.length <= 0) { + return annotatedBuckets; + } + return [ + ...annotatedBuckets, + { + coordinates: { + x0: bucket.startTime, + x1: bucket.startTime + bucketDuration, + }, + details: i18n.translate( + 'xpack.infra.logs.analysis.logRateSectionAnomalyCountTooltipLabel', + { + defaultMessage: `{anomalyCount, plural, one {# anomaly} other {# anomalies}}`, + values: { + anomalyCount: anomalies.length, + }, + } + ), + }, + ]; + }, []) + : [], + [histogramBuckets] + ); + + const logEntryRateSpecId = getSpecId('averageValues'); + const logEntryRateAnomalyAnnotationsId = getAnnotationId('anomalies'); + + const tooltipProps = useMemo( + () => ({ + headerFormatter: (tooltipData: TooltipValue) => + moment(tooltipData.value).format(dateFormat || 'Y-MM-DD HH:mm:ss.SSS'), + }), + [dateFormat] + ); + + const handleBrushEnd = useCallback( + (startTime: number, endTime: number) => { + setTimeRange({ + endTime, + startTime, + }); + }, + [setTimeRange] + ); + + return ( +
+ + + Number(value).toFixed(0)} + /> + + + + +
+ ); +}; diff --git a/x-pack/legacy/plugins/infra/public/pages/logs/analysis/sections/log_rate/index.tsx b/x-pack/legacy/plugins/infra/public/pages/logs/analysis/sections/log_rate/index.tsx index 8a23d7ac091110..02eeb5e468e336 100644 --- a/x-pack/legacy/plugins/infra/public/pages/logs/analysis/sections/log_rate/index.tsx +++ b/x-pack/legacy/plugins/infra/public/pages/logs/analysis/sections/log_rate/index.tsx @@ -16,10 +16,10 @@ import { i18n } from '@kbn/i18n'; import React, { useState } from 'react'; import { GetLogEntryRateSuccessResponsePayload } from '../../../../../../common/http_api/log_analysis/results/log_entry_rate'; -import { ChartView } from './chart'; import { isValidLogRateView, LogRateView, LogRateViewSwitcher } from './log_rate_view_switcher'; import { TableView } from './table'; import { TimeRange } from '../../../../../../common/http_api/shared/time_range'; +import { LogEntryRateBarChart } from './bar_chart'; export const LogRateResults = ({ isLoading, @@ -85,7 +85,12 @@ export const LogRateResults = ({ {viewMode === 'chart' ? ( - + ) : ( )} diff --git a/x-pack/legacy/plugins/infra/server/lib/log_analysis/log_analysis.ts b/x-pack/legacy/plugins/infra/server/lib/log_analysis/log_analysis.ts index 260aa60c0f68bd..31d9c5403e2d2b 100644 --- a/x-pack/legacy/plugins/infra/server/lib/log_analysis/log_analysis.ts +++ b/x-pack/legacy/plugins/infra/server/lib/log_analysis/log_analysis.ts @@ -11,7 +11,12 @@ import { getJobId } from '../../../common/log_analysis'; import { throwErrors, createPlainError } from '../../../common/runtime_types'; import { InfraBackendFrameworkAdapter, InfraFrameworkRequest } from '../adapters/framework'; import { NoLogRateResultsIndexError } from './errors'; -import { logRateModelPlotResponseRT, createLogEntryRateQuery } from './queries'; +import { + logRateModelPlotResponseRT, + createLogEntryRateQuery, + LogRateModelPlotBucket, + CompositeTimestampDataSetKey, +} from './queries'; const COMPOSITE_AGGREGATION_BATCH_SIZE = 1000; @@ -37,30 +42,42 @@ export class InfraLogAnalysis { ) { const logRateJobId = this.getJobIds(request, sourceId).logEntryRate; - // TODO: fetch all batches - const mlModelPlotResponse = await this.libs.framework.callWithRequest( - request, - 'search', - createLogEntryRateQuery( - logRateJobId, - startTime, - endTime, - bucketDuration, - COMPOSITE_AGGREGATION_BATCH_SIZE - ) - ); + let mlModelPlotBuckets: LogRateModelPlotBucket[] = []; + let afterLatestBatchKey: CompositeTimestampDataSetKey | undefined; - if (mlModelPlotResponse._shards.total === 0) { - throw new NoLogRateResultsIndexError( - `Failed to find ml result index for job ${logRateJobId}.` + while (true) { + const mlModelPlotResponse = await this.libs.framework.callWithRequest( + request, + 'search', + createLogEntryRateQuery( + logRateJobId, + startTime, + endTime, + bucketDuration, + COMPOSITE_AGGREGATION_BATCH_SIZE, + afterLatestBatchKey + ) ); - } - const mlModelPlotBuckets = pipe( - logRateModelPlotResponseRT.decode(mlModelPlotResponse), - map(response => response.aggregations.timestamp_data_set_buckets.buckets), - fold(throwErrors(createPlainError), identity) - ); + if (mlModelPlotResponse._shards.total === 0) { + throw new NoLogRateResultsIndexError( + `Failed to find ml result index for job ${logRateJobId}.` + ); + } + + const { after_key: afterKey, buckets: latestBatchBuckets } = pipe( + logRateModelPlotResponseRT.decode(mlModelPlotResponse), + map(response => response.aggregations.timestamp_data_set_buckets), + fold(throwErrors(createPlainError), identity) + ); + + mlModelPlotBuckets = [...mlModelPlotBuckets, ...latestBatchBuckets]; + afterLatestBatchKey = afterKey; + + if (latestBatchBuckets.length < COMPOSITE_AGGREGATION_BATCH_SIZE) { + break; + } + } return mlModelPlotBuckets.reduce< Array<{ diff --git a/x-pack/legacy/plugins/infra/server/lib/log_analysis/queries/log_entry_rate.ts b/x-pack/legacy/plugins/infra/server/lib/log_analysis/queries/log_entry_rate.ts index e2550115b12919..b10b1fe04db24f 100644 --- a/x-pack/legacy/plugins/infra/server/lib/log_analysis/queries/log_entry_rate.ts +++ b/x-pack/legacy/plugins/infra/server/lib/log_analysis/queries/log_entry_rate.ts @@ -13,7 +13,8 @@ export const createLogEntryRateQuery = ( startTime: number, endTime: number, bucketDuration: number, - size: number + size: number, + afterKey?: CompositeTimestampDataSetKey ) => ({ allowNoIndices: true, body: { @@ -46,6 +47,7 @@ export const createLogEntryRateQuery = ( aggs: { timestamp_data_set_buckets: { composite: { + after: afterKey, size, sources: [ { @@ -130,31 +132,35 @@ const compositeTimestampDataSetKeyRT = rt.type({ timestamp: rt.number, }); +export type CompositeTimestampDataSetKey = rt.TypeOf; + +export const logRateModelPlotBucketRT = rt.type({ + key: compositeTimestampDataSetKeyRT, + filter_records: rt.type({ + doc_count: rt.number, + top_hits_record: rt.type({ + hits: rt.type({ + hits: rt.array( + rt.type({ + _source: logRateMlRecordRT, + }) + ), + }), + }), + }), + filter_model_plot: rt.type({ + doc_count: rt.number, + average_actual: metricAggregationRT, + }), +}); + +export type LogRateModelPlotBucket = rt.TypeOf; + export const logRateModelPlotResponseRT = rt.type({ aggregations: rt.type({ timestamp_data_set_buckets: rt.intersection([ rt.type({ - buckets: rt.array( - rt.type({ - key: compositeTimestampDataSetKeyRT, - filter_records: rt.type({ - doc_count: rt.number, - top_hits_record: rt.type({ - hits: rt.type({ - hits: rt.array( - rt.type({ - _source: logRateMlRecordRT, - }) - ), - }), - }), - }), - filter_model_plot: rt.type({ - doc_count: rt.number, - average_actual: metricAggregationRT, - }), - }) - ), + buckets: rt.array(logRateModelPlotBucketRT), }), rt.partial({ after_key: compositeTimestampDataSetKeyRT, From 42256ec558dac14089885904c8d3f9f6dd2b29fa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20St=C3=BCrmer?= Date: Sat, 28 Sep 2019 19:56:01 +0200 Subject: [PATCH 06/10] Remove unused files --- .../log_entry_rate.tsx | 54 ------- .../logs/analysis/chart_helpers/index.tsx | 20 --- .../logs/analysis/sections/log_rate/chart.tsx | 145 ------------------ .../logs/analysis/sections/log_rate/index.tsx | 4 +- 4 files changed, 2 insertions(+), 221 deletions(-) delete mode 100644 x-pack/legacy/plugins/infra/public/containers/logs/log_analysis/log_analysis_graph_data/log_entry_rate.tsx delete mode 100644 x-pack/legacy/plugins/infra/public/pages/logs/analysis/chart_helpers/index.tsx delete mode 100644 x-pack/legacy/plugins/infra/public/pages/logs/analysis/sections/log_rate/chart.tsx diff --git a/x-pack/legacy/plugins/infra/public/containers/logs/log_analysis/log_analysis_graph_data/log_entry_rate.tsx b/x-pack/legacy/plugins/infra/public/containers/logs/log_analysis/log_analysis_graph_data/log_entry_rate.tsx deleted file mode 100644 index 587a1907e8dec7..00000000000000 --- a/x-pack/legacy/plugins/infra/public/containers/logs/log_analysis/log_analysis_graph_data/log_entry_rate.tsx +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import { useMemo } from 'react'; -import { GetLogEntryRateSuccessResponsePayload } from '../../../../../common/http_api/log_analysis'; - -type LogRateLineSeriesDataPoint = [number, number | null]; -type LogRateLineSeries = LogRateLineSeriesDataPoint[]; -type LogRateAnomalySeriesDataPoint = [number, number]; -type LogRateAnomalySeries = LogRateAnomalySeriesDataPoint[]; - -export const useLogEntryRateGraphData = ({ - data, -}: { - data: GetLogEntryRateSuccessResponsePayload['data'] | null; -}) => { - const lineSeries: LogRateLineSeries = useMemo(() => { - if (!data) { - return []; - } - - return data.histogramBuckets.map(bucket => [ - bucket.startTime, - bucket.dataSets.length > 0 ? bucket.dataSets[0].averageActualLogEntryRate : null, - ]); - }, [data]); - - const anomalySeries: LogRateAnomalySeries = useMemo(() => { - if (!data) { - return []; - } - - return data.histogramBuckets.reduce>((acc, bucket) => { - if (bucket.dataSets.length === 0) { - return []; - } - - return [ - ...acc, - ...bucket.dataSets[0].anomalies.map( - anomaly => [anomaly.startTime, anomaly.actualLogEntryRate] as [number, number] - ), - ]; - }, []); - }, [data]); - - return { - lineSeries, - anomalySeries, - }; -}; diff --git a/x-pack/legacy/plugins/infra/public/pages/logs/analysis/chart_helpers/index.tsx b/x-pack/legacy/plugins/infra/public/pages/logs/analysis/chart_helpers/index.tsx deleted file mode 100644 index df0eca449bb9f8..00000000000000 --- a/x-pack/legacy/plugins/infra/public/pages/logs/analysis/chart_helpers/index.tsx +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import chrome from 'ui/chrome'; -import { SpecId, Theme, LIGHT_THEME, DARK_THEME } from '@elastic/charts'; - -export const getColorsMap = (color: string, specId: SpecId) => { - const map = new Map(); - map.set({ colorValues: [], specId }, color); - return map; -}; - -export const isDarkMode = () => chrome.getUiSettingsClient().get('theme:darkMode'); - -export const getChartTheme = (): Theme => { - return isDarkMode() ? DARK_THEME : LIGHT_THEME; -}; diff --git a/x-pack/legacy/plugins/infra/public/pages/logs/analysis/sections/log_rate/chart.tsx b/x-pack/legacy/plugins/infra/public/pages/logs/analysis/sections/log_rate/chart.tsx deleted file mode 100644 index 6780922ba37090..00000000000000 --- a/x-pack/legacy/plugins/infra/public/pages/logs/analysis/sections/log_rate/chart.tsx +++ /dev/null @@ -1,145 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ - -import React, { useMemo, useCallback } from 'react'; -import { i18n } from '@kbn/i18n'; -import moment from 'moment'; -import { - Axis, - Chart, - getAxisId, - getSpecId, - LineSeries, - niceTimeFormatter, - Settings, - TooltipValue, -} from '@elastic/charts'; -import { getColorsMap, isDarkMode, getChartTheme } from '../../chart_helpers'; -import { GetLogEntryRateSuccessResponsePayload } from '../../../../../../common/http_api/log_analysis/results/log_entry_rate'; -import { useLogEntryRateGraphData } from '../../../../../containers/logs/log_analysis/log_analysis_graph_data/log_entry_rate'; -import { useKibanaUiSetting } from '../../../../../utils/use_kibana_ui_setting'; -import { TimeRange } from '../../../../../../common/http_api/shared/time_range'; - -const lineSeriesColour = 'rgb(49, 133, 252)'; - -interface Props { - data: GetLogEntryRateSuccessResponsePayload['data'] | null; - setTimeRange: (timeRange: TimeRange) => void; - timeRange: TimeRange; -} - -export const ChartView = ({ data, setTimeRange, timeRange }: Props) => { - const { lineSeries, anomalySeries } = useLogEntryRateGraphData({ data }); - - const dateFormatter = useMemo( - () => - lineSeries.length > 0 - ? niceTimeFormatter([timeRange.startTime, timeRange.endTime]) - : (value: number) => `${value}`, - [lineSeries, timeRange] - ); - - const lineSpecId = getSpecId('averageValues'); - const anomalySpecId = getSpecId('anomalies'); - - const [dateFormat] = useKibanaUiSetting('dateFormat'); - - const tooltipProps = useMemo( - () => ({ - headerFormatter: (tooltipData: TooltipValue) => - moment(tooltipData.value).format(dateFormat || 'Y-MM-DD HH:mm:ss.SSS'), - }), - [dateFormat] - ); - - const handleBrushEnd = useCallback( - (startTime: number, endTime: number) => { - setTimeRange({ - endTime, - startTime, - }); - }, - [setTimeRange] - ); - - return ( - <> -
- - - Number(value).toFixed(0)} - /> - - - - -
- - ); -}; diff --git a/x-pack/legacy/plugins/infra/public/pages/logs/analysis/sections/log_rate/index.tsx b/x-pack/legacy/plugins/infra/public/pages/logs/analysis/sections/log_rate/index.tsx index 02eeb5e468e336..5e73927f833b9f 100644 --- a/x-pack/legacy/plugins/infra/public/pages/logs/analysis/sections/log_rate/index.tsx +++ b/x-pack/legacy/plugins/infra/public/pages/logs/analysis/sections/log_rate/index.tsx @@ -16,10 +16,10 @@ import { i18n } from '@kbn/i18n'; import React, { useState } from 'react'; import { GetLogEntryRateSuccessResponsePayload } from '../../../../../../common/http_api/log_analysis/results/log_entry_rate'; -import { isValidLogRateView, LogRateView, LogRateViewSwitcher } from './log_rate_view_switcher'; -import { TableView } from './table'; import { TimeRange } from '../../../../../../common/http_api/shared/time_range'; import { LogEntryRateBarChart } from './bar_chart'; +import { isValidLogRateView, LogRateView, LogRateViewSwitcher } from './log_rate_view_switcher'; +import { TableView } from './table'; export const LogRateResults = ({ isLoading, From e18f2105c85c3e7f8ef920f2e0f142ec3e92dc0c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20St=C3=BCrmer?= Date: Sat, 28 Sep 2019 20:18:24 +0200 Subject: [PATCH 07/10] Move log entry rate api call into separate file --- .../log_analysis/api/get_log_entry_rate.ts | 45 +++++++++++++++++++ .../logs/log_analysis/log_entry_rate.tsx | 37 ++------------- 2 files changed, 49 insertions(+), 33 deletions(-) create mode 100644 x-pack/legacy/plugins/infra/public/containers/logs/log_analysis/api/get_log_entry_rate.ts diff --git a/x-pack/legacy/plugins/infra/public/containers/logs/log_analysis/api/get_log_entry_rate.ts b/x-pack/legacy/plugins/infra/public/containers/logs/log_analysis/api/get_log_entry_rate.ts new file mode 100644 index 00000000000000..471a00d40984cf --- /dev/null +++ b/x-pack/legacy/plugins/infra/public/containers/logs/log_analysis/api/get_log_entry_rate.ts @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import { fold } from 'fp-ts/lib/Either'; +import { pipe } from 'fp-ts/lib/pipeable'; +import { identity } from 'fp-ts/lib/function'; +import { kfetch } from 'ui/kfetch'; + +import { + getLogEntryRateRequestPayloadRT, + getLogEntryRateSuccessReponsePayloadRT, + LOG_ANALYSIS_GET_LOG_ENTRY_RATE_PATH, +} from '../../../../../common/http_api/log_analysis'; +import { createPlainError, throwErrors } from '../../../../../common/runtime_types'; + +export const callGetLogEntryRateAPI = async ( + sourceId: string, + startTime: number, + endTime: number, + bucketDuration: number +) => { + const response = await kfetch({ + method: 'POST', + pathname: LOG_ANALYSIS_GET_LOG_ENTRY_RATE_PATH, + body: JSON.stringify( + getLogEntryRateRequestPayloadRT.encode({ + data: { + sourceId, + timeRange: { + startTime, + endTime, + }, + bucketDuration, + }, + }) + ), + }); + return pipe( + getLogEntryRateSuccessReponsePayloadRT.decode(response), + fold(throwErrors(createPlainError), identity) + ); +}; diff --git a/x-pack/legacy/plugins/infra/public/containers/logs/log_analysis/log_entry_rate.tsx b/x-pack/legacy/plugins/infra/public/containers/logs/log_analysis/log_entry_rate.tsx index 4e7a6647a95793..8b21a7e8298944 100644 --- a/x-pack/legacy/plugins/infra/public/containers/logs/log_analysis/log_entry_rate.tsx +++ b/x-pack/legacy/plugins/infra/public/containers/logs/log_analysis/log_entry_rate.tsx @@ -5,19 +5,10 @@ */ import { useMemo, useState } from 'react'; -import { kfetch } from 'ui/kfetch'; -import { fold } from 'fp-ts/lib/Either'; -import { pipe } from 'fp-ts/lib/pipeable'; -import { identity } from 'fp-ts/lib/function'; -import { - getLogEntryRateRequestPayloadRT, - getLogEntryRateSuccessReponsePayloadRT, - GetLogEntryRateSuccessResponsePayload, - LOG_ANALYSIS_GET_LOG_ENTRY_RATE_PATH, -} from '../../../../common/http_api/log_analysis'; -import { createPlainError, throwErrors } from '../../../../common/runtime_types'; +import { GetLogEntryRateSuccessResponsePayload } from '../../../../common/http_api/log_analysis'; import { useTrackedPromise } from '../../../utils/use_tracked_promise'; +import { callGetLogEntryRateAPI } from './api/get_log_entry_rate'; type LogEntryRateResults = GetLogEntryRateSuccessResponsePayload['data']; @@ -38,30 +29,10 @@ export const useLogEntryRate = ({ { cancelPreviousOn: 'resolution', createPromise: async () => { - return await kfetch({ - method: 'POST', - pathname: LOG_ANALYSIS_GET_LOG_ENTRY_RATE_PATH, - body: JSON.stringify( - getLogEntryRateRequestPayloadRT.encode({ - data: { - sourceId, - timeRange: { - startTime, - endTime, - }, - bucketDuration, - }, - }) - ), - }); + return await callGetLogEntryRateAPI(sourceId, startTime, endTime, bucketDuration); }, onResolve: response => { - const { data } = pipe( - getLogEntryRateSuccessReponsePayloadRT.decode(response), - fold(throwErrors(createPlainError), identity) - ); - - setLogEntryRate(data); + setLogEntryRate(response.data); }, }, [sourceId, startTime, endTime, bucketDuration] From a5334c6859dbaeb9633dde46146f61dd767a6599 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20St=C3=BCrmer?= Date: Sat, 28 Sep 2019 20:23:06 +0200 Subject: [PATCH 08/10] Remove anomaly count as per feedback --- .../logs/analysis/page_results_content.tsx | 43 +------------------ 1 file changed, 2 insertions(+), 41 deletions(-) diff --git a/x-pack/legacy/plugins/infra/public/pages/logs/analysis/page_results_content.tsx b/x-pack/legacy/plugins/infra/public/pages/logs/analysis/page_results_content.tsx index 058c72839946b7..aaf24c22594e52 100644 --- a/x-pack/legacy/plugins/infra/public/pages/logs/analysis/page_results_content.tsx +++ b/x-pack/legacy/plugins/infra/public/pages/logs/analysis/page_results_content.tsx @@ -6,7 +6,6 @@ import datemath from '@elastic/datemath'; import { - EuiBadge, EuiFlexGroup, EuiFlexItem, EuiPage, @@ -17,7 +16,6 @@ import { EuiSuperDatePicker, } from '@elastic/eui'; import { i18n } from '@kbn/i18n'; -import { FormattedMessage } from '@kbn/i18n/react'; import moment from 'moment'; import React, { useCallback, useMemo, useState } from 'react'; @@ -122,22 +120,6 @@ export const AnalysisResultsContent = ({ [setAutoRefresh] ); - const anomaliesDetected = useMemo(() => { - if (!logEntryRate) { - return null; - } else { - if (logEntryRate.histogramBuckets && logEntryRate.histogramBuckets.length) { - return logEntryRate.histogramBuckets.reduce( - (acc, bucket) => - acc + (bucket.dataSets.length > 0 ? bucket.dataSets[0].anomalies.length : 0), - 0 - ); - } else { - return null; - } - } - }, [logEntryRate]); - return ( <> {isLoading && !logEntryRate ? ( @@ -151,29 +133,8 @@ export const AnalysisResultsContent = ({ - - - - {anomaliesDetected !== null ? ( - - - {anomaliesDetected} - - ), - number: anomaliesDetected, - }} - /> - - ) : null} - - - - + + Date: Wed, 2 Oct 2019 15:42:55 +0100 Subject: [PATCH 09/10] Amend API tests --- .../apis/infra/log_analysis.ts | 13 +- .../data.json.gz | Bin 0 -> 6298 bytes .../mappings.json | 513 ++++++++++++++++++ 3 files changed, 520 insertions(+), 6 deletions(-) create mode 100644 x-pack/test/functional/es_archives/infra/8.0.0/ml_anomalies_partitioned_log_rate/data.json.gz create mode 100644 x-pack/test/functional/es_archives/infra/8.0.0/ml_anomalies_partitioned_log_rate/mappings.json diff --git a/x-pack/test/api_integration/apis/infra/log_analysis.ts b/x-pack/test/api_integration/apis/infra/log_analysis.ts index bd09cdf6ff56ee..fe7d55649d1d63 100644 --- a/x-pack/test/api_integration/apis/infra/log_analysis.ts +++ b/x-pack/test/api_integration/apis/infra/log_analysis.ts @@ -20,8 +20,8 @@ import { } from '../../../../legacy/plugins/infra/common/runtime_types'; import { FtrProviderContext } from '../../ftr_provider_context'; -const TIME_BEFORE_START = 1564315100000; -const TIME_AFTER_END = 1565040700000; +const TIME_BEFORE_START = 1569934800000; +const TIME_AFTER_END = 1570016700000; const COMMON_HEADERS = { 'kbn-xsrf': 'some-xsrf-token', }; @@ -32,8 +32,8 @@ export default ({ getService }: FtrProviderContext) => { const supertest = getService('supertest'); describe('log analysis apis', () => { - before(() => esArchiver.load('infra/8.0.0/ml_anomalies_log_rate')); - after(() => esArchiver.unload('infra/8.0.0/ml_anomalies_log_rate')); + before(() => esArchiver.load('infra/8.0.0/ml_anomalies_partitioned_log_rate')); + after(() => esArchiver.unload('infra/8.0.0/ml_anomalies_partitioned_log_rate')); describe('log rate results', () => { describe('with the default source', () => { @@ -62,11 +62,12 @@ export default ({ getService }: FtrProviderContext) => { getLogEntryRateSuccessReponsePayloadRT.decode(body), fold(throwErrors(createPlainError), identity) ); - expect(logEntryRateBuckets.data.bucketDuration).to.be(15 * 60 * 1000); expect(logEntryRateBuckets.data.histogramBuckets).to.not.be.empty(); expect( - logEntryRateBuckets.data.histogramBuckets.some(bucket => bucket.anomalies.length > 0) + logEntryRateBuckets.data.histogramBuckets.some(bucket => { + return bucket.dataSets.some(dataSet => dataSet.anomalies.length > 0); + }) ).to.be(true); }); diff --git a/x-pack/test/functional/es_archives/infra/8.0.0/ml_anomalies_partitioned_log_rate/data.json.gz b/x-pack/test/functional/es_archives/infra/8.0.0/ml_anomalies_partitioned_log_rate/data.json.gz new file mode 100644 index 0000000000000000000000000000000000000000..8d15ff8ccb02265ae0e1f7268ff1fc90035d2258 GIT binary patch literal 6298 zcmYkAXE@t$)W?n1rmw;pcF+B)NE~`gqjsw z?e*|~o)^#k@^{X4uHSvu_jBDWu|zm`2akF?ThoK7sjlzjnuh=F$e&(*PyDNa{SyzJ~;F5HN0Z$e5X#fgbH- z4(1AjK9?2_<_-{AMkZez^jn^#U1CpLn6H8^U;9EXpJZH1-T2C0`_2bGTF|A7mpxo( z$(rlgT3UYd)saAQ9BVK7HA{;gpY?I9y5gBw?PUlMhew^5*cRuV->7@`9Cq!edwFG6 zf8WyfKl>*Pl85n!n8^Aye?3Q;w%oWJ7f^^Ov(Qdd@+I+Q!`a68)y9cEI2}Xodboa; z6t7y7r7UKmErJj`12{}vcCkALFh1F`(dG(xxBNZ^mZcbeM+o)l2JkN0*v|_{$>QRfpkcvR77xsikzm{B=N#_+ii9T1|sW zD?06ddrg*`N2ENZ+4TlC>NAVX<&lYonX|QSM36y~;F(Bze92%hMe83XOz_A_Fl1mj z`@xMz&_7EaM^CiH`(p1#ed+e$VTn&w}<7^eK19rQeebN#W9+Rmu%&Wv^D;6b==+boub~dGDhx9R99zc6gjT( zg7|47fi1;KCe+#5H{VFJ)HMRSA0(Abk%H6(SJGhB!GO?|mx~G2;6)wSeFD6ZYulFu zVi+pGt43en!HT-ye(q5eDV*XzxpQ! z2_fHv+C9d-7ajJ|2+GbVQ3!tuR>CAFukfd~3yp8;{4=-iO?b#X9k$+`@NKbrrw_`OAyh{7LXiY&XR1vQ zJ{oaO0IjD>RCJ}$WuuyNXpdjiQIEM`@R-qdZM!!xlxwi2NS!KWqF)3wN{fCjHg=nn zX@K6V&hxh!hLEjq-s$J0Q<7C2&>%MMPjLcTU_QbGoyZsBT#3AL>A!peBhF%O&G=9q zp#4t45ucm{%3oAVzf;^EBA75n>5!zyDVJIY;v?86L&kwV$yEuBJHfIivpjE$2a_+u zhqkq;6DPROQ5A7^Wdk%dlE3l?8xI$pt=NvC-NuiSzHb^#n~yf1L&cBJZt~Onv`pr{VZin_-=Kif9nO~6=~7-Vvksp?WjJLrqg?1RvkvnY- zX6kkEjNg3!3a43!%(brAkEX_ShdA3h1#eGHfQxQWRhG|_3@z2}Nz=#7x}s8Rge7nM zC3UU?(fot6Z>3SxN|{-Ix2(@TofI#DN;`H%H^;jR_@YFOp-(*D{JMPVl!;wC$N6D+ zf9h2R+_iUb)t!xxIBJ>5&nc982(BqM>qv;Y=dLacd;(o4g1GgLMK0!TgHc=l-eQiD-uyBp|UCqf>BcSfDSXCr} zPsbJ;_R*J^kKj3PmXdYMnt%OB9O#=I9BV=)46yNWR#$^Y7+J!K_1CDHCzzd1O8x@1 zdF8Yc4lLWr!SBAL6jhH6)E-hy$Fvr678$kmFX-tsn@8x|te_I(ZG(x}zL6}+fVE&C6%4suejMpRcNkXOwRBhxD%2c#rwd`;h-H~7!{*2g!@$eq6C zr+)X*?Y_cI38!*qpu|2#0YvO7h4>t@J_RvBSNKgjs>g0;&qodZTNx|XCsl!&15j)A zbIw_@m(6#dKmTJYWbX`z3RGY?x@Qa;f=!FPjBdVp16yOK9Lh(}+Zebh`4!#KR4}4S5706lvr!=88js%2?C5sS`Z# zA$_&0A=?x^6zm@Ty(y26|Amjtx+r7nyHaBOi-mHJLQ+XUJqcJC;6Yy{ERUVAb=4dK zvc=e4Xh0o`oFjp5wp<9_47-~7{{P!QQj&gC&38q651eqjS^zAUr;13&Qq_BGPcs|yo0^lf8d1Z$=7m^AC z6l)fWsA2i7jzQ609#_|jP!cubl>5}~J_-)pwRKk%X`V^#O^Uxax=2ybc{2x9PzPZ7(XftY7>Q7)zvKOD{Qmx-NS4DOUw)+O|WGnFS?e6)cYPSU??x>o{tSxtR>`0#yoML-1domFMeCV-Z zeezn`o`0E_9sPuTSqq+l9%;o%KacmL&Ym~loQYqG=fLS7D8G8e7T`|wI)l#Q{X1uu zy_NIXu+Gf@p3uc|RQJ4Z!(rE!*q&l*`Ooh7)aNG*b(0DLIku)W&9b`f)+-Hezb3bS z1`0qHevE@^DYa4rh!uv6E!ldfjy19*5Bx0s%`4xmSF=vq8Z8j9f0?e%@bxHb0G2Jnp$GH2p%!OA%QWcwqg)~ z`R!e1cXIqq5>+mpTI)g)MHW*%FXZQlKBI= zQ||#h5?K>c%y}km!6^2STOvOvt)Ko~lai(QgamC6r9E%4&r1#j$KQWtoa9{8rY>TC zAS5d0)xTUT8*0j*2zQI0+87O&P6u2+F|~XVZv5-VlA4q>Fo&vOPDWZYi4ggm58`k~zd)KPBdKG)+|iAp-cXq6F)x8czD0;0Tr=Fei|VG2 zvb8>(B-{zDoTD9dd#k(N&*Pr(fX3XB9;y~Jy6YN)D5fGwnC}M`&(YWr^V3OCaUMwX zHjYNn!>r8727Jnz7>$XTd_2ikiV#Gty^aS z>pS`4k{@NwWQ#~_5d!}yiqS$N?)`g+ZMpjiN<-LFBBM&L zwKdv5n2px3ao4aP&xuw8AfF+8oh&FPown=x`$wxa`*flOg70xF9CeMGHU20+1+G}tjaMJ*&~*TDvbt#1Im5Vi|$ z2~HTeYFz_qR;To*wy&kqxxd z*mb_p%4}hv%b-eIsqI&Q*XV7&wO#sP{T6Wxf>mT5cf5@|i4(ivovxt%D@P+Q-TNYm z(mg`4aU$IA=lD_CwF;dDkU)P~An-KlR%!c0su+&W=d1_V6?$?fJ|L-~=?_dIAF344 znC)64|0}R|`FamP(P!zL&gLNiON=aXADY~t0v~L;+Ac*{gQsx8 zJY=!Aj45P&E48A6b5`JGfn@l;QJ#RUHv<8g)rS#sN0{``e)EQQN`2T-1Ws(@f3kCOwdlzzkrCibTeIT7tf{p(0g}Et8yDOPBFsB6&Aso_?|WG=;}8yG69dY zZ_BU5s#6m6pBUcB*2C%b#RpKss9W_W>v^Kg9@CZQeh)f;w&&2Z0pHU5n|4-vFRh_y zuHM^-lF^#{sCE|UsIPi)Iy+AtN2*bY8TtcZRaNaj>005Mf}{T$Kp2HGZ@9J>axlM~ z@1`1*V4&f8odhp)iR6<{pur>2^K2vid6(9bm(#(JeeOxihW1Ivs|7Mr6|=f8NTqr23+QJ!$G5ur0KeV&g9Y!A22$)cHx=RCFBx z=f7R}C~sSn?xUX$u(P{q%!|aHL;-DK^7rtHFYB40M%+B=l&nf>HEaa^w0G8kT2vsf zP4VK{sq8Q-W)|8$MwB9GliwsWH4OQrRZ_ULDa7=VCxiJA-zK|_i6^A}4MKa{<J)6vY?sBNh7yhI0!`nq6uhxV^sw(GTRN$) zi{3bTnOYB3@KvzgQ`z5EPdbAd4WBLcBy)Q7iA*tu&WAh^Oej;v)t44^0O2AytqW}5 zw419Nz5>`(Q_kqH_HI%@*(U*5S>{wCsBO2XboqQL&@N3cL+Hj$MwstUjP8Dv*yZ`diGWVes}NfX#${*VO0i$`KUpf z)@S_U-R`f&mW+7&<<^hTDC5BW)8CK?otFzsyMkH%7wq%#Ujv8wLPF*P_Q?09<2sh4 zXANwr>8$!6%Z0Oa-DCy2efz83C+TC%fA)qYWF|K2B3HLtce&qp|GB3ZEglyEn>v4| zktxS3p6O`{GD8dm_nt1-qn+Nud3yW!FqT>Ephv+CA6kAURMVO4-+P{Lak#>yqC-k( zK72jYRJR=WY#T2=d{m$=l-E81+K9BPl^K2QI6$NSw1v1(g`S@_%@Z+}$#}50i&# zRE0(ssLfeQt-#1A9Ix)RHHn9mcw2F+IJsEA)F!+Nlw+Ht|!RtDTl>C(c%(%448yyaXYwXKaIq zAIV`n{-drai9o?%T5o{K_|uE>vz!>|u?4r~R>Wy>26;#jYR}qvoxfbA(+X^*O1(YYs2GGsy9^-xjV~`5f@OF zCTUy9Zk@^s?+sAgFk^mWn5#7%+g{(|q2a!chqtDq1%WW@6p(}kOhfVf>-1}zVXt$% z`uGpXq8y&$@ALKXJyaiT(V<#&LBayFcT`ckCUt}d_H2^BkDA8hvA10m9dc<&1kBj^ z70o6Rkw7V<4ec3wjt+L>1iy(0wzrY=wBE!sv+q0xw}VDjjnYY}@$%(J9#u`*73BvHA`_c&89| z@l4joEblH{I#OjjeqT3mH6NJZ@KI4EKOYhrB6`X7itEHANq~EOP;CdQ>H(T0_*dW* zyRFyfS{~Qpf-0TGv2kH8qDi*HyyR7X#@WPaXPPsDRT|?gHeEy8~Y$?n1U(QS_gJr%X`|bz_Zk#2ZPpglNALr~7s>{f z7dwwUJ*_6r_V)(b&`j1(d`X=hCdL zzv8qza{DrPZ_HF z$q*;%lQCnZY0zu=h61CgdiTicjP7dG6j;b-(Mf${&1GL?<|#)5?mM3j-%pbGMD~!r z#`Ofbck8F;h7|Naj-=W_2IJo}TDq-kV(i*nwI8|L8r04Ak5^aB=n{ikq~Ev5s Date: Thu, 3 Oct 2019 10:33:12 +0100 Subject: [PATCH 10/10] Remove unused translations --- x-pack/plugins/translations/translations/ja-JP.json | 3 --- x-pack/plugins/translations/translations/zh-CN.json | 3 --- 2 files changed, 6 deletions(-) diff --git a/x-pack/plugins/translations/translations/ja-JP.json b/x-pack/plugins/translations/translations/ja-JP.json index ba1cd13a1b9ffe..2a14e4087e0fe9 100644 --- a/x-pack/plugins/translations/translations/ja-JP.json +++ b/x-pack/plugins/translations/translations/ja-JP.json @@ -5321,11 +5321,8 @@ "xpack.infra.header.logsTitle": "ログ", "xpack.infra.homePage.settingsTabTitle": "設定", "xpack.infra.kibanaMetrics.cloudIdMissingErrorMessage": "{metricId} のモデルには cloudId が必要ですが、{nodeId} に cloudId が指定されていません。", - "xpack.infra.logs.analysis.logRateSectionAnomalySeriesName": "異常", - "xpack.infra.logs.analysis.logRateSectionAreaSeriesName": "期待値", "xpack.infra.logs.analysis.logRateSectionLineSeriesName": "15 分ごとのログエントリー (平均)", "xpack.infra.logs.analysis.logRateSectionLoadingAriaLabel": "ログレートの結果を読み込み中", - "xpack.infra.logs.analysis.logRateSectionModelBoundsCheckboxLabel": "モデルバウンドを表示", "xpack.infra.logs.analysis.logRateSectionNoDataBody": "時間範囲を調整する必要があるかもしれません。", "xpack.infra.logs.analysis.logRateSectionNoDataTitle": "表示するデータがありません。", "xpack.infra.logs.analysis.logRateSectionTitle": "ログレート", diff --git a/x-pack/plugins/translations/translations/zh-CN.json b/x-pack/plugins/translations/translations/zh-CN.json index 781043f6c7279d..d94567c5d1307a 100644 --- a/x-pack/plugins/translations/translations/zh-CN.json +++ b/x-pack/plugins/translations/translations/zh-CN.json @@ -5324,11 +5324,8 @@ "xpack.infra.header.logsTitle": "Logs", "xpack.infra.homePage.settingsTabTitle": "设置", "xpack.infra.kibanaMetrics.cloudIdMissingErrorMessage": "{metricId} 的模型需要云 ID,但没有为 {nodeId} 提供。", - "xpack.infra.logs.analysis.logRateSectionAnomalySeriesName": "异常", - "xpack.infra.logs.analysis.logRateSectionAreaSeriesName": "预期", "xpack.infra.logs.analysis.logRateSectionLineSeriesName": "每 15 分钟日志条目数(平均值)", "xpack.infra.logs.analysis.logRateSectionLoadingAriaLabel": "正在加载日志速率结果", - "xpack.infra.logs.analysis.logRateSectionModelBoundsCheckboxLabel": "显示模型边界", "xpack.infra.logs.analysis.logRateSectionNoDataBody": "您可能想调整时间范围。", "xpack.infra.logs.analysis.logRateSectionNoDataTitle": "没有可显示的数据。", "xpack.infra.logs.analysis.logRateSectionTitle": "日志速率",