Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[ML] Add audit warning for 1000 categories found early in job #51146

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -135,6 +135,8 @@ public final class Messages {
"Adjust the analysis_limits.model_memory_limit setting to ensure all data is analyzed";
public static final String JOB_AUDIT_MEMORY_STATUS_HARD_LIMIT_PRE_7_2 = "Job memory status changed to hard_limit at {0}; adjust the " +
"analysis_limits.model_memory_limit setting to ensure all data is analyzed";
public static final String JOB_AUDIT_EXCESSIVE_EARLY_CATEGORIES = "{0} categories observed in the first [{1}] buckets." +
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We have square brackets about the second number in this message but not the first. Should we make it consistent?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The reason I didn't put the first one in square brackets is that it won't vary (at least for a particular version of the product). The first number will always be 1000 unless somebody changes the code, whereas the second number can vary between different occurrences of the audit message.

" This suggests an inappropriate categorization_field_name has been chosen.";

public static final String JOB_CONFIG_CATEGORIZATION_FILTERS_CONTAINS_DUPLICATES = "categorization_filters contain duplicates";
public static final String JOB_CONFIG_CATEGORIZATION_FILTERS_CONTAINS_EMPTY =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,9 @@ public class AutodetectResultProcessor {

private static final Logger LOGGER = LogManager.getLogger(AutodetectResultProcessor.class);

static final long EARLY_BUCKET_THRESHOLD = 100;
static final int EXCESSIVE_EARLY_CATEGORY_COUNT = 1000;

private final Client client;
private final AnomalyDetectionAuditor auditor;
private final String jobId;
Expand All @@ -87,7 +90,9 @@ public class AutodetectResultProcessor {
private final FlushListener flushListener;
private volatile boolean processKilled;
private volatile boolean failed;
private int bucketCount; // only used from the process() thread, so doesn't need to be volatile
private long priorRunsBucketCount;
private long currentRunBucketCount; // only used from the process() thread, so doesn't need to be volatile
private boolean excessiveCategoryWarningIssued; // only used from the process() thread, so doesn't need to be volatile
private final JobResultsPersister.Builder bulkResultsPersister;
private boolean deleteInterimRequired;

Expand Down Expand Up @@ -122,6 +127,7 @@ public AutodetectResultProcessor(Client client,
this.bulkResultsPersister = persister.bulkPersisterBuilder(jobId, this::isAlive);
this.timingStatsReporter = new TimingStatsReporter(timingStats, bulkResultsPersister);
this.deleteInterimRequired = true;
this.priorRunsBucketCount = timingStats.getBucketCount();
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I was wondering where we'd get this from but it's cool we already pass it in!

}

public void process() {
Expand All @@ -140,7 +146,7 @@ public void process() {
} catch (Exception e) {
LOGGER.warn(new ParameterizedMessage("[{}] Error persisting autodetect results", jobId), e);
}
LOGGER.info("[{}] {} buckets parsed from autodetect output", jobId, bucketCount);
LOGGER.info("[{}] {} buckets parsed from autodetect output", jobId, currentRunBucketCount);

} catch (Exception e) {
failed = true;
Expand All @@ -166,15 +172,15 @@ public void process() {
}

private void readResults() {
bucketCount = 0;
currentRunBucketCount = 0;
try {
Iterator<AutodetectResult> iterator = process.readAutodetectResults();
while (iterator.hasNext()) {
try {
AutodetectResult result = iterator.next();
processResult(result);
if (result.getBucket() != null) {
LOGGER.trace("[{}] Bucket number {} parsed from output", jobId, bucketCount);
LOGGER.trace("[{}] Bucket number {} parsed from output", jobId, currentRunBucketCount);
}
} catch (Exception e) {
if (isAlive() == false) {
Expand Down Expand Up @@ -212,7 +218,7 @@ void processResult(AutodetectResult result) {
// results are also interim
timingStatsReporter.reportBucket(bucket);
bulkResultsPersister.persistBucket(bucket).executeRequest();
++bucketCount;
++currentRunBucketCount;
}
List<AnomalyRecord> records = result.getRecords();
if (records != null && !records.isEmpty()) {
Expand All @@ -224,7 +230,7 @@ void processResult(AutodetectResult result) {
}
CategoryDefinition categoryDefinition = result.getCategoryDefinition();
if (categoryDefinition != null) {
persister.persistCategoryDefinition(categoryDefinition, this::isAlive);
processCategoryDefinition(categoryDefinition);
}
ModelPlot modelPlot = result.getModelPlot();
if (modelPlot != null) {
Expand Down Expand Up @@ -308,6 +314,22 @@ void processResult(AutodetectResult result) {
}
}

private void processCategoryDefinition(CategoryDefinition categoryDefinition) {
persister.persistCategoryDefinition(categoryDefinition, this::isAlive);
if (categoryDefinition.getCategoryId() == EXCESSIVE_EARLY_CATEGORY_COUNT &&
priorRunsBucketCount + currentRunBucketCount < EARLY_BUCKET_THRESHOLD &&
excessiveCategoryWarningIssued == false) {
auditor.warning(jobId, Messages.getMessage(Messages.JOB_AUDIT_EXCESSIVE_EARLY_CATEGORIES, EXCESSIVE_EARLY_CATEGORY_COUNT,
// Add 1 because category definitions are written before buckets
1L + priorRunsBucketCount + currentRunBucketCount));
// This flag won't be retained if the job is closed and reopened, or if the job migrates to another node.
// This means it's possible the audit message is generated multiple times. However, that's not a
// disaster, and is also very unlikely in the the (best practice) cases where initial lookback covers
// more than 100 buckets.
excessiveCategoryWarningIssued = true;
}
}

private void processModelSizeStats(ModelSizeStats modelSizeStats) {
LOGGER.trace("[{}] Parsed ModelSizeStats: {} / {} / {} / {} / {} / {}",
jobId, modelSizeStats.getModelBytes(), modelSizeStats.getTotalByFieldCount(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ public void testProcess() throws TimeoutException {
verify(persister).commitStateWrites(JOB_ID);
}

public void testProcessResult_bucket() throws Exception {
public void testProcessResult_bucket() {
when(bulkBuilder.persistTimingStats(any(TimingStats.class))).thenReturn(bulkBuilder);
when(bulkBuilder.persistBucket(any(Bucket.class))).thenReturn(bulkBuilder);
AutodetectResult result = mock(AutodetectResult.class);
Expand All @@ -150,7 +150,7 @@ public void testProcessResult_bucket() throws Exception {
verify(persister, never()).deleteInterimResults(JOB_ID);
}

public void testProcessResult_bucket_deleteInterimRequired() throws Exception {
public void testProcessResult_bucket_deleteInterimRequired() {
when(bulkBuilder.persistTimingStats(any(TimingStats.class))).thenReturn(bulkBuilder);
when(bulkBuilder.persistBucket(any(Bucket.class))).thenReturn(bulkBuilder);
AutodetectResult result = mock(AutodetectResult.class);
Expand All @@ -167,7 +167,7 @@ public void testProcessResult_bucket_deleteInterimRequired() throws Exception {
verify(persister).deleteInterimResults(JOB_ID);
}

public void testProcessResult_records() throws Exception {
public void testProcessResult_records() {
AutodetectResult result = mock(AutodetectResult.class);
List<AnomalyRecord> records =
Arrays.asList(
Expand All @@ -183,7 +183,7 @@ public void testProcessResult_records() throws Exception {
verify(persister).bulkPersisterBuilder(eq(JOB_ID), any());
}

public void testProcessResult_influencers() throws Exception {
public void testProcessResult_influencers() {
AutodetectResult result = mock(AutodetectResult.class);
List<Influencer> influencers =
Arrays.asList(
Expand All @@ -199,9 +199,10 @@ public void testProcessResult_influencers() throws Exception {
verify(persister).bulkPersisterBuilder(eq(JOB_ID), any());
}

public void testProcessResult_categoryDefinition() throws Exception {
public void testProcessResult_categoryDefinition() {
AutodetectResult result = mock(AutodetectResult.class);
CategoryDefinition categoryDefinition = mock(CategoryDefinition.class);
when(categoryDefinition.getCategoryId()).thenReturn(1L);
when(result.getCategoryDefinition()).thenReturn(categoryDefinition);

processorUnderTest.setDeleteInterimRequired(false);
Expand All @@ -212,7 +213,66 @@ public void testProcessResult_categoryDefinition() throws Exception {
verify(persister).bulkPersisterBuilder(eq(JOB_ID), any());
}

public void testProcessResult_flushAcknowledgement() throws Exception {
public void testProcessResult_excessiveCategoryDefinitionCountEarly() {
int iterations = 3;
int categoryCount = AutodetectResultProcessor.EXCESSIVE_EARLY_CATEGORY_COUNT * 2;

processorUnderTest.setDeleteInterimRequired(false);

AutodetectResult result = mock(AutodetectResult.class);
for (int iteration = 1; iteration <= iterations; ++iteration) {
for (int categoryId = 1; categoryId <= categoryCount; ++categoryId) {
CategoryDefinition categoryDefinition = new CategoryDefinition(JOB_ID);
categoryDefinition.setCategoryId(categoryId);
when(result.getCategoryDefinition()).thenReturn(categoryDefinition);

processorUnderTest.processResult(result);
}
}

verify(bulkBuilder, never()).executeRequest();
verify(persister, times(iterations * categoryCount)).persistCategoryDefinition(any(CategoryDefinition.class), any());
verify(persister).bulkPersisterBuilder(eq(JOB_ID), any());
verify(auditor).warning(eq(JOB_ID), eq(Messages.getMessage(Messages.JOB_AUDIT_EXCESSIVE_EARLY_CATEGORIES,
AutodetectResultProcessor.EXCESSIVE_EARLY_CATEGORY_COUNT, 1)));
}

public void testProcessResult_highCategoryDefinitionCountLateOn() {
int iterations = 3;
int categoryCount = AutodetectResultProcessor.EXCESSIVE_EARLY_CATEGORY_COUNT * 2;

processorUnderTest.setDeleteInterimRequired(false);

when(bulkBuilder.persistTimingStats(any(TimingStats.class))).thenReturn(bulkBuilder);
when(bulkBuilder.persistBucket(any(Bucket.class))).thenReturn(bulkBuilder);

AutodetectResult bucketResult = mock(AutodetectResult.class);
final int numPriorBuckets = (int) AutodetectResultProcessor.EARLY_BUCKET_THRESHOLD + 1;
for (int i = 0; i < numPriorBuckets; ++i) {
Bucket bucket = new Bucket(JOB_ID, new Date(i * 1000 + 1000000), BUCKET_SPAN_MS);
when(bucketResult.getBucket()).thenReturn(bucket);
processorUnderTest.processResult(bucketResult);
}

AutodetectResult categoryResult = mock(AutodetectResult.class);
for (int iteration = 1; iteration <= iterations; ++iteration) {
for (int categoryId = 1; categoryId <= categoryCount; ++categoryId) {
CategoryDefinition categoryDefinition = new CategoryDefinition(JOB_ID);
categoryDefinition.setCategoryId(categoryId);
when(categoryResult.getCategoryDefinition()).thenReturn(categoryDefinition);
processorUnderTest.processResult(categoryResult);
}
}

verify(bulkBuilder).persistTimingStats(any(TimingStats.class));
verify(bulkBuilder, times(numPriorBuckets)).persistBucket(any(Bucket.class));
verify(bulkBuilder, times(numPriorBuckets)).executeRequest();
verify(persister, times(iterations * categoryCount)).persistCategoryDefinition(any(CategoryDefinition.class), any());
verify(persister).bulkPersisterBuilder(eq(JOB_ID), any());
verify(auditor, never()).warning(eq(JOB_ID), anyString());
}

public void testProcessResult_flushAcknowledgement() {
AutodetectResult result = mock(AutodetectResult.class);
FlushAcknowledgement flushAcknowledgement = mock(FlushAcknowledgement.class);
when(flushAcknowledgement.getId()).thenReturn(JOB_ID);
Expand All @@ -228,12 +288,13 @@ public void testProcessResult_flushAcknowledgement() throws Exception {
verify(bulkBuilder).executeRequest();
}

public void testProcessResult_flushAcknowledgementMustBeProcessedLast() throws Exception {
public void testProcessResult_flushAcknowledgementMustBeProcessedLast() {
AutodetectResult result = mock(AutodetectResult.class);
FlushAcknowledgement flushAcknowledgement = mock(FlushAcknowledgement.class);
when(flushAcknowledgement.getId()).thenReturn(JOB_ID);
when(result.getFlushAcknowledgement()).thenReturn(flushAcknowledgement);
CategoryDefinition categoryDefinition = mock(CategoryDefinition.class);
when(categoryDefinition.getCategoryId()).thenReturn(1L);
when(result.getCategoryDefinition()).thenReturn(categoryDefinition);

processorUnderTest.setDeleteInterimRequired(false);
Expand All @@ -248,7 +309,7 @@ public void testProcessResult_flushAcknowledgementMustBeProcessedLast() throws E
inOrder.verify(flushListener).acknowledgeFlush(flushAcknowledgement, null);
}

public void testProcessResult_modelPlot() throws Exception {
public void testProcessResult_modelPlot() {
AutodetectResult result = mock(AutodetectResult.class);
ModelPlot modelPlot = mock(ModelPlot.class);
when(result.getModelPlot()).thenReturn(modelPlot);
Expand All @@ -260,7 +321,7 @@ public void testProcessResult_modelPlot() throws Exception {
verify(bulkBuilder).persistModelPlot(modelPlot);
}

public void testProcessResult_modelSizeStats() throws Exception {
public void testProcessResult_modelSizeStats() {
AutodetectResult result = mock(AutodetectResult.class);
ModelSizeStats modelSizeStats = mock(ModelSizeStats.class);
when(result.getModelSizeStats()).thenReturn(modelSizeStats);
Expand All @@ -273,7 +334,7 @@ public void testProcessResult_modelSizeStats() throws Exception {
verify(persister).persistModelSizeStats(eq(modelSizeStats), any());
}

public void testProcessResult_modelSizeStatsWithMemoryStatusChanges() throws Exception {
public void testProcessResult_modelSizeStatsWithMemoryStatusChanges() {
TimeValue delay = TimeValue.timeValueSeconds(5);
// Set up schedule delay time
when(threadPool.schedule(any(Runnable.class), any(TimeValue.class), anyString()))
Expand Down Expand Up @@ -313,7 +374,7 @@ public void testProcessResult_modelSizeStatsWithMemoryStatusChanges() throws Exc
verify(auditor).error(JOB_ID, Messages.getMessage(Messages.JOB_AUDIT_MEMORY_STATUS_HARD_LIMIT, "512mb", "1kb"));
}

public void testProcessResult_modelSnapshot() throws Exception {
public void testProcessResult_modelSnapshot() {
AutodetectResult result = mock(AutodetectResult.class);
ModelSnapshot modelSnapshot = new ModelSnapshot.Builder(JOB_ID)
.setSnapshotId("a_snapshot_id")
Expand All @@ -337,7 +398,7 @@ public void testProcessResult_modelSnapshot() throws Exception {
verify(client).execute(same(UpdateJobAction.INSTANCE), eq(expectedJobUpdateRequest), any());
}

public void testProcessResult_quantiles_givenRenormalizationIsEnabled() throws Exception {
public void testProcessResult_quantiles_givenRenormalizationIsEnabled() {
AutodetectResult result = mock(AutodetectResult.class);
Quantiles quantiles = mock(Quantiles.class);
when(result.getQuantiles()).thenReturn(quantiles);
Expand All @@ -354,7 +415,7 @@ public void testProcessResult_quantiles_givenRenormalizationIsEnabled() throws E
verify(renormalizer).renormalize(quantiles);
}

public void testProcessResult_quantiles_givenRenormalizationIsDisabled() throws Exception {
public void testProcessResult_quantiles_givenRenormalizationIsDisabled() {
AutodetectResult result = mock(AutodetectResult.class);
Quantiles quantiles = mock(Quantiles.class);
when(result.getQuantiles()).thenReturn(quantiles);
Expand Down