Skip to content
This repository has been archived by the owner on Aug 2, 2022. It is now read-only.

Commit

Permalink
Change AD indices to be hidden indices instead of system indices. (#394)
Browse files Browse the repository at this point in the history
Previously, we registered AD indices as system indices as AD indices' names start with a dot. ES deprecates the creation of dot-prefixed index names except for hidden and system indices (elastic/elasticsearch#49959). Starting 7.10, ES adds a dedicated thread pool for system index write operations. System index writes/reads have higher priority than user index writes/reads. For example, system index writes can be forced regardless of whether the current index pressure is high or not (https://github.com/elastic/elasticsearch/blob/242083a36e02496aae9214dc41b89372022e7076/server/src/main/java/org/elasticsearch/index/IndexingPressure.java#L62-L73).

AD indices are not more important than other user indices. We don't want AD index reads/writes to impact user indices' reads/writes. This PR removes AD indices out of the system index list and marks them hidden indices instead.

This change does not impact created AD indices. They are no longer system indices. Using indices metadata section in localhost:9200/_cluster/state?pretty output can confirm that (look for "system" key under each index ). Newly created AD indices will be hidden instead of system indices. This change won't impact search/index API. To list hidden indices, one can use localhost:9200/_cat/indices?v&expand_wildcards=all

Testing done:
* Ran backward-compatibility tests. After updating AD, old detectors run fine, and we can create/run new detectors. All public APIs still work.
  • Loading branch information
kaituo committed Mar 8, 2021
1 parent 1d88cb4 commit 09a5694
Show file tree
Hide file tree
Showing 5 changed files with 72 additions and 55 deletions.
2 changes: 2 additions & 0 deletions .github/workflows/CI.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,12 @@ on:
branches:
- main
- opendistro-*
- 7.10.2-no-workbench
pull_request:
branches:
- main
- opendistro-*
- 7.10.2-no-workbench

jobs:
Build-ad:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,12 +50,10 @@
import org.elasticsearch.common.xcontent.XContentParserUtils;
import org.elasticsearch.env.Environment;
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.indices.SystemIndexDescriptor;
import org.elasticsearch.monitor.jvm.JvmService;
import org.elasticsearch.plugins.ActionPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.plugins.ScriptPlugin;
import org.elasticsearch.plugins.SystemIndexPlugin;
import org.elasticsearch.repositories.RepositoriesService;
import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestHandler;
Expand Down Expand Up @@ -170,7 +168,7 @@
/**
* Entry point of AD plugin.
*/
public class AnomalyDetectorPlugin extends Plugin implements ActionPlugin, ScriptPlugin, JobSchedulerExtension, SystemIndexPlugin {
public class AnomalyDetectorPlugin extends Plugin implements ActionPlugin, ScriptPlugin, JobSchedulerExtension {

private static final Logger LOG = LogManager.getLogger(AnomalyDetectorPlugin.class);

Expand Down Expand Up @@ -635,19 +633,4 @@ public ScheduledJobParser getJobParser() {
return AnomalyDetectorJob.parse(parser);
};
}

@Override
public Collection<SystemIndexDescriptor> getSystemIndexDescriptors(Settings settings) {
return Collections
.unmodifiableList(
Arrays
.asList(
new SystemIndexDescriptor(AnomalyDetectionIndices.ALL_AD_RESULTS_INDEX_PATTERN, "anomaly result"),
new SystemIndexDescriptor(AnomalyDetector.ANOMALY_DETECTORS_INDEX, "detector definition"),
new SystemIndexDescriptor(AnomalyDetectorJob.ANOMALY_DETECTOR_JOB_INDEX, "detector job"),
new SystemIndexDescriptor(CommonName.CHECKPOINT_INDEX_NAME, "model checkpoint"),
new SystemIndexDescriptor(DetectorInternalState.DETECTOR_STATE_INDEX, "detector information like total rcf updates")
)
);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -114,6 +114,8 @@ public class AnomalyDetectionIndices implements LocalNodeMasterListener {
private boolean allUpdated;
// we only want one update at a time
private final AtomicBoolean updateRunning;
// AD index settings
private final Settings setting;

class IndexState {
// keep track of whether the mapping version is up-to-date
Expand Down Expand Up @@ -170,6 +172,8 @@ public AnomalyDetectionIndices(
.addSettingsUpdateConsumer(AD_RESULT_HISTORY_RETENTION_PERIOD, it -> { historyRetentionPeriod = it; });

this.clusterService.getClusterSettings().addSettingsUpdateConsumer(MAX_PRIMARY_SHARDS, it -> maxPrimaryShards = it);

this.setting = Settings.builder().put("index.hidden", true).build();
}

/**
Expand Down Expand Up @@ -325,7 +329,8 @@ public void initAnomalyDetectorIndexIfAbsent(ActionListener<CreateIndexResponse>
*/
public void initAnomalyDetectorIndex(ActionListener<CreateIndexResponse> actionListener) throws IOException {
CreateIndexRequest request = new CreateIndexRequest(AnomalyDetector.ANOMALY_DETECTORS_INDEX)
.mapping(AnomalyDetector.TYPE, getAnomalyDetectorMappings(), XContentType.JSON);
.mapping(AnomalyDetector.TYPE, getAnomalyDetectorMappings(), XContentType.JSON)
.settings(setting);
adminClient.indices().create(request, markMappingUpToDate(ADIndex.CONFIG, actionListener));
}

Expand Down Expand Up @@ -367,6 +372,7 @@ public void initAnomalyResultIndexDirectly(ActionListener<CreateIndexResponse> a
String mapping = getAnomalyResultMappings();
CreateIndexRequest request = new CreateIndexRequest(AD_RESULT_HISTORY_INDEX_PATTERN)
.mapping(CommonName.MAPPING_TYPE, mapping, XContentType.JSON)
.settings(setting)
.alias(new Alias(CommonName.ANOMALY_RESULT_INDEX_ALIAS));
choosePrimaryShards(request);
adminClient.indices().create(request, markMappingUpToDate(ADIndex.RESULT, actionListener));
Expand All @@ -381,7 +387,8 @@ public void initAnomalyResultIndexDirectly(ActionListener<CreateIndexResponse> a
public void initAnomalyDetectorJobIndex(ActionListener<CreateIndexResponse> actionListener) throws IOException {
// TODO: specify replica setting
CreateIndexRequest request = new CreateIndexRequest(AnomalyDetectorJob.ANOMALY_DETECTOR_JOB_INDEX)
.mapping(AnomalyDetector.TYPE, getAnomalyDetectorJobMappings(), XContentType.JSON);
.mapping(AnomalyDetector.TYPE, getAnomalyDetectorJobMappings(), XContentType.JSON)
.settings(setting);
choosePrimaryShards(request);
adminClient.indices().create(request, markMappingUpToDate(ADIndex.JOB, actionListener));
}
Expand All @@ -394,7 +401,8 @@ public void initAnomalyDetectorJobIndex(ActionListener<CreateIndexResponse> acti
*/
public void initDetectorStateIndex(ActionListener<CreateIndexResponse> actionListener) throws IOException {
CreateIndexRequest request = new CreateIndexRequest(DetectorInternalState.DETECTOR_STATE_INDEX)
.mapping(AnomalyDetector.TYPE, getDetectorStateMappings(), XContentType.JSON);
.mapping(AnomalyDetector.TYPE, getDetectorStateMappings(), XContentType.JSON)
.settings(setting);
adminClient.indices().create(request, markMappingUpToDate(ADIndex.STATE, actionListener));
}

Expand All @@ -412,7 +420,8 @@ public void initCheckpointIndex(ActionListener<CreateIndexResponse> actionListen
throw new EndRunException("", "Cannot find checkpoint mapping file", true);
}
CreateIndexRequest request = new CreateIndexRequest(CommonName.CHECKPOINT_INDEX_NAME)
.mapping(CommonName.MAPPING_TYPE, mapping, XContentType.JSON);
.mapping(CommonName.MAPPING_TYPE, mapping, XContentType.JSON)
.settings(setting);
choosePrimaryShards(request);
adminClient.indices().create(request, markMappingUpToDate(ADIndex.CHECKPOINT, actionListener));
}
Expand Down Expand Up @@ -470,7 +479,9 @@ void rolloverAndDeleteHistoryIndex() {
request
.getCreateIndexRequest()
.index(AD_RESULT_HISTORY_INDEX_PATTERN)
.mapping(CommonName.MAPPING_TYPE, adResultMapping, XContentType.JSON);
.mapping(CommonName.MAPPING_TYPE, adResultMapping, XContentType.JSON)
.settings(setting);

request.addMaxIndexDocsCondition(historyMaxDocs);
adminClient.indices().rolloverIndex(request, ActionListener.wrap(response -> {
if (!response.isRolledOver()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@ public static Response makeRequest(
HttpEntity entity,
List<Header> headers
) throws IOException {
return makeRequest(client, method, endpoint, params, entity, headers, true);
return makeRequest(client, method, endpoint, params, entity, headers, false);
}

public static Response makeRequest(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@

package com.amazon.opendistroforelasticsearch.ad.e2e;

import static com.amazon.opendistroforelasticsearch.ad.TestHelpers.toHttpEntity;

import java.io.File;
import java.io.FileReader;
import java.time.Instant;
Expand All @@ -29,20 +31,27 @@
import java.util.Map.Entry;
import java.util.Set;

import org.apache.http.HttpHeaders;
import org.apache.http.message.BasicHeader;
import org.elasticsearch.client.Request;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.WarningsHandler;

import com.amazon.opendistroforelasticsearch.ad.ODFERestTestCase;
import com.amazon.opendistroforelasticsearch.ad.TestHelpers;
import com.google.common.collect.ImmutableList;
import com.google.gson.JsonArray;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;

public class DetectionResultEvalutationIT extends ODFERestTestCase {

public void testDataset() throws Exception {
verifyAnomaly("synthetic", 1, 1500, 8, .9, .9, 10);
// TODO: this test case will run for a much longer time and timeout with security enabled
if (!isHttps()) {
verifyAnomaly("synthetic", 1, 1500, 8, .9, .9, 10);
}
}

private void verifyAnomaly(
Expand All @@ -54,7 +63,6 @@ private void verifyAnomaly(
double minRecall,
double maxError
) throws Exception {

RestClient client = client();

String dataFileName = String.format("data/%s.data", datasetName);
Expand All @@ -63,11 +71,10 @@ private void verifyAnomaly(
List<JsonObject> data = getData(dataFileName);
List<Entry<Instant, Instant>> anomalies = getAnomalyWindows(labelFileName);

indexTrainData(datasetName, data, trainTestSplit, client);
bulkIndexTrainData(datasetName, data, trainTestSplit, client);
String detectorId = createDetector(datasetName, intervalMinutes, client);
startDetector(detectorId, data, trainTestSplit, shingleSize, intervalMinutes, client);

indexTestData(data, datasetName, trainTestSplit, client);
bulkIndexTestData(data, datasetName, trainTestSplit, client);
double[] testResults = getTestResults(detectorId, data, trainTestSplit, intervalMinutes, anomalies, client);
verifyTestResults(testResults, anomalies, minPrecision, minRecall, maxError);
}
Expand Down Expand Up @@ -141,22 +148,6 @@ private double[] getTestResults(
return new double[] { positives, truePositives, positiveAnomalies.size(), errors };
}

private void indexTestData(List<JsonObject> data, String datasetName, int trainTestSplit, RestClient client) throws Exception {
data.stream().skip(trainTestSplit).forEach(r -> {
try {
Request req = new Request("POST", String.format("/%s/_doc/", datasetName));
RequestOptions.Builder options = RequestOptions.DEFAULT.toBuilder();
options.setWarningsHandler(WarningsHandler.PERMISSIVE);
req.setOptions(options.build());
req.setJsonEntity(r.toString());
client.performRequest(req);
} catch (Exception e) {
throw new RuntimeException(e);
}
});
Thread.sleep(1_000);
}

private void startDetector(
String detectorId,
List<JsonObject> data,
Expand Down Expand Up @@ -229,26 +220,56 @@ private List<Entry<Instant, Instant>> getAnomalyWindows(String labalFileName) th
return anomalies;
}

private void indexTrainData(String datasetName, List<JsonObject> data, int trainTestSplit, RestClient client) throws Exception {
private void bulkIndexTrainData(String datasetName, List<JsonObject> data, int trainTestSplit, RestClient client) throws Exception {
Request request = new Request("PUT", datasetName);
String requestBody = "{ \"mappings\": { \"properties\": { \"timestamp\": { \"type\": \"date\"},"
+ " \"Feature1\": { \"type\": \"double\" }, \"Feature2\": { \"type\": \"double\" } } } }";
request.setJsonEntity(requestBody);
setWarningHandler(request, false);
client.performRequest(request);
Thread.sleep(1_000);

data.stream().limit(trainTestSplit).forEach(r -> {
try {
Request req = new Request("POST", String.format("/%s/_doc/", datasetName));
req.setJsonEntity(r.toString());
client.performRequest(req);
} catch (Exception e) {
throw new RuntimeException(e);
}
});
StringBuilder bulkRequestBuilder = new StringBuilder();
for (int i = 0; i < trainTestSplit; i++) {
bulkRequestBuilder.append("{ \"index\" : { \"_index\" : \"" + datasetName + "\", \"_id\" : \"" + i + "\" } }\n");
bulkRequestBuilder.append(data.get(i).toString()).append("\n");
}
TestHelpers
.makeRequest(
client,
"POST",
"_bulk?refresh=true",
null,
toHttpEntity(bulkRequestBuilder.toString()),
ImmutableList.of(new BasicHeader(HttpHeaders.USER_AGENT, "Kibana"))
);
Thread.sleep(1_000);
}

private void bulkIndexTestData(List<JsonObject> data, String datasetName, int trainTestSplit, RestClient client) throws Exception {
StringBuilder bulkRequestBuilder = new StringBuilder();
for (int i = trainTestSplit; i < data.size(); i++) {
bulkRequestBuilder.append("{ \"index\" : { \"_index\" : \"" + datasetName + "\", \"_id\" : \"" + i + "\" } }\n");
bulkRequestBuilder.append(data.get(i).toString()).append("\n");
}
TestHelpers
.makeRequest(
client,
"POST",
"_bulk?refresh=true",
null,
toHttpEntity(bulkRequestBuilder.toString()),
ImmutableList.of(new BasicHeader(HttpHeaders.USER_AGENT, "Kibana"))
);
Thread.sleep(1_000);
}

private void setWarningHandler(Request request, boolean strictDeprecationMode) {
RequestOptions.Builder options = RequestOptions.DEFAULT.toBuilder();
options.setWarningsHandler(strictDeprecationMode ? WarningsHandler.STRICT : WarningsHandler.PERMISSIVE);
request.setOptions(options.build());
}

private List<JsonObject> getData(String datasetFileName) throws Exception {
JsonArray jsonArray = new JsonParser()
.parse(new FileReader(new File(getClass().getResource(datasetFileName).toURI())))
Expand Down

0 comments on commit 09a5694

Please sign in to comment.