-
Notifications
You must be signed in to change notification settings - Fork 24.3k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Initial implementation for DataLifecycleService (#94012)
This adds support for managing the lifecycle of data streams. It currently supports rollover and data retention.
- Loading branch information
Showing
13 changed files
with
929 additions
and
17 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,5 @@ | ||
pr: 94012 | ||
summary: Initial implementation for `DataLifecycleService` | ||
area: DLM | ||
type: feature | ||
issues: [] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
180 changes: 180 additions & 0 deletions
180
modules/dlm/src/internalClusterTest/java/org/elasticsearch/dlm/DataLifecycleServiceIT.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,180 @@ | ||
/* | ||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one | ||
* or more contributor license agreements. Licensed under the Elastic License | ||
* 2.0 and the Server Side Public License, v 1; you may not use this file except | ||
* in compliance with, at your election, the Elastic License 2.0 or the Server | ||
* Side Public License, v 1. | ||
*/ | ||
package org.elasticsearch.dlm; | ||
|
||
import org.elasticsearch.action.DocWriteRequest; | ||
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; | ||
import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; | ||
import org.elasticsearch.action.admin.indices.template.put.PutComposableIndexTemplateAction; | ||
import org.elasticsearch.action.bulk.BulkItemResponse; | ||
import org.elasticsearch.action.bulk.BulkRequest; | ||
import org.elasticsearch.action.bulk.BulkResponse; | ||
import org.elasticsearch.action.datastreams.CreateDataStreamAction; | ||
import org.elasticsearch.action.datastreams.GetDataStreamAction; | ||
import org.elasticsearch.action.index.IndexRequest; | ||
import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; | ||
import org.elasticsearch.cluster.metadata.DataLifecycle; | ||
import org.elasticsearch.cluster.metadata.DataStream; | ||
import org.elasticsearch.cluster.metadata.Template; | ||
import org.elasticsearch.common.compress.CompressedXContent; | ||
import org.elasticsearch.common.settings.Settings; | ||
import org.elasticsearch.core.Nullable; | ||
import org.elasticsearch.core.TimeValue; | ||
import org.elasticsearch.datastreams.DataStreamsPlugin; | ||
import org.elasticsearch.index.Index; | ||
import org.elasticsearch.index.mapper.DateFieldMapper; | ||
import org.elasticsearch.plugins.Plugin; | ||
import org.elasticsearch.rest.RestStatus; | ||
import org.elasticsearch.test.ESIntegTestCase; | ||
import org.elasticsearch.test.transport.MockTransportService; | ||
import org.elasticsearch.xcontent.XContentType; | ||
|
||
import java.io.IOException; | ||
import java.util.Collection; | ||
import java.util.List; | ||
import java.util.Locale; | ||
import java.util.Map; | ||
|
||
import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.backingIndexEqualTo; | ||
import static org.elasticsearch.cluster.metadata.MetadataIndexTemplateService.DEFAULT_TIMESTAMP_FIELD; | ||
import static org.hamcrest.Matchers.equalTo; | ||
import static org.hamcrest.Matchers.nullValue; | ||
import static org.hamcrest.Matchers.startsWith; | ||
|
||
public class DataLifecycleServiceIT extends ESIntegTestCase { | ||
|
||
@Override | ||
protected Collection<Class<? extends Plugin>> nodePlugins() { | ||
return List.of(DataLifecyclePlugin.class, DataStreamsPlugin.class, MockTransportService.TestPlugin.class); | ||
} | ||
|
||
protected boolean ignoreExternalCluster() { | ||
return true; | ||
} | ||
|
||
@Override | ||
protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { | ||
Settings.Builder settings = Settings.builder().put(super.nodeSettings(nodeOrdinal, otherSettings)); | ||
settings.put(DataLifecycleService.DLM_POLL_INTERVAL, "1s"); | ||
return settings.build(); | ||
} | ||
|
||
public void testRolloverLifecycle() throws Exception { | ||
// empty lifecycle contains the default rollover | ||
DataLifecycle lifecycle = new DataLifecycle(); | ||
|
||
putComposableIndexTemplate("id1", null, List.of("metrics-foo*"), null, null, lifecycle); | ||
Iterable<DataLifecycleService> dataLifecycleServices = internalCluster().getInstances(DataLifecycleService.class); | ||
|
||
for (DataLifecycleService dataLifecycleService : dataLifecycleServices) { | ||
dataLifecycleService.setDefaultRolloverRequestSupplier((target) -> { | ||
RolloverRequest rolloverRequest = new RolloverRequest(target, null); | ||
rolloverRequest.addMaxIndexDocsCondition(1); | ||
return rolloverRequest; | ||
}); | ||
} | ||
String dataStreamName = "metrics-foo"; | ||
CreateDataStreamAction.Request createDataStreamRequest = new CreateDataStreamAction.Request(dataStreamName); | ||
client().execute(CreateDataStreamAction.INSTANCE, createDataStreamRequest).get(); | ||
|
||
indexDocs(dataStreamName, 1); | ||
|
||
assertBusy(() -> { | ||
GetDataStreamAction.Request getDataStreamRequest = new GetDataStreamAction.Request(new String[] { dataStreamName }); | ||
GetDataStreamAction.Response getDataStreamResponse = client().execute(GetDataStreamAction.INSTANCE, getDataStreamRequest) | ||
.actionGet(); | ||
assertThat(getDataStreamResponse.getDataStreams().size(), equalTo(1)); | ||
assertThat(getDataStreamResponse.getDataStreams().get(0).getDataStream().getName(), equalTo(dataStreamName)); | ||
List<Index> backingIndices = getDataStreamResponse.getDataStreams().get(0).getDataStream().getIndices(); | ||
assertThat(backingIndices.size(), equalTo(2)); | ||
String backingIndex = backingIndices.get(0).getName(); | ||
assertThat(backingIndex, backingIndexEqualTo(dataStreamName, 1)); | ||
String writeIndex = backingIndices.get(1).getName(); | ||
assertThat(writeIndex, backingIndexEqualTo(dataStreamName, 2)); | ||
}); | ||
} | ||
|
||
public void testRolloverAndRetention() throws Exception { | ||
DataLifecycle lifecycle = new DataLifecycle(TimeValue.timeValueMillis(0)); | ||
|
||
putComposableIndexTemplate("id1", null, List.of("metrics-foo*"), null, null, lifecycle); | ||
Iterable<DataLifecycleService> dataLifecycleServices = internalCluster().getInstances(DataLifecycleService.class); | ||
|
||
for (DataLifecycleService dataLifecycleService : dataLifecycleServices) { | ||
dataLifecycleService.setDefaultRolloverRequestSupplier((target) -> { | ||
RolloverRequest rolloverRequest = new RolloverRequest(target, null); | ||
rolloverRequest.addMaxIndexDocsCondition(1); | ||
return rolloverRequest; | ||
}); | ||
} | ||
String dataStreamName = "metrics-foo"; | ||
CreateDataStreamAction.Request createDataStreamRequest = new CreateDataStreamAction.Request(dataStreamName); | ||
client().execute(CreateDataStreamAction.INSTANCE, createDataStreamRequest).get(); | ||
|
||
indexDocs(dataStreamName, 1); | ||
|
||
assertBusy(() -> { | ||
GetDataStreamAction.Request getDataStreamRequest = new GetDataStreamAction.Request(new String[] { dataStreamName }); | ||
GetDataStreamAction.Response getDataStreamResponse = client().execute(GetDataStreamAction.INSTANCE, getDataStreamRequest) | ||
.actionGet(); | ||
assertThat(getDataStreamResponse.getDataStreams().size(), equalTo(1)); | ||
assertThat(getDataStreamResponse.getDataStreams().get(0).getDataStream().getName(), equalTo(dataStreamName)); | ||
List<Index> backingIndices = getDataStreamResponse.getDataStreams().get(0).getDataStream().getIndices(); | ||
assertThat(backingIndices.size(), equalTo(1)); | ||
// we expect the data stream to have only one backing index, the write one, with generation 2 | ||
// as generation 1 would've been deleted by DLM given the lifecycle configuration | ||
String writeIndex = backingIndices.get(0).getName(); | ||
assertThat(writeIndex, backingIndexEqualTo(dataStreamName, 2)); | ||
}); | ||
} | ||
|
||
static void indexDocs(String dataStream, int numDocs) { | ||
BulkRequest bulkRequest = new BulkRequest(); | ||
for (int i = 0; i < numDocs; i++) { | ||
String value = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.formatMillis(System.currentTimeMillis()); | ||
bulkRequest.add( | ||
new IndexRequest(dataStream).opType(DocWriteRequest.OpType.CREATE) | ||
.source(String.format(Locale.ROOT, "{\"%s\":\"%s\"}", DEFAULT_TIMESTAMP_FIELD, value), XContentType.JSON) | ||
); | ||
} | ||
BulkResponse bulkResponse = client().bulk(bulkRequest).actionGet(); | ||
assertThat(bulkResponse.getItems().length, equalTo(numDocs)); | ||
String backingIndexPrefix = DataStream.BACKING_INDEX_PREFIX + dataStream; | ||
for (BulkItemResponse itemResponse : bulkResponse) { | ||
assertThat(itemResponse.getFailureMessage(), nullValue()); | ||
assertThat(itemResponse.status(), equalTo(RestStatus.CREATED)); | ||
assertThat(itemResponse.getIndex(), startsWith(backingIndexPrefix)); | ||
} | ||
client().admin().indices().refresh(new RefreshRequest(dataStream)).actionGet(); | ||
} | ||
|
||
static void putComposableIndexTemplate( | ||
String id, | ||
@Nullable String mappings, | ||
List<String> patterns, | ||
@Nullable Settings settings, | ||
@Nullable Map<String, Object> metadata, | ||
@Nullable DataLifecycle lifecycle | ||
) throws IOException { | ||
PutComposableIndexTemplateAction.Request request = new PutComposableIndexTemplateAction.Request(id); | ||
request.indexTemplate( | ||
new ComposableIndexTemplate( | ||
patterns, | ||
new Template(settings, mappings == null ? null : CompressedXContent.fromJSON(mappings), null, lifecycle), | ||
null, | ||
null, | ||
null, | ||
metadata, | ||
new ComposableIndexTemplate.DataStreamTemplate(), | ||
null | ||
) | ||
); | ||
client().execute(PutComposableIndexTemplateAction.INSTANCE, request).actionGet(); | ||
} | ||
|
||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.