Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 0 additions & 2 deletions muted-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -257,8 +257,6 @@ tests:
- class: org.elasticsearch.xpack.inference.rest.ServerSentEventsRestActionListenerTests
method: testNoStream
issue: https://github.com/elastic/elasticsearch/issues/114788
- class: org.elasticsearch.xpack.ml.integration.DatafeedJobsRestIT
issue: https://github.com/elastic/elasticsearch/issues/111319
- class: org.elasticsearch.xpack.restart.CoreFullClusterRestartIT
method: testSnapshotRestore {cluster=UPGRADED}
issue: https://github.com/elastic/elasticsearch/issues/111799
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.ResourceNotFoundException;
import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest;
import org.elasticsearch.action.admin.cluster.node.tasks.cancel.TransportCancelTasksAction;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.common.ReferenceDocs;
Expand Down Expand Up @@ -80,6 +82,13 @@ public class DatafeedJobsIT extends MlNativeAutodetectIntegTestCase {
public void cleanup() {
updateClusterSettings(Settings.builder().putNull("logger.org.elasticsearch.xpack.ml.datafeed"));
cleanUp();
// Race conditions between closing and killing tasks in these tests,
// sometimes result in lingering persistent close tasks, which cause
// subsequent tests to fail. Therefore, they're explicitly cancelled.
CancelTasksRequest cancelTasksRequest = new CancelTasksRequest();
cancelTasksRequest.setActions("*close*");
cancelTasksRequest.setWaitForCompletion(true);
client().execute(TransportCancelTasksAction.TYPE, cancelTasksRequest).actionGet();
}

public void testLookbackOnly() throws Exception {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,6 @@
import static org.hamcrest.Matchers.anyOf;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.not;

public class DatafeedJobsRestIT extends ESRestTestCase {

Expand Down Expand Up @@ -503,12 +502,12 @@ public void testLookbackWithIndicesOptions() throws Exception {
}""");
client().performRequest(createJobRequest);
String datafeedId = jobId + "-datafeed";
new DatafeedBuilder(datafeedId, jobId, "*hidden-*").setIndicesOptions("""
new DatafeedBuilder(datafeedId, jobId, "hidden-*").setIndicesOptions("""
{"expand_wildcards": ["all"],"allow_no_indices": true}""").build();

StringBuilder bulk = new StringBuilder();

Request createGeoData = new Request("PUT", "/.hidden-index");
Request createGeoData = new Request("PUT", "/hidden-index");
createGeoData.setJsonEntity("""
{
"mappings": {
Expand All @@ -528,23 +527,23 @@ public void testLookbackWithIndicesOptions() throws Exception {
client().performRequest(createGeoData);

bulk.append("""
{"index": {"_index": ".hidden-index", "_id": 1}}
{"index": {"_index": "hidden-index", "_id": 1}}
{"time":"2016-06-01T00:00:00Z","value": 1000}
{"index": {"_index": ".hidden-index", "_id": 2}}
{"index": {"_index": "hidden-index", "_id": 2}}
{"time":"2016-06-01T00:05:00Z","value":1500}
{"index": {"_index": ".hidden-index", "_id": 3}}
{"index": {"_index": "hidden-index", "_id": 3}}
{"time":"2016-06-01T00:10:00Z","value":1600}
{"index": {"_index": ".hidden-index", "_id": 4}}
{"index": {"_index": "hidden-index", "_id": 4}}
{"time":"2016-06-01T00:15:00Z","value":100}
{"index": {"_index": ".hidden-index", "_id": 5}}
{"index": {"_index": "hidden-index", "_id": 5}}
{"time":"2016-06-01T00:20:00Z","value":1}
{"index": {"_index": ".hidden-index", "_id": 6}}
{"index": {"_index": "hidden-index", "_id": 6}}
{"time":"2016-06-01T00:25:00Z","value":1500}
{"index": {"_index": ".hidden-index", "_id": 7}}
{"index": {"_index": "hidden-index", "_id": 7}}
{"time":"2016-06-01T00:30:00Z","value":1500}
{"index": {"_index": ".hidden-index", "_id": 8}}
{"index": {"_index": "hidden-index", "_id": 8}}
{"time":"2016-06-01T00:40:00Z","value":2100}
{"index": {"_index": ".hidden-index", "_id": 9}}
{"index": {"_index": "hidden-index", "_id": 9}}
{"time":"2016-06-01T00:41:00Z","value":0}
""");
bulkIndex(bulk.toString());
Expand Down Expand Up @@ -1802,7 +1801,7 @@ private void bulkIndex(String bulk) throws IOException {
bulkRequest.addParameter("refresh", "true");
bulkRequest.addParameter("pretty", null);
String bulkResponse = EntityUtils.toString(client().performRequest(bulkRequest).getEntity());
assertThat(bulkResponse, not(containsString("\"errors\": false")));
assertThat(bulkResponse, containsString("\"errors\" : false"));
}

private Response createJobAndDataFeed(String jobId, String datafeedId) throws IOException {
Expand Down
Loading