From fec7860edccd60a4e11d249367c7289281154920 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Fri, 15 Jun 2018 17:12:42 +0200 Subject: [PATCH 01/34] [Tests] Fix edge case in ScriptedMetricAggregatorTests (#31357) An expected exception is only thrown when there are documents in the index created in the test setup. Fixed the test by making sure there is at least one. Closes #31307 --- .../scripted/ScriptedMetricAggregatorTests.java | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorTests.java index 9417cc092d828..7a7c66d21aada 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorTests.java @@ -118,7 +118,7 @@ public static void initMockScripts() { SCRIPTS.put("initScriptParams", params -> { Map agg = (Map) params.get("_agg"); Integer initialValue = (Integer)params.get("initialValue"); - ArrayList collector = new ArrayList(); + ArrayList collector = new ArrayList<>(); collector.add(initialValue); agg.put("collector", collector); return agg; @@ -175,7 +175,6 @@ public void testNoDocs() throws IOException { /** * without combine script, the "_aggs" map should contain a list of the size of the number of documents matched */ - @SuppressWarnings("unchecked") public void testScriptedMetricWithoutCombine() throws IOException { try (Directory directory = newDirectory()) { int numDocs = randomInt(100); @@ -190,8 +189,11 @@ public void testScriptedMetricWithoutCombine() throws IOException { ScriptedMetric scriptedMetric = search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder); assertEquals(AGG_NAME, scriptedMetric.getName()); assertNotNull(scriptedMetric.aggregation()); + @SuppressWarnings("unchecked") Map agg = (Map) scriptedMetric.aggregation(); - assertEquals(numDocs, ((List) agg.get("collector")).size()); + @SuppressWarnings("unchecked") + List list = (List) agg.get("collector"); + assertEquals(numDocs, list.size()); } } } @@ -300,10 +302,9 @@ public void testSelfReferencingAggStateAfterInit() throws IOException { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/31307") public void testSelfReferencingAggStateAfterMap() throws IOException { try (Directory directory = newDirectory()) { - Integer numDocs = randomInt(100); + Integer numDocs = randomIntBetween(1, 100); try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { for (int i = 0; i < numDocs; i++) { indexWriter.addDocument(singleton(new SortedNumericDocValuesField("number", i))); From da5bfda5f314b95c3cce289c0ddb72208307f43a Mon Sep 17 00:00:00 2001 From: Dimitris Athanasiou Date: Fri, 15 Jun 2018 16:29:09 +0100 Subject: [PATCH 02/34] [ML] Hold ML filter items in sorted set (#31338) Filter items should be unique. They should also be sorted to make them easier to read plus save sorting in the autodetect process. --- .../xpack/core/ml/job/config/MlFilter.java | 21 +++++++++++-------- .../core/ml/job/config/MlFilterTests.java | 19 ++++++++++++----- .../rest-api-spec/test/ml/filter_crud.yml | 2 +- 3 files changed, 27 insertions(+), 15 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/MlFilter.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/MlFilter.java index 991f421265ea8..b11dfd476515c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/MlFilter.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/MlFilter.java @@ -17,11 +17,12 @@ import org.elasticsearch.xpack.core.ml.MlMetaIndex; import java.io.IOException; -import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Objects; +import java.util.SortedSet; +import java.util.TreeSet; public class MlFilter implements ToXContentObject, Writeable { @@ -53,9 +54,9 @@ private static ObjectParser createParser(boolean ignoreUnknownFie private final String id; private final String description; - private final List items; + private final SortedSet items; - public MlFilter(String id, String description, List items) { + public MlFilter(String id, String description, SortedSet items) { this.id = Objects.requireNonNull(id, ID.getPreferredName() + " must not be null"); this.description = description; this.items = Objects.requireNonNull(items, ITEMS.getPreferredName() + " must not be null"); @@ -68,7 +69,8 @@ public MlFilter(StreamInput in) throws IOException { } else { description = null; } - items = Arrays.asList(in.readStringArray()); + items = new TreeSet<>(); + items.addAll(Arrays.asList(in.readStringArray())); } @Override @@ -103,8 +105,8 @@ public String getDescription() { return description; } - public List getItems() { - return new ArrayList<>(items); + public SortedSet getItems() { + return Collections.unmodifiableSortedSet(items); } @Override @@ -142,7 +144,7 @@ public static class Builder { private String id; private String description; - private List items = Collections.emptyList(); + private SortedSet items = new TreeSet<>(); private Builder() {} @@ -162,12 +164,13 @@ public Builder setDescription(String description) { } public Builder setItems(List items) { - this.items = items; + this.items = new TreeSet<>(); + this.items.addAll(items); return this; } public Builder setItems(String... items) { - this.items = Arrays.asList(items); + setItems(Arrays.asList(items)); return this; } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/MlFilterTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/MlFilterTests.java index 78d87b82839a2..9ac6683f004c5 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/MlFilterTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/MlFilterTests.java @@ -11,10 +11,9 @@ import org.elasticsearch.test.AbstractSerializingTestCase; import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; +import java.util.TreeSet; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -40,7 +39,7 @@ public static MlFilter createRandom(String filterId) { } int size = randomInt(10); - List items = new ArrayList<>(size); + TreeSet items = new TreeSet<>(); for (int i = 0; i < size; i++) { items.add(randomAlphaOfLengthBetween(1, 20)); } @@ -58,7 +57,7 @@ protected MlFilter doParseInstance(XContentParser parser) { } public void testNullId() { - NullPointerException ex = expectThrows(NullPointerException.class, () -> new MlFilter(null, "", Collections.emptyList())); + NullPointerException ex = expectThrows(NullPointerException.class, () -> new MlFilter(null, "", new TreeSet<>())); assertEquals(MlFilter.ID.getPreferredName() + " must not be null", ex.getMessage()); } @@ -88,4 +87,14 @@ public void testLenientParser() throws IOException { MlFilter.LENIENT_PARSER.apply(parser, null); } } + + public void testItemsAreSorted() { + MlFilter filter = MlFilter.builder("foo").setItems("c", "b", "a").build(); + assertThat(filter.getItems(), contains("a", "b", "c")); + } + + public void testGetItemsReturnsUnmodifiable() { + MlFilter filter = MlFilter.builder("foo").setItems("c", "b", "a").build(); + expectThrows(UnsupportedOperationException.class, () -> filter.getItems().add("x")); + } } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/filter_crud.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/filter_crud.yml index a1f7eee0dcc3d..2b7b86673e0d0 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/filter_crud.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/filter_crud.yml @@ -22,7 +22,7 @@ setup: filter_id: filter-foo body: > { - "items": ["abc", "xyz"] + "items": ["xyz", "abc"] } - do: From eda4964f640f9618ec8c80db64904ef35426fd28 Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Fri, 15 Jun 2018 08:44:29 -0700 Subject: [PATCH 03/34] Add rollover-creation-date setting to rolled over index (#31144) This commit introduces a new property to IndexMetaData called RolloverInfo. This object contains a map containing the aliases that were used to rollover the related index, which conditions were met, and at what time the rollover took place. much like the `index.creation_date`, it captures the approximate time that the index was rolled over to a new one. --- .../indices/rollover/MaxAgeCondition.java | 9 ++ .../indices/rollover/MaxDocsCondition.java | 9 ++ .../indices/rollover/MaxSizeCondition.java | 9 ++ .../admin/indices/rollover/RolloverInfo.java | 134 ++++++++++++++++++ .../rollover/TransportRolloverAction.java | 39 +++-- .../client/transport/TransportClient.java | 3 + .../cluster/metadata/IndexMetaData.java | 69 ++++++++- .../elasticsearch/indices/IndicesModule.java | 23 ++- .../java/org/elasticsearch/node/Node.java | 1 + .../admin/indices/rollover/RolloverIT.java | 25 +++- .../cluster/metadata/IndexMetaDataTests.java | 62 ++++++-- 11 files changed, 356 insertions(+), 27 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverInfo.java diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MaxAgeCondition.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MaxAgeCondition.java index c0b0d2a3297da..bf6c9e2f69592 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MaxAgeCondition.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MaxAgeCondition.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; @@ -64,4 +65,12 @@ public void writeTo(StreamOutput out) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { return builder.field(NAME, value.getStringRep()); } + + public static MaxAgeCondition fromXContent(XContentParser parser) throws IOException { + if (parser.nextToken() == XContentParser.Token.VALUE_STRING) { + return new MaxAgeCondition(TimeValue.parseTimeValue(parser.text(), NAME)); + } else { + throw new IllegalArgumentException("invalid token: " + parser.currentToken()); + } + } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MaxDocsCondition.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MaxDocsCondition.java index 8fddb870e59e9..2f897fa6a0175 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MaxDocsCondition.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MaxDocsCondition.java @@ -22,6 +22,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; @@ -61,4 +62,12 @@ public void writeTo(StreamOutput out) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { return builder.field(NAME, value); } + + public static MaxDocsCondition fromXContent(XContentParser parser) throws IOException { + if (parser.nextToken() == XContentParser.Token.VALUE_NUMBER) { + return new MaxDocsCondition(parser.longValue()); + } else { + throw new IllegalArgumentException("invalid token: " + parser.currentToken()); + } + } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MaxSizeCondition.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MaxSizeCondition.java index bb6f37634ce87..f1a121a87d41e 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MaxSizeCondition.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MaxSizeCondition.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; @@ -70,4 +71,12 @@ public void writeTo(StreamOutput out) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { return builder.field(NAME, value.getStringRep()); } + + public static MaxSizeCondition fromXContent(XContentParser parser) throws IOException { + if (parser.nextToken() == XContentParser.Token.VALUE_STRING) { + return new MaxSizeCondition(ByteSizeValue.parseBytesSizeValue(parser.text(), NAME)); + } else { + throw new IllegalArgumentException("invalid token: " + parser.currentToken()); + } + } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverInfo.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverInfo.java new file mode 100644 index 0000000000000..291dd3a0ddae7 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverInfo.java @@ -0,0 +1,134 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.rollover; + +import org.elasticsearch.cluster.AbstractDiffable; +import org.elasticsearch.cluster.Diff; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContentFragment; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +/** + * Class for holding Rollover related information within an index + */ +public class RolloverInfo extends AbstractDiffable implements Writeable, ToXContentFragment { + + public static final ParseField CONDITION_FIELD = new ParseField("met_conditions"); + public static final ParseField TIME_FIELD = new ParseField("time"); + + @SuppressWarnings("unchecked") + public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>("rollover_info", false, + (a, alias) -> new RolloverInfo(alias, (List) a[0], (Long) a[1])); + static { + PARSER.declareNamedObjects(ConstructingObjectParser.constructorArg(), + (p, c, n) -> p.namedObject(Condition.class, n, c), CONDITION_FIELD); + PARSER.declareLong(ConstructingObjectParser.constructorArg(), TIME_FIELD); + } + + private final String alias; + private final List metConditions; + private final long time; + + public RolloverInfo(String alias, List metConditions, long time) { + this.alias = alias; + this.metConditions = metConditions; + this.time = time; + } + + public RolloverInfo(StreamInput in) throws IOException { + this.alias = in.readString(); + this.time = in.readVLong(); + this.metConditions = in.readNamedWriteableList(Condition.class); + } + + public static RolloverInfo parse(XContentParser parser, String alias) { + return PARSER.apply(parser, alias); + } + + public String getAlias() { + return alias; + } + + public List getMetConditions() { + return metConditions; + } + + public long getTime() { + return time; + } + + public static Diff readDiffFrom(StreamInput in) throws IOException { + return readDiffFrom(RolloverInfo::new, in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(alias); + out.writeVLong(time); + out.writeNamedWriteableList(metConditions); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(alias); + builder.startObject(CONDITION_FIELD.getPreferredName()); + for (Condition condition : metConditions) { + condition.toXContent(builder, params); + } + builder.endObject(); + builder.field(TIME_FIELD.getPreferredName(), time); + builder.endObject(); + return builder; + } + + @Override + public int hashCode() { + return Objects.hash(alias, metConditions, time); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (obj.getClass() != getClass()) { + return false; + } + RolloverInfo other = (RolloverInfo) obj; + return Objects.equals(alias, other.alias) && + Objects.equals(metConditions, other.metConditions) && + Objects.equals(time, other.time); + } + + @Override + public String toString() { + return Strings.toString(this); + } +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java index a5385c42aa0af..db686305aa7fb 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java @@ -31,6 +31,7 @@ import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.AliasAction; @@ -131,7 +132,9 @@ public void onResponse(IndicesStatsResponse statsResponse) { new RolloverResponse(sourceIndexName, rolloverIndexName, conditionResults, true, false, false, false)); return; } - if (conditionResults.size() == 0 || conditionResults.values().stream().anyMatch(result -> result)) { + List metConditions = rolloverRequest.getConditions().values().stream() + .filter(condition -> conditionResults.get(condition.toString())).collect(Collectors.toList()); + if (conditionResults.size() == 0 || metConditions.size() > 0) { CreateIndexClusterStateUpdateRequest updateRequest = prepareCreateIndexRequest(unresolvedName, rolloverIndexName, rolloverRequest); createIndexService.createIndex(updateRequest, ActionListener.wrap(createIndexClusterStateUpdateResponse -> { @@ -141,13 +144,33 @@ public void onResponse(IndicesStatsResponse statsResponse) { rolloverRequest), ActionListener.wrap(aliasClusterStateUpdateResponse -> { if (aliasClusterStateUpdateResponse.isAcknowledged()) { - activeShardsObserver.waitForActiveShards(new String[]{rolloverIndexName}, - rolloverRequest.getCreateIndexRequest().waitForActiveShards(), - rolloverRequest.masterNodeTimeout(), - isShardsAcknowledged -> listener.onResponse(new RolloverResponse( - sourceIndexName, rolloverIndexName, conditionResults, false, true, true, - isShardsAcknowledged)), - listener::onFailure); + clusterService.submitStateUpdateTask("update_rollover_info", new ClusterStateUpdateTask() { + @Override + public ClusterState execute(ClusterState currentState) { + RolloverInfo rolloverInfo = new RolloverInfo(rolloverRequest.getAlias(), metConditions, + threadPool.absoluteTimeInMillis()); + return ClusterState.builder(currentState) + .metaData(MetaData.builder(currentState.metaData()) + .put(IndexMetaData.builder(currentState.metaData().index(sourceIndexName)) + .putRolloverInfo(rolloverInfo))).build(); + } + + @Override + public void onFailure(String source, Exception e) { + listener.onFailure(e); + } + + @Override + public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { + activeShardsObserver.waitForActiveShards(new String[]{rolloverIndexName}, + rolloverRequest.getCreateIndexRequest().waitForActiveShards(), + rolloverRequest.masterNodeTimeout(), + isShardsAcknowledged -> listener.onResponse(new RolloverResponse( + sourceIndexName, rolloverIndexName, conditionResults, false, true, true, + isShardsAcknowledged)), + listener::onFailure); + } + }); } else { listener.onResponse(new RolloverResponse(sourceIndexName, rolloverIndexName, conditionResults, false, true, false, false)); diff --git a/server/src/main/java/org/elasticsearch/client/transport/TransportClient.java b/server/src/main/java/org/elasticsearch/client/transport/TransportClient.java index 8ef6e4cd15bff..ecdecc4457bdd 100644 --- a/server/src/main/java/org/elasticsearch/client/transport/TransportClient.java +++ b/server/src/main/java/org/elasticsearch/client/transport/TransportClient.java @@ -46,6 +46,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.node.InternalSettingsPreparer; import org.elasticsearch.node.Node; @@ -150,9 +151,11 @@ private static ClientTemplate buildTemplate(Settings providedSettings, Settings SettingsModule settingsModule = new SettingsModule(settings, additionalSettings, additionalSettingsFilter); SearchModule searchModule = new SearchModule(settings, true, pluginsService.filterPlugins(SearchPlugin.class)); + IndicesModule indicesModule = new IndicesModule(Collections.emptyList()); List entries = new ArrayList<>(); entries.addAll(NetworkModule.getNamedWriteables()); entries.addAll(searchModule.getNamedWriteables()); + entries.addAll(indicesModule.getNamedWriteables()); entries.addAll(ClusterModule.getNamedWriteables()); entries.addAll(pluginsService.filterPlugins(Plugin.class).stream() .flatMap(p -> p.getNamedWriteables().stream()) diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java index c871dd6183042..9e4f849787867 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java @@ -25,6 +25,7 @@ import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import org.elasticsearch.Version; +import org.elasticsearch.action.admin.indices.rollover.RolloverInfo; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.Diffable; @@ -294,6 +295,7 @@ public Iterator> settings() { static final String KEY_STATE = "state"; static final String KEY_MAPPINGS = "mappings"; static final String KEY_ALIASES = "aliases"; + static final String KEY_ROLLOVER_INFOS = "rollover_info"; public static final String KEY_PRIMARY_TERMS = "primary_terms"; public static final String INDEX_STATE_FILE_PREFIX = "state-"; @@ -331,13 +333,14 @@ public Iterator> settings() { private final Version indexUpgradedVersion; private final ActiveShardCount waitForActiveShards; + private final ImmutableOpenMap rolloverInfos; private IndexMetaData(Index index, long version, long[] primaryTerms, State state, int numberOfShards, int numberOfReplicas, Settings settings, ImmutableOpenMap mappings, ImmutableOpenMap aliases, ImmutableOpenMap customs, ImmutableOpenIntMap> inSyncAllocationIds, DiscoveryNodeFilters requireFilters, DiscoveryNodeFilters initialRecoveryFilters, DiscoveryNodeFilters includeFilters, DiscoveryNodeFilters excludeFilters, Version indexCreatedVersion, Version indexUpgradedVersion, - int routingNumShards, int routingPartitionSize, ActiveShardCount waitForActiveShards) { + int routingNumShards, int routingPartitionSize, ActiveShardCount waitForActiveShards, ImmutableOpenMap rolloverInfos) { this.index = index; this.version = version; @@ -362,6 +365,7 @@ private IndexMetaData(Index index, long version, long[] primaryTerms, State stat this.routingFactor = routingNumShards / numberOfShards; this.routingPartitionSize = routingPartitionSize; this.waitForActiveShards = waitForActiveShards; + this.rolloverInfos = rolloverInfos; assert numberOfShards * routingFactor == routingNumShards : routingNumShards + " must be a multiple of " + numberOfShards; } @@ -517,6 +521,10 @@ public ImmutableOpenIntMap> getInSyncAllocationIds() { return inSyncAllocationIds; } + public ImmutableOpenMap getRolloverInfos() { + return rolloverInfos; + } + public Set inSyncAllocationIds(int shardId) { assert shardId >= 0 && shardId < numberOfShards; return inSyncAllocationIds.get(shardId); @@ -587,6 +595,9 @@ public boolean equals(Object o) { if (!inSyncAllocationIds.equals(that.inSyncAllocationIds)) { return false; } + if (rolloverInfos.equals(that.rolloverInfos) == false) { + return false; + } return true; } @@ -603,6 +614,7 @@ public int hashCode() { result = 31 * result + Long.hashCode(routingNumShards); result = 31 * result + Arrays.hashCode(primaryTerms); result = 31 * result + inSyncAllocationIds.hashCode(); + result = 31 * result + rolloverInfos.hashCode(); return result; } @@ -638,6 +650,7 @@ private static class IndexMetaDataDiff implements Diff { private final Diff> aliases; private final Diff> customs; private final Diff>> inSyncAllocationIds; + private final Diff> rolloverInfos; IndexMetaDataDiff(IndexMetaData before, IndexMetaData after) { index = after.index.getName(); @@ -651,6 +664,7 @@ private static class IndexMetaDataDiff implements Diff { customs = DiffableUtils.diff(before.customs, after.customs, DiffableUtils.getStringKeySerializer()); inSyncAllocationIds = DiffableUtils.diff(before.inSyncAllocationIds, after.inSyncAllocationIds, DiffableUtils.getVIntKeySerializer(), DiffableUtils.StringSetValueSerializer.getInstance()); + rolloverInfos = DiffableUtils.diff(before.rolloverInfos, after.rolloverInfos, DiffableUtils.getStringKeySerializer()); } IndexMetaDataDiff(StreamInput in) throws IOException { @@ -679,6 +693,13 @@ public Diff readDiff(StreamInput in, String key) throws IOException { }); inSyncAllocationIds = DiffableUtils.readImmutableOpenIntMapDiff(in, DiffableUtils.getVIntKeySerializer(), DiffableUtils.StringSetValueSerializer.getInstance()); + if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + rolloverInfos = DiffableUtils.readImmutableOpenMapDiff(in, DiffableUtils.getStringKeySerializer(), RolloverInfo::new, + RolloverInfo::readDiffFrom); + } else { + ImmutableOpenMap emptyMap = ImmutableOpenMap.of(); + rolloverInfos = DiffableUtils.diff(emptyMap, emptyMap, DiffableUtils.getStringKeySerializer()); + } } @Override @@ -693,6 +714,9 @@ public void writeTo(StreamOutput out) throws IOException { aliases.writeTo(out); customs.writeTo(out); inSyncAllocationIds.writeTo(out); + if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + rolloverInfos.writeTo(out); + } } @Override @@ -707,6 +731,7 @@ public IndexMetaData apply(IndexMetaData part) { builder.aliases.putAll(aliases.apply(part.aliases)); builder.customs.putAll(customs.apply(part.customs)); builder.inSyncAllocationIds.putAll(inSyncAllocationIds.apply(part.inSyncAllocationIds)); + builder.rolloverInfos.putAll(rolloverInfos.apply(part.rolloverInfos)); return builder.build(); } } @@ -740,6 +765,12 @@ public static IndexMetaData readFrom(StreamInput in) throws IOException { Set allocationIds = DiffableUtils.StringSetValueSerializer.getInstance().read(in, key); builder.putInSyncAllocationIds(key, allocationIds); } + if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + int rolloverAliasesSize = in.readVInt(); + for (int i = 0; i < rolloverAliasesSize; i++) { + builder.putRolloverInfo(new RolloverInfo(in)); + } + } return builder.build(); } @@ -769,6 +800,12 @@ public void writeTo(StreamOutput out) throws IOException { out.writeVInt(cursor.key); DiffableUtils.StringSetValueSerializer.getInstance().write(cursor.value, out); } + if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + out.writeVInt(rolloverInfos.size()); + for (ObjectCursor cursor : rolloverInfos.values()) { + cursor.value.writeTo(out); + } + } } public static Builder builder(String index) { @@ -790,6 +827,7 @@ public static class Builder { private final ImmutableOpenMap.Builder aliases; private final ImmutableOpenMap.Builder customs; private final ImmutableOpenIntMap.Builder> inSyncAllocationIds; + private final ImmutableOpenMap.Builder rolloverInfos; private Integer routingNumShards; public Builder(String index) { @@ -798,6 +836,7 @@ public Builder(String index) { this.aliases = ImmutableOpenMap.builder(); this.customs = ImmutableOpenMap.builder(); this.inSyncAllocationIds = ImmutableOpenIntMap.builder(); + this.rolloverInfos = ImmutableOpenMap.builder(); } public Builder(IndexMetaData indexMetaData) { @@ -811,6 +850,7 @@ public Builder(IndexMetaData indexMetaData) { this.customs = ImmutableOpenMap.builder(indexMetaData.customs); this.routingNumShards = indexMetaData.routingNumShards; this.inSyncAllocationIds = ImmutableOpenIntMap.builder(indexMetaData.inSyncAllocationIds); + this.rolloverInfos = ImmutableOpenMap.builder(indexMetaData.rolloverInfos); } public String index() { @@ -951,6 +991,15 @@ public Builder putInSyncAllocationIds(int shardId, Set allocationIds) { return this; } + public RolloverInfo getRolloverInfo(String alias) { + return rolloverInfos.get(alias); + } + + public Builder putRolloverInfo(RolloverInfo rolloverInfo) { + rolloverInfos.put(rolloverInfo.getAlias(), rolloverInfo); + return this; + } + public long version() { return this.version; } @@ -1089,7 +1138,7 @@ public IndexMetaData build() { return new IndexMetaData(new Index(index, uuid), version, primaryTerms, state, numberOfShards, numberOfReplicas, tmpSettings, mappings.build(), tmpAliases.build(), customs.build(), filledInSyncAllocationIds.build(), requireFilters, initialRecoveryFilters, includeFilters, excludeFilters, - indexCreatedVersion, indexUpgradedVersion, getRoutingNumShards(), routingPartitionSize, waitForActiveShards); + indexCreatedVersion, indexUpgradedVersion, getRoutingNumShards(), routingPartitionSize, waitForActiveShards, rolloverInfos.build()); } public static void toXContent(IndexMetaData indexMetaData, XContentBuilder builder, ToXContent.Params params) throws IOException { @@ -1143,6 +1192,12 @@ public static void toXContent(IndexMetaData indexMetaData, XContentBuilder build } builder.endObject(); + builder.startObject(KEY_ROLLOVER_INFOS); + for (ObjectCursor cursor : indexMetaData.getRolloverInfos().values()) { + cursor.value.toXContent(builder, params); + } + builder.endObject(); + builder.endObject(); } @@ -1202,6 +1257,16 @@ public static IndexMetaData fromXContent(XContentParser parser) throws IOExcepti throw new IllegalArgumentException("Unexpected token: " + token); } } + } else if (KEY_ROLLOVER_INFOS.equals(currentFieldName)) { + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token == XContentParser.Token.START_OBJECT) { + builder.putRolloverInfo(RolloverInfo.parse(parser, currentFieldName)); + } else { + throw new IllegalArgumentException("Unexpected token: " + token); + } + } } else if ("warmers".equals(currentFieldName)) { // TODO: do this in 6.0: // throw new IllegalArgumentException("Warmers are not supported anymore - are you upgrading from 1.x?"); diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesModule.java b/server/src/main/java/org/elasticsearch/indices/IndicesModule.java index 11024286b2274..2d41491e3a746 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesModule.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesModule.java @@ -24,9 +24,12 @@ import org.elasticsearch.action.admin.indices.rollover.MaxDocsCondition; import org.elasticsearch.action.admin.indices.rollover.MaxSizeCondition; import org.elasticsearch.action.resync.TransportResyncReplicationAction; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.geo.ShapesAvailability; import org.elasticsearch.common.inject.AbstractModule; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.engine.EngineFactory; import org.elasticsearch.index.mapper.BinaryFieldMapper; @@ -62,6 +65,7 @@ import org.elasticsearch.plugins.MapperPlugin; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.LinkedHashMap; @@ -86,15 +90,26 @@ public IndicesModule(List mapperPlugins) { } private void registerBuiltinWritables() { - namedWritables.add(new Entry(Condition.class, MaxAgeCondition.NAME, MaxAgeCondition::new)); - namedWritables.add(new Entry(Condition.class, MaxDocsCondition.NAME, MaxDocsCondition::new)); - namedWritables.add(new Entry(Condition.class, MaxSizeCondition.NAME, MaxSizeCondition::new)); + namedWritables.add(new NamedWriteableRegistry.Entry(Condition.class, MaxAgeCondition.NAME, MaxAgeCondition::new)); + namedWritables.add(new NamedWriteableRegistry.Entry(Condition.class, MaxDocsCondition.NAME, MaxDocsCondition::new)); + namedWritables.add(new NamedWriteableRegistry.Entry(Condition.class, MaxSizeCondition.NAME, MaxSizeCondition::new)); } - public List getNamedWriteables() { + public List getNamedWriteables() { return namedWritables; } + public List getNamedXContents() { + return Arrays.asList( + new NamedXContentRegistry.Entry(Condition.class, new ParseField(MaxAgeCondition.NAME), (p, c) -> + MaxAgeCondition.fromXContent(p)), + new NamedXContentRegistry.Entry(Condition.class, new ParseField(MaxDocsCondition.NAME), (p, c) -> + MaxDocsCondition.fromXContent(p)), + new NamedXContentRegistry.Entry(Condition.class, new ParseField(MaxSizeCondition.NAME), (p, c) -> + MaxSizeCondition.fromXContent(p)) + ); + } + private Map getMappers(List mapperPlugins) { Map mappers = new LinkedHashMap<>(); diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index b6274223d2260..aa9b3943e8863 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -389,6 +389,7 @@ protected Node(final Environment environment, Collection final NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(namedWriteables); NamedXContentRegistry xContentRegistry = new NamedXContentRegistry(Stream.of( NetworkModule.getNamedXContents().stream(), + indicesModule.getNamedXContents().stream(), searchModule.getNamedXContents().stream(), pluginsService.filterPlugins(Plugin.class).stream() .flatMap(p -> p.getNamedXContent().stream()), diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java index 869bba452fefe..aa35d9d273a92 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java @@ -37,6 +37,7 @@ import java.util.Collection; import java.util.Collections; +import java.util.List; import java.util.Set; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -44,6 +45,10 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.everyItem; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.lessThanOrEqualTo; +import static org.hamcrest.collection.IsEmptyCollection.empty; +import static org.hamcrest.core.CombinableMatcher.both; +import static org.hamcrest.number.OrderingComparison.greaterThanOrEqualTo; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST) public class RolloverIT extends ESIntegTestCase { @@ -70,6 +75,7 @@ public void testRolloverOnEmptyIndex() throws Exception { } public void testRollover() throws Exception { + long beforeTime = client().threadPool().absoluteTimeInMillis() - 1000L; assertAcked(prepareCreate("test_index-2").addAlias(new Alias("test_alias")).get()); index("test_index-2", "type1", "1", "field", "value"); flush("test_index-2"); @@ -84,6 +90,11 @@ public void testRollover() throws Exception { assertFalse(oldIndex.getAliases().containsKey("test_alias")); final IndexMetaData newIndex = state.metaData().index("test_index-000003"); assertTrue(newIndex.getAliases().containsKey("test_alias")); + assertThat(oldIndex.getRolloverInfos().size(), equalTo(1)); + assertThat(oldIndex.getRolloverInfos().get("test_alias").getAlias(), equalTo("test_alias")); + assertThat(oldIndex.getRolloverInfos().get("test_alias").getMetConditions(), is(empty())); + assertThat(oldIndex.getRolloverInfos().get("test_alias").getTime(), + is(both(greaterThanOrEqualTo(beforeTime)).and(lessThanOrEqualTo(client().threadPool().absoluteTimeInMillis() + 1000L)))); } public void testRolloverWithIndexSettings() throws Exception { @@ -246,17 +257,27 @@ public void testRolloverMaxSize() throws Exception { assertThat(response.getOldIndex(), equalTo("test-1")); assertThat(response.getNewIndex(), equalTo("test-000002")); assertThat("No rollover with a large max_size condition", response.isRolledOver(), equalTo(false)); + final IndexMetaData oldIndex = client().admin().cluster().prepareState().get().getState().metaData().index("test-1"); + assertThat(oldIndex.getRolloverInfos().size(), equalTo(0)); } // A small max_size { + ByteSizeValue maxSizeValue = new ByteSizeValue(randomIntBetween(1, 20), ByteSizeUnit.BYTES); + long beforeTime = client().threadPool().absoluteTimeInMillis() - 1000L; final RolloverResponse response = client().admin().indices() .prepareRolloverIndex("test_alias") - .addMaxIndexSizeCondition(new ByteSizeValue(randomIntBetween(1, 20), ByteSizeUnit.BYTES)) + .addMaxIndexSizeCondition(maxSizeValue) .get(); assertThat(response.getOldIndex(), equalTo("test-1")); assertThat(response.getNewIndex(), equalTo("test-000002")); assertThat("Should rollover with a small max_size condition", response.isRolledOver(), equalTo(true)); + final IndexMetaData oldIndex = client().admin().cluster().prepareState().get().getState().metaData().index("test-1"); + List metConditions = oldIndex.getRolloverInfos().get("test_alias").getMetConditions(); + assertThat(metConditions.size(), equalTo(1)); + assertThat(metConditions.get(0).toString(), equalTo(new MaxSizeCondition(maxSizeValue).toString())); + assertThat(oldIndex.getRolloverInfos().get("test_alias").getTime(), + is(both(greaterThanOrEqualTo(beforeTime)).and(lessThanOrEqualTo(client().threadPool().absoluteTimeInMillis() + 1000L)))); } // An empty index @@ -268,6 +289,8 @@ public void testRolloverMaxSize() throws Exception { assertThat(response.getOldIndex(), equalTo("test-000002")); assertThat(response.getNewIndex(), equalTo("test-000003")); assertThat("No rollover with an empty index", response.isRolledOver(), equalTo(false)); + final IndexMetaData oldIndex = client().admin().cluster().prepareState().get().getState().metaData().index("test-000002"); + assertThat(oldIndex.getRolloverInfos().size(), equalTo(0)); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetaDataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetaDataTests.java index 7734a9d7b4e6a..9e8a5e04f43c1 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetaDataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetaDataTests.java @@ -19,18 +19,31 @@ package org.elasticsearch.cluster.metadata; +import org.elasticsearch.action.admin.indices.rollover.MaxAgeCondition; +import org.elasticsearch.action.admin.indices.rollover.MaxDocsCondition; +import org.elasticsearch.action.admin.indices.rollover.MaxSizeCondition; +import org.elasticsearch.action.admin.indices.rollover.RolloverInfo; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.test.ESTestCase; +import org.junit.Before; import java.io.IOException; +import java.util.Arrays; import java.util.Collections; import java.util.Set; @@ -38,6 +51,23 @@ public class IndexMetaDataTests extends ESTestCase { + private IndicesModule INDICES_MODULE = new IndicesModule(Collections.emptyList()); + + @Before + public void setUp() throws Exception { + super.setUp(); + } + + @Override + protected NamedWriteableRegistry writableRegistry() { + return new NamedWriteableRegistry(INDICES_MODULE.getNamedWriteables()); + } + + @Override + protected NamedXContentRegistry xContentRegistry() { + return new NamedXContentRegistry(INDICES_MODULE.getNamedXContents()); + } + public void testIndexMetaDataSerialization() throws IOException { Integer numShard = randomFrom(1, 2, 4, 8, 16); int numberOfReplicas = randomIntBetween(0, 10); @@ -50,7 +80,12 @@ public void testIndexMetaDataSerialization() throws IOException { .creationDate(randomLong()) .primaryTerm(0, 2) .setRoutingNumShards(32) - .build(); + .putRolloverInfo( + new RolloverInfo(randomAlphaOfLength(5), + Arrays.asList(new MaxAgeCondition(TimeValue.timeValueMillis(randomNonNegativeLong())), + new MaxSizeCondition(new ByteSizeValue(randomNonNegativeLong())), + new MaxDocsCondition(randomNonNegativeLong())), + randomNonNegativeLong())).build(); final XContentBuilder builder = JsonXContent.contentBuilder(); builder.startObject(); @@ -71,17 +106,20 @@ public void testIndexMetaDataSerialization() throws IOException { final BytesStreamOutput out = new BytesStreamOutput(); metaData.writeTo(out); - IndexMetaData deserialized = IndexMetaData.readFrom(out.bytes().streamInput()); - assertEquals(metaData, deserialized); - assertEquals(metaData.hashCode(), deserialized.hashCode()); - - assertEquals(metaData.getNumberOfReplicas(), deserialized.getNumberOfReplicas()); - assertEquals(metaData.getNumberOfShards(), deserialized.getNumberOfShards()); - assertEquals(metaData.getCreationVersion(), deserialized.getCreationVersion()); - assertEquals(metaData.getRoutingNumShards(), deserialized.getRoutingNumShards()); - assertEquals(metaData.getCreationDate(), deserialized.getCreationDate()); - assertEquals(metaData.getRoutingFactor(), deserialized.getRoutingFactor()); - assertEquals(metaData.primaryTerm(0), deserialized.primaryTerm(0)); + try (StreamInput in = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), writableRegistry())) { + IndexMetaData deserialized = IndexMetaData.readFrom(in); + assertEquals(metaData, deserialized); + assertEquals(metaData.hashCode(), deserialized.hashCode()); + + assertEquals(metaData.getNumberOfReplicas(), deserialized.getNumberOfReplicas()); + assertEquals(metaData.getNumberOfShards(), deserialized.getNumberOfShards()); + assertEquals(metaData.getCreationVersion(), deserialized.getCreationVersion()); + assertEquals(metaData.getRoutingNumShards(), deserialized.getRoutingNumShards()); + assertEquals(metaData.getCreationDate(), deserialized.getCreationDate()); + assertEquals(metaData.getRoutingFactor(), deserialized.getRoutingFactor()); + assertEquals(metaData.primaryTerm(0), deserialized.primaryTerm(0)); + assertEquals(metaData.getRolloverInfos(), deserialized.getRolloverInfos()); + } } public void testGetRoutingFactor() { From 3b70e943eb08f90aa24e9c34033c8ca2a7211239 Mon Sep 17 00:00:00 2001 From: Tal Levy Date: Fri, 15 Jun 2018 08:45:29 -0700 Subject: [PATCH 04/34] add is-write-index flag to aliases (#30942) This commit adds the is-write-index flag for aliases. It allows requests to set the flag, and responses to display the flag. It does not validate and/or affect any indexing/getting/updating behavior of Elasticsearch -- this will be done in a follow-up PR. --- docs/reference/indices/aliases.asciidoc | 88 ++++++++++++ .../test/indices.create/10_basic.yml | 25 +++- .../action/admin/indices/alias/Alias.java | 34 +++++ .../indices/alias/IndicesAliasesRequest.java | 22 +++ .../alias/IndicesAliasesRequestBuilder.java | 12 ++ .../alias/TransportIndicesAliasesAction.java | 3 +- .../rollover/TransportRolloverAction.java | 2 +- .../cluster/metadata/AliasAction.java | 24 +++- .../cluster/metadata/AliasMetaData.java | 44 +++++- .../cluster/metadata/AliasOrIndex.java | 30 ++++ .../cluster/metadata/AliasValidator.java | 4 +- .../cluster/metadata/MetaData.java | 31 ++-- .../metadata/MetaDataCreateIndexService.java | 2 +- .../metadata/MetaDataIndexAliasesService.java | 2 +- .../indices/alias/AliasActionsTests.java | 3 + .../create/CreateIndexRequestTests.java | 3 +- .../indices/shrink/ResizeRequestTests.java | 3 +- .../cluster/metadata/AliasMetaDataTests.java | 17 +++ .../metadata/IndexCreationTaskTests.java | 27 ++++ .../MetaDataIndexAliasesServiceTests.java | 134 +++++++++++++++++- .../cluster/metadata/MetaDataTests.java | 35 +++++ .../metadata/ToAndFromJsonMetaDataTests.java | 9 +- .../SharedClusterSnapshotRestoreIT.java | 6 +- .../index/RandomCreateIndexGenerator.java | 4 + 24 files changed, 524 insertions(+), 40 deletions(-) diff --git a/docs/reference/indices/aliases.asciidoc b/docs/reference/indices/aliases.asciidoc index 447873a595bbe..b155cfef302fb 100644 --- a/docs/reference/indices/aliases.asciidoc +++ b/docs/reference/indices/aliases.asciidoc @@ -244,6 +244,94 @@ GET /alias2/_search?q=user:kimchy&routing=2,3 // CONSOLE // TEST[continued] +[float] +[[aliases-write-index]] +==== Write Index + +It is possible to associate the index pointed to by an alias as the write index. +When specified, all index and update requests against an alias that point to multiple +indices will attempt to resolve to the one index that is the write index. +Only one index per alias can be assigned to be the write index at a time. If no write index is specified +and there are multiple indices referenced by an alias, then writes will not be allowed. + +It is possible to specify an index associated with an alias as a write index using both the aliases API +and index creation API. + +[source,js] +-------------------------------------------------- +POST /_aliases +{ + "actions" : [ + { + "add" : { + "index" : "test", + "alias" : "alias1", + "is_write_index" : true + } + } + ] +} +-------------------------------------------------- +// CONSOLE +// TEST[s/^/PUT test\n/] + +In this example, we associate the alias `alias1` to both `test` and `test2`, where +`test` will be the index chosen for writing to. + +[source,js] +-------------------------------------------------- +PUT /alias1/_doc/1 +{ + "foo": "bar" +} +-------------------------------------------------- +// CONSOLE +// TEST[continued] + +The new document that was indexed to `/alias1/_doc/1` will be indexed as if it were +`/test/_doc/1`. + +[source,js] +-------------------------------------------------- +GET /test/_doc/1 +-------------------------------------------------- +// CONSOLE +// TEST[continued] + +To swap which index is the write index for an alias, the Aliases API can be leveraged to +do an atomic swap. The swap is not dependent on the ordering of the actions. + +[source,js] +-------------------------------------------------- +POST /_aliases +{ + "actions" : [ + { + "add" : { + "index" : "test", + "alias" : "alias1", + "is_write_index" : true + } + }, { + "add" : { + "index" : "test2", + "alias" : "alias1", + "is_write_index" : false + } + } + ] +} +-------------------------------------------------- +// CONSOLE +// TEST[s/^/PUT test\nPUT test2\n/] + +[IMPORTANT] +===================================== +Aliases that do not explicitly set `is_write_index: true` for an index, and +only reference one index, will have that referenced index behave as if it is the write index +until an additional index is referenced. At that point, there will be no write index and +writes will be rejected. +===================================== [float] [[alias-adding]] diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/10_basic.yml index 8fafd9ef250aa..6f7c5a6009386 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.create/10_basic.yml @@ -79,7 +79,6 @@ indices.get_alias: index: test_index - - match: {test_index.aliases.test_alias: {}} - match: {test_index.aliases.test_blias.search_routing: b} - match: {test_index.aliases.test_blias.index_routing: b} - is_false: test_index.aliases.test_blias.filter @@ -87,6 +86,30 @@ - is_false: test_index.aliases.test_clias.index_routing - is_false: test_index.aliases.test_clias.search_routing +--- +"Create index with write aliases": + - skip: + version: " - 6.99.99" + reason: is_write_index is not implemented in ES <= 6.x + - do: + indices.create: + index: test_index + body: + aliases: + test_alias: {} + test_blias: + is_write_index: false + test_clias: + is_write_index: true + + - do: + indices.get_alias: + index: test_index + + - is_false: test_index.aliases.test_alias.is_write_index + - is_false: test_index.aliases.test_blias.is_write_index + - is_true: test_index.aliases.test_clias.is_write_index + --- "Create index with no type mappings": - do: diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/Alias.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/Alias.java index 9172500a8cb50..10ee8877fc9c9 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/Alias.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/Alias.java @@ -20,6 +20,7 @@ package org.elasticsearch.action.admin.indices.alias; import org.elasticsearch.ElasticsearchGenerationException; +import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; @@ -49,6 +50,7 @@ public class Alias implements Streamable, ToXContentFragment { private static final ParseField ROUTING = new ParseField("routing"); private static final ParseField INDEX_ROUTING = new ParseField("index_routing", "indexRouting", "index-routing"); private static final ParseField SEARCH_ROUTING = new ParseField("search_routing", "searchRouting", "search-routing"); + private static final ParseField IS_WRITE_INDEX = new ParseField("is_write_index"); private String name; @@ -61,6 +63,9 @@ public class Alias implements Streamable, ToXContentFragment { @Nullable private String searchRouting; + @Nullable + private Boolean writeIndex; + private Alias() { } @@ -167,6 +172,21 @@ public Alias searchRouting(String searchRouting) { return this; } + /** + * @return the write index flag for the alias + */ + public Boolean writeIndex() { + return writeIndex; + } + + /** + * Sets whether an alias is pointing to a write-index + */ + public Alias writeIndex(@Nullable Boolean writeIndex) { + this.writeIndex = writeIndex; + return this; + } + /** * Allows to read an alias from the provided input stream */ @@ -182,6 +202,11 @@ public void readFrom(StreamInput in) throws IOException { filter = in.readOptionalString(); indexRouting = in.readOptionalString(); searchRouting = in.readOptionalString(); + if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + writeIndex = in.readOptionalBoolean(); + } else { + writeIndex = null; + } } @Override @@ -190,6 +215,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(filter); out.writeOptionalString(indexRouting); out.writeOptionalString(searchRouting); + if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + out.writeOptionalBoolean(writeIndex); + } } /** @@ -219,6 +247,10 @@ public static Alias fromXContent(XContentParser parser) throws IOException { } else if (SEARCH_ROUTING.match(currentFieldName, parser.getDeprecationHandler())) { alias.searchRouting(parser.text()); } + } else if (token == XContentParser.Token.VALUE_BOOLEAN) { + if (IS_WRITE_INDEX.match(currentFieldName, parser.getDeprecationHandler())) { + alias.writeIndex(parser.booleanValue()); + } } } return alias; @@ -245,6 +277,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } } + builder.field(IS_WRITE_INDEX.getPreferredName(), writeIndex); + builder.endObject(); return builder; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java index 6332f50c1452e..00e3f7e32df3b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesRequest.java @@ -20,6 +20,7 @@ package org.elasticsearch.action.admin.indices.alias; import org.elasticsearch.ElasticsearchGenerationException; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.AliasesRequest; import org.elasticsearch.action.support.IndicesOptions; @@ -84,6 +85,7 @@ public static class AliasActions implements AliasesRequest, Writeable, ToXConten private static final ParseField ROUTING = new ParseField("routing"); private static final ParseField INDEX_ROUTING = new ParseField("index_routing", "indexRouting", "index-routing"); private static final ParseField SEARCH_ROUTING = new ParseField("search_routing", "searchRouting", "search-routing"); + private static final ParseField IS_WRITE_INDEX = new ParseField("is_write_index"); private static final ParseField ADD = new ParseField("add"); private static final ParseField REMOVE = new ParseField("remove"); @@ -179,6 +181,7 @@ private static ObjectParser parser(String name, Supplier REMOVE_PARSER = parser(REMOVE.getPreferredName(), AliasActions::remove); private static final ObjectParser REMOVE_INDEX_PARSER = parser(REMOVE_INDEX.getPreferredName(), @@ -215,6 +218,7 @@ private static ObjectParser parser(String name, Supplier actions = unmodifiableList(Arrays.asList( - new AliasAction.Add(newIndex, request.getAlias(), null, null, null), + new AliasAction.Add(newIndex, request.getAlias(), null, null, null, null), new AliasAction.Remove(oldIndex, request.getAlias()))); final IndicesAliasesClusterStateUpdateRequest updateRequest = new IndicesAliasesClusterStateUpdateRequest(actions) .ackTimeout(request.ackTimeout()) diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/AliasAction.java b/server/src/main/java/org/elasticsearch/cluster/metadata/AliasAction.java index ff49d072815fb..436ae79c10319 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/AliasAction.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/AliasAction.java @@ -51,7 +51,7 @@ public String getIndex() { /** * Apply the action. - * + * * @param aliasValidator call to validate a new alias before adding it to the builder * @param metadata metadata builder for the changes made by all actions as part of this request * @param index metadata for the index being changed @@ -64,7 +64,7 @@ public String getIndex() { */ @FunctionalInterface public interface NewAliasValidator { - void validate(String alias, @Nullable String indexRouting, @Nullable String filter); + void validate(String alias, @Nullable String indexRouting, @Nullable String filter, @Nullable Boolean writeIndex); } /** @@ -82,10 +82,14 @@ public static class Add extends AliasAction { @Nullable private final String searchRouting; + @Nullable + private final Boolean writeIndex; + /** * Build the operation. */ - public Add(String index, String alias, @Nullable String filter, @Nullable String indexRouting, @Nullable String searchRouting) { + public Add(String index, String alias, @Nullable String filter, @Nullable String indexRouting, + @Nullable String searchRouting, @Nullable Boolean writeIndex) { super(index); if (false == Strings.hasText(alias)) { throw new IllegalArgumentException("[alias] is required"); @@ -94,6 +98,7 @@ public Add(String index, String alias, @Nullable String filter, @Nullable String this.filter = filter; this.indexRouting = indexRouting; this.searchRouting = searchRouting; + this.writeIndex = writeIndex; } /** @@ -103,6 +108,10 @@ public String getAlias() { return alias; } + public Boolean writeIndex() { + return writeIndex; + } + @Override boolean removeIndex() { return false; @@ -110,15 +119,18 @@ boolean removeIndex() { @Override boolean apply(NewAliasValidator aliasValidator, MetaData.Builder metadata, IndexMetaData index) { - aliasValidator.validate(alias, indexRouting, filter); + aliasValidator.validate(alias, indexRouting, filter, writeIndex); + AliasMetaData newAliasMd = AliasMetaData.newAliasMetaDataBuilder(alias).filter(filter).indexRouting(indexRouting) - .searchRouting(searchRouting).build(); + .searchRouting(searchRouting).writeIndex(writeIndex).build(); + // Check if this alias already exists AliasMetaData currentAliasMd = index.getAliases().get(alias); if (currentAliasMd != null && currentAliasMd.equals(newAliasMd)) { // It already exists, ignore it return false; } + metadata.put(IndexMetaData.builder(index).putAlias(newAliasMd)); return true; } @@ -182,4 +194,4 @@ boolean apply(NewAliasValidator aliasValidator, MetaData.Builder metadata, Index throw new UnsupportedOperationException(); } } -} \ No newline at end of file +} diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/AliasMetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/AliasMetaData.java index 945c94bcd642d..29455123287a6 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/AliasMetaData.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/AliasMetaData.java @@ -20,8 +20,10 @@ package org.elasticsearch.cluster.metadata; import org.elasticsearch.ElasticsearchGenerationException; +import org.elasticsearch.Version; import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.cluster.Diff; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -55,7 +57,10 @@ public class AliasMetaData extends AbstractDiffable implements To private final Set searchRoutingValues; - private AliasMetaData(String alias, CompressedXContent filter, String indexRouting, String searchRouting) { + @Nullable + private final Boolean writeIndex; + + private AliasMetaData(String alias, CompressedXContent filter, String indexRouting, String searchRouting, Boolean writeIndex) { this.alias = alias; this.filter = filter; this.indexRouting = indexRouting; @@ -65,10 +70,11 @@ private AliasMetaData(String alias, CompressedXContent filter, String indexRouti } else { searchRoutingValues = emptySet(); } + this.writeIndex = writeIndex; } private AliasMetaData(AliasMetaData aliasMetaData, String alias) { - this(alias, aliasMetaData.filter(), aliasMetaData.indexRouting(), aliasMetaData.searchRouting()); + this(alias, aliasMetaData.filter(), aliasMetaData.indexRouting(), aliasMetaData.searchRouting(), aliasMetaData.writeIndex()); } public String alias() { @@ -111,6 +117,10 @@ public Set searchRoutingValues() { return searchRoutingValues; } + public Boolean writeIndex() { + return writeIndex; + } + public static Builder builder(String alias) { return new Builder(alias); } @@ -138,6 +148,8 @@ public boolean equals(Object o) { if (indexRouting != null ? !indexRouting.equals(that.indexRouting) : that.indexRouting != null) return false; if (searchRouting != null ? !searchRouting.equals(that.searchRouting) : that.searchRouting != null) return false; + if (writeIndex != null ? writeIndex != that.writeIndex : that.writeIndex != null) + return false; return true; } @@ -148,6 +160,7 @@ public int hashCode() { result = 31 * result + (filter != null ? filter.hashCode() : 0); result = 31 * result + (indexRouting != null ? indexRouting.hashCode() : 0); result = 31 * result + (searchRouting != null ? searchRouting.hashCode() : 0); + result = 31 * result + (writeIndex != null ? writeIndex.hashCode() : 0); return result; } @@ -173,6 +186,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(false); } + if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + out.writeOptionalBoolean(writeIndex()); + } } public AliasMetaData(StreamInput in) throws IOException { @@ -194,6 +210,11 @@ public AliasMetaData(StreamInput in) throws IOException { searchRouting = null; searchRoutingValues = emptySet(); } + if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + writeIndex = in.readOptionalBoolean(); + } else { + writeIndex = null; + } } public static Diff readDiffFrom(StreamInput in) throws IOException { @@ -221,6 +242,9 @@ public static class Builder { private String searchRouting; + @Nullable + private Boolean writeIndex; + public Builder(String alias) { this.alias = alias; @@ -231,6 +255,7 @@ public Builder(AliasMetaData aliasMetaData) { filter = aliasMetaData.filter(); indexRouting = aliasMetaData.indexRouting(); searchRouting = aliasMetaData.searchRouting(); + writeIndex = aliasMetaData.writeIndex(); } public String alias() { @@ -284,8 +309,13 @@ public Builder searchRouting(String searchRouting) { return this; } + public Builder writeIndex(@Nullable Boolean writeIndex) { + this.writeIndex = writeIndex; + return this; + } + public AliasMetaData build() { - return new AliasMetaData(alias, filter, indexRouting, searchRouting); + return new AliasMetaData(alias, filter, indexRouting, searchRouting, writeIndex); } public static void toXContent(AliasMetaData aliasMetaData, XContentBuilder builder, ToXContent.Params params) throws IOException { @@ -307,6 +337,10 @@ public static void toXContent(AliasMetaData aliasMetaData, XContentBuilder build builder.field("search_routing", aliasMetaData.searchRouting()); } + if (aliasMetaData.writeIndex() != null) { + builder.field("is_write_index", aliasMetaData.writeIndex()); + } + builder.endObject(); } @@ -343,6 +377,10 @@ public static AliasMetaData fromXContent(XContentParser parser) throws IOExcepti } } else if (token == XContentParser.Token.START_ARRAY) { parser.skipChildren(); + } else if (token == XContentParser.Token.VALUE_BOOLEAN) { + if ("is_write_index".equals(currentFieldName)) { + builder.writeIndex(parser.booleanValue()); + } } } return builder.build(); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/AliasOrIndex.java b/server/src/main/java/org/elasticsearch/cluster/metadata/AliasOrIndex.java index 786bd9af78a4c..d8bb04a1a39c3 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/AliasOrIndex.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/AliasOrIndex.java @@ -19,12 +19,16 @@ package org.elasticsearch.cluster.metadata; +import org.apache.lucene.util.SetOnce; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Tuple; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; +import java.util.stream.Collectors; /** * Encapsulates the {@link IndexMetaData} instances of a concrete index or indices an alias is pointing to. @@ -78,6 +82,7 @@ class Alias implements AliasOrIndex { private final String aliasName; private final List referenceIndexMetaDatas; + private SetOnce writeIndex = new SetOnce<>(); public Alias(AliasMetaData aliasMetaData, IndexMetaData indexMetaData) { this.aliasName = aliasMetaData.getAlias(); @@ -90,11 +95,21 @@ public boolean isAlias() { return true; } + public String getAliasName() { + return aliasName; + } + @Override public List getIndices() { return referenceIndexMetaDatas; } + + @Nullable + public IndexMetaData getWriteIndex() { + return writeIndex.get(); + } + /** * Returns the unique alias metadata per concrete index. * @@ -138,5 +153,20 @@ void addIndex(IndexMetaData indexMetaData) { this.referenceIndexMetaDatas.add(indexMetaData); } + public void computeAndValidateWriteIndex() { + List writeIndices = referenceIndexMetaDatas.stream() + .filter(idxMeta -> Boolean.TRUE.equals(idxMeta.getAliases().get(aliasName).writeIndex())) + .collect(Collectors.toList()); + if (referenceIndexMetaDatas.size() == 1) { + writeIndex.set(referenceIndexMetaDatas.get(0)); + } else if (writeIndices.size() == 1) { + writeIndex.set(writeIndices.get(0)); + } else if (writeIndices.size() > 1) { + List writeIndicesStrings = writeIndices.stream() + .map(i -> i.getIndex().getName()).collect(Collectors.toList()); + throw new IllegalStateException("alias [" + aliasName + "] has more than one write index [" + + Strings.collectionToCommaDelimitedString(writeIndicesStrings) + "]"); + } + } } } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/AliasValidator.java b/server/src/main/java/org/elasticsearch/cluster/metadata/AliasValidator.java index 7f1348dd1587f..33e1687e241fa 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/AliasValidator.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/AliasValidator.java @@ -57,7 +57,7 @@ public AliasValidator(Settings settings) { * @throws IllegalArgumentException if the alias is not valid */ public void validateAlias(Alias alias, String index, MetaData metaData) { - validateAlias(alias.name(), index, alias.indexRouting(), name -> metaData.index(name)); + validateAlias(alias.name(), index, alias.indexRouting(), metaData::index); } /** @@ -66,7 +66,7 @@ public void validateAlias(Alias alias, String index, MetaData metaData) { * @throws IllegalArgumentException if the alias is not valid */ public void validateAliasMetaData(AliasMetaData aliasMetaData, String index, MetaData metaData) { - validateAlias(aliasMetaData.alias(), index, aliasMetaData.indexRouting(), name -> metaData.index(name)); + validateAlias(aliasMetaData.alias(), index, aliasMetaData.indexRouting(), metaData::index); } /** diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java index 5657873987e18..2bfe0d0a58f70 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java @@ -1039,7 +1039,22 @@ public MetaData build() { } - // build all indices map + SortedMap aliasAndIndexLookup = Collections.unmodifiableSortedMap(buildAliasAndIndexLookup()); + + + // build all concrete indices arrays: + // TODO: I think we can remove these arrays. it isn't worth the effort, for operations on all indices. + // When doing an operation across all indices, most of the time is spent on actually going to all shards and + // do the required operations, the bottleneck isn't resolving expressions into concrete indices. + String[] allIndicesArray = allIndices.toArray(new String[allIndices.size()]); + String[] allOpenIndicesArray = allOpenIndices.toArray(new String[allOpenIndices.size()]); + String[] allClosedIndicesArray = allClosedIndices.toArray(new String[allClosedIndices.size()]); + + return new MetaData(clusterUUID, version, transientSettings, persistentSettings, indices.build(), templates.build(), + customs.build(), allIndicesArray, allOpenIndicesArray, allClosedIndicesArray, aliasAndIndexLookup); + } + + private SortedMap buildAliasAndIndexLookup() { SortedMap aliasAndIndexLookup = new TreeMap<>(); for (ObjectCursor cursor : indices.values()) { IndexMetaData indexMetaData = cursor.value; @@ -1059,17 +1074,9 @@ public MetaData build() { }); } } - aliasAndIndexLookup = Collections.unmodifiableSortedMap(aliasAndIndexLookup); - // build all concrete indices arrays: - // TODO: I think we can remove these arrays. it isn't worth the effort, for operations on all indices. - // When doing an operation across all indices, most of the time is spent on actually going to all shards and - // do the required operations, the bottleneck isn't resolving expressions into concrete indices. - String[] allIndicesArray = allIndices.toArray(new String[allIndices.size()]); - String[] allOpenIndicesArray = allOpenIndices.toArray(new String[allOpenIndices.size()]); - String[] allClosedIndicesArray = allClosedIndices.toArray(new String[allClosedIndices.size()]); - - return new MetaData(clusterUUID, version, transientSettings, persistentSettings, indices.build(), templates.build(), - customs.build(), allIndicesArray, allOpenIndicesArray, allClosedIndicesArray, aliasAndIndexLookup); + aliasAndIndexLookup.values().stream().filter(AliasOrIndex::isAlias) + .forEach(alias -> ((AliasOrIndex.Alias) alias).computeAndValidateWriteIndex()); + return aliasAndIndexLookup; } public static String toXContent(MetaData metaData) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java index 0d8a374e66d42..be9db5262b00c 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java @@ -516,7 +516,7 @@ public ClusterState execute(ClusterState currentState) throws Exception { } for (Alias alias : request.aliases()) { AliasMetaData aliasMetaData = AliasMetaData.builder(alias.name()).filter(alias.filter()) - .indexRouting(alias.indexRouting()).searchRouting(alias.searchRouting()).build(); + .indexRouting(alias.indexRouting()).searchRouting(alias.searchRouting()).writeIndex(alias.writeIndex()).build(); indexMetaDataBuilder.putAlias(aliasMetaData); } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java index 0c38371bdc9cb..28dc7f2425d91 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesService.java @@ -127,7 +127,7 @@ ClusterState innerExecute(ClusterState currentState, Iterable actio if (index == null) { throw new IndexNotFoundException(action.getIndex()); } - NewAliasValidator newAliasValidator = (alias, indexRouting, filter) -> { + NewAliasValidator newAliasValidator = (alias, indexRouting, filter, writeIndex) -> { /* It is important that we look up the index using the metadata builder we are modifying so we can remove an * index and replace it with an alias. */ Function indexLookup = name -> metadata.get(name); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/alias/AliasActionsTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/alias/AliasActionsTests.java index 202530ccf9289..f2ae67e1fc1ab 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/alias/AliasActionsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/alias/AliasActionsTests.java @@ -114,6 +114,7 @@ public void testParseAdd() throws IOException { Map filter = randomBoolean() ? randomMap(5) : null; Object searchRouting = randomBoolean() ? randomRouting() : null; Object indexRouting = randomBoolean() ? randomBoolean() ? searchRouting : randomRouting() : null; + boolean writeIndex = randomBoolean(); XContentBuilder b = XContentBuilder.builder(randomFrom(XContentType.values()).xContent()); b.startObject(); { @@ -142,6 +143,7 @@ public void testParseAdd() throws IOException { if (indexRouting != null && false == indexRouting.equals(searchRouting)) { b.field("index_routing", indexRouting); } + b.field("is_write_index", writeIndex); } b.endObject(); } @@ -159,6 +161,7 @@ public void testParseAdd() throws IOException { } assertEquals(Objects.toString(searchRouting, null), action.searchRouting()); assertEquals(Objects.toString(indexRouting, null), action.indexRouting()); + assertEquals(writeIndex, action.writeIndex()); } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestTests.java index dbca9f7a98f13..e50805ab5b263 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestTests.java @@ -93,6 +93,7 @@ public void testToXContent() throws IOException { Alias alias = new Alias("test_alias"); alias.routing("1"); alias.filter("{\"term\":{\"year\":2016}}"); + alias.writeIndex(true); request.alias(alias); Settings.Builder settings = Settings.builder(); @@ -103,7 +104,7 @@ public void testToXContent() throws IOException { String expectedRequestBody = "{\"settings\":{\"index\":{\"number_of_shards\":\"10\"}}," + "\"mappings\":{\"my_type\":{\"type\":{}}}," + - "\"aliases\":{\"test_alias\":{\"filter\":{\"term\":{\"year\":2016}},\"routing\":\"1\"}}}"; + "\"aliases\":{\"test_alias\":{\"filter\":{\"term\":{\"year\":2016}},\"routing\":\"1\",\"is_write_index\":true}}}"; assertEquals(expectedRequestBody, actualRequestBody); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestTests.java index ba595de5215a3..4fa99374f0fab 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestTests.java @@ -71,6 +71,7 @@ public void testToXContent() throws IOException { Alias alias = new Alias("test_alias"); alias.routing("1"); alias.filter("{\"term\":{\"year\":2016}}"); + alias.writeIndex(true); target.alias(alias); Settings.Builder settings = Settings.builder(); settings.put(SETTING_NUMBER_OF_SHARDS, 10); @@ -78,7 +79,7 @@ public void testToXContent() throws IOException { request.setTargetIndex(target); String actualRequestBody = Strings.toString(request); String expectedRequestBody = "{\"settings\":{\"index\":{\"number_of_shards\":\"10\"}}," + - "\"aliases\":{\"test_alias\":{\"filter\":{\"term\":{\"year\":2016}},\"routing\":\"1\"}}}"; + "\"aliases\":{\"test_alias\":{\"filter\":{\"term\":{\"year\":2016}},\"routing\":\"1\",\"is_write_index\":true}}}"; assertEquals(expectedRequestBody, actualRequestBody); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/AliasMetaDataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/AliasMetaDataTests.java index 00865cc9a6579..de23c560eb9af 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/AliasMetaDataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/AliasMetaDataTests.java @@ -41,6 +41,7 @@ public void testSerialization() throws IOException { .indexRouting("indexRouting") .routing("routing") .searchRouting("trim,tw , ltw , lw") + .writeIndex(randomBoolean() ? null : randomBoolean()) .build(); assertThat(before.searchRoutingValues(), equalTo(Sets.newHashSet("trim", "tw ", " ltw ", " lw"))); @@ -54,6 +55,21 @@ public void testSerialization() throws IOException { assertThat(after, equalTo(before)); } + @Override + protected void assertEqualInstances(AliasMetaData expectedInstance, AliasMetaData newInstance) { + assertNotSame(newInstance, expectedInstance); + if (expectedInstance.writeIndex() == null) { + expectedInstance = AliasMetaData.builder(expectedInstance.alias()) + .filter(expectedInstance.filter()) + .indexRouting(expectedInstance.indexRouting()) + .searchRouting(expectedInstance.searchRouting()) + .writeIndex(randomBoolean() ? null : randomBoolean()) + .build(); + } + assertEquals(expectedInstance, newInstance); + assertEquals(expectedInstance.hashCode(), newInstance.hashCode()); + } + @Override protected AliasMetaData createTestInstance() { return createTestItem(); @@ -95,6 +111,7 @@ private static AliasMetaData createTestItem() { if (randomBoolean()) { builder.filter("{\"term\":{\"year\":2016}}"); } + builder.writeIndex(randomBoolean()); return builder.build(); } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexCreationTaskTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexCreationTaskTests.java index 56fbf1db24502..744a29e843c48 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexCreationTaskTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexCreationTaskTests.java @@ -69,6 +69,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.startsWith; import static org.mockito.Matchers.anyObject; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -291,6 +292,32 @@ public void testValidateWaitForActiveShardsFailure() throws Exception { assertThat(e.getMessage(), containsString("invalid wait_for_active_shards")); } + public void testWriteIndex() throws Exception { + Boolean writeIndex = randomBoolean() ? null : randomBoolean(); + setupRequestAlias(new Alias("alias1").writeIndex(writeIndex)); + setupRequestMapping("mapping1", createMapping()); + setupRequestCustom("custom1", createCustom()); + reqSettings.put("key1", "value1"); + + final ClusterState result = executeTask(); + assertThat(result.metaData().index("test").getAliases(), hasKey("alias1")); + assertThat(result.metaData().index("test").getAliases().get("alias1").writeIndex(), equalTo(writeIndex)); + } + + public void testWriteIndexValidationException() throws Exception { + IndexMetaData existingWriteIndex = IndexMetaData.builder("test2") + .settings(settings(Version.CURRENT)).putAlias(AliasMetaData.builder("alias1").writeIndex(true).build()) + .numberOfShards(1).numberOfReplicas(0).build(); + idxBuilder.put("test2", existingWriteIndex); + setupRequestMapping("mapping1", createMapping()); + setupRequestCustom("custom1", createCustom()); + reqSettings.put("key1", "value1"); + setupRequestAlias(new Alias("alias1").writeIndex(true)); + + Exception exception = expectThrows(IllegalStateException.class, () -> executeTask()); + assertThat(exception.getMessage(), startsWith("alias [alias1] has more than one write index [")); + } + private IndexRoutingTable createIndexRoutingTableWithStartedShards(Index index) { final IndexRoutingTable idxRoutingTable = mock(IndexRoutingTable.class); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesServiceTests.java index e5b52d8cf52bf..812dfd8f6f686 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesServiceTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.cluster.metadata; +import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.settings.Settings; @@ -29,9 +30,13 @@ import java.util.Arrays; import java.util.Collection; +import java.util.Collections; +import java.util.List; import static java.util.Collections.singletonList; import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.startsWith; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anySetOf; import static org.mockito.Mockito.mock; @@ -64,7 +69,7 @@ public void testAddAndRemove() { ClusterState before = createIndex(ClusterState.builder(ClusterName.DEFAULT).build(), index); // Add an alias to it - ClusterState after = service.innerExecute(before, singletonList(new AliasAction.Add(index, "test", null, null, null))); + ClusterState after = service.innerExecute(before, singletonList(new AliasAction.Add(index, "test", null, null, null, null))); AliasOrIndex alias = after.metaData().getAliasAndIndexLookup().get("test"); assertNotNull(alias); assertTrue(alias.isAlias()); @@ -74,7 +79,7 @@ public void testAddAndRemove() { before = after; after = service.innerExecute(before, Arrays.asList( new AliasAction.Remove(index, "test"), - new AliasAction.Add(index, "test_2", null, null, null))); + new AliasAction.Add(index, "test_2", null, null, null, null))); assertNull(after.metaData().getAliasAndIndexLookup().get("test")); alias = after.metaData().getAliasAndIndexLookup().get("test_2"); assertNotNull(alias); @@ -95,7 +100,7 @@ public void testSwapIndexWithAlias() { // Now remove "test" and add an alias to "test" to "test_2" in one go ClusterState after = service.innerExecute(before, Arrays.asList( - new AliasAction.Add("test_2", "test", null, null, null), + new AliasAction.Add("test_2", "test", null, null, null, null), new AliasAction.RemoveIndex("test"))); AliasOrIndex alias = after.metaData().getAliasAndIndexLookup().get("test"); assertNotNull(alias); @@ -109,7 +114,7 @@ public void testAddAliasToRemovedIndex() { // Attempt to add an alias to "test" at the same time as we remove it IndexNotFoundException e = expectThrows(IndexNotFoundException.class, () -> service.innerExecute(before, Arrays.asList( - new AliasAction.Add("test", "alias", null, null, null), + new AliasAction.Add("test", "alias", null, null, null, null), new AliasAction.RemoveIndex("test")))); assertEquals("test", e.getIndex().getName()); } @@ -125,6 +130,127 @@ public void testRemoveIndexTwice() { assertNull(after.metaData().getAliasAndIndexLookup().get("test")); } + public void testAddWriteOnlyWithNoExistingAliases() { + ClusterState before = createIndex(ClusterState.builder(ClusterName.DEFAULT).build(), "test"); + + ClusterState after = service.innerExecute(before, Arrays.asList( + new AliasAction.Add("test", "alias", null, null, null, false))); + assertFalse(after.metaData().index("test").getAliases().get("alias").writeIndex()); + assertThat(((AliasOrIndex.Alias) after.metaData().getAliasAndIndexLookup().get("alias")).getWriteIndex(), + equalTo(after.metaData().index("test"))); + + after = service.innerExecute(before, Arrays.asList( + new AliasAction.Add("test", "alias", null, null, null, null))); + assertNull(after.metaData().index("test").getAliases().get("alias").writeIndex()); + assertThat(((AliasOrIndex.Alias) after.metaData().getAliasAndIndexLookup().get("alias")).getWriteIndex(), + equalTo(after.metaData().index("test"))); + + after = service.innerExecute(before, Arrays.asList( + new AliasAction.Add("test", "alias", null, null, null, true))); + assertTrue(after.metaData().index("test").getAliases().get("alias").writeIndex()); + assertThat(((AliasOrIndex.Alias) after.metaData().getAliasAndIndexLookup().get("alias")).getWriteIndex(), + equalTo(after.metaData().index("test"))); + } + + public void testAddWriteOnlyWithExistingWriteIndex() { + IndexMetaData.Builder indexMetaData = IndexMetaData.builder("test") + .settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1); + IndexMetaData.Builder indexMetaData2 = IndexMetaData.builder("test2") + .putAlias(AliasMetaData.builder("alias").writeIndex(true).build()) + .settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1); + ClusterState before = ClusterState.builder(ClusterName.DEFAULT) + .metaData(MetaData.builder().put(indexMetaData).put(indexMetaData2)).build(); + + ClusterState after = service.innerExecute(before, Arrays.asList( + new AliasAction.Add("test", "alias", null, null, null, null))); + assertNull(after.metaData().index("test").getAliases().get("alias").writeIndex()); + assertThat(((AliasOrIndex.Alias) after.metaData().getAliasAndIndexLookup().get("alias")).getWriteIndex(), + equalTo(after.metaData().index("test2"))); + + Exception exception = expectThrows(IllegalStateException.class, () -> service.innerExecute(before, Arrays.asList( + new AliasAction.Add("test", "alias", null, null, null, true)))); + assertThat(exception.getMessage(), startsWith("alias [alias] has more than one write index [")); + } + + public void testSwapWriteOnlyIndex() { + IndexMetaData.Builder indexMetaData = IndexMetaData.builder("test") + .putAlias(AliasMetaData.builder("alias").writeIndex(true).build()) + .settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1); + IndexMetaData.Builder indexMetaData2 = IndexMetaData.builder("test2") + .settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1); + ClusterState before = ClusterState.builder(ClusterName.DEFAULT) + .metaData(MetaData.builder().put(indexMetaData).put(indexMetaData2)).build(); + + Boolean unsetValue = randomBoolean() ? null : false; + List swapActions = Arrays.asList( + new AliasAction.Add("test", "alias", null, null, null, unsetValue), + new AliasAction.Add("test2", "alias", null, null, null, true) + ); + Collections.shuffle(swapActions, random()); + ClusterState after = service.innerExecute(before, swapActions); + assertThat(after.metaData().index("test").getAliases().get("alias").writeIndex(), equalTo(unsetValue)); + assertTrue(after.metaData().index("test2").getAliases().get("alias").writeIndex()); + assertThat(((AliasOrIndex.Alias) after.metaData().getAliasAndIndexLookup().get("alias")).getWriteIndex(), + equalTo(after.metaData().index("test2"))); + } + + public void testAddWriteOnlyWithExistingNonWriteIndices() { + IndexMetaData.Builder indexMetaData = IndexMetaData.builder("test") + .putAlias(AliasMetaData.builder("alias").writeIndex(randomBoolean() ? null : false).build()) + .settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1); + IndexMetaData.Builder indexMetaData2 = IndexMetaData.builder("test2") + .putAlias(AliasMetaData.builder("alias").writeIndex(randomBoolean() ? null : false).build()) + .settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1); + IndexMetaData.Builder indexMetaData3 = IndexMetaData.builder("test3") + .settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1); + ClusterState before = ClusterState.builder(ClusterName.DEFAULT) + .metaData(MetaData.builder().put(indexMetaData).put(indexMetaData2).put(indexMetaData3)).build(); + + assertNull(((AliasOrIndex.Alias) before.metaData().getAliasAndIndexLookup().get("alias")).getWriteIndex()); + + ClusterState after = service.innerExecute(before, Arrays.asList( + new AliasAction.Add("test3", "alias", null, null, null, true))); + assertTrue(after.metaData().index("test3").getAliases().get("alias").writeIndex()); + assertThat(((AliasOrIndex.Alias) after.metaData().getAliasAndIndexLookup().get("alias")).getWriteIndex(), + equalTo(after.metaData().index("test3"))); + + } + + public void testAddWriteOnlyWithIndexRemoved() { + IndexMetaData.Builder indexMetaData = IndexMetaData.builder("test") + .putAlias(AliasMetaData.builder("alias").build()) + .settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1); + IndexMetaData.Builder indexMetaData2 = IndexMetaData.builder("test2") + .putAlias(AliasMetaData.builder("alias").build()) + .settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1); + ClusterState before = ClusterState.builder(ClusterName.DEFAULT) + .metaData(MetaData.builder().put(indexMetaData).put(indexMetaData2)).build(); + + assertNull(before.metaData().index("test").getAliases().get("alias").writeIndex()); + assertNull(before.metaData().index("test2").getAliases().get("alias").writeIndex()); + assertNull(((AliasOrIndex.Alias) before.metaData().getAliasAndIndexLookup().get("alias")).getWriteIndex()); + + ClusterState after = service.innerExecute(before, Collections.singletonList(new AliasAction.RemoveIndex("test"))); + assertNull(after.metaData().index("test2").getAliases().get("alias").writeIndex()); + assertThat(((AliasOrIndex.Alias) after.metaData().getAliasAndIndexLookup().get("alias")).getWriteIndex(), + equalTo(after.metaData().index("test2"))); + } + + public void testAddWriteOnlyValidatesAgainstMetaDataBuilder() { + IndexMetaData.Builder indexMetaData = IndexMetaData.builder("test") + .settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1); + IndexMetaData.Builder indexMetaData2 = IndexMetaData.builder("test2") + .settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(1); + ClusterState before = ClusterState.builder(ClusterName.DEFAULT) + .metaData(MetaData.builder().put(indexMetaData).put(indexMetaData2)).build(); + + Exception exception = expectThrows(IllegalStateException.class, () -> service.innerExecute(before, Arrays.asList( + new AliasAction.Add("test", "alias", null, null, null, true), + new AliasAction.Add("test2", "alias", null, null, null, true) + ))); + assertThat(exception.getMessage(), startsWith("alias [alias] has more than one write index [")); + } + private ClusterState createIndex(ClusterState state, String index) { IndexMetaData indexMetaData = IndexMetaData.builder(index) .settings(Settings.builder().put("index.version.created", VersionUtils.randomVersion(random()))) diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java index 3a83580dc1cdd..96a533118c8da 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java @@ -99,6 +99,34 @@ public void testAliasCollidingWithAnExistingIndex() { } } + public void testValidateAliasWriteOnly() { + String alias = randomAlphaOfLength(5); + String indexA = randomAlphaOfLength(6); + String indexB = randomAlphaOfLength(7); + Boolean aWriteIndex = randomBoolean() ? null : randomBoolean(); + Boolean bWriteIndex; + if (Boolean.TRUE.equals(aWriteIndex)) { + bWriteIndex = randomFrom(Boolean.FALSE, null); + } else { + bWriteIndex = randomFrom(Boolean.TRUE, Boolean.FALSE, null); + } + // when only one index/alias pair exist + MetaData metaData = MetaData.builder().put(buildIndexMetaData(indexA, alias, aWriteIndex)).build(); + + // when alias points to two indices, but valid + // one of the following combinations: [(null, null), (null, true), (null, false), (false, false)] + MetaData.builder(metaData).put(buildIndexMetaData(indexB, alias, bWriteIndex)).build(); + + // when too many write indices + Exception exception = expectThrows(IllegalStateException.class, + () -> { + IndexMetaData.Builder metaA = buildIndexMetaData(indexA, alias, true); + IndexMetaData.Builder metaB = buildIndexMetaData(indexB, alias, true); + MetaData.builder().put(metaA).put(metaB).build(); + }); + assertThat(exception.getMessage(), startsWith("alias [" + alias + "] has more than one write index [")); + } + public void testResolveIndexRouting() { IndexMetaData.Builder builder = IndexMetaData.builder("index") .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) @@ -428,6 +456,13 @@ public void testFindMappingsWithFilters() throws IOException { } } + private IndexMetaData.Builder buildIndexMetaData(String name, String alias, Boolean writeIndex) { + return IndexMetaData.builder(name) + .settings(settings(Version.CURRENT)).creationDate(randomNonNegativeLong()) + .putAlias(AliasMetaData.builder(alias).writeIndex(writeIndex)) + .numberOfShards(1).numberOfReplicas(0); + } + @SuppressWarnings("unchecked") private static void assertIndexMappingsNoFields(ImmutableOpenMap> mappings, String index) { diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/ToAndFromJsonMetaDataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/ToAndFromJsonMetaDataTests.java index 3b80d1f6e2cf0..bde478eb36381 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/ToAndFromJsonMetaDataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/ToAndFromJsonMetaDataTests.java @@ -111,7 +111,7 @@ public void testSimpleJsonFromAndTo() throws IOException { .putMapping("mapping1", MAPPING_SOURCE1) .putMapping("mapping2", MAPPING_SOURCE2) .putAlias(newAliasMetaDataBuilder("alias1").filter(ALIAS_FILTER1)) - .putAlias(newAliasMetaDataBuilder("alias2")) + .putAlias(newAliasMetaDataBuilder("alias2").writeIndex(randomBoolean() ? null : randomBoolean())) .putAlias(newAliasMetaDataBuilder("alias4").filter(ALIAS_FILTER2))) .put(IndexTemplateMetaData.builder("foo") .patterns(Collections.singletonList("bar")) @@ -132,7 +132,7 @@ public void testSimpleJsonFromAndTo() throws IOException { .putMapping("mapping1", MAPPING_SOURCE1) .putMapping("mapping2", MAPPING_SOURCE2) .putAlias(newAliasMetaDataBuilder("alias1").filter(ALIAS_FILTER1)) - .putAlias(newAliasMetaDataBuilder("alias2")) + .putAlias(newAliasMetaDataBuilder("alias2").writeIndex(randomBoolean() ? null : randomBoolean())) .putAlias(newAliasMetaDataBuilder("alias4").filter(ALIAS_FILTER2))) .put(IndexTemplateMetaData.builder("foo") .patterns(Collections.singletonList("bar")) @@ -146,7 +146,6 @@ public void testSimpleJsonFromAndTo() throws IOException { .build(); String metaDataSource = MetaData.Builder.toXContent(metaData); -// System.out.println("ToJson: " + metaDataSource); MetaData parsedMetaData = MetaData.Builder.fromXContent(createParser(JsonXContent.jsonXContent, metaDataSource)); @@ -270,6 +269,8 @@ public void testSimpleJsonFromAndTo() throws IOException { assertThat(indexMetaData.getAliases().get("alias1").filter().string(), equalTo(ALIAS_FILTER1)); assertThat(indexMetaData.getAliases().get("alias2").alias(), equalTo("alias2")); assertThat(indexMetaData.getAliases().get("alias2").filter(), nullValue()); + assertThat(indexMetaData.getAliases().get("alias2").writeIndex(), + equalTo(metaData.index("test11").getAliases().get("alias2").writeIndex())); assertThat(indexMetaData.getAliases().get("alias4").alias(), equalTo("alias4")); assertThat(indexMetaData.getAliases().get("alias4").filter().string(), equalTo(ALIAS_FILTER2)); @@ -288,6 +289,8 @@ public void testSimpleJsonFromAndTo() throws IOException { assertThat(indexMetaData.getAliases().get("alias1").filter().string(), equalTo(ALIAS_FILTER1)); assertThat(indexMetaData.getAliases().get("alias2").alias(), equalTo("alias2")); assertThat(indexMetaData.getAliases().get("alias2").filter(), nullValue()); + assertThat(indexMetaData.getAliases().get("alias2").writeIndex(), + equalTo(metaData.index("test12").getAliases().get("alias2").writeIndex())); assertThat(indexMetaData.getAliases().get("alias4").alias(), equalTo("alias4")); assertThat(indexMetaData.getAliases().get("alias4").filter().string(), equalTo(ALIAS_FILTER2)); diff --git a/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java index 06499aa544e9f..c9ca1637b1ade 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java @@ -1518,9 +1518,9 @@ public void testRenameOnRestore() throws Exception { ensureGreen(); assertAcked(client.admin().indices().prepareAliases() - .addAlias("test-idx-1", "alias-1") - .addAlias("test-idx-2", "alias-2") - .addAlias("test-idx-3", "alias-3") + .addAlias("test-idx-1", "alias-1", false) + .addAlias("test-idx-2", "alias-2", false) + .addAlias("test-idx-3", "alias-3", false) ); logger.info("--> indexing some data"); diff --git a/test/framework/src/main/java/org/elasticsearch/index/RandomCreateIndexGenerator.java b/test/framework/src/main/java/org/elasticsearch/index/RandomCreateIndexGenerator.java index 3e42e3b304e00..e88a9f0a38d2c 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/RandomCreateIndexGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/index/RandomCreateIndexGenerator.java @@ -138,6 +138,10 @@ private static Alias randomAlias() { alias.filter("{\"term\":{\"year\":2016}}"); } + if (randomBoolean()) { + alias.writeIndex(randomBoolean()); + } + return alias; } } From 4918ae6fe480f06f28949b365cb6de0ebbc2bb88 Mon Sep 17 00:00:00 2001 From: lcawl Date: Fri, 15 Jun 2018 11:04:11 -0700 Subject: [PATCH 05/34] [DOCS] Adds links to release notes and highlights --- docs/reference/migration/index.asciidoc | 5 ++++- docs/reference/migration/migrate_7_0.asciidoc | 2 ++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/docs/reference/migration/index.asciidoc b/docs/reference/migration/index.asciidoc index 15db4b7a94a85..aa74068419df0 100644 --- a/docs/reference/migration/index.asciidoc +++ b/docs/reference/migration/index.asciidoc @@ -17,6 +17,9 @@ As a general rule: * Migration between non-consecutive major versions -- e.g. `5.x` to `7.x` -- is not supported. -See <> for more info. +For more information, see <>. + +See also <> and <>. + -- include::migrate_7_0.asciidoc[] diff --git a/docs/reference/migration/migrate_7_0.asciidoc b/docs/reference/migration/migrate_7_0.asciidoc index c80b0ae64a371..42fd6b7afbe73 100644 --- a/docs/reference/migration/migrate_7_0.asciidoc +++ b/docs/reference/migration/migrate_7_0.asciidoc @@ -4,6 +4,8 @@ This section discusses the changes that you need to be aware of when migrating your application to Elasticsearch 7.0. +See also <> and <>. + [float] === Indices created before 7.0 From 605dbbeabdd6769daa79a3afa4955e0c7b2671cb Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Fri, 15 Jun 2018 12:39:25 -0600 Subject: [PATCH 06/34] Remove trial status info from start trial doc (#31365) This is related to #31325. There is currently information about the get-trial-status api on the start-trial api documentation page. It also has the incorrect route for that api. This commit removes that information as the start-trial page properly links to a page providing documenation about get-trial-status. --- .../en/rest-api/license/start-trial.asciidoc | 18 ------------------ 1 file changed, 18 deletions(-) diff --git a/x-pack/docs/en/rest-api/license/start-trial.asciidoc b/x-pack/docs/en/rest-api/license/start-trial.asciidoc index 7754f6feef79c..341c72853fd08 100644 --- a/x-pack/docs/en/rest-api/license/start-trial.asciidoc +++ b/x-pack/docs/en/rest-api/license/start-trial.asciidoc @@ -36,24 +36,6 @@ For more information, see [float] ==== Examples -The following example checks whether you are eligible to start a trial: - -[source,js] ------------------------------------------------------------- -GET _xpack/license/start_trial ------------------------------------------------------------- -// CONSOLE -// TEST[skip:license testing issues] - -Example response: -[source,js] ------------------------------------------------------------- -{ - "eligible_to_start_trial": true -} ------------------------------------------------------------- -// NOTCONSOLE - The following example starts a 30-day trial license. The acknowledge parameter is required as you are initiating a license that will expire. From a705e1a9e30abe2b636ab38602bd1afea2b0c422 Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Fri, 15 Jun 2018 14:01:03 -0600 Subject: [PATCH 07/34] Add byte array pooling to nio http transport (#31349) This is related to #28898. This PR implements pooling of bytes arrays when reading from the wire in the http server transport. In order to do this, we must integrate with netty reference counting. That manner in which this PR implements this is making Pages in InboundChannelBuffer reference counted. When we accessing the underlying page to pass to netty, we retain the page. When netty releases its bytebuf, it releases the underlying pages we have passed to it. --- .../util/concurrent/AbstractRefCounted.java | 4 +- .../common/util/concurrent/RefCounted.java | 2 +- .../util/concurrent/RefCountedTests.java | 9 +- .../nio/InboundChannelBuffer.java | 81 ++++++++++++++++- .../nio/InboundChannelBufferTests.java | 46 +++++++++- .../elasticsearch/transport/Netty4Plugin.java | 2 +- .../http/nio/HttpReadWriteHandler.java | 2 +- .../elasticsearch/http/nio/NettyAdaptor.java | 8 ++ .../http/nio/NioHttpServerTransport.java | 17 +++- .../elasticsearch/http/nio/PagedByteBuf.java | 75 +++++++++++++++ .../transport/nio/NioTransportPlugin.java | 5 +- .../http/nio/NioHttpServerTransportTests.java | 20 ++-- .../http/nio/PagedByteBufTests.java | 91 +++++++++++++++++++ .../common/network/NetworkModule.java | 2 +- .../elasticsearch/plugins/NetworkPlugin.java | 2 +- .../common/network/NetworkModuleTests.java | 6 +- .../elasticsearch/index/store/StoreTests.java | 15 +-- .../core/LocalStateCompositeXPackPlugin.java | 4 +- .../xpack/security/Security.java | 2 +- 19 files changed, 343 insertions(+), 50 deletions(-) rename {server => libs/core}/src/main/java/org/elasticsearch/common/util/concurrent/AbstractRefCounted.java (92%) rename {server => libs/core}/src/main/java/org/elasticsearch/common/util/concurrent/RefCounted.java (95%) rename {server => libs/core}/src/test/java/org/elasticsearch/common/util/concurrent/RefCountedTests.java (94%) create mode 100644 plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/PagedByteBuf.java create mode 100644 plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/PagedByteBufTests.java diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/AbstractRefCounted.java b/libs/core/src/main/java/org/elasticsearch/common/util/concurrent/AbstractRefCounted.java similarity index 92% rename from server/src/main/java/org/elasticsearch/common/util/concurrent/AbstractRefCounted.java rename to libs/core/src/main/java/org/elasticsearch/common/util/concurrent/AbstractRefCounted.java index e0b8aea178c70..a30e7490ff445 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/AbstractRefCounted.java +++ b/libs/core/src/main/java/org/elasticsearch/common/util/concurrent/AbstractRefCounted.java @@ -19,8 +19,6 @@ package org.elasticsearch.common.util.concurrent; -import org.apache.lucene.store.AlreadyClosedException; - import java.util.concurrent.atomic.AtomicInteger; /** @@ -68,7 +66,7 @@ public final void decRef() { } protected void alreadyClosed() { - throw new AlreadyClosedException(name + " is already closed can't increment refCount current count [" + refCount.get() + "]"); + throw new IllegalStateException(name + " is already closed can't increment refCount current count [" + refCount.get() + "]"); } /** diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/RefCounted.java b/libs/core/src/main/java/org/elasticsearch/common/util/concurrent/RefCounted.java similarity index 95% rename from server/src/main/java/org/elasticsearch/common/util/concurrent/RefCounted.java rename to libs/core/src/main/java/org/elasticsearch/common/util/concurrent/RefCounted.java index b2cc8b99c63de..1e7bdc0e78faa 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/RefCounted.java +++ b/libs/core/src/main/java/org/elasticsearch/common/util/concurrent/RefCounted.java @@ -44,7 +44,7 @@ public interface RefCounted { * * @see #decRef * @see #tryIncRef() - * @throws org.apache.lucene.store.AlreadyClosedException iff the reference counter can not be incremented. + * @throws IllegalStateException iff the reference counter can not be incremented. */ void incRef(); diff --git a/server/src/test/java/org/elasticsearch/common/util/concurrent/RefCountedTests.java b/libs/core/src/test/java/org/elasticsearch/common/util/concurrent/RefCountedTests.java similarity index 94% rename from server/src/test/java/org/elasticsearch/common/util/concurrent/RefCountedTests.java rename to libs/core/src/test/java/org/elasticsearch/common/util/concurrent/RefCountedTests.java index b2664b134ed8e..ebcf12482dfa7 100644 --- a/server/src/test/java/org/elasticsearch/common/util/concurrent/RefCountedTests.java +++ b/libs/core/src/test/java/org/elasticsearch/common/util/concurrent/RefCountedTests.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.common.util.concurrent; -import org.apache.lucene.store.AlreadyClosedException; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; @@ -70,14 +69,14 @@ public void testRefCount() throws IOException { try { counted.incRef(); fail(" expected exception"); - } catch (AlreadyClosedException ex) { + } catch (IllegalStateException ex) { assertThat(ex.getMessage(), equalTo("test is already closed can't increment refCount current count [0]")); } try { counted.ensureOpen(); fail(" expected exception"); - } catch (AlreadyClosedException ex) { + } catch (IllegalStateException ex) { assertThat(ex.getMessage(), equalTo("closed")); } } @@ -116,7 +115,7 @@ public void run() { try { counted.ensureOpen(); fail("expected to be closed"); - } catch (AlreadyClosedException ex) { + } catch (IllegalStateException ex) { assertThat(ex.getMessage(), equalTo("closed")); } assertThat(counted.refCount(), is(0)); @@ -140,7 +139,7 @@ protected void closeInternal() { public void ensureOpen() { if (closed.get()) { assert this.refCount() == 0; - throw new AlreadyClosedException("closed"); + throw new IllegalStateException("closed"); } } } diff --git a/libs/nio/src/main/java/org/elasticsearch/nio/InboundChannelBuffer.java b/libs/nio/src/main/java/org/elasticsearch/nio/InboundChannelBuffer.java index f671b39d4d61b..7c718237cd20e 100644 --- a/libs/nio/src/main/java/org/elasticsearch/nio/InboundChannelBuffer.java +++ b/libs/nio/src/main/java/org/elasticsearch/nio/InboundChannelBuffer.java @@ -19,6 +19,7 @@ package org.elasticsearch.nio; +import org.elasticsearch.common.util.concurrent.AbstractRefCounted; import org.elasticsearch.nio.utils.ExceptionsHelper; import java.nio.ByteBuffer; @@ -41,6 +42,7 @@ public final class InboundChannelBuffer implements AutoCloseable { private static final int PAGE_MASK = PAGE_SIZE - 1; private static final int PAGE_SHIFT = Integer.numberOfTrailingZeros(PAGE_SIZE); private static final ByteBuffer[] EMPTY_BYTE_BUFFER_ARRAY = new ByteBuffer[0]; + private static final Page[] EMPTY_BYTE_PAGE_ARRAY = new Page[0]; private final ArrayDeque pages; @@ -152,6 +154,46 @@ public ByteBuffer[] sliceBuffersTo(long to) { return buffers; } + /** + * This method will return an array of {@link Page} representing the bytes from the beginning of + * this buffer up through the index argument that was passed. The pages and buffers will be duplicates of + * the internal components, so any modifications to the markers {@link ByteBuffer#position()}, + * {@link ByteBuffer#limit()}, etc will not modify the this class. Additionally, this will internally + * retain the underlying pages, so the pages returned by this method must be closed. + * + * @param to the index to slice up to + * @return the pages + */ + public Page[] sliceAndRetainPagesTo(long to) { + if (to > capacity) { + throw new IndexOutOfBoundsException("can't slice a channel buffer with capacity [" + capacity + + "], with slice parameters to [" + to + "]"); + } else if (to == 0) { + return EMPTY_BYTE_PAGE_ARRAY; + } + long indexWithOffset = to + offset; + int pageCount = pageIndex(indexWithOffset); + int finalLimit = indexInPage(indexWithOffset); + if (finalLimit != 0) { + pageCount += 1; + } + + Page[] pages = new Page[pageCount]; + Iterator pageIterator = this.pages.iterator(); + Page firstPage = pageIterator.next().duplicate(); + ByteBuffer firstBuffer = firstPage.byteBuffer; + firstBuffer.position(firstBuffer.position() + offset); + pages[0] = firstPage; + for (int i = 1; i < pages.length; i++) { + pages[i] = pageIterator.next().duplicate(); + } + if (finalLimit != 0) { + pages[pages.length - 1].byteBuffer.limit(finalLimit); + } + + return pages; + } + /** * This method will return an array of {@link ByteBuffer} representing the bytes from the index passed * through the end of this buffer. The buffers will be duplicates of the internal buffers, so any @@ -231,16 +273,49 @@ private int indexInPage(long index) { public static class Page implements AutoCloseable { private final ByteBuffer byteBuffer; - private final Runnable closeable; + // This is reference counted as some implementations want to retain the byte pages by calling + // sliceAndRetainPagesTo. With reference counting we can increment the reference count, return the + // pages, and safely close them when this channel buffer is done with them. The reference count + // would be 1 at that point, meaning that the pages will remain until the implementation closes + // theirs. + private final RefCountedCloseable refCountedCloseable; public Page(ByteBuffer byteBuffer, Runnable closeable) { + this(byteBuffer, new RefCountedCloseable(closeable)); + } + + private Page(ByteBuffer byteBuffer, RefCountedCloseable refCountedCloseable) { this.byteBuffer = byteBuffer; - this.closeable = closeable; + this.refCountedCloseable = refCountedCloseable; + } + + private Page duplicate() { + refCountedCloseable.incRef(); + return new Page(byteBuffer.duplicate(), refCountedCloseable); + } + + public ByteBuffer getByteBuffer() { + return byteBuffer; } @Override public void close() { - closeable.run(); + refCountedCloseable.decRef(); + } + + private static class RefCountedCloseable extends AbstractRefCounted { + + private final Runnable closeable; + + private RefCountedCloseable(Runnable closeable) { + super("byte array page"); + this.closeable = closeable; + } + + @Override + protected void closeInternal() { + closeable.run(); + } } } } diff --git a/libs/nio/src/test/java/org/elasticsearch/nio/InboundChannelBufferTests.java b/libs/nio/src/test/java/org/elasticsearch/nio/InboundChannelBufferTests.java index 199a509cbfabb..8dd72e869e8d9 100644 --- a/libs/nio/src/test/java/org/elasticsearch/nio/InboundChannelBufferTests.java +++ b/libs/nio/src/test/java/org/elasticsearch/nio/InboundChannelBufferTests.java @@ -31,7 +31,8 @@ public class InboundChannelBufferTests extends ESTestCase { private static final int PAGE_SIZE = BigArrays.PAGE_SIZE_IN_BYTES; private final Supplier defaultPageSupplier = () -> - new InboundChannelBuffer.Page(ByteBuffer.allocate(BigArrays.BYTE_PAGE_SIZE), () -> {}); + new InboundChannelBuffer.Page(ByteBuffer.allocate(BigArrays.BYTE_PAGE_SIZE), () -> { + }); public void testNewBufferHasSinglePage() { InboundChannelBuffer channelBuffer = new InboundChannelBuffer(defaultPageSupplier); @@ -167,6 +168,49 @@ public void testClose() { expectThrows(IllegalStateException.class, () -> channelBuffer.ensureCapacity(1)); } + public void testCloseRetainedPages() { + ConcurrentLinkedQueue queue = new ConcurrentLinkedQueue<>(); + Supplier supplier = () -> { + AtomicBoolean atomicBoolean = new AtomicBoolean(); + queue.add(atomicBoolean); + return new InboundChannelBuffer.Page(ByteBuffer.allocate(PAGE_SIZE), () -> atomicBoolean.set(true)); + }; + InboundChannelBuffer channelBuffer = new InboundChannelBuffer(supplier); + channelBuffer.ensureCapacity(PAGE_SIZE * 4); + + assertEquals(4, queue.size()); + + for (AtomicBoolean closedRef : queue) { + assertFalse(closedRef.get()); + } + + InboundChannelBuffer.Page[] pages = channelBuffer.sliceAndRetainPagesTo(PAGE_SIZE * 2); + + pages[1].close(); + + for (AtomicBoolean closedRef : queue) { + assertFalse(closedRef.get()); + } + + channelBuffer.close(); + + int i = 0; + for (AtomicBoolean closedRef : queue) { + if (i < 1) { + assertFalse(closedRef.get()); + } else { + assertTrue(closedRef.get()); + } + ++i; + } + + pages[0].close(); + + for (AtomicBoolean closedRef : queue) { + assertTrue(closedRef.get()); + } + } + public void testAccessByteBuffers() { InboundChannelBuffer channelBuffer = new InboundChannelBuffer(defaultPageSupplier); diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/Netty4Plugin.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/Netty4Plugin.java index c6655b58bc3bd..70afcc86ad8f9 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/Netty4Plugin.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/Netty4Plugin.java @@ -87,8 +87,8 @@ public Map> getTransports(Settings settings, ThreadP @Override public Map> getHttpTransports(Settings settings, ThreadPool threadPool, BigArrays bigArrays, + PageCacheRecycler pageCacheRecycler, CircuitBreakerService circuitBreakerService, - NamedWriteableRegistry namedWriteableRegistry, NamedXContentRegistry xContentRegistry, NetworkService networkService, HttpServerTransport.Dispatcher dispatcher) { diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpReadWriteHandler.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpReadWriteHandler.java index ea75c62dbbce2..ad81719ebcbb9 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpReadWriteHandler.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpReadWriteHandler.java @@ -78,7 +78,7 @@ public class HttpReadWriteHandler implements ReadWriteHandler { @Override public int consumeReads(InboundChannelBuffer channelBuffer) throws IOException { - int bytesConsumed = adaptor.read(channelBuffer.sliceBuffersTo(channelBuffer.getIndex())); + int bytesConsumed = adaptor.read(channelBuffer.sliceAndRetainPagesTo(channelBuffer.getIndex())); Object message; while ((message = adaptor.pollInboundMessage()) != null) { handleRequest(message); diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NettyAdaptor.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NettyAdaptor.java index cf8c92bff905c..41cb72aa32273 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NettyAdaptor.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NettyAdaptor.java @@ -29,6 +29,7 @@ import io.netty.channel.embedded.EmbeddedChannel; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.nio.FlushOperation; +import org.elasticsearch.nio.InboundChannelBuffer; import org.elasticsearch.nio.WriteOperation; import java.nio.ByteBuffer; @@ -97,6 +98,13 @@ public int read(ByteBuffer[] buffers) { return byteBuf.readerIndex() - initialReaderIndex; } + public int read(InboundChannelBuffer.Page[] pages) { + ByteBuf byteBuf = PagedByteBuf.byteBufFromPages(pages); + int readableBytes = byteBuf.readableBytes(); + nettyChannel.writeInbound(byteBuf); + return readableBytes; + } + public Object pollInboundMessage() { return nettyChannel.readInbound(); } diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java index 5aac491a6abd4..ba51f7c684818 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java @@ -32,12 +32,14 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.common.network.NetworkService; +import org.elasticsearch.common.recycler.Recycler; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.NetworkExceptionHelper; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.http.AbstractHttpServerTransport; @@ -63,6 +65,7 @@ import java.io.IOException; import java.net.InetAddress; import java.net.InetSocketAddress; +import java.nio.ByteBuffer; import java.nio.channels.ServerSocketChannel; import java.nio.channels.SocketChannel; import java.util.ArrayList; @@ -103,6 +106,8 @@ public class NioHttpServerTransport extends AbstractHttpServerTransport { (s) -> Integer.toString(EsExecutors.numberOfProcessors(s) * 2), (s) -> Setting.parseInt(s, 1, "http.nio.worker_count"), Setting.Property.NodeScope); + private final PageCacheRecycler pageCacheRecycler; + private final boolean tcpNoDelay; private final boolean tcpKeepAlive; private final boolean reuseAddress; @@ -115,9 +120,11 @@ public class NioHttpServerTransport extends AbstractHttpServerTransport { private HttpChannelFactory channelFactory; private final NioCorsConfig corsConfig; - public NioHttpServerTransport(Settings settings, NetworkService networkService, BigArrays bigArrays, ThreadPool threadPool, - NamedXContentRegistry xContentRegistry, HttpServerTransport.Dispatcher dispatcher) { + public NioHttpServerTransport(Settings settings, NetworkService networkService, BigArrays bigArrays, + PageCacheRecycler pageCacheRecycler, ThreadPool threadPool, NamedXContentRegistry xContentRegistry, + HttpServerTransport.Dispatcher dispatcher) { super(settings, networkService, bigArrays, threadPool, xContentRegistry, dispatcher); + this.pageCacheRecycler = pageCacheRecycler; ByteSizeValue maxChunkSize = SETTING_HTTP_MAX_CHUNK_SIZE.get(settings); ByteSizeValue maxHeaderSize = SETTING_HTTP_MAX_HEADER_SIZE.get(settings); @@ -329,11 +336,15 @@ private HttpChannelFactory() { @Override public NioHttpChannel createChannel(NioSelector selector, SocketChannel channel) throws IOException { NioHttpChannel nioChannel = new NioHttpChannel(channel); + java.util.function.Supplier pageSupplier = () -> { + Recycler.V bytes = pageCacheRecycler.bytePage(false); + return new InboundChannelBuffer.Page(ByteBuffer.wrap(bytes.v()), bytes::close); + }; HttpReadWriteHandler httpReadWritePipeline = new HttpReadWriteHandler(nioChannel,NioHttpServerTransport.this, handlingSettings, corsConfig); Consumer exceptionHandler = (e) -> exceptionCaught(nioChannel, e); SocketChannelContext context = new BytesChannelContext(nioChannel, selector, exceptionHandler, httpReadWritePipeline, - InboundChannelBuffer.allocatingInstance()); + new InboundChannelBuffer(pageSupplier)); nioChannel.setContext(context); return nioChannel; } diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/PagedByteBuf.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/PagedByteBuf.java new file mode 100644 index 0000000000000..40f3aeecfbc94 --- /dev/null +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/PagedByteBuf.java @@ -0,0 +1,75 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.http.nio; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.CompositeByteBuf; +import io.netty.buffer.Unpooled; +import io.netty.buffer.UnpooledByteBufAllocator; +import io.netty.buffer.UnpooledHeapByteBuf; +import org.elasticsearch.nio.InboundChannelBuffer; + +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.List; + +public class PagedByteBuf extends UnpooledHeapByteBuf { + + private final Runnable releasable; + + private PagedByteBuf(byte[] array, Runnable releasable) { + super(UnpooledByteBufAllocator.DEFAULT, array, array.length); + this.releasable = releasable; + } + + static ByteBuf byteBufFromPages(InboundChannelBuffer.Page[] pages) { + int componentCount = pages.length; + if (componentCount == 0) { + return Unpooled.EMPTY_BUFFER; + } else if (componentCount == 1) { + return byteBufFromPage(pages[0]); + } else { + int maxComponents = Math.max(16, componentCount); + final List components = new ArrayList<>(componentCount); + for (InboundChannelBuffer.Page page : pages) { + components.add(byteBufFromPage(page)); + } + return new CompositeByteBuf(UnpooledByteBufAllocator.DEFAULT, false, maxComponents, components); + } + } + + private static ByteBuf byteBufFromPage(InboundChannelBuffer.Page page) { + ByteBuffer buffer = page.getByteBuffer(); + assert buffer.isDirect() == false && buffer.hasArray() : "Must be a heap buffer with an array"; + int offset = buffer.arrayOffset() + buffer.position(); + PagedByteBuf newByteBuf = new PagedByteBuf(buffer.array(), page::close); + return newByteBuf.slice(offset, buffer.remaining()); + } + + + @Override + protected void deallocate() { + try { + super.deallocate(); + } finally { + releasable.run(); + } + } +} diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTransportPlugin.java b/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTransportPlugin.java index 1cc94f18dd3c1..1da8e909b2dd8 100644 --- a/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTransportPlugin.java +++ b/plugins/transport-nio/src/main/java/org/elasticsearch/transport/nio/NioTransportPlugin.java @@ -67,12 +67,13 @@ public Map> getTransports(Settings settings, ThreadP @Override public Map> getHttpTransports(Settings settings, ThreadPool threadPool, BigArrays bigArrays, + PageCacheRecycler pageCacheRecycler, CircuitBreakerService circuitBreakerService, - NamedWriteableRegistry namedWriteableRegistry, NamedXContentRegistry xContentRegistry, NetworkService networkService, HttpServerTransport.Dispatcher dispatcher) { return Collections.singletonMap(NIO_HTTP_TRANSPORT_NAME, - () -> new NioHttpServerTransport(settings, networkService, bigArrays, threadPool, xContentRegistry, dispatcher)); + () -> new NioHttpServerTransport(settings, networkService, bigArrays, pageCacheRecycler, threadPool, xContentRegistry, + dispatcher)); } } diff --git a/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/NioHttpServerTransportTests.java b/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/NioHttpServerTransportTests.java index 48a5bf617a436..a0cb74f7cd205 100644 --- a/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/NioHttpServerTransportTests.java +++ b/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/NioHttpServerTransportTests.java @@ -88,12 +88,14 @@ public class NioHttpServerTransportTests extends ESTestCase { private NetworkService networkService; private ThreadPool threadPool; private MockBigArrays bigArrays; + private MockPageCacheRecycler pageRecycler; @Before public void setup() throws Exception { networkService = new NetworkService(Collections.emptyList()); threadPool = new TestThreadPool("test"); - bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()); + pageRecycler = new MockPageCacheRecycler(Settings.EMPTY); + bigArrays = new MockBigArrays(pageRecycler, new NoneCircuitBreakerService()); } @After @@ -186,7 +188,7 @@ public void dispatchBadRequest(RestRequest request, RestChannel channel, ThreadC throw new AssertionError(); } }; - try (NioHttpServerTransport transport = new NioHttpServerTransport(settings, networkService, bigArrays, threadPool, + try (NioHttpServerTransport transport = new NioHttpServerTransport(settings, networkService, bigArrays, pageRecycler, threadPool, xContentRegistry(), dispatcher)) { transport.start(); final TransportAddress remoteAddress = randomFrom(transport.boundAddress().boundAddresses()); @@ -210,13 +212,13 @@ public void dispatchBadRequest(RestRequest request, RestChannel channel, ThreadC } public void testBindUnavailableAddress() { - try (NioHttpServerTransport transport = new NioHttpServerTransport(Settings.EMPTY, networkService, bigArrays, threadPool, - xContentRegistry(), new NullDispatcher())) { + try (NioHttpServerTransport transport = new NioHttpServerTransport(Settings.EMPTY, networkService, bigArrays, pageRecycler, + threadPool, xContentRegistry(), new NullDispatcher())) { transport.start(); TransportAddress remoteAddress = randomFrom(transport.boundAddress().boundAddresses()); Settings settings = Settings.builder().put("http.port", remoteAddress.getPort()).build(); - try (NioHttpServerTransport otherTransport = new NioHttpServerTransport(settings, networkService, bigArrays, threadPool, - xContentRegistry(), new NullDispatcher())) { + try (NioHttpServerTransport otherTransport = new NioHttpServerTransport(settings, networkService, bigArrays, pageRecycler, + threadPool, xContentRegistry(), new NullDispatcher())) { BindHttpException bindHttpException = expectThrows(BindHttpException.class, () -> otherTransport.start()); assertEquals("Failed to bind to [" + remoteAddress.getPort() + "]", bindHttpException.getMessage()); } @@ -259,8 +261,8 @@ public void dispatchBadRequest(final RestRequest request, settings = Settings.builder().put(httpMaxInitialLineLengthSetting.getKey(), maxInitialLineLength + "b").build(); } - try (NioHttpServerTransport transport = - new NioHttpServerTransport(settings, networkService, bigArrays, threadPool, xContentRegistry(), dispatcher)) { + try (NioHttpServerTransport transport = new NioHttpServerTransport(settings, networkService, bigArrays, pageRecycler, + threadPool, xContentRegistry(), dispatcher)) { transport.start(); final TransportAddress remoteAddress = randomFrom(transport.boundAddress().boundAddresses()); @@ -279,7 +281,7 @@ public void dispatchBadRequest(final RestRequest request, assertNotNull(causeReference.get()); assertThat(causeReference.get(), instanceOf(TooLongFrameException.class)); } - + // public void testReadTimeout() throws Exception { // final HttpServerTransport.Dispatcher dispatcher = new HttpServerTransport.Dispatcher() { // diff --git a/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/PagedByteBufTests.java b/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/PagedByteBufTests.java new file mode 100644 index 0000000000000..15bd18ecf6959 --- /dev/null +++ b/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/PagedByteBufTests.java @@ -0,0 +1,91 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.http.nio; + +import io.netty.buffer.ByteBuf; +import org.elasticsearch.nio.InboundChannelBuffer; +import org.elasticsearch.test.ESTestCase; + +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.concurrent.atomic.AtomicInteger; + +public class PagedByteBufTests extends ESTestCase { + + public void testReleasingPage() { + AtomicInteger integer = new AtomicInteger(0); + int pageCount = randomInt(10) + 1; + ArrayList pages = new ArrayList<>(); + for (int i = 0; i < pageCount; ++i) { + pages.add(new InboundChannelBuffer.Page(ByteBuffer.allocate(10), integer::incrementAndGet)); + } + + ByteBuf byteBuf = PagedByteBuf.byteBufFromPages(pages.toArray(new InboundChannelBuffer.Page[0])); + + assertEquals(0, integer.get()); + byteBuf.retain(); + byteBuf.release(); + assertEquals(0, integer.get()); + ByteBuf secondBuf = byteBuf.retainedSlice(); + byteBuf.release(); + assertEquals(0, integer.get()); + secondBuf.release(); + assertEquals(pageCount, integer.get()); + } + + public void testBytesAreUsed() { + byte[] bytes1 = new byte[10]; + byte[] bytes2 = new byte[10]; + + for (int i = 0; i < 10; ++i) { + bytes1[i] = (byte) i; + } + + for (int i = 10; i < 20; ++i) { + bytes2[i - 10] = (byte) i; + } + + InboundChannelBuffer.Page[] pages = new InboundChannelBuffer.Page[2]; + pages[0] = new InboundChannelBuffer.Page(ByteBuffer.wrap(bytes1), () -> {}); + pages[1] = new InboundChannelBuffer.Page(ByteBuffer.wrap(bytes2), () -> {}); + + ByteBuf byteBuf = PagedByteBuf.byteBufFromPages(pages); + assertEquals(20, byteBuf.readableBytes()); + + for (int i = 0; i < 20; ++i) { + assertEquals((byte) i, byteBuf.getByte(i)); + } + + InboundChannelBuffer.Page[] pages2 = new InboundChannelBuffer.Page[2]; + ByteBuffer firstBuffer = ByteBuffer.wrap(bytes1); + firstBuffer.position(2); + ByteBuffer secondBuffer = ByteBuffer.wrap(bytes2); + secondBuffer.limit(8); + pages2[0] = new InboundChannelBuffer.Page(firstBuffer, () -> {}); + pages2[1] = new InboundChannelBuffer.Page(secondBuffer, () -> {}); + + ByteBuf byteBuf2 = PagedByteBuf.byteBufFromPages(pages2); + assertEquals(16, byteBuf2.readableBytes()); + + for (int i = 2; i < 18; ++i) { + assertEquals((byte) i, byteBuf2.getByte(i - 2)); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/common/network/NetworkModule.java b/server/src/main/java/org/elasticsearch/common/network/NetworkModule.java index 70d26770a7bdc..cd8141ffa3c91 100644 --- a/server/src/main/java/org/elasticsearch/common/network/NetworkModule.java +++ b/server/src/main/java/org/elasticsearch/common/network/NetworkModule.java @@ -116,7 +116,7 @@ public NetworkModule(Settings settings, boolean transportClient, List> httpTransportFactory = plugin.getHttpTransports(settings, threadPool, bigArrays, - circuitBreakerService, namedWriteableRegistry, xContentRegistry, networkService, dispatcher); + pageCacheRecycler, circuitBreakerService, xContentRegistry, networkService, dispatcher); if (transportClient == false) { for (Map.Entry> entry : httpTransportFactory.entrySet()) { registerHttpTransport(entry.getKey(), entry.getValue()); diff --git a/server/src/main/java/org/elasticsearch/plugins/NetworkPlugin.java b/server/src/main/java/org/elasticsearch/plugins/NetworkPlugin.java index df41036ffeabb..d33997fc82b99 100644 --- a/server/src/main/java/org/elasticsearch/plugins/NetworkPlugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/NetworkPlugin.java @@ -71,8 +71,8 @@ default Map> getTransports(Settings settings, Thread * See {@link org.elasticsearch.common.network.NetworkModule#HTTP_TYPE_SETTING} to configure a specific implementation. */ default Map> getHttpTransports(Settings settings, ThreadPool threadPool, BigArrays bigArrays, + PageCacheRecycler pageCacheRecycler, CircuitBreakerService circuitBreakerService, - NamedWriteableRegistry namedWriteableRegistry, NamedXContentRegistry xContentRegistry, NetworkService networkService, HttpServerTransport.Dispatcher dispatcher) { diff --git a/server/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java b/server/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java index ba74e373f8842..8a4eb8e9177f1 100644 --- a/server/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java +++ b/server/src/test/java/org/elasticsearch/common/network/NetworkModuleTests.java @@ -159,8 +159,8 @@ public void testRegisterHttpTransport() { @Override public Map> getHttpTransports(Settings settings, ThreadPool threadPool, BigArrays bigArrays, + PageCacheRecycler pageCacheRecycler, CircuitBreakerService circuitBreakerService, - NamedWriteableRegistry namedWriteableRegistry, NamedXContentRegistry xContentRegistry, NetworkService networkService, HttpServerTransport.Dispatcher requestDispatcher) { @@ -198,8 +198,8 @@ public Map> getTransports(Settings settings, ThreadP @Override public Map> getHttpTransports(Settings settings, ThreadPool threadPool, BigArrays bigArrays, + PageCacheRecycler pageCacheRecycler, CircuitBreakerService circuitBreakerService, - NamedWriteableRegistry namedWriteableRegistry, NamedXContentRegistry xContentRegistry, NetworkService networkService, HttpServerTransport.Dispatcher requestDispatcher) { @@ -233,8 +233,8 @@ public Map> getTransports(Settings settings, ThreadP @Override public Map> getHttpTransports(Settings settings, ThreadPool threadPool, BigArrays bigArrays, + PageCacheRecycler pageCacheRecycler, CircuitBreakerService circuitBreakerService, - NamedWriteableRegistry namedWriteableRegistry, NamedXContentRegistry xContentRegistry, NetworkService networkService, HttpServerTransport.Dispatcher requestDispatcher) { diff --git a/server/src/test/java/org/elasticsearch/index/store/StoreTests.java b/server/src/test/java/org/elasticsearch/index/store/StoreTests.java index 9352d978e6e46..2cea9bb364684 100644 --- a/server/src/test/java/org/elasticsearch/index/store/StoreTests.java +++ b/server/src/test/java/org/elasticsearch/index/store/StoreTests.java @@ -39,7 +39,6 @@ import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.index.SnapshotDeletionPolicy; import org.apache.lucene.index.Term; -import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.BaseDirectoryWrapper; import org.apache.lucene.store.ChecksumIndexInput; import org.apache.lucene.store.Directory; @@ -145,18 +144,8 @@ public void testRefCount() throws IOException { store.decRef(); assertThat(store.refCount(), Matchers.equalTo(0)); assertFalse(store.tryIncRef()); - try { - store.incRef(); - fail(" expected exception"); - } catch (AlreadyClosedException ex) { - - } - try { - store.ensureOpen(); - fail(" expected exception"); - } catch (AlreadyClosedException ex) { - - } + expectThrows(IllegalStateException.class, store::incRef); + expectThrows(IllegalStateException.class, store::ensureOpen); } public void testVerifyingIndexOutput() throws IOException { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java index 44fd61e1693ad..796cae375e3a6 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java @@ -286,14 +286,14 @@ public Map> getTransports(Settings settings, ThreadP @Override public Map> getHttpTransports(Settings settings, ThreadPool threadPool, BigArrays bigArrays, + PageCacheRecycler pageCacheRecycler, CircuitBreakerService circuitBreakerService, - NamedWriteableRegistry namedWriteableRegistry, NamedXContentRegistry xContentRegistry, NetworkService networkService, HttpServerTransport.Dispatcher dispatcher) { Map> transports = new HashMap<>(); filterPlugins(NetworkPlugin.class).stream().forEach(p -> transports.putAll(p.getHttpTransports(settings, threadPool, bigArrays, - circuitBreakerService, namedWriteableRegistry, xContentRegistry, networkService, dispatcher))); + pageCacheRecycler, circuitBreakerService, xContentRegistry, networkService, dispatcher))); return transports; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 664745b19204b..c0bd7882c419a 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -843,8 +843,8 @@ public Map> getTransports(Settings settings, ThreadP @Override public Map> getHttpTransports(Settings settings, ThreadPool threadPool, BigArrays bigArrays, + PageCacheRecycler pageCacheRecycler, CircuitBreakerService circuitBreakerService, - NamedWriteableRegistry namedWriteableRegistry, NamedXContentRegistry xContentRegistry, NetworkService networkService, HttpServerTransport.Dispatcher dispatcher) { From dbc9d602608864f16bf59741114f4c05119b73c6 Mon Sep 17 00:00:00 2001 From: Vladimir Dolzhenko Date: Fri, 15 Jun 2018 22:14:28 +0200 Subject: [PATCH 08/34] Support for remote path in reindex api (#31290) Support for remote path in reindex api Closes #22913 --- docs/reference/docs/reindex.asciidoc | 10 +-- .../index/reindex/RestReindexAction.java | 9 ++- .../index/reindex/TransportReindexAction.java | 62 ++++++++++--------- ...ReindexFromRemoteBuildRestClientTests.java | 28 +++++---- .../ReindexFromRemoteWhitelistTests.java | 4 +- .../ReindexFromRemoteWithAuthTests.java | 5 +- .../ReindexSourceTargetValidationTests.java | 4 +- .../index/reindex/RestReindexActionTests.java | 23 +++++++ .../index/reindex/RetryTests.java | 6 +- .../index/reindex/RoundTripTests.java | 5 +- .../index/reindex/remote/RemoteInfoTests.java | 16 +++-- .../index/reindex/RemoteInfo.java | 25 +++++++- .../index/reindex/ReindexRequestTests.java | 12 ++-- 13 files changed, 136 insertions(+), 73 deletions(-) diff --git a/docs/reference/docs/reindex.asciidoc b/docs/reference/docs/reindex.asciidoc index f05acab559ce1..bdbffb0a08d5d 100644 --- a/docs/reference/docs/reindex.asciidoc +++ b/docs/reference/docs/reindex.asciidoc @@ -422,11 +422,11 @@ POST _reindex // TEST[s/"username": "user",//] // TEST[s/"password": "pass"//] -The `host` parameter must contain a scheme, host, and port (e.g. -`https://otherhost:9200`). The `username` and `password` parameters are -optional, and when they are present `_reindex` will connect to the remote -Elasticsearch node using basic auth. Be sure to use `https` when using -basic auth or the password will be sent in plain text. +The `host` parameter must contain a scheme, host, port (e.g. +`https://otherhost:9200`) and optional path (e.g. `https://otherhost:9200/proxy`). +The `username` and `password` parameters are optional, and when they are present `_reindex` +will connect to the remote Elasticsearch node using basic auth. Be sure to use `https` when +using basic auth or the password will be sent in plain text. Remote hosts have to be explicitly whitelisted in elasticsearch.yaml using the `reindex.remote.whitelist` property. It can be set to a comma delimited list diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java index f1ac681b59fdf..a5520c90b0ff5 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestReindexAction.java @@ -57,7 +57,7 @@ */ public class RestReindexAction extends AbstractBaseReindexRestHandler { static final ObjectParser PARSER = new ObjectParser<>("reindex"); - private static final Pattern HOST_PATTERN = Pattern.compile("(?[^:]+)://(?[^:]+):(?\\d+)"); + private static final Pattern HOST_PATTERN = Pattern.compile("(?[^:]+)://(?[^:]+):(?\\d+)(?/.*)?"); static { ObjectParser.Parser sourceParser = (parser, request, context) -> { @@ -139,10 +139,12 @@ static RemoteInfo buildRemoteInfo(Map source) throws IOException String hostInRequest = requireNonNull(extractString(remote, "host"), "[host] must be specified to reindex from a remote cluster"); Matcher hostMatcher = HOST_PATTERN.matcher(hostInRequest); if (false == hostMatcher.matches()) { - throw new IllegalArgumentException("[host] must be of the form [scheme]://[host]:[port] but was [" + hostInRequest + "]"); + throw new IllegalArgumentException("[host] must be of the form [scheme]://[host]:[port](/[pathPrefix])? but was [" + + hostInRequest + "]"); } String scheme = hostMatcher.group("scheme"); String host = hostMatcher.group("host"); + String pathPrefix = hostMatcher.group("pathPrefix"); int port = Integer.parseInt(hostMatcher.group("port")); Map headers = extractStringStringMap(remote, "headers"); TimeValue socketTimeout = extractTimeValue(remote, "socket_timeout", RemoteInfo.DEFAULT_SOCKET_TIMEOUT); @@ -151,7 +153,8 @@ static RemoteInfo buildRemoteInfo(Map source) throws IOException throw new IllegalArgumentException( "Unsupported fields in [remote]: [" + Strings.collectionToCommaDelimitedString(remote.keySet()) + "]"); } - return new RemoteInfo(scheme, host, port, queryForRemote(source), username, password, headers, socketTimeout, connectTimeout); + return new RemoteInfo(scheme, host, port, pathPrefix, queryForRemote(source), + username, password, headers, socketTimeout, connectTimeout); } /** diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java index 6f839558e03cc..5e0ad0fd3fdd8 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java @@ -37,6 +37,7 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.bulk.BulkItemResponse.Failure; +import org.elasticsearch.client.RestClientBuilder; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.index.reindex.ScrollableHitSource.SearchFailure; @@ -206,34 +207,39 @@ static RestClient buildRestClient(RemoteInfo remoteInfo, long taskId, List header : remoteInfo.getHeaders().entrySet()) { clientHeaders[i++] = new BasicHeader(header.getKey(), header.getValue()); } - return RestClient.builder(new HttpHost(remoteInfo.getHost(), remoteInfo.getPort(), remoteInfo.getScheme())) - .setDefaultHeaders(clientHeaders) - .setRequestConfigCallback(c -> { - c.setConnectTimeout(Math.toIntExact(remoteInfo.getConnectTimeout().millis())); - c.setSocketTimeout(Math.toIntExact(remoteInfo.getSocketTimeout().millis())); - return c; - }) - .setHttpClientConfigCallback(c -> { - // Enable basic auth if it is configured - if (remoteInfo.getUsername() != null) { - UsernamePasswordCredentials creds = new UsernamePasswordCredentials(remoteInfo.getUsername(), - remoteInfo.getPassword()); - CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); - credentialsProvider.setCredentials(AuthScope.ANY, creds); - c.setDefaultCredentialsProvider(credentialsProvider); - } - // Stick the task id in the thread name so we can track down tasks from stack traces - AtomicInteger threads = new AtomicInteger(); - c.setThreadFactory(r -> { - String name = "es-client-" + taskId + "-" + threads.getAndIncrement(); - Thread t = new Thread(r, name); - threadCollector.add(t); - return t; - }); - // Limit ourselves to one reactor thread because for now the search process is single threaded. - c.setDefaultIOReactorConfig(IOReactorConfig.custom().setIoThreadCount(1).build()); - return c; - }).build(); + final RestClientBuilder builder = + RestClient.builder(new HttpHost(remoteInfo.getHost(), remoteInfo.getPort(), remoteInfo.getScheme())) + .setDefaultHeaders(clientHeaders) + .setRequestConfigCallback(c -> { + c.setConnectTimeout(Math.toIntExact(remoteInfo.getConnectTimeout().millis())); + c.setSocketTimeout(Math.toIntExact(remoteInfo.getSocketTimeout().millis())); + return c; + }) + .setHttpClientConfigCallback(c -> { + // Enable basic auth if it is configured + if (remoteInfo.getUsername() != null) { + UsernamePasswordCredentials creds = new UsernamePasswordCredentials(remoteInfo.getUsername(), + remoteInfo.getPassword()); + CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); + credentialsProvider.setCredentials(AuthScope.ANY, creds); + c.setDefaultCredentialsProvider(credentialsProvider); + } + // Stick the task id in the thread name so we can track down tasks from stack traces + AtomicInteger threads = new AtomicInteger(); + c.setThreadFactory(r -> { + String name = "es-client-" + taskId + "-" + threads.getAndIncrement(); + Thread t = new Thread(r, name); + threadCollector.add(t); + return t; + }); + // Limit ourselves to one reactor thread because for now the search process is single threaded. + c.setDefaultIOReactorConfig(IOReactorConfig.custom().setIoThreadCount(1).build()); + return c; + }); + if (Strings.hasLength(remoteInfo.getPathPrefix()) && "/".equals(remoteInfo.getPathPrefix()) == false) { + builder.setPathPrefix(remoteInfo.getPathPrefix()); + } + return builder.build(); } /** diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFromRemoteBuildRestClientTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFromRemoteBuildRestClientTests.java index 14e3142d226c9..db32e4813b316 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFromRemoteBuildRestClientTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFromRemoteBuildRestClientTests.java @@ -34,20 +34,22 @@ public class ReindexFromRemoteBuildRestClientTests extends RestClientBuilderTestCase { public void testBuildRestClient() throws Exception { - RemoteInfo remoteInfo = new RemoteInfo("https", "localhost", 9200, new BytesArray("ignored"), null, null, emptyMap(), + for(final String path: new String[]{"", null, "/", "path"}) { + RemoteInfo remoteInfo = new RemoteInfo("https", "localhost", 9200, path, new BytesArray("ignored"), null, null, emptyMap(), RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT); - long taskId = randomLong(); - List threads = synchronizedList(new ArrayList<>()); - RestClient client = TransportReindexAction.buildRestClient(remoteInfo, taskId, threads); - try { - assertBusy(() -> assertThat(threads, hasSize(2))); - int i = 0; - for (Thread thread : threads) { - assertEquals("es-client-" + taskId + "-" + i, thread.getName()); - i++; + long taskId = randomLong(); + List threads = synchronizedList(new ArrayList<>()); + RestClient client = TransportReindexAction.buildRestClient(remoteInfo, taskId, threads); + try { + assertBusy(() -> assertThat(threads, hasSize(2))); + int i = 0; + for (Thread thread : threads) { + assertEquals("es-client-" + taskId + "-" + i, thread.getName()); + i++; + } + } finally { + client.close(); } - } finally { - client.close(); } } @@ -57,7 +59,7 @@ public void testHeaders() throws Exception { for (int i = 0; i < numHeaders; ++i) { headers.put("header" + i, Integer.toString(i)); } - RemoteInfo remoteInfo = new RemoteInfo("https", "localhost", 9200, new BytesArray("ignored"), null, null, + RemoteInfo remoteInfo = new RemoteInfo("https", "localhost", 9200, null, new BytesArray("ignored"), null, null, headers, RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT); long taskId = randomLong(); List threads = synchronizedList(new ArrayList<>()); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFromRemoteWhitelistTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFromRemoteWhitelistTests.java index 128cd4043e283..e32370b166546 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFromRemoteWhitelistTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFromRemoteWhitelistTests.java @@ -49,7 +49,7 @@ public void testLocalRequestWithWhitelist() { * Build a {@link RemoteInfo}, defaulting values that we don't care about in this test to values that don't hurt anything. */ private RemoteInfo newRemoteInfo(String host, int port) { - return new RemoteInfo(randomAlphaOfLength(5), host, port, new BytesArray("test"), null, null, emptyMap(), + return new RemoteInfo(randomAlphaOfLength(5), host, port, null, new BytesArray("test"), null, null, emptyMap(), RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT); } @@ -63,7 +63,7 @@ public void testWhitelistedRemote() { public void testWhitelistedByPrefix() { checkRemoteWhitelist(buildRemoteWhitelist(singletonList("*.example.com:9200")), - new RemoteInfo(randomAlphaOfLength(5), "es.example.com", 9200, new BytesArray("test"), null, null, emptyMap(), + new RemoteInfo(randomAlphaOfLength(5), "es.example.com", 9200, null, new BytesArray("test"), null, null, emptyMap(), RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT)); checkRemoteWhitelist(buildRemoteWhitelist(singletonList("*.example.com:9200")), newRemoteInfo("6e134134a1.us-east-1.aws.example.com", 9200)); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFromRemoteWithAuthTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFromRemoteWithAuthTests.java index 72ba651dff9ae..dc6d5eac58897 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFromRemoteWithAuthTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexFromRemoteWithAuthTests.java @@ -104,8 +104,9 @@ public void fetchTransportAddress() { * Build a {@link RemoteInfo}, defaulting values that we don't care about in this test to values that don't hurt anything. */ private RemoteInfo newRemoteInfo(String username, String password, Map headers) { - return new RemoteInfo("http", address.getAddress(), address.getPort(), new BytesArray("{\"match_all\":{}}"), username, password, - headers, RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT); + return new RemoteInfo("http", address.getAddress(), address.getPort(), null, + new BytesArray("{\"match_all\":{}}"), username, password, headers, + RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT); } public void testReindexFromRemoteWithAuthentication() throws Exception { diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexSourceTargetValidationTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexSourceTargetValidationTests.java index 28b9febe1c289..4784d7f5fe546 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexSourceTargetValidationTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexSourceTargetValidationTests.java @@ -88,10 +88,10 @@ public void testTargetIsAlias() { public void testRemoteInfoSkipsValidation() { // The index doesn't have to exist - succeeds(new RemoteInfo(randomAlphaOfLength(5), "test", 9200, new BytesArray("test"), null, null, emptyMap(), + succeeds(new RemoteInfo(randomAlphaOfLength(5), "test", 9200, null, new BytesArray("test"), null, null, emptyMap(), RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT), "does_not_exist", "target"); // And it doesn't matter if they are the same index. They are considered to be different because the remote one is, well, remote. - succeeds(new RemoteInfo(randomAlphaOfLength(5), "test", 9200, new BytesArray("test"), null, null, emptyMap(), + succeeds(new RemoteInfo(randomAlphaOfLength(5), "test", 9200, null, new BytesArray("test"), null, null, emptyMap(), RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT), "target", "target"); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestReindexActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestReindexActionTests.java index 88fa31f423a21..b06948b90581a 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestReindexActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RestReindexActionTests.java @@ -89,6 +89,7 @@ public void testBuildRemoteInfoWithAllHostParts() throws IOException { assertEquals("http", info.getScheme()); assertEquals("example.com", info.getHost()); assertEquals(9200, info.getPort()); + assertNull(info.getPathPrefix()); assertEquals(RemoteInfo.DEFAULT_SOCKET_TIMEOUT, info.getSocketTimeout()); // Didn't set the timeout so we should get the default assertEquals(RemoteInfo.DEFAULT_CONNECT_TIMEOUT, info.getConnectTimeout()); // Didn't set the timeout so we should get the default @@ -96,8 +97,30 @@ public void testBuildRemoteInfoWithAllHostParts() throws IOException { assertEquals("https", info.getScheme()); assertEquals("other.example.com", info.getHost()); assertEquals(9201, info.getPort()); + assertNull(info.getPathPrefix()); assertEquals(RemoteInfo.DEFAULT_SOCKET_TIMEOUT, info.getSocketTimeout()); assertEquals(RemoteInfo.DEFAULT_CONNECT_TIMEOUT, info.getConnectTimeout()); + + info = buildRemoteInfoHostTestCase("https://other.example.com:9201/"); + assertEquals("https", info.getScheme()); + assertEquals("other.example.com", info.getHost()); + assertEquals(9201, info.getPort()); + assertEquals("/", info.getPathPrefix()); + assertEquals(RemoteInfo.DEFAULT_SOCKET_TIMEOUT, info.getSocketTimeout()); + assertEquals(RemoteInfo.DEFAULT_CONNECT_TIMEOUT, info.getConnectTimeout()); + + info = buildRemoteInfoHostTestCase("https://other.example.com:9201/proxy-path/"); + assertEquals("https", info.getScheme()); + assertEquals("other.example.com", info.getHost()); + assertEquals(9201, info.getPort()); + assertEquals("/proxy-path/", info.getPathPrefix()); + assertEquals(RemoteInfo.DEFAULT_SOCKET_TIMEOUT, info.getSocketTimeout()); + assertEquals(RemoteInfo.DEFAULT_CONNECT_TIMEOUT, info.getConnectTimeout()); + + final IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, + () -> buildRemoteInfoHostTestCase("https")); + assertEquals("[host] must be of the form [scheme]://[host]:[port](/[pathPrefix])? but was [https]", + exception.getMessage()); } public void testReindexFromRemoteRequestParsing() throws IOException { diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java index 1107a36086927..9ea2a24bfb136 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java @@ -124,8 +124,10 @@ public void testReindexFromRemote() throws Exception { assertNotNull(masterNode); TransportAddress address = masterNode.getHttp().getAddress().publishAddress(); - RemoteInfo remote = new RemoteInfo("http", address.getAddress(), address.getPort(), new BytesArray("{\"match_all\":{}}"), null, - null, emptyMap(), RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT); + RemoteInfo remote = + new RemoteInfo("http", address.getAddress(), address.getPort(), null, + new BytesArray("{\"match_all\":{}}"), null, null, emptyMap(), + RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT); ReindexRequestBuilder request = new ReindexRequestBuilder(client, ReindexAction.INSTANCE).source("source").destination("dest") .setRemoteInfo(remote); return request; diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java index 946ab030c8285..2dc4b59e8d9f9 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java @@ -63,8 +63,9 @@ public void testReindexRequest() throws IOException { } TimeValue socketTimeout = parseTimeValue(randomPositiveTimeValue(), "socketTimeout"); TimeValue connectTimeout = parseTimeValue(randomPositiveTimeValue(), "connectTimeout"); - reindex.setRemoteInfo(new RemoteInfo(randomAlphaOfLength(5), randomAlphaOfLength(5), port, query, username, password, headers, - socketTimeout, connectTimeout)); + reindex.setRemoteInfo( + new RemoteInfo(randomAlphaOfLength(5), randomAlphaOfLength(5), port, null, + query, username, password, headers, socketTimeout, connectTimeout)); } ReindexRequest tripped = new ReindexRequest(); roundTrip(reindex, tripped); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteInfoTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteInfoTests.java index d6ab599b43c2d..de0ade9c47cc3 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteInfoTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteInfoTests.java @@ -26,17 +26,21 @@ import static java.util.Collections.emptyMap; public class RemoteInfoTests extends ESTestCase { - private RemoteInfo newRemoteInfo(String scheme, String username, String password) { - return new RemoteInfo(scheme, "testhost", 12344, new BytesArray("testquery"), username, password, emptyMap(), + private RemoteInfo newRemoteInfo(String scheme, String prefixPath, String username, String password) { + return new RemoteInfo(scheme, "testhost", 12344, prefixPath, new BytesArray("testquery"), username, password, emptyMap(), RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT); } public void testToString() { - assertEquals("host=testhost port=12344 query=testquery", newRemoteInfo("http", null, null).toString()); - assertEquals("host=testhost port=12344 query=testquery username=testuser", newRemoteInfo("http", "testuser", null).toString()); + assertEquals("host=testhost port=12344 query=testquery", + newRemoteInfo("http", null, null, null).toString()); + assertEquals("host=testhost port=12344 query=testquery username=testuser", + newRemoteInfo("http", null, "testuser", null).toString()); assertEquals("host=testhost port=12344 query=testquery username=testuser password=<<>>", - newRemoteInfo("http", "testuser", "testpass").toString()); + newRemoteInfo("http", null, "testuser", "testpass").toString()); assertEquals("scheme=https host=testhost port=12344 query=testquery username=testuser password=<<>>", - newRemoteInfo("https", "testuser", "testpass").toString()); + newRemoteInfo("https", null, "testuser", "testpass").toString()); + assertEquals("scheme=https host=testhost port=12344 pathPrefix=prxy query=testquery username=testuser password=<<>>", + newRemoteInfo("https", "prxy", "testuser", "testpass").toString()); } } diff --git a/server/src/main/java/org/elasticsearch/index/reindex/RemoteInfo.java b/server/src/main/java/org/elasticsearch/index/reindex/RemoteInfo.java index 8e7a990902631..494658821cfd5 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/RemoteInfo.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/RemoteInfo.java @@ -48,6 +48,7 @@ public class RemoteInfo implements Writeable { private final String scheme; private final String host; private final int port; + private final String pathPrefix; private final BytesReference query; private final String username; private final String password; @@ -61,11 +62,12 @@ public class RemoteInfo implements Writeable { */ private final TimeValue connectTimeout; - public RemoteInfo(String scheme, String host, int port, BytesReference query, String username, String password, - Map headers, TimeValue socketTimeout, TimeValue connectTimeout) { + public RemoteInfo(String scheme, String host, int port, String pathPrefix, BytesReference query, String username, String password, + Map headers, TimeValue socketTimeout, TimeValue connectTimeout) { this.scheme = requireNonNull(scheme, "[scheme] must be specified to reindex from a remote cluster"); this.host = requireNonNull(host, "[host] must be specified to reindex from a remote cluster"); this.port = port; + this.pathPrefix = pathPrefix; this.query = requireNonNull(query, "[query] must be specified to reindex from a remote cluster"); this.username = username; this.password = password; @@ -97,6 +99,11 @@ public RemoteInfo(StreamInput in) throws IOException { socketTimeout = DEFAULT_SOCKET_TIMEOUT; connectTimeout = DEFAULT_CONNECT_TIMEOUT; } + if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + pathPrefix = in.readOptionalString(); + } else { + pathPrefix = null; + } } @Override @@ -116,6 +123,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeTimeValue(socketTimeout); out.writeTimeValue(connectTimeout); } + if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + out.writeOptionalString(pathPrefix); + } } public String getScheme() { @@ -130,6 +140,11 @@ public int getPort() { return port; } + @Nullable + public String getPathPrefix() { + return pathPrefix; + } + public BytesReference getQuery() { return query; } @@ -169,7 +184,11 @@ public String toString() { // http is the default so it isn't worth taking up space if it is the scheme b.append("scheme=").append(scheme).append(' '); } - b.append("host=").append(host).append(" port=").append(port).append(" query=").append(query.utf8ToString()); + b.append("host=").append(host).append(" port=").append(port); + if (pathPrefix != null) { + b.append(" pathPrefix=").append(pathPrefix); + } + b.append(" query=").append(query.utf8ToString()); if (username != null) { b.append(" username=").append(username); } diff --git a/server/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java b/server/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java index 9f4b20ff35ba3..6c1988a1440e9 100644 --- a/server/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java +++ b/server/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java @@ -37,8 +37,9 @@ public class ReindexRequestTests extends AbstractBulkByScrollRequestTestCase Date: Fri, 15 Jun 2018 21:15:35 +0100 Subject: [PATCH 09/34] [ML] Put ML filter API response should contain the filter (#31362) --- .../xpack/core/ml/action/PutFilterAction.java | 51 +++++++++++++++++-- .../PutCalendarActionResponseTests.java | 13 ++++- .../action/PutFilterActionResponseTests.java | 31 +++++++++++ .../ml/action/TransportPutFilterAction.java | 2 +- .../rest-api-spec/test/ml/filter_crud.yml | 6 ++- .../ml/integration/DetectionRulesIT.java | 7 ++- .../MlNativeAutodetectIntegTestCase.java | 5 +- 7 files changed, 100 insertions(+), 15 deletions(-) create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutFilterActionResponseTests.java diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutFilterAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutFilterAction.java index 2f7606795f001..8269a105b6463 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutFilterAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutFilterAction.java @@ -9,7 +9,7 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.action.ActionResponse; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; @@ -115,10 +115,53 @@ public RequestBuilder(ElasticsearchClient client) { } } - public static class Response extends AcknowledgedResponse { + public static class Response extends ActionResponse implements ToXContentObject { - public Response() { - super(true); + private MlFilter filter; + + Response() { + } + + public Response(MlFilter filter) { + this.filter = filter; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + filter = new MlFilter(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + filter.writeTo(out); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return filter.toXContent(builder, params); + } + + public MlFilter getFilter() { + return filter; + } + + @Override + public int hashCode() { + return Objects.hash(filter); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Response other = (Response) obj; + return Objects.equals(filter, other.filter); } } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutCalendarActionResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutCalendarActionResponseTests.java index 941de884554bf..77d4d788db620 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutCalendarActionResponseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutCalendarActionResponseTests.java @@ -5,10 +5,14 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.test.AbstractStreamableTestCase; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; +import org.elasticsearch.xpack.core.ml.calendars.Calendar; import org.elasticsearch.xpack.core.ml.calendars.CalendarTests; -public class PutCalendarActionResponseTests extends AbstractStreamableTestCase { +import java.io.IOException; + +public class PutCalendarActionResponseTests extends AbstractStreamableXContentTestCase { @Override protected PutCalendarAction.Response createTestInstance() { @@ -19,4 +23,9 @@ protected PutCalendarAction.Response createTestInstance() { protected PutCalendarAction.Response createBlankInstance() { return new PutCalendarAction.Response(); } + + @Override + protected PutCalendarAction.Response doParseInstance(XContentParser parser) throws IOException { + return new PutCalendarAction.Response(Calendar.LENIENT_PARSER.parse(parser, null).build()); + } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutFilterActionResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutFilterActionResponseTests.java new file mode 100644 index 0000000000000..1e697f5172a4a --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutFilterActionResponseTests.java @@ -0,0 +1,31 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.action; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; +import org.elasticsearch.xpack.core.ml.job.config.MlFilter; +import org.elasticsearch.xpack.core.ml.job.config.MlFilterTests; + +import java.io.IOException; + +public class PutFilterActionResponseTests extends AbstractStreamableXContentTestCase { + + @Override + protected PutFilterAction.Response createTestInstance() { + return new PutFilterAction.Response(MlFilterTests.createRandom()); + } + + @Override + protected PutFilterAction.Response createBlankInstance() { + return new PutFilterAction.Response(); + } + + @Override + protected PutFilterAction.Response doParseInstance(XContentParser parser) throws IOException { + return new PutFilterAction.Response(MlFilter.LENIENT_PARSER.parse(parser, null).build()); + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java index f7ac11e2d1aec..fc14ef085dd33 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java @@ -69,7 +69,7 @@ protected void doExecute(PutFilterAction.Request request, ActionListener { + "description": "A newly created filter", "items": ["abc", "xyz"] } - - match: { acknowledged: true } + - match: { filter_id: filter-foo2 } + - match: { description: "A newly created filter" } + - match: { items: ["abc", "xyz"]} - do: xpack.ml.get_filters: @@ -128,6 +131,7 @@ setup: - match: filters.0: filter_id: "filter-foo2" + description: "A newly created filter" items: ["abc", "xyz"] --- diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DetectionRulesIT.java b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DetectionRulesIT.java index b99170546df3b..fbda8ad716b2c 100644 --- a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DetectionRulesIT.java +++ b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DetectionRulesIT.java @@ -35,7 +35,6 @@ import java.util.Set; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.isOneOf; /** @@ -121,7 +120,7 @@ public void testCondition() throws Exception { public void testScope() throws Exception { MlFilter safeIps = MlFilter.builder("safe_ips").setItems("111.111.111.111", "222.222.222.222").build(); - assertThat(putMlFilter(safeIps), is(true)); + assertThat(putMlFilter(safeIps).getFilter(), equalTo(safeIps)); DetectionRule rule = new DetectionRule.Builder(RuleScope.builder().include("ip", "safe_ips")).build(); @@ -179,7 +178,7 @@ public void testScope() throws Exception { // Now let's update the filter MlFilter updatedFilter = MlFilter.builder(safeIps.getId()).setItems("333.333.333.333").build(); - assertThat(putMlFilter(updatedFilter), is(true)); + assertThat(putMlFilter(updatedFilter).getFilter(), equalTo(updatedFilter)); // Wait until the notification that the process was updated is indexed assertBusy(() -> { @@ -230,7 +229,7 @@ public void testScopeAndCondition() throws IOException { // We have 2 IPs and they're both safe-listed. List ips = Arrays.asList("111.111.111.111", "222.222.222.222"); MlFilter safeIps = MlFilter.builder("safe_ips").setItems(ips).build(); - assertThat(putMlFilter(safeIps), is(true)); + assertThat(putMlFilter(safeIps).getFilter(), equalTo(safeIps)); // Ignore if ip in safe list AND actual < 10. DetectionRule rule = new DetectionRule.Builder(RuleScope.builder().include("ip", "safe_ips")) diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java index 9057db476ad77..4e6fb03497e6a 100644 --- a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java +++ b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java @@ -419,9 +419,8 @@ protected List getForecasts(String jobId, ForecastRequestStats forecas return forecasts; } - protected boolean putMlFilter(MlFilter filter) { - PutFilterAction.Response response = client().execute(PutFilterAction.INSTANCE, new PutFilterAction.Request(filter)).actionGet(); - return response.isAcknowledged(); + protected PutFilterAction.Response putMlFilter(MlFilter filter) { + return client().execute(PutFilterAction.INSTANCE, new PutFilterAction.Request(filter)).actionGet(); } protected PutCalendarAction.Response putCalendar(String calendarId, List jobIds, String description) { From babb16d90cdc05b1b46d91948f7d6bf38a49bf6e Mon Sep 17 00:00:00 2001 From: Vladimir Dolzhenko Date: Fri, 15 Jun 2018 22:24:47 +0200 Subject: [PATCH 10/34] Support for remote path in reindex api - post backport fix Closes #22913 --- .../main/java/org/elasticsearch/index/reindex/RemoteInfo.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/reindex/RemoteInfo.java b/server/src/main/java/org/elasticsearch/index/reindex/RemoteInfo.java index 494658821cfd5..70f79a9def605 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/RemoteInfo.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/RemoteInfo.java @@ -99,7 +99,7 @@ public RemoteInfo(StreamInput in) throws IOException { socketTimeout = DEFAULT_SOCKET_TIMEOUT; connectTimeout = DEFAULT_CONNECT_TIMEOUT; } - if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + if (in.getVersion().onOrAfter(Version.V_6_4_0)) { pathPrefix = in.readOptionalString(); } else { pathPrefix = null; @@ -123,7 +123,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeTimeValue(socketTimeout); out.writeTimeValue(connectTimeout); } - if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + if (out.getVersion().onOrAfter(Version.V_6_4_0)) { out.writeOptionalString(pathPrefix); } } From c3084a332b6d2f34fa069d969932d53aacf1e5f7 Mon Sep 17 00:00:00 2001 From: Igor Motov Date: Fri, 15 Jun 2018 19:07:47 -0400 Subject: [PATCH 11/34] SQL: Fix rest endpoint names in node stats (#31371) Fixes wrong name for the sql translate endpoint and makes rest endpoint names in stats more consistent. --- .../xpack/sql/plugin/RestSqlClearCursorAction.java | 2 +- .../org/elasticsearch/xpack/sql/plugin/RestSqlQueryAction.java | 2 +- .../elasticsearch/xpack/sql/plugin/RestSqlTranslateAction.java | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlClearCursorAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlClearCursorAction.java index 534d0459180e0..175b78d4f6655 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlClearCursorAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlClearCursorAction.java @@ -37,6 +37,6 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli @Override public String getName() { - return "sql_translate_action"; + return "xpack_sql_clear_cursor_action"; } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlQueryAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlQueryAction.java index 9e34a3fb2e097..a8daa1136d390 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlQueryAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlQueryAction.java @@ -114,6 +114,6 @@ public RestResponse buildResponse(SqlQueryResponse response) throws Exception { @Override public String getName() { - return "xpack_sql_action"; + return "xpack_sql_query_action"; } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlTranslateAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlTranslateAction.java index 503ee84314820..74d94e4800606 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlTranslateAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/RestSqlTranslateAction.java @@ -40,7 +40,7 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli @Override public String getName() { - return "sql_translate_action"; + return "xpack_sql_translate_action"; } } From 215c5f292a9b954fa7ccfe3f29540875076ae1ae Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Sun, 17 Jun 2018 00:14:59 +0300 Subject: [PATCH 12/34] [DOCS] Improve install and setup section for SQL JDBC --- x-pack/docs/en/sql/endpoints/jdbc.asciidoc | 133 ++++++++++++++++++++- 1 file changed, 129 insertions(+), 4 deletions(-) diff --git a/x-pack/docs/en/sql/endpoints/jdbc.asciidoc b/x-pack/docs/en/sql/endpoints/jdbc.asciidoc index 9ac197048ddae..067a4c586fb8e 100644 --- a/x-pack/docs/en/sql/endpoints/jdbc.asciidoc +++ b/x-pack/docs/en/sql/endpoints/jdbc.asciidoc @@ -6,10 +6,133 @@ Elasticsearch's SQL jdbc driver is a rich, fully featured JDBC driver for Elasti It is Type 4 driver, meaning it is a platform independent, stand-alone, Direct to Database, pure Java driver that converts JDBC calls to Elasticsearch SQL. -// TODO add example of resolving the artifact in maven and gradle. +[float] +=== Installation -You can connect to it using the two APIs offered -by JDBC, namely `java.sql.Driver` and `DriverManager`: +The JDBC driver can be obtained either by downloading it from the https://www.elastic.co/downloads/jdbc-client[elastic.co] site or by using a http://maven.apache.org/[Maven]-compatible tool with the following dependency: + +["source","xml",subs="attributes"] +---- + + org.elasticsearch.plugin.jdbc + jdbc + {ver} + +---- + +from `artifacts.elastic.co/maven` by adding it to the repositories list: + +["source","xml",subs="attributes"] +---- + + + elastic.co + https://artifacts.elastic.co/maven + + +---- + +[[jdbc-setup]] +[float] +=== Setup + +The driver main class is `org.elasticsearch.xpack.sql.jdbc.jdbc.JdbcDriver`. Note the driver +also implements the JDBC 4.0 +Service Provider+ mechanism meaning it is registerd automatically +as long as its available in the classpath. + +Once registered, the driver expects the following syntax as an URL: + +["source","text",subs="attributes"] +---- +jdbc:es://<1>[http|https]?<2>[host[:port]]*<3>/[prefix]*<4>[?[option=value]&<5>]* +---- + +<1> `jdbc:es://` prefix. Mandatory. +<2> type of HTTP connection to make - `http` (default) or `https`. Optional. +<3> host (`localhost` by default) and port (`9200` by default). Optional. +<4> prefix (empty by default). Typically used when hosting {es} under a certain path. Optional. +<5> Parameters for the JDBC driver. Empty by default. Optional. + +The driver recognized the following parameters: + +[[jdbc-cfg]] +[float] +===== Essential + +`timezone` (default JVM timezone):: +Timezone used by the driver _per connection_ indicated by its `ID`. +*Highly* recommended to set it (to, say, `UTC`) as the JVM timezone can vary, is global for the entire JVM and can't be changed easily when running under a security manager. + +[[jdbc-cfg-network]] +[float] +===== Network + +`connect.timeout` (default 30s):: +Connection timeout (in seconds). That is the maximum amount of time waiting to make a connection to the server. + +`network.timeout` (default 60s):: +Network timeout (in seconds). That is the maximum amount of time waiting for the network. + +`page.timeout` (default 45s):: +Page timeout (in seconds). That is the maximum amount of time waiting for a page. + +`page.size` (default 1000):: +Page size (in entries). The number of results returned per page by the server. + +`query.timeout` (default 90s):: +Query timeout (in seconds). That is the maximum amount of time waiting for a query to return. + +[[jdbc-cfg-auth]] +[float] +==== Basic Authentication + +`user`:: Basic Authentication user name + +`password`:: Basic Authentication password + +[[jdbc-cfg-ssl]] +[float] +==== SSL + +`ssl` (default false):: Enable SSL + +`ssl.keystore.location`:: key store (if used) location + +`ssl.keystore.pass`:: key store password + +`ssl.keystore.type` (default `JKS`):: key store type. `PKCS12` is a common, alternative format + +`ssl.truststore.location`:: trust store location + +`ssl.truststore.pass`:: trust store password + +`ssl.cert.allow.self.signed` (default `false`):: Whether or not to allow self signed certificates + +`ssl.protocol`(default `TLS`):: SSL protocol to be used + +[float] +==== Proxy + +`proxy.http`:: Http proxy host name + +`proxy.socks`:: SOCKS proxy host name + + +To put all of it together, the following URL: + +["source","text",subs="attributes"] +---- +jdbc:es://http://server:3456/timezone=UTC&page.size=250 +---- + +Opens up a {es-jdbc} connection to `server` on port `3456`, setting the JDBC timezone to `UTC` and its pagesize to `250` entries. + +=== API usage + +One can use JDBC through the official `java.sql` and `javax.sql` packages: + +==== `java.sql` +The former through `java.sql.Driver` and `DriverManager`: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- @@ -20,7 +143,9 @@ HTTP traffic. The port is by default 9200. <2> Properties for connecting to Elasticsearch. An empty `Properties` instance is fine for unsecured Elasticsearch. -or `javax.sql.DataSource` through +==== `javax.sql` + +Accessible through the `javax.sql.DataSource` API: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{jdbc-tests}/JdbcIntegrationTestCase.java[connect-ds] From bbcfcd1ca5a6b78757a7f5578da0cc618839c27f Mon Sep 17 00:00:00 2001 From: Costin Leau Date: Sun, 17 Jun 2018 00:40:01 +0300 Subject: [PATCH 13/34] [DOCS] Fix version in SQL JDBC Maven template --- x-pack/docs/en/sql/endpoints/jdbc.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/docs/en/sql/endpoints/jdbc.asciidoc b/x-pack/docs/en/sql/endpoints/jdbc.asciidoc index 067a4c586fb8e..a980278810e57 100644 --- a/x-pack/docs/en/sql/endpoints/jdbc.asciidoc +++ b/x-pack/docs/en/sql/endpoints/jdbc.asciidoc @@ -16,7 +16,7 @@ The JDBC driver can be obtained either by downloading it from the https://www.el org.elasticsearch.plugin.jdbc jdbc - {ver} + {version} ---- From 5b94afd3099cd60c7342ad7ba4e5764420d531eb Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Sun, 17 Jun 2018 12:17:28 +0300 Subject: [PATCH 14/34] [TEST] Double write alias fault (#30942) --- .../cluster/metadata/ToAndFromJsonMetaDataTests.java | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/ToAndFromJsonMetaDataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/ToAndFromJsonMetaDataTests.java index bde478eb36381..3ac55ec663ca0 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/ToAndFromJsonMetaDataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/ToAndFromJsonMetaDataTests.java @@ -132,7 +132,7 @@ public void testSimpleJsonFromAndTo() throws IOException { .putMapping("mapping1", MAPPING_SOURCE1) .putMapping("mapping2", MAPPING_SOURCE2) .putAlias(newAliasMetaDataBuilder("alias1").filter(ALIAS_FILTER1)) - .putAlias(newAliasMetaDataBuilder("alias2").writeIndex(randomBoolean() ? null : randomBoolean())) + .putAlias(newAliasMetaDataBuilder("alias3").writeIndex(randomBoolean() ? null : randomBoolean())) .putAlias(newAliasMetaDataBuilder("alias4").filter(ALIAS_FILTER2))) .put(IndexTemplateMetaData.builder("foo") .patterns(Collections.singletonList("bar")) @@ -287,10 +287,10 @@ public void testSimpleJsonFromAndTo() throws IOException { assertThat(indexMetaData.getAliases().size(), equalTo(3)); assertThat(indexMetaData.getAliases().get("alias1").alias(), equalTo("alias1")); assertThat(indexMetaData.getAliases().get("alias1").filter().string(), equalTo(ALIAS_FILTER1)); - assertThat(indexMetaData.getAliases().get("alias2").alias(), equalTo("alias2")); - assertThat(indexMetaData.getAliases().get("alias2").filter(), nullValue()); - assertThat(indexMetaData.getAliases().get("alias2").writeIndex(), - equalTo(metaData.index("test12").getAliases().get("alias2").writeIndex())); + assertThat(indexMetaData.getAliases().get("alias3").alias(), equalTo("alias3")); + assertThat(indexMetaData.getAliases().get("alias3").filter(), nullValue()); + assertThat(indexMetaData.getAliases().get("alias3").writeIndex(), + equalTo(metaData.index("test12").getAliases().get("alias3").writeIndex())); assertThat(indexMetaData.getAliases().get("alias4").alias(), equalTo("alias4")); assertThat(indexMetaData.getAliases().get("alias4").filter().string(), equalTo(ALIAS_FILTER2)); From 3d5f113ada63b9175631e6ad1ef1645dc8f5f275 Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Sun, 17 Jun 2018 13:32:53 +0200 Subject: [PATCH 15/34] Ensure we don't use a remote profile if cluster name matches (#31331) If we are running into a race condition between a node being configured to be a remote node for cross cluster search etc. and that node joining the cluster we might connect to that node with a remote profile. If that node now joins the cluster it connected to it as a CCS remote node we use the wrong profile and can't use bulk connections etc. anymore. This change uses the remote profile only if we connect to a node that has a different cluster name than the local cluster. This is not a perfect fix for this situation but is the safe option while potentially only loose a small optimization of using less connections per node which is small anyways since we only connect to a small set of nodes. Closes #29321 --- .../transport/RemoteClusterConnection.java | 33 ++++++- .../transport/TransportService.java | 18 +++- .../RemoteClusterConnectionTests.java | 96 +++++++++++++++++++ .../transport/MockTcpTransport.java | 17 +++- 4 files changed, 153 insertions(+), 11 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java index 82b921bd233b0..e37f46c5517db 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java @@ -87,6 +87,7 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo private volatile boolean skipUnavailable; private final ConnectHandler connectHandler; private SetOnce remoteClusterName = new SetOnce<>(); + private final ClusterName localClusterName; /** * Creates a new {@link RemoteClusterConnection} @@ -100,6 +101,7 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo RemoteClusterConnection(Settings settings, String clusterAlias, List seedNodes, TransportService transportService, int maxNumRemoteConnections, Predicate nodePredicate) { super(settings); + this.localClusterName = ClusterName.CLUSTER_NAME_SETTING.get(settings); this.transportService = transportService; this.maxNumRemoteConnections = maxNumRemoteConnections; this.nodePredicate = nodePredicate; @@ -310,6 +312,21 @@ public boolean isClosed() { return connectHandler.isClosed(); } + private ConnectionProfile getRemoteProfile(ClusterName name) { + // we can only compare the cluster name to make a decision if we should use a remote profile + // we can't use a cluster UUID here since we could be connecting to that remote cluster before + // the remote node has joined its cluster and have a cluster UUID. The fact that we just lose a + // rather smallish optimization on the connection layer under certain situations where remote clusters + // have the same name as the local one is minor here. + // the alternative here is to complicate the remote infrastructure to also wait until we formed a cluster, + // gained a cluster UUID and then start connecting etc. we rather use this simplification in order to maintain simplicity + if (this.localClusterName.equals(name)) { + return null; + } else { + return remoteProfile; + } + } + /** * The connect handler manages node discovery and the actual connect to the remote cluster. * There is at most one connect job running at any time. If such a connect job is triggered @@ -419,7 +436,6 @@ protected void doRun() { collectRemoteNodes(seedNodes.iterator(), transportService, listener); } }); - } void collectRemoteNodes(Iterator seedNodes, @@ -431,21 +447,27 @@ void collectRemoteNodes(Iterator seedNodes, if (seedNodes.hasNext()) { cancellableThreads.executeIO(() -> { final DiscoveryNode seedNode = seedNodes.next(); - final DiscoveryNode handshakeNode; + final TransportService.HandshakeResponse handshakeResponse; Transport.Connection connection = transportService.openConnection(seedNode, ConnectionProfile.buildSingleChannelProfile(TransportRequestOptions.Type.REG, null, null)); boolean success = false; try { try { - handshakeNode = transportService.handshake(connection, remoteProfile.getHandshakeTimeout().millis(), + handshakeResponse = transportService.handshake(connection, remoteProfile.getHandshakeTimeout().millis(), (c) -> remoteClusterName.get() == null ? true : c.equals(remoteClusterName.get())); } catch (IllegalStateException ex) { logger.warn(() -> new ParameterizedMessage("seed node {} cluster name mismatch expected " + "cluster name {}", connection.getNode(), remoteClusterName.get()), ex); throw ex; } + + final DiscoveryNode handshakeNode = handshakeResponse.getDiscoveryNode(); if (nodePredicate.test(handshakeNode) && connectedNodes.size() < maxNumRemoteConnections) { - transportService.connectToNode(handshakeNode, remoteProfile); + transportService.connectToNode(handshakeNode, getRemoteProfile(handshakeResponse.getClusterName())); + if (remoteClusterName.get() == null) { + assert handshakeResponse.getClusterName().value() != null; + remoteClusterName.set(handshakeResponse.getClusterName()); + } connectedNodes.add(handshakeNode); } ClusterStateRequest request = new ClusterStateRequest(); @@ -552,7 +574,8 @@ public void handleResponse(ClusterStateResponse response) { for (DiscoveryNode node : nodesIter) { if (nodePredicate.test(node) && connectedNodes.size() < maxNumRemoteConnections) { try { - transportService.connectToNode(node, remoteProfile); // noop if node is connected + transportService.connectToNode(node, getRemoteProfile(remoteClusterName.get())); // noop if node is + // connected connectedNodes.add(node); } catch (ConnectTransportException | IllegalStateException ex) { // ISE if we fail the handshake with an version incompatible node diff --git a/server/src/main/java/org/elasticsearch/transport/TransportService.java b/server/src/main/java/org/elasticsearch/transport/TransportService.java index 9755898be5fef..656d8c3841769 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportService.java @@ -342,7 +342,7 @@ public void connectToNode(final DiscoveryNode node, ConnectionProfile connection } transport.connectToNode(node, connectionProfile, (newConnection, actualProfile) -> { // We don't validate cluster names to allow for CCS connections. - final DiscoveryNode remote = handshake(newConnection, actualProfile.getHandshakeTimeout().millis(), cn -> true); + final DiscoveryNode remote = handshake(newConnection, actualProfile.getHandshakeTimeout().millis(), cn -> true).discoveryNode; if (validateConnections && node.equals(remote) == false) { throw new ConnectTransportException(node, "handshake failed. unexpected remote node " + remote); } @@ -378,7 +378,7 @@ public Transport.Connection openConnection(final DiscoveryNode node, ConnectionP public DiscoveryNode handshake( final Transport.Connection connection, final long handshakeTimeout) throws ConnectTransportException { - return handshake(connection, handshakeTimeout, clusterName::equals); + return handshake(connection, handshakeTimeout, clusterName::equals).discoveryNode; } /** @@ -390,11 +390,11 @@ public DiscoveryNode handshake( * @param connection the connection to a specific node * @param handshakeTimeout handshake timeout * @param clusterNamePredicate cluster name validation predicate - * @return the connected node + * @return the handshake response * @throws ConnectTransportException if the connection failed * @throws IllegalStateException if the handshake failed */ - public DiscoveryNode handshake( + public HandshakeResponse handshake( final Transport.Connection connection, final long handshakeTimeout, Predicate clusterNamePredicate) throws ConnectTransportException { final HandshakeResponse response; @@ -420,7 +420,7 @@ public HandshakeResponse newInstance() { throw new IllegalStateException("handshake failed, incompatible version [" + response.version + "] - " + node); } - return response.discoveryNode; + return response; } static class HandshakeRequest extends TransportRequest { @@ -461,6 +461,14 @@ public void writeTo(StreamOutput out) throws IOException { clusterName.writeTo(out); Version.writeVersion(version, out); } + + public DiscoveryNode getDiscoveryNode() { + return discoveryNode; + } + + public ClusterName getClusterName() { + return clusterName; + } } public void disconnectFromNode(DiscoveryNode node) { diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java index ac6f99351e46d..637b8fb26a880 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java @@ -142,6 +142,102 @@ public static MockTransportService startTransport( } } + public void testLocalProfileIsUsedForLocalCluster() throws Exception { + List knownNodes = new CopyOnWriteArrayList<>(); + try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT); + MockTransportService discoverableTransport = startTransport("discoverable_node", knownNodes, Version.CURRENT)) { + DiscoveryNode seedNode = seedTransport.getLocalDiscoNode(); + DiscoveryNode discoverableNode = discoverableTransport.getLocalDiscoNode(); + knownNodes.add(seedTransport.getLocalDiscoNode()); + knownNodes.add(discoverableTransport.getLocalDiscoNode()); + Collections.shuffle(knownNodes, random()); + + try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) { + service.start(); + service.acceptIncomingRequests(); + try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", + Arrays.asList(seedNode), service, Integer.MAX_VALUE, n -> true)) { + updateSeedNodes(connection, Arrays.asList(seedNode)); + assertTrue(service.nodeConnected(seedNode)); + assertTrue(service.nodeConnected(discoverableNode)); + assertTrue(connection.assertNoRunningConnections()); + PlainTransportFuture futureHandler = new PlainTransportFuture<>( + new FutureTransportResponseHandler() { + @Override + public ClusterSearchShardsResponse read(StreamInput in) throws IOException { + ClusterSearchShardsResponse inst = new ClusterSearchShardsResponse(); + inst.readFrom(in); + return inst; + } + }); + TransportRequestOptions options = TransportRequestOptions.builder().withType(TransportRequestOptions.Type.BULK) + .build(); + service.sendRequest(connection.getConnection(), ClusterSearchShardsAction.NAME, new ClusterSearchShardsRequest(), + options, futureHandler); + futureHandler.txGet(); + } + } + } + } + + public void testRemoteProfileIsUsedForRemoteCluster() throws Exception { + List knownNodes = new CopyOnWriteArrayList<>(); + try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT, threadPool, + Settings.builder().put("cluster.name", "foobar").build()); + MockTransportService discoverableTransport = startTransport("discoverable_node", knownNodes, Version.CURRENT, + threadPool, Settings.builder().put("cluster.name", "foobar").build())) { + DiscoveryNode seedNode = seedTransport.getLocalDiscoNode(); + DiscoveryNode discoverableNode = discoverableTransport.getLocalDiscoNode(); + knownNodes.add(seedTransport.getLocalDiscoNode()); + knownNodes.add(discoverableTransport.getLocalDiscoNode()); + Collections.shuffle(knownNodes, random()); + + try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) { + service.start(); + service.acceptIncomingRequests(); + try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", + Arrays.asList(seedNode), service, Integer.MAX_VALUE, n -> true)) { + updateSeedNodes(connection, Arrays.asList(seedNode)); + assertTrue(service.nodeConnected(seedNode)); + assertTrue(service.nodeConnected(discoverableNode)); + assertTrue(connection.assertNoRunningConnections()); + PlainTransportFuture futureHandler = new PlainTransportFuture<>( + new FutureTransportResponseHandler() { + @Override + public ClusterSearchShardsResponse read(StreamInput in) throws IOException { + ClusterSearchShardsResponse inst = new ClusterSearchShardsResponse(); + inst.readFrom(in); + return inst; + } + }); + TransportRequestOptions options = TransportRequestOptions.builder().withType(TransportRequestOptions.Type.BULK) + .build(); + IllegalStateException ise = (IllegalStateException) expectThrows(SendRequestTransportException.class, () -> { + service.sendRequest(discoverableNode, + ClusterSearchShardsAction.NAME, new ClusterSearchShardsRequest(), options, futureHandler); + futureHandler.txGet(); + }).getCause(); + assertEquals(ise.getMessage(), "can't select channel size is 0 for types: [RECOVERY, BULK, STATE]"); + + PlainTransportFuture handler = new PlainTransportFuture<>( + new FutureTransportResponseHandler() { + @Override + public ClusterSearchShardsResponse read(StreamInput in) throws IOException { + ClusterSearchShardsResponse inst = new ClusterSearchShardsResponse(); + inst.readFrom(in); + return inst; + } + }); + TransportRequestOptions ops = TransportRequestOptions.builder().withType(TransportRequestOptions.Type.REG) + .build(); + service.sendRequest(connection.getConnection(), ClusterSearchShardsAction.NAME, new ClusterSearchShardsRequest(), + ops, handler); + handler.txGet(); + } + } + } + } + public void testDiscoverSingleNode() throws Exception { List knownNodes = new CopyOnWriteArrayList<>(); try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT); diff --git a/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java b/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java index 37bf95d0b153a..8831c46c01136 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java @@ -191,7 +191,22 @@ protected MockChannel initiateChannel(InetSocketAddress address, ActionListener< @Override protected ConnectionProfile resolveConnectionProfile(ConnectionProfile connectionProfile) { ConnectionProfile connectionProfile1 = resolveConnectionProfile(connectionProfile, defaultConnectionProfile); - ConnectionProfile.Builder builder = new ConnectionProfile.Builder(LIGHT_PROFILE); + ConnectionProfile.Builder builder = new ConnectionProfile.Builder(); + Set allTypesWithConnection = new HashSet<>(); + Set allTypesWithoutConnection = new HashSet<>(); + for (ConnectionProfile.ConnectionTypeHandle handle : connectionProfile1.getHandles()) { + Set types = handle.getTypes(); + if (handle.length > 0) { + allTypesWithConnection.addAll(types); + } else { + allTypesWithoutConnection.addAll(types); + } + } + // make sure we maintain at least the types that are supported by this profile even if we only use a single channel for them. + builder.addConnections(1, allTypesWithConnection.toArray(new TransportRequestOptions.Type[0])); + if (allTypesWithoutConnection.isEmpty() == false) { + builder.addConnections(0, allTypesWithoutConnection.toArray(new TransportRequestOptions.Type[0])); + } builder.setHandshakeTimeout(connectionProfile1.getHandshakeTimeout()); builder.setConnectTimeout(connectionProfile1.getConnectTimeout()); return builder.build(); From 16fa6b270f5ebe9e78e0d283a88f4f7ced71c35a Mon Sep 17 00:00:00 2001 From: Julie Tibshirani Date: Sun, 17 Jun 2018 21:42:42 -0700 Subject: [PATCH 16/34] Remove some cases in FieldTypeLookupTests that are no longer relevant. (#31381) --- .../index/mapper/FieldTypeLookupTests.java | 25 ------------------- 1 file changed, 25 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java index 39753548ee390..4f1b908cae84e 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldTypeLookupTests.java @@ -32,8 +32,6 @@ import java.util.Iterator; import java.util.List; -import static org.hamcrest.Matchers.containsString; - public class FieldTypeLookupTests extends ESTestCase { public void testEmpty() { @@ -80,29 +78,6 @@ public void testAddExistingField() { assertEquals(f2.fieldType(), lookup2.get("foo")); } - public void testAddExistingIndexName() { - MockFieldMapper f = new MockFieldMapper("foo"); - MockFieldMapper f2 = new MockFieldMapper("bar"); - FieldTypeLookup lookup = new FieldTypeLookup(); - lookup = lookup.copyAndAddAll("type1", newList(f)); - FieldTypeLookup lookup2 = lookup.copyAndAddAll("type2", newList(f2)); - - assertSame(f.fieldType(), lookup2.get("foo")); - assertSame(f2.fieldType(), lookup2.get("bar")); - assertEquals(2, size(lookup2.iterator())); - } - - public void testAddExistingFullName() { - MockFieldMapper f = new MockFieldMapper("foo"); - MockFieldMapper f2 = new MockFieldMapper("foo"); - FieldTypeLookup lookup = new FieldTypeLookup(); - try { - lookup.copyAndAddAll("type2", newList(f2)); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("mapper [foo] has different [index_name]")); - } - } - public void testCheckCompatibilityMismatchedTypes() { FieldMapper f1 = new MockFieldMapper("foo"); FieldTypeLookup lookup = new FieldTypeLookup(); From 3378240b2954fcf879ac84f440002685c4b30cc9 Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Mon, 18 Jun 2018 09:42:11 +0300 Subject: [PATCH 17/34] Reload secure settings for plugins (#31383) Adds the ability to reread and decrypt the local node keystore. Commonly, the contents of the keystore, backing the `SecureSettings`, are not retrievable except during node initialization. This changes that by adding a new API which broadcasts a password to every node. The password is used to decrypt the local keystore and use it to populate a `Settings` object that is passes to all the plugins implementing the `ReloadablePlugin` interface. The plugin is then responsible to do whatever "reload" means in his case. When the `reload`handler returns, the keystore is closed and its contents are no longer retrievable. Password is never stored persistently on any node. Plugins that have been moded in this commit are: `repository-azure`, `repository-s3`, `repository-gcs` and `discovery-ec2`. --- .../discovery/ec2/AmazonEc2Reference.java | 61 +++ .../discovery/ec2/AwsEc2Service.java | 74 ++- .../discovery/ec2/AwsEc2ServiceImpl.java | 148 +++--- .../ec2/AwsEc2UnicastHostsProvider.java | 43 +- .../discovery/ec2/Ec2ClientSettings.java | 145 ++++++ .../discovery/ec2/Ec2DiscoveryPlugin.java | 69 +-- .../discovery/ec2/AmazonEC2Mock.java | 15 +- .../discovery/ec2/AwsEc2ServiceImplTests.java | 31 +- .../discovery/ec2/AwsEc2ServiceMock.java | 33 +- .../ec2/Ec2DiscoveryPluginMock.java} | 20 +- .../ec2/Ec2DiscoveryPluginTests.java | 93 +++- .../discovery/ec2/Ec2DiscoveryTests.java | 46 +- .../repositories/azure/AzureBlobStore.java | 58 +-- .../repositories/azure/AzureRepository.java | 52 +-- .../azure/AzureRepositoryPlugin.java | 27 +- .../azure/AzureStorageService.java | 47 +- .../azure/AzureStorageServiceImpl.java | 296 ++++++------ .../azure/AzureStorageSettings.java | 77 +++- .../azure/AzureRepositorySettingsTests.java | 3 +- .../azure/AzureSnapshotRestoreTests.java | 56 ++- .../azure/AzureStorageServiceMock.java | 42 +- .../azure/AzureStorageServiceTests.java | 191 +++++--- .../gcs/GoogleCloudStorageBlobStore.java | 104 ++--- .../gcs/GoogleCloudStoragePlugin.java | 32 +- .../gcs/GoogleCloudStorageRepository.java | 5 +- .../gcs/GoogleCloudStorageService.java | 77 +++- ...leCloudStorageBlobStoreContainerTests.java | 15 +- ...eCloudStorageBlobStoreRepositoryTests.java | 20 +- .../gcs/GoogleCloudStorageBlobStoreTests.java | 15 +- .../gcs/GoogleCloudStorageServiceTests.java | 92 +++- plugins/repository-s3/build.gradle | 2 +- .../repositories/s3/AmazonS3Reference.java | 63 +++ .../repositories/s3/AwsS3Service.java | 23 +- .../repositories/s3/InternalAwsS3Service.java | 148 +++--- .../repositories/s3/S3BlobContainer.java | 137 +++--- .../repositories/s3/S3BlobStore.java | 74 +-- .../repositories/s3/S3ClientSettings.java | 72 ++- .../repositories/s3/S3Repository.java | 46 +- .../repositories/s3/S3RepositoryPlugin.java | 45 +- .../plugin-metadata/plugin-security.policy | 3 + .../s3/AbstractS3SnapshotRestoreTest.java | 26 +- .../repositories/s3/AmazonS3Wrapper.java | 5 + .../s3/AwsS3ServiceImplTests.java | 134 +++--- .../repositories/s3/MockAmazonS3.java | 5 + .../s3/RepositoryCredentialsTests.java | 211 +++++++++ .../RepositorySettingsCredentialsTests.java | 41 -- .../s3/S3BlobStoreContainerTests.java | 31 +- .../s3/S3BlobStoreRepositoryTests.java | 26 +- .../repositories/s3/S3BlobStoreTests.java | 12 +- .../repositories/s3/S3RepositoryTests.java | 89 ++-- .../repositories/s3/TestAmazonS3.java | 38 +- .../repositories/s3/TestAwsS3Service.java | 28 +- .../elasticsearch/action/ActionModule.java | 6 + .../NodesReloadSecureSettingsAction.java | 21 +- .../NodesReloadSecureSettingsRequest.java | 160 +++++++ ...desReloadSecureSettingsRequestBuilder.java | 84 ++++ .../NodesReloadSecureSettingsResponse.java | 149 +++++++ ...nsportNodesReloadSecureSettingsAction.java | 144 ++++++ .../client/ClusterAdminClient.java | 6 + .../client/support/AbstractClient.java | 7 + .../common/settings/KeyStoreWrapper.java | 4 +- .../common/util/LazyInitializable.java | 108 +++++ .../org/elasticsearch/plugins/Plugin.java | 1 + .../plugins/ReloadablePlugin.java | 54 +++ .../RestReloadSecureSettingsAction.java | 87 ++++ .../action/admin/ReloadSecureSettingsIT.java | 422 ++++++++++++++++++ .../action/admin/invalid.txt.keystore | 3 + 67 files changed, 3323 insertions(+), 1179 deletions(-) create mode 100644 plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AmazonEc2Reference.java create mode 100644 plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2ClientSettings.java rename plugins/{repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureServiceDisableException.java => discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPluginMock.java} (62%) create mode 100644 plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/AmazonS3Reference.java create mode 100644 plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java delete mode 100644 plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositorySettingsCredentialsTests.java rename plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureServiceRemoteException.java => server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsAction.java (56%) create mode 100644 server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsRequest.java create mode 100644 server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsRequestBuilder.java create mode 100644 server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsResponse.java create mode 100644 server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/TransportNodesReloadSecureSettingsAction.java create mode 100644 server/src/main/java/org/elasticsearch/common/util/LazyInitializable.java create mode 100644 server/src/main/java/org/elasticsearch/plugins/ReloadablePlugin.java create mode 100644 server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestReloadSecureSettingsAction.java create mode 100644 server/src/test/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java create mode 100644 server/src/test/resources/org/elasticsearch/action/admin/invalid.txt.keystore diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AmazonEc2Reference.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AmazonEc2Reference.java new file mode 100644 index 0000000000000..0b0b208790b48 --- /dev/null +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AmazonEc2Reference.java @@ -0,0 +1,61 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.discovery.ec2; + +import com.amazonaws.services.ec2.AmazonEC2; + +import org.elasticsearch.common.lease.Releasable; +import org.elasticsearch.common.util.concurrent.AbstractRefCounted; + +/** + * Handles the shutdown of the wrapped {@link AmazonEC2} using reference + * counting. + */ +public class AmazonEc2Reference extends AbstractRefCounted implements Releasable { + + private final AmazonEC2 client; + + AmazonEc2Reference(AmazonEC2 client) { + super("AWS_EC2_CLIENT"); + this.client = client; + } + + /** + * Call when the client is not needed anymore. + */ + @Override + public void close() { + decRef(); + } + + /** + * Returns the underlying `AmazonEC2` client. All method calls are permitted BUT + * NOT shutdown. Shutdown is called when reference count reaches 0. + */ + public AmazonEC2 client() { + return client; + } + + @Override + protected void closeInternal() { + client.shutdown(); + } + +} diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2Service.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2Service.java index db3164fe9007a..976f1db26d173 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2Service.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2Service.java @@ -19,22 +19,17 @@ package org.elasticsearch.discovery.ec2; -import com.amazonaws.ClientConfiguration; -import com.amazonaws.Protocol; -import com.amazonaws.services.ec2.AmazonEC2; -import org.elasticsearch.common.settings.SecureSetting; -import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.unit.TimeValue; +import java.io.Closeable; import java.util.ArrayList; import java.util.Collections; import java.util.List; -import java.util.Locale; import java.util.function.Function; -interface AwsEc2Service { +interface AwsEc2Service extends Closeable { Setting AUTO_ATTRIBUTE_SETTING = Setting.boolSetting("cloud.node.auto_attributes", false, Property.NodeScope); class HostType { @@ -45,36 +40,6 @@ class HostType { public static final String TAG_PREFIX = "tag:"; } - /** The access key (ie login id) for connecting to ec2. */ - Setting ACCESS_KEY_SETTING = SecureSetting.secureString("discovery.ec2.access_key", null); - - /** The secret key (ie password) for connecting to ec2. */ - Setting SECRET_KEY_SETTING = SecureSetting.secureString("discovery.ec2.secret_key", null); - - /** An override for the ec2 endpoint to connect to. */ - Setting ENDPOINT_SETTING = new Setting<>("discovery.ec2.endpoint", "", - s -> s.toLowerCase(Locale.ROOT), Property.NodeScope); - - /** The protocol to use to connect to to ec2. */ - Setting PROTOCOL_SETTING = new Setting<>("discovery.ec2.protocol", "https", - s -> Protocol.valueOf(s.toUpperCase(Locale.ROOT)), Property.NodeScope); - - /** The host name of a proxy to connect to ec2 through. */ - Setting PROXY_HOST_SETTING = Setting.simpleString("discovery.ec2.proxy.host", Property.NodeScope); - - /** The port of a proxy to connect to ec2 through. */ - Setting PROXY_PORT_SETTING = Setting.intSetting("discovery.ec2.proxy.port", 80, 0, 1<<16, Property.NodeScope); - - /** The username of a proxy to connect to s3 through. */ - Setting PROXY_USERNAME_SETTING = SecureSetting.secureString("discovery.ec2.proxy.username", null); - - /** The password of a proxy to connect to s3 through. */ - Setting PROXY_PASSWORD_SETTING = SecureSetting.secureString("discovery.ec2.proxy.password", null); - - /** The socket timeout for connecting to s3. */ - Setting READ_TIMEOUT_SETTING = Setting.timeSetting("discovery.ec2.read_timeout", - TimeValue.timeValueMillis(ClientConfiguration.DEFAULT_SOCKET_TIMEOUT), Property.NodeScope); - /** * discovery.ec2.host_type: The type of host type to use to communicate with other instances. * Can be one of private_ip, public_ip, private_dns, public_dns or tag:XXXX where @@ -87,26 +52,24 @@ class HostType { * discovery.ec2.any_group: If set to false, will require all security groups to be present for the instance to be used for the * discovery. Defaults to true. */ - Setting ANY_GROUP_SETTING = - Setting.boolSetting("discovery.ec2.any_group", true, Property.NodeScope); + Setting ANY_GROUP_SETTING = Setting.boolSetting("discovery.ec2.any_group", true, Property.NodeScope); /** * discovery.ec2.groups: Either a comma separated list or array based list of (security) groups. Only instances with the provided * security groups will be used in the cluster discovery. (NOTE: You could provide either group NAME or group ID.) */ - Setting> GROUPS_SETTING = - Setting.listSetting("discovery.ec2.groups", new ArrayList<>(), s -> s.toString(), Property.NodeScope); + Setting> GROUPS_SETTING = Setting.listSetting("discovery.ec2.groups", new ArrayList<>(), s -> s.toString(), + Property.NodeScope); /** * discovery.ec2.availability_zones: Either a comma separated list or array based list of availability zones. Only instances within * the provided availability zones will be used in the cluster discovery. */ - Setting> AVAILABILITY_ZONES_SETTING = - Setting.listSetting("discovery.ec2.availability_zones", Collections.emptyList(), s -> s.toString(), - Property.NodeScope); + Setting> AVAILABILITY_ZONES_SETTING = Setting.listSetting("discovery.ec2.availability_zones", Collections.emptyList(), + s -> s.toString(), Property.NodeScope); /** * discovery.ec2.node_cache_time: How long the list of hosts is cached to prevent further requests to the AWS API. Defaults to 10s. */ - Setting NODE_CACHE_TIME_SETTING = - Setting.timeSetting("discovery.ec2.node_cache_time", TimeValue.timeValueSeconds(10), Property.NodeScope); + Setting NODE_CACHE_TIME_SETTING = Setting.timeSetting("discovery.ec2.node_cache_time", TimeValue.timeValueSeconds(10), + Property.NodeScope); /** * discovery.ec2.tag.*: The ec2 discovery can filter machines to include in the cluster based on tags (and not just groups). @@ -115,7 +78,22 @@ class HostType { * instance to be included. */ Setting.AffixSetting> TAG_SETTING = Setting.prefixKeySetting("discovery.ec2.tag.", - key -> Setting.listSetting(key, Collections.emptyList(), Function.identity(), Property.NodeScope)); + key -> Setting.listSetting(key, Collections.emptyList(), Function.identity(), Property.NodeScope)); + + /** + * Builds then caches an {@code AmazonEC2} client using the current client + * settings. Returns an {@code AmazonEc2Reference} wrapper which should be + * released as soon as it is not required anymore. + */ + AmazonEc2Reference client(); + + /** + * Updates the settings for building the client and releases the cached one. + * Future client requests will use the new settings to lazily built the new + * client. + * + * @param clientSettings the new refreshed settings + */ + void refreshAndClearCache(Ec2ClientSettings clientSettings); - AmazonEC2 client(); } diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceImpl.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceImpl.java index b53dc7a876301..67902174630ea 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceImpl.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceImpl.java @@ -19,12 +19,9 @@ package org.elasticsearch.discovery.ec2; -import java.io.Closeable; -import java.io.IOException; import java.util.Random; +import java.util.concurrent.atomic.AtomicReference; -import com.amazonaws.AmazonClientException; -import com.amazonaws.AmazonWebServiceRequest; import com.amazonaws.ClientConfiguration; import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.auth.BasicAWSCredentials; @@ -35,112 +32,117 @@ import com.amazonaws.services.ec2.AmazonEC2; import com.amazonaws.services.ec2.AmazonEC2Client; import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.Randomness; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.LazyInitializable; -class AwsEc2ServiceImpl extends AbstractComponent implements AwsEc2Service, Closeable { +class AwsEc2ServiceImpl extends AbstractComponent implements AwsEc2Service { public static final String EC2_METADATA_URL = "http://169.254.169.254/latest/meta-data/"; - private AmazonEC2Client client; + private final AtomicReference> lazyClientReference = + new AtomicReference<>(); AwsEc2ServiceImpl(Settings settings) { super(settings); } - @Override - public synchronized AmazonEC2 client() { - if (client != null) { - return client; - } - - this.client = new AmazonEC2Client(buildCredentials(logger, settings), buildConfiguration(logger, settings)); - String endpoint = findEndpoint(logger, settings); - if (endpoint != null) { - client.setEndpoint(endpoint); + private AmazonEC2 buildClient(Ec2ClientSettings clientSettings) { + final AWSCredentialsProvider credentials = buildCredentials(logger, clientSettings); + final ClientConfiguration configuration = buildConfiguration(logger, clientSettings); + final AmazonEC2 client = buildClient(credentials, configuration); + if (Strings.hasText(clientSettings.endpoint)) { + logger.debug("using explicit ec2 endpoint [{}]", clientSettings.endpoint); + client.setEndpoint(clientSettings.endpoint); } - - return this.client; + return client; } - protected static AWSCredentialsProvider buildCredentials(Logger logger, Settings settings) { - AWSCredentialsProvider credentials; - - try (SecureString key = ACCESS_KEY_SETTING.get(settings); - SecureString secret = SECRET_KEY_SETTING.get(settings)) { - if (key.length() == 0 && secret.length() == 0) { - logger.debug("Using either environment variables, system properties or instance profile credentials"); - credentials = new DefaultAWSCredentialsProviderChain(); - } else { - logger.debug("Using basic key/secret credentials"); - credentials = new StaticCredentialsProvider(new BasicAWSCredentials(key.toString(), secret.toString())); - } - } - - return credentials; + // proxy for testing + AmazonEC2 buildClient(AWSCredentialsProvider credentials, ClientConfiguration configuration) { + final AmazonEC2 client = new AmazonEC2Client(credentials, configuration); + return client; } - protected static ClientConfiguration buildConfiguration(Logger logger, Settings settings) { - ClientConfiguration clientConfiguration = new ClientConfiguration(); + // pkg private for tests + static ClientConfiguration buildConfiguration(Logger logger, Ec2ClientSettings clientSettings) { + final ClientConfiguration clientConfiguration = new ClientConfiguration(); // the response metadata cache is only there for diagnostics purposes, // but can force objects from every response to the old generation. clientConfiguration.setResponseMetadataCacheSize(0); - clientConfiguration.setProtocol(PROTOCOL_SETTING.get(settings)); - - if (PROXY_HOST_SETTING.exists(settings)) { - String proxyHost = PROXY_HOST_SETTING.get(settings); - Integer proxyPort = PROXY_PORT_SETTING.get(settings); - try (SecureString proxyUsername = PROXY_USERNAME_SETTING.get(settings); - SecureString proxyPassword = PROXY_PASSWORD_SETTING.get(settings)) { - - clientConfiguration - .withProxyHost(proxyHost) - .withProxyPort(proxyPort) - .withProxyUsername(proxyUsername.toString()) - .withProxyPassword(proxyPassword.toString()); - } + clientConfiguration.setProtocol(clientSettings.protocol); + if (Strings.hasText(clientSettings.proxyHost)) { + // TODO: remove this leniency, these settings should exist together and be validated + clientConfiguration.setProxyHost(clientSettings.proxyHost); + clientConfiguration.setProxyPort(clientSettings.proxyPort); + clientConfiguration.setProxyUsername(clientSettings.proxyUsername); + clientConfiguration.setProxyPassword(clientSettings.proxyPassword); } - // Increase the number of retries in case of 5xx API responses final Random rand = Randomness.get(); - RetryPolicy retryPolicy = new RetryPolicy( + final RetryPolicy retryPolicy = new RetryPolicy( RetryPolicy.RetryCondition.NO_RETRY_CONDITION, - new RetryPolicy.BackoffStrategy() { - @Override - public long delayBeforeNextRetry(AmazonWebServiceRequest originalRequest, - AmazonClientException exception, - int retriesAttempted) { - // with 10 retries the max delay time is 320s/320000ms (10 * 2^5 * 1 * 1000) - logger.warn("EC2 API request failed, retry again. Reason was:", exception); - return 1000L * (long) (10d * Math.pow(2, retriesAttempted / 2.0d) * (1.0d + rand.nextDouble())); - } + (originalRequest, exception, retriesAttempted) -> { + // with 10 retries the max delay time is 320s/320000ms (10 * 2^5 * 1 * 1000) + logger.warn("EC2 API request failed, retry again. Reason was:", exception); + return 1000L * (long) (10d * Math.pow(2, retriesAttempted / 2.0d) * (1.0d + rand.nextDouble())); }, 10, false); clientConfiguration.setRetryPolicy(retryPolicy); - clientConfiguration.setSocketTimeout((int) READ_TIMEOUT_SETTING.get(settings).millis()); - + clientConfiguration.setSocketTimeout(clientSettings.readTimeoutMillis); return clientConfiguration; } - protected static String findEndpoint(Logger logger, Settings settings) { - String endpoint = null; - if (ENDPOINT_SETTING.exists(settings)) { - endpoint = ENDPOINT_SETTING.get(settings); - logger.debug("using explicit ec2 endpoint [{}]", endpoint); + // pkg private for tests + static AWSCredentialsProvider buildCredentials(Logger logger, Ec2ClientSettings clientSettings) { + final BasicAWSCredentials credentials = clientSettings.credentials; + if (credentials == null) { + logger.debug("Using either environment variables, system properties or instance profile credentials"); + return new DefaultAWSCredentialsProviderChain(); + } else { + logger.debug("Using basic key/secret credentials"); + return new StaticCredentialsProvider(credentials); } - return endpoint; } @Override - public void close() throws IOException { - if (client != null) { - client.shutdown(); + public AmazonEc2Reference client() { + final LazyInitializable clientReference = this.lazyClientReference.get(); + if (clientReference == null) { + throw new IllegalStateException("Missing ec2 client configs"); } + return clientReference.getOrCompute(); + } - // Ensure that IdleConnectionReaper is shutdown + /** + * Refreshes the settings for the AmazonEC2 client. The new client will be build + * using these new settings. The old client is usable until released. On release it + * will be destroyed instead of being returned to the cache. + */ + @Override + public void refreshAndClearCache(Ec2ClientSettings clientSettings) { + final LazyInitializable newClient = new LazyInitializable<>( + () -> new AmazonEc2Reference(buildClient(clientSettings)), clientReference -> clientReference.incRef(), + clientReference -> clientReference.decRef()); + final LazyInitializable oldClient = this.lazyClientReference.getAndSet(newClient); + if (oldClient != null) { + oldClient.reset(); + } + } + + @Override + public void close() { + final LazyInitializable clientReference = this.lazyClientReference.getAndSet(null); + if (clientReference != null) { + clientReference.reset(); + } + // shutdown IdleConnectionReaper background thread + // it will be restarted on new client usage IdleConnectionReaper.shutdown(); } + } diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java index f291413d408ed..2c536981b04c5 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java @@ -20,7 +20,6 @@ package org.elasticsearch.discovery.ec2; import com.amazonaws.AmazonClientException; -import com.amazonaws.services.ec2.AmazonEC2; import com.amazonaws.services.ec2.model.DescribeInstancesRequest; import com.amazonaws.services.ec2.model.DescribeInstancesResult; import com.amazonaws.services.ec2.model.Filter; @@ -59,7 +58,7 @@ class AwsEc2UnicastHostsProvider extends AbstractComponent implements UnicastHos private final TransportService transportService; - private final AmazonEC2 client; + private final AwsEc2Service awsEc2Service; private final boolean bindAnyGroup; @@ -76,7 +75,7 @@ class AwsEc2UnicastHostsProvider extends AbstractComponent implements UnicastHos AwsEc2UnicastHostsProvider(Settings settings, TransportService transportService, AwsEc2Service awsEc2Service) { super(settings); this.transportService = transportService; - this.client = awsEc2Service.client(); + this.awsEc2Service = awsEc2Service; this.hostType = AwsEc2Service.HOST_TYPE_SETTING.get(settings); this.discoNodes = new DiscoNodesCache(AwsEc2Service.NODE_CACHE_TIME_SETTING.get(settings)); @@ -103,31 +102,31 @@ public List buildDynamicNodes() { protected List fetchDynamicNodes() { - List discoNodes = new ArrayList<>(); + final List discoNodes = new ArrayList<>(); - DescribeInstancesResult descInstances; - try { + final DescribeInstancesResult descInstances; + try (AmazonEc2Reference clientReference = awsEc2Service.client()) { // Query EC2 API based on AZ, instance state, and tag. // NOTE: we don't filter by security group during the describe instances request for two reasons: // 1. differences in VPCs require different parameters during query (ID vs Name) // 2. We want to use two different strategies: (all security groups vs. any security groups) - descInstances = SocketAccess.doPrivileged(() -> client.describeInstances(buildDescribeInstancesRequest())); - } catch (AmazonClientException e) { + descInstances = SocketAccess.doPrivileged(() -> clientReference.client().describeInstances(buildDescribeInstancesRequest())); + } catch (final AmazonClientException e) { logger.info("Exception while retrieving instance list from AWS API: {}", e.getMessage()); logger.debug("Full exception:", e); return discoNodes; } logger.trace("building dynamic unicast discovery nodes..."); - for (Reservation reservation : descInstances.getReservations()) { - for (Instance instance : reservation.getInstances()) { + for (final Reservation reservation : descInstances.getReservations()) { + for (final Instance instance : reservation.getInstances()) { // lets see if we can filter based on groups if (!groups.isEmpty()) { - List instanceSecurityGroups = instance.getSecurityGroups(); - List securityGroupNames = new ArrayList<>(instanceSecurityGroups.size()); - List securityGroupIds = new ArrayList<>(instanceSecurityGroups.size()); - for (GroupIdentifier sg : instanceSecurityGroups) { + final List instanceSecurityGroups = instance.getSecurityGroups(); + final List securityGroupNames = new ArrayList<>(instanceSecurityGroups.size()); + final List securityGroupIds = new ArrayList<>(instanceSecurityGroups.size()); + for (final GroupIdentifier sg : instanceSecurityGroups) { securityGroupNames.add(sg.getGroupName()); securityGroupIds.add(sg.getGroupId()); } @@ -162,10 +161,10 @@ && disjoint(securityGroupIds, groups)) { address = instance.getPublicIpAddress(); } else if (hostType.startsWith(TAG_PREFIX)) { // Reading the node host from its metadata - String tagName = hostType.substring(TAG_PREFIX.length()); + final String tagName = hostType.substring(TAG_PREFIX.length()); logger.debug("reading hostname from [{}] instance tag", tagName); - List tags = instance.getTags(); - for (Tag tag : tags) { + final List tags = instance.getTags(); + for (final Tag tag : tags) { if (tag.getKey().equals(tagName)) { address = tag.getValue(); logger.debug("using [{}] as the instance address", address); @@ -177,13 +176,13 @@ && disjoint(securityGroupIds, groups)) { if (address != null) { try { // we only limit to 1 port per address, makes no sense to ping 100 ports - TransportAddress[] addresses = transportService.addressesFromString(address, 1); + final TransportAddress[] addresses = transportService.addressesFromString(address, 1); for (int i = 0; i < addresses.length; i++) { logger.trace("adding {}, address {}, transport_address {}", instance.getInstanceId(), address, addresses[i]); discoNodes.add(new DiscoveryNode(instance.getInstanceId(), "#cloud-" + instance.getInstanceId() + "-" + i, addresses[i], emptyMap(), emptySet(), Version.CURRENT.minimumCompatibilityVersion())); } - } catch (Exception e) { + } catch (final Exception e) { final String finalAddress = address; logger.warn( (Supplier) @@ -201,12 +200,12 @@ && disjoint(securityGroupIds, groups)) { } private DescribeInstancesRequest buildDescribeInstancesRequest() { - DescribeInstancesRequest describeInstancesRequest = new DescribeInstancesRequest() + final DescribeInstancesRequest describeInstancesRequest = new DescribeInstancesRequest() .withFilters( new Filter("instance-state-name").withValues("running", "pending") ); - for (Map.Entry> tagFilter : tags.entrySet()) { + for (final Map.Entry> tagFilter : tags.entrySet()) { // for a given tag key, OR relationship for multiple different values describeInstancesRequest.withFilters( new Filter("tag:" + tagFilter.getKey()).withValues(tagFilter.getValue()) @@ -238,7 +237,7 @@ protected boolean needsRefresh() { @Override protected List refresh() { - List nodes = fetchDynamicNodes(); + final List nodes = fetchDynamicNodes(); empty = nodes.isEmpty(); return nodes; } diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2ClientSettings.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2ClientSettings.java new file mode 100644 index 0000000000000..b42b0d546001a --- /dev/null +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2ClientSettings.java @@ -0,0 +1,145 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.discovery.ec2; + +import com.amazonaws.ClientConfiguration; +import com.amazonaws.Protocol; +import com.amazonaws.auth.BasicAWSCredentials; + +import org.elasticsearch.common.settings.SecureSetting; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.Setting.Property; +import org.elasticsearch.common.unit.TimeValue; +import java.util.Locale; + +/** + * A container for settings used to create an EC2 client. + */ +final class Ec2ClientSettings { + + /** The access key (ie login id) for connecting to ec2. */ + static final Setting ACCESS_KEY_SETTING = SecureSetting.secureString("discovery.ec2.access_key", null); + + /** The secret key (ie password) for connecting to ec2. */ + static final Setting SECRET_KEY_SETTING = SecureSetting.secureString("discovery.ec2.secret_key", null); + + /** The host name of a proxy to connect to ec2 through. */ + static final Setting PROXY_HOST_SETTING = Setting.simpleString("discovery.ec2.proxy.host", Property.NodeScope); + + /** The port of a proxy to connect to ec2 through. */ + static final Setting PROXY_PORT_SETTING = Setting.intSetting("discovery.ec2.proxy.port", 80, 0, 1 << 16, Property.NodeScope); + + /** An override for the ec2 endpoint to connect to. */ + static final Setting ENDPOINT_SETTING = new Setting<>("discovery.ec2.endpoint", "", s -> s.toLowerCase(Locale.ROOT), + Property.NodeScope); + + /** The protocol to use to connect to to ec2. */ + static final Setting PROTOCOL_SETTING = new Setting<>("discovery.ec2.protocol", "https", + s -> Protocol.valueOf(s.toUpperCase(Locale.ROOT)), Property.NodeScope); + + /** The username of a proxy to connect to s3 through. */ + static final Setting PROXY_USERNAME_SETTING = SecureSetting.secureString("discovery.ec2.proxy.username", null); + + /** The password of a proxy to connect to s3 through. */ + static final Setting PROXY_PASSWORD_SETTING = SecureSetting.secureString("discovery.ec2.proxy.password", null); + + /** The socket timeout for connecting to s3. */ + static final Setting READ_TIMEOUT_SETTING = Setting.timeSetting("discovery.ec2.read_timeout", + TimeValue.timeValueMillis(ClientConfiguration.DEFAULT_SOCKET_TIMEOUT), Property.NodeScope); + + /** Credentials to authenticate with ec2. */ + final BasicAWSCredentials credentials; + + /** + * The ec2 endpoint the client should talk to, or empty string to use the + * default. + */ + final String endpoint; + + /** The protocol to use to talk to ec2. Defaults to https. */ + final Protocol protocol; + + /** An optional proxy host that requests to ec2 should be made through. */ + final String proxyHost; + + /** The port number the proxy host should be connected on. */ + final int proxyPort; + + // these should be "secure" yet the api for the ec2 client only takes String, so + // storing them + // as SecureString here won't really help with anything + /** An optional username for the proxy host, for basic authentication. */ + final String proxyUsername; + + /** An optional password for the proxy host, for basic authentication. */ + final String proxyPassword; + + /** The read timeout for the ec2 client. */ + final int readTimeoutMillis; + + protected Ec2ClientSettings(BasicAWSCredentials credentials, String endpoint, Protocol protocol, String proxyHost, int proxyPort, + String proxyUsername, String proxyPassword, int readTimeoutMillis) { + this.credentials = credentials; + this.endpoint = endpoint; + this.protocol = protocol; + this.proxyHost = proxyHost; + this.proxyPort = proxyPort; + this.proxyUsername = proxyUsername; + this.proxyPassword = proxyPassword; + this.readTimeoutMillis = readTimeoutMillis; + } + + static BasicAWSCredentials loadCredentials(Settings settings) { + try (SecureString accessKey = ACCESS_KEY_SETTING.get(settings); + SecureString secretKey = SECRET_KEY_SETTING.get(settings);) { + if (accessKey.length() != 0) { + if (secretKey.length() != 0) { + return new BasicAWSCredentials(accessKey.toString(), secretKey.toString()); + } else { + throw new IllegalArgumentException("Missing secret key for ec2 client."); + } + } else if (secretKey.length() != 0) { + throw new IllegalArgumentException("Missing access key for ec2 client."); + } + return null; + } + } + + // pkg private for tests + /** Parse settings for a single client. */ + static Ec2ClientSettings getClientSettings(Settings settings) { + final BasicAWSCredentials credentials = loadCredentials(settings); + try (SecureString proxyUsername = PROXY_USERNAME_SETTING.get(settings); + SecureString proxyPassword = PROXY_PASSWORD_SETTING.get(settings)) { + return new Ec2ClientSettings( + credentials, + ENDPOINT_SETTING.get(settings), + PROTOCOL_SETTING.get(settings), + PROXY_HOST_SETTING.get(settings), + PROXY_PORT_SETTING.get(settings), + proxyUsername.toString(), + proxyPassword.toString(), + (int)READ_TIMEOUT_SETTING.get(settings).millis()); + } + } + +} diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPlugin.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPlugin.java index 28d563e6a9ca6..9fc32ea306c0e 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPlugin.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPlugin.java @@ -21,8 +21,6 @@ import com.amazonaws.util.json.Jackson; import org.apache.logging.log4j.Logger; -import org.elasticsearch.core.internal.io.IOUtils; -import org.apache.lucene.util.SetOnce; import org.elasticsearch.SpecialPermission; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.logging.Loggers; @@ -33,10 +31,10 @@ import org.elasticsearch.node.Node; import org.elasticsearch.plugins.DiscoveryPlugin; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.ReloadablePlugin; import org.elasticsearch.transport.TransportService; import java.io.BufferedReader; -import java.io.Closeable; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; @@ -52,7 +50,7 @@ import java.util.Map; import java.util.function.Supplier; -public class Ec2DiscoveryPlugin extends Plugin implements DiscoveryPlugin, Closeable { +public class Ec2DiscoveryPlugin extends Plugin implements DiscoveryPlugin, ReloadablePlugin { private static Logger logger = Loggers.getLogger(Ec2DiscoveryPlugin.class); public static final String EC2 = "ec2"; @@ -68,22 +66,27 @@ public class Ec2DiscoveryPlugin extends Plugin implements DiscoveryPlugin, Close // ClientConfiguration clinit has some classloader problems // TODO: fix that Class.forName("com.amazonaws.ClientConfiguration"); - } catch (ClassNotFoundException e) { + } catch (final ClassNotFoundException e) { throw new RuntimeException(e); } return null; }); } - private Settings settings; - // stashed when created in order to properly close - private final SetOnce ec2Service = new SetOnce<>(); + private final Settings settings; + // protected for testing + protected final AwsEc2Service ec2Service; public Ec2DiscoveryPlugin(Settings settings) { - this.settings = settings; + this(settings, new AwsEc2ServiceImpl(settings)); } - + protected Ec2DiscoveryPlugin(Settings settings, AwsEc2ServiceImpl ec2Service) { + this.settings = settings; + this.ec2Service = ec2Service; + // eagerly load client settings when secure settings are accessible + reload(settings); + } @Override public NetworkService.CustomNameResolver getCustomNameResolver(Settings settings) { @@ -94,25 +97,22 @@ public NetworkService.CustomNameResolver getCustomNameResolver(Settings settings @Override public Map> getZenHostsProviders(TransportService transportService, NetworkService networkService) { - return Collections.singletonMap(EC2, () -> { - ec2Service.set(new AwsEc2ServiceImpl(settings)); - return new AwsEc2UnicastHostsProvider(settings, transportService, ec2Service.get()); - }); + return Collections.singletonMap(EC2, () -> new AwsEc2UnicastHostsProvider(settings, transportService, ec2Service)); } @Override public List> getSettings() { return Arrays.asList( // Register EC2 discovery settings: discovery.ec2 - AwsEc2Service.ACCESS_KEY_SETTING, - AwsEc2Service.SECRET_KEY_SETTING, - AwsEc2Service.ENDPOINT_SETTING, - AwsEc2Service.PROTOCOL_SETTING, - AwsEc2Service.PROXY_HOST_SETTING, - AwsEc2Service.PROXY_PORT_SETTING, - AwsEc2Service.PROXY_USERNAME_SETTING, - AwsEc2Service.PROXY_PASSWORD_SETTING, - AwsEc2Service.READ_TIMEOUT_SETTING, + Ec2ClientSettings.ACCESS_KEY_SETTING, + Ec2ClientSettings.SECRET_KEY_SETTING, + Ec2ClientSettings.ENDPOINT_SETTING, + Ec2ClientSettings.PROTOCOL_SETTING, + Ec2ClientSettings.PROXY_HOST_SETTING, + Ec2ClientSettings.PROXY_PORT_SETTING, + Ec2ClientSettings.PROXY_USERNAME_SETTING, + Ec2ClientSettings.PROXY_PASSWORD_SETTING, + Ec2ClientSettings.READ_TIMEOUT_SETTING, AwsEc2Service.HOST_TYPE_SETTING, AwsEc2Service.ANY_GROUP_SETTING, AwsEc2Service.GROUPS_SETTING, @@ -125,10 +125,10 @@ public List> getSettings() { @Override public Settings additionalSettings() { - Settings.Builder builder = Settings.builder(); + final Settings.Builder builder = Settings.builder(); // Adds a node attribute for the ec2 availability zone - String azMetadataUrl = AwsEc2ServiceImpl.EC2_METADATA_URL + "placement/availability-zone"; + final String azMetadataUrl = AwsEc2ServiceImpl.EC2_METADATA_URL + "placement/availability-zone"; builder.put(getAvailabilityZoneNodeAttributes(settings, azMetadataUrl)); return builder.build(); } @@ -139,7 +139,7 @@ static Settings getAvailabilityZoneNodeAttributes(Settings settings, String azMe if (AwsEc2Service.AUTO_ATTRIBUTE_SETTING.get(settings) == false) { return Settings.EMPTY; } - Settings.Builder attrs = Settings.builder(); + final Settings.Builder attrs = Settings.builder(); final URL url; final URLConnection urlConnection; @@ -148,7 +148,7 @@ static Settings getAvailabilityZoneNodeAttributes(Settings settings, String azMe logger.debug("obtaining ec2 [placement/availability-zone] from ec2 meta-data url {}", url); urlConnection = SocketAccess.doPrivilegedIOException(url::openConnection); urlConnection.setConnectTimeout(2000); - } catch (IOException e) { + } catch (final IOException e) { // should not happen, we know the url is not malformed, and openConnection does not actually hit network throw new UncheckedIOException(e); } @@ -156,13 +156,13 @@ static Settings getAvailabilityZoneNodeAttributes(Settings settings, String azMe try (InputStream in = SocketAccess.doPrivilegedIOException(urlConnection::getInputStream); BufferedReader urlReader = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8))) { - String metadataResult = urlReader.readLine(); - if (metadataResult == null || metadataResult.length() == 0) { + final String metadataResult = urlReader.readLine(); + if ((metadataResult == null) || (metadataResult.length() == 0)) { throw new IllegalStateException("no ec2 metadata returned from " + url); } else { attrs.put(Node.NODE_ATTRIBUTES.getKey() + "aws_availability_zone", metadataResult); } - } catch (IOException e) { + } catch (final IOException e) { // this is lenient so the plugin does not fail when installed outside of ec2 logger.error("failed to get metadata for [placement/availability-zone]", e); } @@ -172,6 +172,13 @@ static Settings getAvailabilityZoneNodeAttributes(Settings settings, String azMe @Override public void close() throws IOException { - IOUtils.close(ec2Service.get()); + ec2Service.close(); + } + + @Override + public void reload(Settings settings) { + // secure settings should be readable + final Ec2ClientSettings clientSettings = Ec2ClientSettings.getClientSettings(settings); + ec2Service.refreshAndClearCache(clientSettings); } } diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java index 34ad449d06e8d..aa08447fd208b 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AmazonEC2Mock.java @@ -22,7 +22,9 @@ import com.amazonaws.AmazonClientException; import com.amazonaws.AmazonServiceException; import com.amazonaws.AmazonWebServiceRequest; +import com.amazonaws.ClientConfiguration; import com.amazonaws.ResponseMetadata; +import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.regions.Region; import com.amazonaws.services.ec2.AmazonEC2; import com.amazonaws.services.ec2.model.AcceptVpcPeeringConnectionRequest; @@ -528,9 +530,12 @@ public class AmazonEC2Mock implements AmazonEC2 { public static final String PREFIX_PRIVATE_DNS = "mock-ip-"; public static final String SUFFIX_PRIVATE_DNS = ".ec2.internal"; - List instances = new ArrayList<>(); + final List instances = new ArrayList<>(); + String endpoint; + final AWSCredentialsProvider credentials; + final ClientConfiguration configuration; - public AmazonEC2Mock(int nodes, List> tagsList) { + public AmazonEC2Mock(int nodes, List> tagsList, AWSCredentialsProvider credentials, ClientConfiguration configuration) { if (tagsList != null) { assert tagsList.size() == nodes; } @@ -552,7 +557,8 @@ public AmazonEC2Mock(int nodes, List> tagsList) { instances.add(instance); } - + this.credentials = credentials; + this.configuration = configuration; } @Override @@ -642,7 +648,7 @@ public DescribeInstancesResult describeInstances(DescribeInstancesRequest descri @Override public void setEndpoint(String endpoint) throws IllegalArgumentException { - throw new UnsupportedOperationException("Not supported in mock"); + this.endpoint = endpoint; } @Override @@ -2110,7 +2116,6 @@ public DryRunResult dryRun(DryRunSupporte @Override public void shutdown() { - throw new UnsupportedOperationException("Not supported in mock"); } @Override diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceImplTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceImplTests.java index e5841fbc36ff9..a13fe47a632ae 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceImplTests.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceImplTests.java @@ -26,31 +26,31 @@ import com.amazonaws.auth.DefaultAWSCredentialsProviderChain; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.discovery.ec2.AwsEc2Service; import org.elasticsearch.discovery.ec2.AwsEc2ServiceImpl; import org.elasticsearch.test.ESTestCase; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.nullValue; public class AwsEc2ServiceImplTests extends ESTestCase { public void testAWSCredentialsWithSystemProviders() { - AWSCredentialsProvider credentialsProvider = AwsEc2ServiceImpl.buildCredentials(logger, Settings.EMPTY); + final AWSCredentialsProvider credentialsProvider = AwsEc2ServiceImpl.buildCredentials(logger, + Ec2ClientSettings.getClientSettings(Settings.EMPTY)); assertThat(credentialsProvider, instanceOf(DefaultAWSCredentialsProviderChain.class)); } public void testAWSCredentialsWithElasticsearchAwsSettings() { - MockSecureSettings secureSettings = new MockSecureSettings(); + final MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString("discovery.ec2.access_key", "aws_key"); secureSettings.setString("discovery.ec2.secret_key", "aws_secret"); - Settings settings = Settings.builder().setSecureSettings(secureSettings).build(); + final Settings settings = Settings.builder().setSecureSettings(secureSettings).build(); launchAWSCredentialsWithElasticsearchSettingsTest(settings, "aws_key", "aws_secret"); } protected void launchAWSCredentialsWithElasticsearchSettingsTest(Settings settings, String expectedKey, String expectedSecret) { - AWSCredentials credentials = AwsEc2ServiceImpl.buildCredentials(logger, settings).getCredentials(); + final AWSCredentials credentials = AwsEc2ServiceImpl.buildCredentials(logger, Ec2ClientSettings.getClientSettings(settings)) + .getCredentials(); assertThat(credentials.getAWSAccessKeyId(), is(expectedKey)); assertThat(credentials.getAWSSecretKey(), is(expectedSecret)); } @@ -61,10 +61,10 @@ public void testAWSDefaultConfiguration() { } public void testAWSConfigurationWithAwsSettings() { - MockSecureSettings secureSettings = new MockSecureSettings(); + final MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString("discovery.ec2.proxy.username", "aws_proxy_username"); secureSettings.setString("discovery.ec2.proxy.password", "aws_proxy_password"); - Settings settings = Settings.builder() + final Settings settings = Settings.builder() .put("discovery.ec2.protocol", "http") .put("discovery.ec2.proxy.host", "aws_proxy_host") .put("discovery.ec2.proxy.port", 8080) @@ -81,7 +81,8 @@ protected void launchAWSConfigurationTest(Settings settings, String expectedProxyUsername, String expectedProxyPassword, int expectedReadTimeout) { - ClientConfiguration configuration = AwsEc2ServiceImpl.buildConfiguration(logger, settings); + final ClientConfiguration configuration = AwsEc2ServiceImpl.buildConfiguration(logger, + Ec2ClientSettings.getClientSettings(settings)); assertThat(configuration.getResponseMetadataCacheSize(), is(0)); assertThat(configuration.getProtocol(), is(expectedProtocol)); @@ -92,16 +93,4 @@ protected void launchAWSConfigurationTest(Settings settings, assertThat(configuration.getSocketTimeout(), is(expectedReadTimeout)); } - public void testDefaultEndpoint() { - String endpoint = AwsEc2ServiceImpl.findEndpoint(logger, Settings.EMPTY); - assertThat(endpoint, nullValue()); - } - - public void testSpecificEndpoint() { - Settings settings = Settings.builder() - .put(AwsEc2Service.ENDPOINT_SETTING.getKey(), "ec2.endpoint") - .build(); - String endpoint = AwsEc2ServiceImpl.findEndpoint(logger, settings); - assertThat(endpoint, is("ec2.endpoint")); - } } diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceMock.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceMock.java index e29821efda223..0596dd697b2eb 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceMock.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/AwsEc2ServiceMock.java @@ -19,18 +19,19 @@ package org.elasticsearch.discovery.ec2; +import com.amazonaws.ClientConfiguration; +import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.services.ec2.AmazonEC2; import com.amazonaws.services.ec2.model.Tag; -import org.elasticsearch.common.component.AbstractLifecycleComponent; + import org.elasticsearch.common.settings.Settings; import java.util.List; -public class AwsEc2ServiceMock extends AbstractLifecycleComponent implements AwsEc2Service { +public class AwsEc2ServiceMock extends AwsEc2ServiceImpl { - private int nodes; - private List> tagsList; - private AmazonEC2 client; + private final int nodes; + private final List> tagsList; public AwsEc2ServiceMock(Settings settings, int nodes, List> tagsList) { super(settings); @@ -39,26 +40,8 @@ public AwsEc2ServiceMock(Settings settings, int nodes, List> tagsList) } @Override - public synchronized AmazonEC2 client() { - if (client == null) { - client = new AmazonEC2Mock(nodes, tagsList); - } - - return client; + AmazonEC2 buildClient(AWSCredentialsProvider credentials, ClientConfiguration configuration) { + return new AmazonEC2Mock(nodes, tagsList, credentials, configuration); } - @Override - protected void doStart() { - - } - - @Override - protected void doStop() { - - } - - @Override - protected void doClose() { - - } } diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureServiceDisableException.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPluginMock.java similarity index 62% rename from plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureServiceDisableException.java rename to plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPluginMock.java index a100079668b54..a92bd243bc9b7 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureServiceDisableException.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPluginMock.java @@ -17,14 +17,22 @@ * under the License. */ -package org.elasticsearch.repositories.azure; +package org.elasticsearch.discovery.ec2; -public class AzureServiceDisableException extends IllegalStateException { - public AzureServiceDisableException(String msg) { - super(msg); +import com.amazonaws.services.ec2.model.Tag; + +import org.elasticsearch.common.settings.Settings; + +import java.util.List; + +public class Ec2DiscoveryPluginMock extends Ec2DiscoveryPlugin { + + Ec2DiscoveryPluginMock(Settings settings) { + this(settings, 1, null); } - public AzureServiceDisableException(String msg, Throwable cause) { - super(msg, cause); + public Ec2DiscoveryPluginMock(Settings settings, int nodes, List> tagsList) { + super(settings, new AwsEc2ServiceMock(settings, nodes, tagsList)); } + } diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPluginTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPluginTests.java index 9bb75c0b09f97..6001ab56d5042 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPluginTests.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPluginTests.java @@ -19,12 +19,17 @@ package org.elasticsearch.discovery.ec2; +import java.io.IOException; import java.io.UncheckedIOException; import java.nio.file.Files; import java.nio.file.Path; import java.util.Arrays; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; + import org.elasticsearch.discovery.ec2.AwsEc2Service; +import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.ec2.Ec2DiscoveryPlugin; import org.elasticsearch.node.Node; @@ -33,14 +38,14 @@ public class Ec2DiscoveryPluginTests extends ESTestCase { private Settings getNodeAttributes(Settings settings, String url) { - Settings realSettings = Settings.builder() + final Settings realSettings = Settings.builder() .put(AwsEc2Service.AUTO_ATTRIBUTE_SETTING.getKey(), true) .put(settings).build(); return Ec2DiscoveryPlugin.getAvailabilityZoneNodeAttributes(realSettings, url); } private void assertNodeAttributes(Settings settings, String url, String expected) { - Settings additional = getNodeAttributes(settings, url); + final Settings additional = getNodeAttributes(settings, url); if (expected == null) { assertTrue(additional.isEmpty()); } else { @@ -49,36 +54,106 @@ private void assertNodeAttributes(Settings settings, String url, String expected } public void testNodeAttributesDisabled() { - Settings settings = Settings.builder() + final Settings settings = Settings.builder() .put(AwsEc2Service.AUTO_ATTRIBUTE_SETTING.getKey(), false).build(); assertNodeAttributes(settings, "bogus", null); } public void testNodeAttributes() throws Exception { - Path zoneUrl = createTempFile(); + final Path zoneUrl = createTempFile(); Files.write(zoneUrl, Arrays.asList("us-east-1c")); assertNodeAttributes(Settings.EMPTY, zoneUrl.toUri().toURL().toString(), "us-east-1c"); } public void testNodeAttributesBogusUrl() { - UncheckedIOException e = expectThrows(UncheckedIOException.class, () -> + final UncheckedIOException e = expectThrows(UncheckedIOException.class, () -> getNodeAttributes(Settings.EMPTY, "bogus") ); assertNotNull(e.getCause()); - String msg = e.getCause().getMessage(); + final String msg = e.getCause().getMessage(); assertTrue(msg, msg.contains("no protocol: bogus")); } public void testNodeAttributesEmpty() throws Exception { - Path zoneUrl = createTempFile(); - IllegalStateException e = expectThrows(IllegalStateException.class, () -> + final Path zoneUrl = createTempFile(); + final IllegalStateException e = expectThrows(IllegalStateException.class, () -> getNodeAttributes(Settings.EMPTY, zoneUrl.toUri().toURL().toString()) ); assertTrue(e.getMessage(), e.getMessage().contains("no ec2 metadata returned")); } public void testNodeAttributesErrorLenient() throws Exception { - Path dne = createTempDir().resolve("dne"); + final Path dne = createTempDir().resolve("dne"); assertNodeAttributes(Settings.EMPTY, dne.toUri().toURL().toString(), null); } + + public void testDefaultEndpoint() throws IOException { + try (Ec2DiscoveryPluginMock plugin = new Ec2DiscoveryPluginMock(Settings.EMPTY)) { + final String endpoint = ((AmazonEC2Mock) plugin.ec2Service.client().client()).endpoint; + assertThat(endpoint, nullValue()); + } + } + + public void testSpecificEndpoint() throws IOException { + final Settings settings = Settings.builder().put(Ec2ClientSettings.ENDPOINT_SETTING.getKey(), "ec2.endpoint").build(); + try (Ec2DiscoveryPluginMock plugin = new Ec2DiscoveryPluginMock(settings)) { + final String endpoint = ((AmazonEC2Mock) plugin.ec2Service.client().client()).endpoint; + assertThat(endpoint, is("ec2.endpoint")); + } + } + + public void testClientSettingsReInit() throws IOException { + final MockSecureSettings mockSecure1 = new MockSecureSettings(); + mockSecure1.setString(Ec2ClientSettings.ACCESS_KEY_SETTING.getKey(), "ec2_access_1"); + mockSecure1.setString(Ec2ClientSettings.SECRET_KEY_SETTING.getKey(), "ec2_secret_1"); + mockSecure1.setString(Ec2ClientSettings.PROXY_USERNAME_SETTING.getKey(), "proxy_username_1"); + mockSecure1.setString(Ec2ClientSettings.PROXY_PASSWORD_SETTING.getKey(), "proxy_password_1"); + final Settings settings1 = Settings.builder() + .put(Ec2ClientSettings.PROXY_HOST_SETTING.getKey(), "proxy_host_1") + .put(Ec2ClientSettings.PROXY_PORT_SETTING.getKey(), 881) + .put(Ec2ClientSettings.ENDPOINT_SETTING.getKey(), "ec2_endpoint_1") + .setSecureSettings(mockSecure1) + .build(); + final MockSecureSettings mockSecure2 = new MockSecureSettings(); + mockSecure2.setString(Ec2ClientSettings.ACCESS_KEY_SETTING.getKey(), "ec2_access_2"); + mockSecure2.setString(Ec2ClientSettings.SECRET_KEY_SETTING.getKey(), "ec2_secret_2"); + mockSecure2.setString(Ec2ClientSettings.PROXY_USERNAME_SETTING.getKey(), "proxy_username_2"); + mockSecure2.setString(Ec2ClientSettings.PROXY_PASSWORD_SETTING.getKey(), "proxy_password_2"); + final Settings settings2 = Settings.builder() + .put(Ec2ClientSettings.PROXY_HOST_SETTING.getKey(), "proxy_host_2") + .put(Ec2ClientSettings.PROXY_PORT_SETTING.getKey(), 882) + .put(Ec2ClientSettings.ENDPOINT_SETTING.getKey(), "ec2_endpoint_2") + .setSecureSettings(mockSecure2) + .build(); + try (Ec2DiscoveryPluginMock plugin = new Ec2DiscoveryPluginMock(settings1)) { + try (AmazonEc2Reference clientReference = plugin.ec2Service.client()) { + assertThat(((AmazonEC2Mock) clientReference.client()).credentials.getCredentials().getAWSAccessKeyId(), is("ec2_access_1")); + assertThat(((AmazonEC2Mock) clientReference.client()).credentials.getCredentials().getAWSSecretKey(), is("ec2_secret_1")); + assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyUsername(), is("proxy_username_1")); + assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPassword(), is("proxy_password_1")); + assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyHost(), is("proxy_host_1")); + assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPort(), is(881)); + assertThat(((AmazonEC2Mock) clientReference.client()).endpoint, is("ec2_endpoint_1")); + // reload secure settings2 + plugin.reload(settings2); + // client is not released, it is still using the old settings + assertThat(((AmazonEC2Mock) clientReference.client()).credentials.getCredentials().getAWSAccessKeyId(), is("ec2_access_1")); + assertThat(((AmazonEC2Mock) clientReference.client()).credentials.getCredentials().getAWSSecretKey(), is("ec2_secret_1")); + assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyUsername(), is("proxy_username_1")); + assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPassword(), is("proxy_password_1")); + assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyHost(), is("proxy_host_1")); + assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPort(), is(881)); + assertThat(((AmazonEC2Mock) clientReference.client()).endpoint, is("ec2_endpoint_1")); + } + try (AmazonEc2Reference clientReference = plugin.ec2Service.client()) { + assertThat(((AmazonEC2Mock) clientReference.client()).credentials.getCredentials().getAWSAccessKeyId(), is("ec2_access_2")); + assertThat(((AmazonEC2Mock) clientReference.client()).credentials.getCredentials().getAWSSecretKey(), is("ec2_secret_2")); + assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyUsername(), is("proxy_username_2")); + assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPassword(), is("proxy_password_2")); + assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyHost(), is("proxy_host_2")); + assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPort(), is(882)); + assertThat(((AmazonEC2Mock) clientReference.client()).endpoint, is("ec2_endpoint_2")); + } + } + } } diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java index e7986cb878e41..43cc924fadb10 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java @@ -39,6 +39,7 @@ import org.junit.Before; import org.junit.BeforeClass; +import java.io.IOException; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.ArrayList; @@ -91,11 +92,15 @@ protected List buildDynamicNodes(Settings nodeSettings, int nodes } protected List buildDynamicNodes(Settings nodeSettings, int nodes, List> tagsList) { - AwsEc2Service awsEc2Service = new AwsEc2ServiceMock(nodeSettings, nodes, tagsList); - AwsEc2UnicastHostsProvider provider = new AwsEc2UnicastHostsProvider(nodeSettings, transportService, awsEc2Service); - List discoveryNodes = provider.buildDynamicNodes(); - logger.debug("--> nodes found: {}", discoveryNodes); - return discoveryNodes; + try (Ec2DiscoveryPluginMock plugin = new Ec2DiscoveryPluginMock(Settings.EMPTY, nodes, tagsList)) { + AwsEc2UnicastHostsProvider provider = new AwsEc2UnicastHostsProvider(nodeSettings, transportService, plugin.ec2Service); + List discoveryNodes = provider.buildDynamicNodes(); + logger.debug("--> nodes found: {}", discoveryNodes); + return discoveryNodes; + } catch (IOException e) { + fail("Unexpected IOException"); + return null; + } } public void testDefaultSettings() throws InterruptedException { @@ -315,22 +320,23 @@ protected List fetchDynamicNodes() { public void testGetNodeListCached() throws Exception { Settings.Builder builder = Settings.builder() .put(AwsEc2Service.NODE_CACHE_TIME_SETTING.getKey(), "500ms"); - AwsEc2Service awsEc2Service = new AwsEc2ServiceMock(Settings.EMPTY, 1, null); - DummyEc2HostProvider provider = new DummyEc2HostProvider(builder.build(), transportService, awsEc2Service) { - @Override - protected List fetchDynamicNodes() { - fetchCount++; - return Ec2DiscoveryTests.this.buildDynamicNodes(Settings.EMPTY, 1); + try (Ec2DiscoveryPluginMock plugin = new Ec2DiscoveryPluginMock(Settings.EMPTY)) { + DummyEc2HostProvider provider = new DummyEc2HostProvider(builder.build(), transportService, plugin.ec2Service) { + @Override + protected List fetchDynamicNodes() { + fetchCount++; + return Ec2DiscoveryTests.this.buildDynamicNodes(Settings.EMPTY, 1); + } + }; + for (int i=0; i<3; i++) { + provider.buildDynamicNodes(); } - }; - for (int i=0; i<3; i++) { - provider.buildDynamicNodes(); - } - assertThat(provider.fetchCount, is(1)); - Thread.sleep(1_000L); // wait for cache to expire - for (int i=0; i<3; i++) { - provider.buildDynamicNodes(); + assertThat(provider.fetchCount, is(1)); + Thread.sleep(1_000L); // wait for cache to expire + for (int i=0; i<3; i++) { + provider.buildDynamicNodes(); + } + assertThat(provider.fetchCount, is(2)); } - assertThat(provider.fetchCount, is(2)); } } diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobStore.java b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobStore.java index fc6d9d7e482a8..bcd6b936af1aa 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobStore.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureBlobStore.java @@ -20,46 +20,44 @@ package org.elasticsearch.repositories.azure; import com.microsoft.azure.storage.LocationMode; + import com.microsoft.azure.storage.StorageException; import org.elasticsearch.cluster.metadata.RepositoryMetaData; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; - import java.io.IOException; import java.io.InputStream; import java.net.URISyntaxException; import java.nio.file.FileAlreadyExistsException; -import java.util.Locale; import java.util.Map; +import static java.util.Collections.emptyMap; + import static org.elasticsearch.repositories.azure.AzureRepository.Repository; public class AzureBlobStore extends AbstractComponent implements BlobStore { - private final AzureStorageService client; + private final AzureStorageService service; private final String clientName; - private final LocationMode locMode; private final String container; + private final LocationMode locationMode; - public AzureBlobStore(RepositoryMetaData metadata, Settings settings, - AzureStorageService client) throws URISyntaxException, StorageException { + public AzureBlobStore(RepositoryMetaData metadata, Settings settings, AzureStorageService service) + throws URISyntaxException, StorageException { super(settings); - this.client = client; this.container = Repository.CONTAINER_SETTING.get(metadata.settings()); this.clientName = Repository.CLIENT_NAME.get(metadata.settings()); - - String modeStr = Repository.LOCATION_MODE_SETTING.get(metadata.settings()); - if (Strings.hasLength(modeStr)) { - this.locMode = LocationMode.valueOf(modeStr.toUpperCase(Locale.ROOT)); - } else { - this.locMode = LocationMode.PRIMARY_ONLY; - } + this.service = service; + // locationMode is set per repository, not per client + this.locationMode = Repository.LOCATION_MODE_SETTING.get(metadata.settings()); + final Map prevSettings = this.service.refreshAndClearCache(emptyMap()); + final Map newSettings = AzureStorageSettings.overrideLocationMode(prevSettings, this.locationMode); + this.service.refreshAndClearCache(newSettings); } @Override @@ -71,7 +69,11 @@ public String toString() { * Gets the configured {@link LocationMode} for the Azure storage requests. */ public LocationMode getLocationMode() { - return locMode; + return locationMode; + } + + public String getClientName() { + return clientName; } @Override @@ -80,12 +82,13 @@ public BlobContainer blobContainer(BlobPath path) { } @Override - public void delete(BlobPath path) { - String keyPath = path.buildAsString(); + public void delete(BlobPath path) throws IOException { + final String keyPath = path.buildAsString(); try { - this.client.deleteFiles(this.clientName, this.locMode, container, keyPath); + service.deleteFiles(clientName, container, keyPath); } catch (URISyntaxException | StorageException e) { - logger.warn("can not remove [{}] in container {{}}: {}", keyPath, container, e.getMessage()); + logger.warn("cannot access [{}] in container {{}}: {}", keyPath, container, e.getMessage()); + throw new IOException(e); } } @@ -93,30 +96,29 @@ public void delete(BlobPath path) { public void close() { } - public boolean doesContainerExist() - { - return this.client.doesContainerExist(this.clientName, this.locMode, container); + public boolean containerExist() throws URISyntaxException, StorageException { + return service.doesContainerExist(clientName, container); } public boolean blobExists(String blob) throws URISyntaxException, StorageException { - return this.client.blobExists(this.clientName, this.locMode, container, blob); + return service.blobExists(clientName, container, blob); } public void deleteBlob(String blob) throws URISyntaxException, StorageException { - this.client.deleteBlob(this.clientName, this.locMode, container, blob); + service.deleteBlob(clientName, container, blob); } public InputStream getInputStream(String blob) throws URISyntaxException, StorageException, IOException { - return this.client.getInputStream(this.clientName, this.locMode, container, blob); + return service.getInputStream(clientName, container, blob); } public Map listBlobsByPrefix(String keyPath, String prefix) throws URISyntaxException, StorageException { - return this.client.listBlobsByPrefix(this.clientName, this.locMode, container, keyPath, prefix); + return service.listBlobsByPrefix(clientName, container, keyPath, prefix); } public void writeBlob(String blobName, InputStream inputStream, long blobSize) throws URISyntaxException, StorageException, FileAlreadyExistsException { - this.client.writeBlob(this.clientName, this.locMode, container, blobName, inputStream, blobSize); + service.writeBlob(this.clientName, container, blobName, inputStream, blobSize); } } diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java index 06bf10fb2e292..47b398a4c2fd3 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java @@ -21,6 +21,8 @@ import com.microsoft.azure.storage.LocationMode; import com.microsoft.azure.storage.StorageException; + +import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.common.Strings; @@ -33,6 +35,7 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.repositories.IndexId; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; +import org.elasticsearch.snapshots.SnapshotCreationException; import org.elasticsearch.snapshots.SnapshotId; import java.io.IOException; @@ -60,19 +63,19 @@ public class AzureRepository extends BlobStoreRepository { public static final String TYPE = "azure"; public static final class Repository { - @Deprecated // Replaced by client public static final Setting ACCOUNT_SETTING = new Setting<>("account", "default", Function.identity(), Property.NodeScope, Property.Deprecated); public static final Setting CLIENT_NAME = new Setting<>("client", ACCOUNT_SETTING, Function.identity()); - public static final Setting CONTAINER_SETTING = new Setting<>("container", "elasticsearch-snapshots", Function.identity(), Property.NodeScope); public static final Setting BASE_PATH_SETTING = Setting.simpleString("base_path", Property.NodeScope); - public static final Setting LOCATION_MODE_SETTING = Setting.simpleString("location_mode", Property.NodeScope); + public static final Setting LOCATION_MODE_SETTING = new Setting<>("location_mode", + s -> LocationMode.PRIMARY_ONLY.toString(), s -> LocationMode.valueOf(s.toUpperCase(Locale.ROOT)), Property.NodeScope); public static final Setting CHUNK_SIZE_SETTING = Setting.byteSizeSetting("chunk_size", MAX_CHUNK_SIZE, MIN_CHUNK_SIZE, MAX_CHUNK_SIZE, Property.NodeScope); public static final Setting COMPRESS_SETTING = Setting.boolSetting("compress", false, Property.NodeScope); + public static final Setting READONLY_SETTING = Setting.boolSetting("readonly", false, Property.NodeScope); } private final AzureBlobStore blobStore; @@ -81,45 +84,32 @@ public static final class Repository { private final boolean compress; private final boolean readonly; - public AzureRepository(RepositoryMetaData metadata, Environment environment, - NamedXContentRegistry namedXContentRegistry, AzureStorageService storageService) - throws IOException, URISyntaxException, StorageException { + public AzureRepository(RepositoryMetaData metadata, Environment environment, NamedXContentRegistry namedXContentRegistry, + AzureStorageService storageService) throws IOException, URISyntaxException, StorageException { super(metadata, environment.settings(), namedXContentRegistry); - - blobStore = new AzureBlobStore(metadata, environment.settings(), storageService); - String container = Repository.CONTAINER_SETTING.get(metadata.settings()); + this.blobStore = new AzureBlobStore(metadata, environment.settings(), storageService); this.chunkSize = Repository.CHUNK_SIZE_SETTING.get(metadata.settings()); this.compress = Repository.COMPRESS_SETTING.get(metadata.settings()); - String modeStr = Repository.LOCATION_MODE_SETTING.get(metadata.settings()); - Boolean forcedReadonly = metadata.settings().getAsBoolean("readonly", null); // If the user explicitly did not define a readonly value, we set it by ourselves depending on the location mode setting. // For secondary_only setting, the repository should be read only - if (forcedReadonly == null) { - if (Strings.hasLength(modeStr)) { - LocationMode locationMode = LocationMode.valueOf(modeStr.toUpperCase(Locale.ROOT)); - this.readonly = locationMode == LocationMode.SECONDARY_ONLY; - } else { - this.readonly = false; - } + if (Repository.READONLY_SETTING.exists(metadata.settings())) { + this.readonly = Repository.READONLY_SETTING.get(metadata.settings()); } else { - readonly = forcedReadonly; + this.readonly = this.blobStore.getLocationMode() == LocationMode.SECONDARY_ONLY; } - - String basePath = Repository.BASE_PATH_SETTING.get(metadata.settings()); - + final String basePath = Strings.trimLeadingCharacter(Repository.BASE_PATH_SETTING.get(metadata.settings()), '/'); if (Strings.hasLength(basePath)) { // Remove starting / if any - basePath = Strings.trimLeadingCharacter(basePath, '/'); BlobPath path = new BlobPath(); - for(String elem : basePath.split("/")) { + for(final String elem : basePath.split("/")) { path = path.add(elem); } this.basePath = path; } else { this.basePath = BlobPath.cleanPath(); } - logger.debug("using container [{}], chunk_size [{}], compress [{}], base_path [{}]", - container, chunkSize, compress, basePath); + logger.debug((org.apache.logging.log4j.util.Supplier) () -> new ParameterizedMessage( + "using container [{}], chunk_size [{}], compress [{}], base_path [{}]", blobStore, chunkSize, compress, basePath)); } /** @@ -153,9 +143,13 @@ protected ByteSizeValue chunkSize() { @Override public void initializeSnapshot(SnapshotId snapshotId, List indices, MetaData clusterMetadata) { - if (blobStore.doesContainerExist() == false) { - throw new IllegalArgumentException("The bucket [" + blobStore + "] does not exist. Please create it before " + - " creating an azure snapshot repository backed by it."); + try { + if (blobStore.containerExist() == false) { + throw new IllegalArgumentException("The bucket [" + blobStore + "] does not exist. Please create it before " + + " creating an azure snapshot repository backed by it."); + } + } catch (URISyntaxException | StorageException e) { + throw new SnapshotCreationException(metadata.name(), snapshotId, e); } super.initializeSnapshot(snapshotId, indices, clusterMetadata); } diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepositoryPlugin.java b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepositoryPlugin.java index c0126cb8df065..f2702b139a69d 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepositoryPlugin.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepositoryPlugin.java @@ -21,12 +21,13 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.env.Environment; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.ReloadablePlugin; import org.elasticsearch.plugins.RepositoryPlugin; import org.elasticsearch.repositories.Repository; - import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -35,24 +36,20 @@ /** * A plugin to add a repository type that writes to and from the Azure cloud storage service. */ -public class AzureRepositoryPlugin extends Plugin implements RepositoryPlugin { - - private final Map clientsSettings; +public class AzureRepositoryPlugin extends Plugin implements RepositoryPlugin, ReloadablePlugin { - // overridable for tests - protected AzureStorageService createStorageService(Settings settings) { - return new AzureStorageServiceImpl(settings, clientsSettings); - } + // protected for testing + final AzureStorageService azureStoreService; public AzureRepositoryPlugin(Settings settings) { // eagerly load client settings so that secure settings are read - clientsSettings = AzureStorageSettings.load(settings); + this.azureStoreService = new AzureStorageServiceImpl(settings); } @Override public Map getRepositories(Environment env, NamedXContentRegistry namedXContentRegistry) { return Collections.singletonMap(AzureRepository.TYPE, - (metadata) -> new AzureRepository(metadata, env, namedXContentRegistry, createStorageService(env.settings()))); + (metadata) -> new AzureRepository(metadata, env, namedXContentRegistry, azureStoreService)); } @Override @@ -67,4 +64,14 @@ public List> getSettings() { AzureStorageSettings.PROXY_PORT_SETTING ); } + + @Override + public void reload(Settings settings) { + // secure settings should be readable + final Map clientsSettings = AzureStorageSettings.load(settings); + if (clientsSettings.isEmpty()) { + throw new SettingsException("If you want to use an azure repository, you need to define a client configuration."); + } + azureStoreService.refreshAndClearCache(clientsSettings); + } } diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageService.java b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageService.java index 1c2ca71fe7887..272c550f1d723 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageService.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageService.java @@ -19,9 +19,12 @@ package org.elasticsearch.repositories.azure; -import com.microsoft.azure.storage.LocationMode; +import com.microsoft.azure.storage.OperationContext; import com.microsoft.azure.storage.StorageException; +import com.microsoft.azure.storage.blob.CloudBlobClient; + import org.elasticsearch.common.blobstore.BlobMetaData; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; @@ -30,6 +33,7 @@ import java.net.URISyntaxException; import java.nio.file.FileAlreadyExistsException; import java.util.Map; +import java.util.function.Supplier; /** * Azure Storage Service interface @@ -37,29 +41,46 @@ */ public interface AzureStorageService { + /** + * Creates a {@code CloudBlobClient} on each invocation using the current client + * settings. CloudBlobClient is not thread safe and the settings can change, + * therefore the instance is not cache-able and should only be reused inside a + * thread for logically coupled ops. The {@code OperationContext} is used to + * specify the proxy, but a new context is *required* for each call. + */ + Tuple> client(String clientName); + + /** + * Updates settings for building clients. Any client cache is cleared. Future + * client requests will use the new refreshed settings. + * + * @param clientsSettings the settings for new clients + * @return the old settings + */ + Map refreshAndClearCache(Map clientsSettings); + ByteSizeValue MIN_CHUNK_SIZE = new ByteSizeValue(1, ByteSizeUnit.BYTES); ByteSizeValue MAX_CHUNK_SIZE = new ByteSizeValue(64, ByteSizeUnit.MB); - boolean doesContainerExist(String account, LocationMode mode, String container); + boolean doesContainerExist(String account, String container) throws URISyntaxException, StorageException; - void removeContainer(String account, LocationMode mode, String container) throws URISyntaxException, StorageException; + void removeContainer(String account, String container) throws URISyntaxException, StorageException; - void createContainer(String account, LocationMode mode, String container) throws URISyntaxException, StorageException; + void createContainer(String account, String container) throws URISyntaxException, StorageException; - void deleteFiles(String account, LocationMode mode, String container, String path) throws URISyntaxException, StorageException; + void deleteFiles(String account, String container, String path) throws URISyntaxException, StorageException; - boolean blobExists(String account, LocationMode mode, String container, String blob) throws URISyntaxException, StorageException; + boolean blobExists(String account, String container, String blob) throws URISyntaxException, StorageException; - void deleteBlob(String account, LocationMode mode, String container, String blob) throws URISyntaxException, StorageException; + void deleteBlob(String account, String container, String blob) throws URISyntaxException, StorageException; - InputStream getInputStream(String account, LocationMode mode, String container, String blob) - throws URISyntaxException, StorageException, IOException; + InputStream getInputStream(String account, String container, String blob) throws URISyntaxException, StorageException, IOException; - Map listBlobsByPrefix(String account, LocationMode mode, String container, String keyPath, String prefix) - throws URISyntaxException, StorageException; + Map listBlobsByPrefix(String account, String container, String keyPath, String prefix) + throws URISyntaxException, StorageException; - void writeBlob(String account, LocationMode mode, String container, String blobName, InputStream inputStream, long blobSize) throws - URISyntaxException, StorageException, FileAlreadyExistsException; + void writeBlob(String account, String container, String blobName, InputStream inputStream, long blobSize) + throws URISyntaxException, StorageException, FileAlreadyExistsException; static InputStream giveSocketPermissionsToStream(InputStream stream) { return new InputStream() { diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageServiceImpl.java b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageServiceImpl.java index 9f059eaca11c6..e3f56323f3cbf 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageServiceImpl.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageServiceImpl.java @@ -21,7 +21,6 @@ import com.microsoft.azure.storage.AccessCondition; import com.microsoft.azure.storage.CloudStorageAccount; -import com.microsoft.azure.storage.LocationMode; import com.microsoft.azure.storage.OperationContext; import com.microsoft.azure.storage.RetryExponentialRetry; import com.microsoft.azure.storage.RetryPolicy; @@ -36,164 +35,133 @@ import com.microsoft.azure.storage.blob.DeleteSnapshotsOption; import com.microsoft.azure.storage.blob.ListBlobItem; import org.apache.logging.log4j.message.ParameterizedMessage; -import org.apache.logging.log4j.util.Supplier; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.support.PlainBlobMetaData; import org.elasticsearch.common.collect.MapBuilder; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.repositories.RepositoryException; import java.io.InputStream; import java.net.HttpURLConnection; import java.net.URI; import java.net.URISyntaxException; +import java.security.InvalidKeyException; import java.nio.file.FileAlreadyExistsException; -import java.util.Collections; import java.util.EnumSet; -import java.util.HashMap; import java.util.Map; +import java.util.function.Supplier; + +import static java.util.Collections.emptyMap; public class AzureStorageServiceImpl extends AbstractComponent implements AzureStorageService { - final Map storageSettings; - final Map clients; + // 'package' for testing + volatile Map storageSettings = emptyMap(); - public AzureStorageServiceImpl(Settings settings, Map storageSettings) { + public AzureStorageServiceImpl(Settings settings) { super(settings); - if (storageSettings.isEmpty()) { - // If someone did not register any settings, they basically can't use the plugin - throw new IllegalArgumentException("If you want to use an azure repository, you need to define a client configuration."); - } - this.storageSettings = storageSettings; - this.clients = createClients(storageSettings); + // eagerly load client settings so that secure settings are read + final Map clientsSettings = AzureStorageSettings.load(settings); + refreshAndClearCache(clientsSettings); } - private Map createClients(final Map storageSettings) { - final Map clients = new HashMap<>(); - for (Map.Entry azureStorageEntry : storageSettings.entrySet()) { - final String clientName = azureStorageEntry.getKey(); - final AzureStorageSettings clientSettings = azureStorageEntry.getValue(); - try { - logger.trace("creating new Azure storage client with name [{}]", clientName); - String storageConnectionString = - "DefaultEndpointsProtocol=https;" - + "AccountName=" + clientSettings.getAccount() + ";" - + "AccountKey=" + clientSettings.getKey(); - - final String endpointSuffix = clientSettings.getEndpointSuffix(); - if (Strings.hasLength(endpointSuffix)) { - storageConnectionString += ";EndpointSuffix=" + endpointSuffix; - } - // Retrieve storage account from connection-string. - CloudStorageAccount storageAccount = CloudStorageAccount.parse(storageConnectionString); - - // Create the blob client. - CloudBlobClient client = storageAccount.createCloudBlobClient(); - - // Register the client - clients.put(clientSettings.getAccount(), client); - } catch (Exception e) { - logger.error(() -> new ParameterizedMessage("Can not create azure storage client [{}]", clientName), e); - } - } - return Collections.unmodifiableMap(clients); - } - - CloudBlobClient getSelectedClient(String clientName, LocationMode mode) { - logger.trace("selecting a client named [{}], mode [{}]", clientName, mode.name()); - AzureStorageSettings azureStorageSettings = this.storageSettings.get(clientName); + @Override + public Tuple> client(String clientName) { + final AzureStorageSettings azureStorageSettings = this.storageSettings.get(clientName); if (azureStorageSettings == null) { - throw new IllegalArgumentException("Unable to find client with name [" + clientName + "]"); + throw new SettingsException("Unable to find client with name [" + clientName + "]"); } - - CloudBlobClient client = this.clients.get(azureStorageSettings.getAccount()); - if (client == null) { - throw new IllegalArgumentException("No account defined for client with name [" + clientName + "]"); + try { + return new Tuple<>(buildClient(azureStorageSettings), () -> buildOperationContext(azureStorageSettings)); + } catch (InvalidKeyException | URISyntaxException | IllegalArgumentException e) { + throw new SettingsException("Invalid azure client settings with name [" + clientName + "]", e); } + } - // NOTE: for now, just set the location mode in case it is different; - // only one mode per storage clientName can be active at a time - client.getDefaultRequestOptions().setLocationMode(mode); - - // Set timeout option if the user sets cloud.azure.storage.timeout or cloud.azure.storage.xxx.timeout (it's negative by default) - if (azureStorageSettings.getTimeout().getSeconds() > 0) { - try { - int timeout = (int) azureStorageSettings.getTimeout().getMillis(); - client.getDefaultRequestOptions().setTimeoutIntervalInMs(timeout); - } catch (ClassCastException e) { - throw new IllegalArgumentException("Can not convert [" + azureStorageSettings.getTimeout() + - "]. It can not be longer than 2,147,483,647ms."); + protected CloudBlobClient buildClient(AzureStorageSettings azureStorageSettings) throws InvalidKeyException, URISyntaxException { + final CloudBlobClient client = createClient(azureStorageSettings); + // Set timeout option if the user sets cloud.azure.storage.timeout or + // cloud.azure.storage.xxx.timeout (it's negative by default) + final long timeout = azureStorageSettings.getTimeout().getMillis(); + if (timeout > 0) { + if (timeout > Integer.MAX_VALUE) { + throw new IllegalArgumentException("Timeout [" + azureStorageSettings.getTimeout() + "] exceeds 2,147,483,647ms."); } + client.getDefaultRequestOptions().setTimeoutIntervalInMs((int) timeout); } - // We define a default exponential retry policy - client.getDefaultRequestOptions().setRetryPolicyFactory( - new RetryExponentialRetry(RetryPolicy.DEFAULT_CLIENT_BACKOFF, azureStorageSettings.getMaxRetries())); - + client.getDefaultRequestOptions() + .setRetryPolicyFactory(new RetryExponentialRetry(RetryPolicy.DEFAULT_CLIENT_BACKOFF, azureStorageSettings.getMaxRetries())); + client.getDefaultRequestOptions().setLocationMode(azureStorageSettings.getLocationMode()); return client; } - private OperationContext generateOperationContext(String clientName) { - OperationContext context = new OperationContext(); - AzureStorageSettings azureStorageSettings = this.storageSettings.get(clientName); - - if (azureStorageSettings.getProxy() != null) { - context.setProxy(azureStorageSettings.getProxy()); - } + protected CloudBlobClient createClient(AzureStorageSettings azureStorageSettings) throws InvalidKeyException, URISyntaxException { + final String connectionString = azureStorageSettings.buildConnectionString(); + return CloudStorageAccount.parse(connectionString).createCloudBlobClient(); + } + protected OperationContext buildOperationContext(AzureStorageSettings azureStorageSettings) { + final OperationContext context = new OperationContext(); + context.setProxy(azureStorageSettings.getProxy()); return context; } @Override - public boolean doesContainerExist(String account, LocationMode mode, String container) { - try { - CloudBlobClient client = this.getSelectedClient(account, mode); - CloudBlobContainer blobContainer = client.getContainerReference(container); - return SocketAccess.doPrivilegedException(() -> blobContainer.exists(null, null, generateOperationContext(account))); - } catch (Exception e) { - logger.error("can not access container [{}]", container); - } - return false; + public Map refreshAndClearCache(Map clientsSettings) { + final Map prevSettings = this.storageSettings; + this.storageSettings = MapBuilder.newMapBuilder(clientsSettings).immutableMap(); + // clients are built lazily by {@link client(String)} + return prevSettings; + } + + @Override + public boolean doesContainerExist(String account, String container) throws URISyntaxException, StorageException { + final Tuple> client = client(account); + final CloudBlobContainer blobContainer = client.v1().getContainerReference(container); + return SocketAccess.doPrivilegedException(() -> blobContainer.exists(null, null, client.v2().get())); } @Override - public void removeContainer(String account, LocationMode mode, String container) throws URISyntaxException, StorageException { - CloudBlobClient client = this.getSelectedClient(account, mode); - CloudBlobContainer blobContainer = client.getContainerReference(container); - logger.trace("removing container [{}]", container); - SocketAccess.doPrivilegedException(() -> blobContainer.deleteIfExists(null, null, generateOperationContext(account))); + public void removeContainer(String account, String container) throws URISyntaxException, StorageException { + final Tuple> client = client(account); + final CloudBlobContainer blobContainer = client.v1().getContainerReference(container); + logger.trace(() -> new ParameterizedMessage("removing container [{}]", container)); + SocketAccess.doPrivilegedException(() -> blobContainer.deleteIfExists(null, null, client.v2().get())); } @Override - public void createContainer(String account, LocationMode mode, String container) throws URISyntaxException, StorageException { + public void createContainer(String account, String container) throws URISyntaxException, StorageException { try { - CloudBlobClient client = this.getSelectedClient(account, mode); - CloudBlobContainer blobContainer = client.getContainerReference(container); - logger.trace("creating container [{}]", container); - SocketAccess.doPrivilegedException(() -> blobContainer.createIfNotExists(null, null, generateOperationContext(account))); - } catch (IllegalArgumentException e) { - logger.trace((Supplier) () -> new ParameterizedMessage("fails creating container [{}]", container), e); + final Tuple> client = client(account); + final CloudBlobContainer blobContainer = client.v1().getContainerReference(container); + logger.trace(() -> new ParameterizedMessage("creating container [{}]", container)); + SocketAccess.doPrivilegedException(() -> blobContainer.createIfNotExists(null, null, client.v2().get())); + } catch (final IllegalArgumentException e) { + logger.trace(() -> new ParameterizedMessage("failed creating container [{}]", container), e); throw new RepositoryException(container, e.getMessage(), e); } } @Override - public void deleteFiles(String account, LocationMode mode, String container, String path) throws URISyntaxException, StorageException { - logger.trace("delete files container [{}], path [{}]", container, path); - - // Container name must be lower case. - CloudBlobClient client = this.getSelectedClient(account, mode); - CloudBlobContainer blobContainer = client.getContainerReference(container); + public void deleteFiles(String account, String container, String path) throws URISyntaxException, StorageException { + final Tuple> client = client(account); + // container name must be lower case. + final CloudBlobContainer blobContainer = client.v1().getContainerReference(container); + logger.trace(() -> new ParameterizedMessage("delete files container [{}], path [{}]", container, path)); SocketAccess.doPrivilegedVoidException(() -> { if (blobContainer.exists()) { - // We list the blobs using a flat blob listing mode - for (ListBlobItem blobItem : blobContainer.listBlobs(path, true, EnumSet.noneOf(BlobListingDetails.class), null, - generateOperationContext(account))) { - String blobName = blobNameFromUri(blobItem.getUri()); - logger.trace("removing blob [{}] full URI was [{}]", blobName, blobItem.getUri()); - deleteBlob(account, mode, container, blobName); + // list the blobs using a flat blob listing mode + for (final ListBlobItem blobItem : blobContainer.listBlobs(path, true, EnumSet.noneOf(BlobListingDetails.class), null, + client.v2().get())) { + final String blobName = blobNameFromUri(blobItem.getUri()); + logger.trace(() -> new ParameterizedMessage("removing blob [{}] full URI was [{}]", blobName, blobItem.getUri())); + // don't call {@code #deleteBlob}, use the same client + final CloudBlockBlob azureBlob = blobContainer.getBlockBlobReference(blobName); + azureBlob.delete(DeleteSnapshotsOption.NONE, null, null, client.v2().get()); } } }); @@ -205,85 +173,82 @@ public void deleteFiles(String account, LocationMode mode, String container, Str * @param uri URI to parse * @return The blob name relative to the container */ - public static String blobNameFromUri(URI uri) { - String path = uri.getPath(); - + static String blobNameFromUri(URI uri) { + final String path = uri.getPath(); // We remove the container name from the path // The 3 magic number cames from the fact if path is /container/path/to/myfile // First occurrence is empty "/" // Second occurrence is "container // Last part contains "path/to/myfile" which is what we want to get - String[] splits = path.split("/", 3); - + final String[] splits = path.split("/", 3); // We return the remaining end of the string return splits[2]; } @Override - public boolean blobExists(String account, LocationMode mode, String container, String blob) - throws URISyntaxException, StorageException { + public boolean blobExists(String account, String container, String blob) + throws URISyntaxException, StorageException { // Container name must be lower case. - CloudBlobClient client = this.getSelectedClient(account, mode); - CloudBlobContainer blobContainer = client.getContainerReference(container); - if (SocketAccess.doPrivilegedException(() -> blobContainer.exists(null, null, generateOperationContext(account)))) { - CloudBlockBlob azureBlob = blobContainer.getBlockBlobReference(blob); - return SocketAccess.doPrivilegedException(() -> azureBlob.exists(null, null, generateOperationContext(account))); - } - - return false; + final Tuple> client = client(account); + final CloudBlobContainer blobContainer = client.v1().getContainerReference(container); + return SocketAccess.doPrivilegedException(() -> { + if (blobContainer.exists(null, null, client.v2().get())) { + final CloudBlockBlob azureBlob = blobContainer.getBlockBlobReference(blob); + return azureBlob.exists(null, null, client.v2().get()); + } + return false; + }); } @Override - public void deleteBlob(String account, LocationMode mode, String container, String blob) throws URISyntaxException, StorageException { - logger.trace("delete blob for container [{}], blob [{}]", container, blob); - + public void deleteBlob(String account, String container, String blob) throws URISyntaxException, StorageException { + final Tuple> client = client(account); // Container name must be lower case. - CloudBlobClient client = this.getSelectedClient(account, mode); - CloudBlobContainer blobContainer = client.getContainerReference(container); - if (SocketAccess.doPrivilegedException(() -> blobContainer.exists(null, null, generateOperationContext(account)))) { - logger.trace("container [{}]: blob [{}] found. removing.", container, blob); - CloudBlockBlob azureBlob = blobContainer.getBlockBlobReference(blob); - SocketAccess.doPrivilegedVoidException(() -> azureBlob.delete(DeleteSnapshotsOption.NONE, null, null, - generateOperationContext(account))); - } + final CloudBlobContainer blobContainer = client.v1().getContainerReference(container); + logger.trace(() -> new ParameterizedMessage("delete blob for container [{}], blob [{}]", container, blob)); + SocketAccess.doPrivilegedVoidException(() -> { + if (blobContainer.exists(null, null, client.v2().get())) { + final CloudBlockBlob azureBlob = blobContainer.getBlockBlobReference(blob); + logger.trace(() -> new ParameterizedMessage("container [{}]: blob [{}] found. removing.", container, blob)); + azureBlob.delete(DeleteSnapshotsOption.NONE, null, null, client.v2().get()); + } + }); } @Override - public InputStream getInputStream(String account, LocationMode mode, String container, String blob) throws URISyntaxException, + public InputStream getInputStream(String account, String container, String blob) throws URISyntaxException, StorageException { - logger.trace("reading container [{}], blob [{}]", container, blob); - CloudBlobClient client = this.getSelectedClient(account, mode); - CloudBlockBlob blockBlobReference = client.getContainerReference(container).getBlockBlobReference(blob); - BlobInputStream is = SocketAccess.doPrivilegedException(() -> - blockBlobReference.openInputStream(null, null, generateOperationContext(account))); + final Tuple> client = client(account); + final CloudBlockBlob blockBlobReference = client.v1().getContainerReference(container).getBlockBlobReference(blob); + logger.trace(() -> new ParameterizedMessage("reading container [{}], blob [{}]", container, blob)); + final BlobInputStream is = SocketAccess.doPrivilegedException(() -> + blockBlobReference.openInputStream(null, null, client.v2().get())); return AzureStorageService.giveSocketPermissionsToStream(is); } @Override - public Map listBlobsByPrefix(String account, LocationMode mode, String container, String keyPath, String prefix) + public Map listBlobsByPrefix(String account, String container, String keyPath, String prefix) throws URISyntaxException, StorageException { // NOTE: this should be here: if (prefix == null) prefix = ""; // however, this is really inefficient since deleteBlobsByPrefix enumerates everything and // then does a prefix match on the result; it should just call listBlobsByPrefix with the prefix! - - logger.debug("listing container [{}], keyPath [{}], prefix [{}]", container, keyPath, prefix); - MapBuilder blobsBuilder = MapBuilder.newMapBuilder(); - EnumSet enumBlobListingDetails = EnumSet.of(BlobListingDetails.METADATA); - CloudBlobClient client = this.getSelectedClient(account, mode); - CloudBlobContainer blobContainer = client.getContainerReference(container); + final MapBuilder blobsBuilder = MapBuilder.newMapBuilder(); + final EnumSet enumBlobListingDetails = EnumSet.of(BlobListingDetails.METADATA); + final Tuple> client = client(account); + final CloudBlobContainer blobContainer = client.v1().getContainerReference(container); + logger.trace(() -> new ParameterizedMessage("listing container [{}], keyPath [{}], prefix [{}]", container, keyPath, prefix)); SocketAccess.doPrivilegedVoidException(() -> { if (blobContainer.exists()) { - for (ListBlobItem blobItem : blobContainer.listBlobs(keyPath + (prefix == null ? "" : prefix), false, - enumBlobListingDetails, null, generateOperationContext(account))) { - URI uri = blobItem.getUri(); - logger.trace("blob url [{}]", uri); - + for (final ListBlobItem blobItem : blobContainer.listBlobs(keyPath + (prefix == null ? "" : prefix), false, + enumBlobListingDetails, null, client.v2().get())) { + final URI uri = blobItem.getUri(); + logger.trace(() -> new ParameterizedMessage("blob url [{}]", uri)); // uri.getPath is of the form /container/keyPath.* and we want to strip off the /container/ // this requires 1 + container.length() + 1, with each 1 corresponding to one of the / - String blobPath = uri.getPath().substring(1 + container.length() + 1); - BlobProperties properties = ((CloudBlockBlob) blobItem).getProperties(); - String name = blobPath.substring(keyPath.length()); - logger.trace("blob url [{}], name [{}], size [{}]", uri, name, properties.getLength()); + final String blobPath = uri.getPath().substring(1 + container.length() + 1); + final BlobProperties properties = ((CloudBlockBlob) blobItem).getProperties(); + final String name = blobPath.substring(keyPath.length()); + logger.trace(() -> new ParameterizedMessage("blob url [{}], name [{}], size [{}]", uri, name, properties.getLength())); blobsBuilder.put(name, new PlainBlobMetaData(name, properties.getLength())); } } @@ -292,22 +257,23 @@ enumBlobListingDetails, null, generateOperationContext(account))) { } @Override - public void writeBlob(String account, LocationMode mode, String container, String blobName, InputStream inputStream, long blobSize) + public void writeBlob(String account, String container, String blobName, InputStream inputStream, long blobSize) throws URISyntaxException, StorageException, FileAlreadyExistsException { - logger.trace("writeBlob({}, stream, {})", blobName, blobSize); - CloudBlobClient client = this.getSelectedClient(account, mode); - CloudBlobContainer blobContainer = client.getContainerReference(container); - CloudBlockBlob blob = blobContainer.getBlockBlobReference(blobName); + logger.trace(() -> new ParameterizedMessage("writeBlob({}, stream, {})", blobName, blobSize)); + final Tuple> client = client(account); + final CloudBlobContainer blobContainer = client.v1().getContainerReference(container); + final CloudBlockBlob blob = blobContainer.getBlockBlobReference(blobName); try { SocketAccess.doPrivilegedVoidException(() -> blob.upload(inputStream, blobSize, AccessCondition.generateIfNotExistsCondition(), - null, generateOperationContext(account))); - } catch (StorageException se) { + null, client.v2().get())); + } catch (final StorageException se) { if (se.getHttpStatusCode() == HttpURLConnection.HTTP_CONFLICT && StorageErrorCodeStrings.BLOB_ALREADY_EXISTS.equals(se.getErrorCode())) { throw new FileAlreadyExistsException(blobName, null, se.getMessage()); } throw se; } - logger.trace("writeBlob({}, stream, {}) - done", blobName, blobSize); + logger.trace(() -> new ParameterizedMessage("writeBlob({}, stream, {}) - done", blobName, blobSize)); } + } diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageSettings.java b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageSettings.java index e360558933cc1..c4e4c1439e45f 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageSettings.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageSettings.java @@ -19,8 +19,10 @@ package org.elasticsearch.repositories.azure; +import com.microsoft.azure.storage.LocationMode; import com.microsoft.azure.storage.RetryPolicy; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.settings.SecureSetting; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; @@ -29,7 +31,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.unit.TimeValue; - import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.Proxy; @@ -39,7 +40,7 @@ import java.util.Locale; import java.util.Map; -public final class AzureStorageSettings { +final class AzureStorageSettings { // prefix for azure client settings private static final String AZURE_CLIENT_PREFIX_KEY = "azure.client."; @@ -86,22 +87,33 @@ public final class AzureStorageSettings { private final TimeValue timeout; private final int maxRetries; private final Proxy proxy; + private final LocationMode locationMode; + // copy-constructor + private AzureStorageSettings(String account, String key, String endpointSuffix, TimeValue timeout, int maxRetries, Proxy proxy, + LocationMode locationMode) { + this.account = account; + this.key = key; + this.endpointSuffix = endpointSuffix; + this.timeout = timeout; + this.maxRetries = maxRetries; + this.proxy = proxy; + this.locationMode = locationMode; + } - public AzureStorageSettings(String account, String key, String endpointSuffix, TimeValue timeout, int maxRetries, + AzureStorageSettings(String account, String key, String endpointSuffix, TimeValue timeout, int maxRetries, Proxy.Type proxyType, String proxyHost, Integer proxyPort) { this.account = account; this.key = key; this.endpointSuffix = endpointSuffix; this.timeout = timeout; this.maxRetries = maxRetries; - // Register the proxy if we have any // Validate proxy settings - if (proxyType.equals(Proxy.Type.DIRECT) && (proxyPort != 0 || Strings.hasText(proxyHost))) { + if (proxyType.equals(Proxy.Type.DIRECT) && ((proxyPort != 0) || Strings.hasText(proxyHost))) { throw new SettingsException("Azure Proxy port or host have been set but proxy type is not defined."); } - if (proxyType.equals(Proxy.Type.DIRECT) == false && (proxyPort == 0 || Strings.isEmpty(proxyHost))) { + if ((proxyType.equals(Proxy.Type.DIRECT) == false) && ((proxyPort == 0) || Strings.isEmpty(proxyHost))) { throw new SettingsException("Azure Proxy type has been set but proxy host or port is not defined."); } @@ -110,10 +122,11 @@ public AzureStorageSettings(String account, String key, String endpointSuffix, T } else { try { proxy = new Proxy(proxyType, new InetSocketAddress(InetAddress.getByName(proxyHost), proxyPort)); - } catch (UnknownHostException e) { + } catch (final UnknownHostException e) { throw new SettingsException("Azure proxy host is unknown.", e); } } + this.locationMode = LocationMode.PRIMARY_ONLY; } public String getKey() { @@ -140,37 +153,55 @@ public Proxy getProxy() { return proxy; } + public String buildConnectionString() { + final StringBuilder connectionStringBuilder = new StringBuilder(); + connectionStringBuilder.append("DefaultEndpointsProtocol=https") + .append(";AccountName=") + .append(account) + .append(";AccountKey=") + .append(key); + if (Strings.hasText(endpointSuffix)) { + connectionStringBuilder.append(";EndpointSuffix=").append(endpointSuffix); + } + return connectionStringBuilder.toString(); + } + + public LocationMode getLocationMode() { + return locationMode; + } + @Override public String toString() { final StringBuilder sb = new StringBuilder("AzureStorageSettings{"); - sb.append(", account='").append(account).append('\''); + sb.append("account='").append(account).append('\''); sb.append(", key='").append(key).append('\''); sb.append(", timeout=").append(timeout); sb.append(", endpointSuffix='").append(endpointSuffix).append('\''); sb.append(", maxRetries=").append(maxRetries); sb.append(", proxy=").append(proxy); + sb.append(", locationMode='").append(locationMode).append('\''); sb.append('}'); return sb.toString(); } /** - * Parses settings and read all settings available under azure.client.* + * Parse and read all settings available under the azure.client.* namespace * @param settings settings to parse * @return All the named configurations */ public static Map load(Settings settings) { // Get the list of existing named configurations - Map storageSettings = new HashMap<>(); - for (String clientName : ACCOUNT_SETTING.getNamespaces(settings)) { + final Map storageSettings = new HashMap<>(); + for (final String clientName : ACCOUNT_SETTING.getNamespaces(settings)) { storageSettings.put(clientName, getClientSettings(settings, clientName)); } - - if (storageSettings.containsKey("default") == false && storageSettings.isEmpty() == false) { + if (false == storageSettings.containsKey("default") && false == storageSettings.isEmpty()) { // in case no setting named "default" has been set, let's define our "default" // as the first named config we get - AzureStorageSettings defaultSettings = storageSettings.values().iterator().next(); + final AzureStorageSettings defaultSettings = storageSettings.values().iterator().next(); storageSettings.put("default", defaultSettings); } + assert storageSettings.containsKey("default") || storageSettings.isEmpty() : "always have 'default' if any"; return Collections.unmodifiableMap(storageSettings); } @@ -191,13 +222,25 @@ static AzureStorageSettings getClientSettings(Settings settings, String clientNa private static T getConfigValue(Settings settings, String clientName, Setting.AffixSetting clientSetting) { - Setting concreteSetting = clientSetting.getConcreteSettingForNamespace(clientName); + final Setting concreteSetting = clientSetting.getConcreteSettingForNamespace(clientName); return concreteSetting.get(settings); } public static T getValue(Settings settings, String groupName, Setting setting) { - Setting.AffixKey k = (Setting.AffixKey) setting.getRawKey(); - String fullKey = k.toConcreteKey(groupName).toString(); + final Setting.AffixKey k = (Setting.AffixKey) setting.getRawKey(); + final String fullKey = k.toConcreteKey(groupName).toString(); return setting.getConcreteSetting(fullKey).get(settings); } + + static Map overrideLocationMode(Map clientsSettings, + LocationMode locationMode) { + final MapBuilder mapBuilder = new MapBuilder<>(); + for (final Map.Entry entry : clientsSettings.entrySet()) { + final AzureStorageSettings azureSettings = new AzureStorageSettings(entry.getValue().account, entry.getValue().key, + entry.getValue().endpointSuffix, entry.getValue().timeout, entry.getValue().maxRetries, entry.getValue().proxy, + locationMode); + mapBuilder.put(entry.getKey(), azureSettings); + } + return mapBuilder.immutableMap(); + } } diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java index 26b02278eddc0..639905042cf87 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java @@ -34,6 +34,7 @@ import java.net.URISyntaxException; import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; public class AzureRepositorySettingsTests extends ESTestCase { @@ -44,7 +45,7 @@ private AzureRepository azureRepository(Settings settings) throws StorageExcepti .put(settings) .build(); return new AzureRepository(new RepositoryMetaData("foo", "azure", internalSettings), - TestEnvironment.newEnvironment(internalSettings), NamedXContentRegistry.EMPTY, null); + TestEnvironment.newEnvironment(internalSettings), NamedXContentRegistry.EMPTY, mock(AzureStorageService.class)); } public void testReadonlyDefault() throws StorageException, IOException, URISyntaxException { diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSnapshotRestoreTests.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSnapshotRestoreTests.java index 439a9d567f1a4..10163bb2f31df 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSnapshotRestoreTests.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureSnapshotRestoreTests.java @@ -19,9 +19,7 @@ package org.elasticsearch.repositories.azure; - import com.carrotsearch.randomizedtesting.RandomizedTest; -import com.microsoft.azure.storage.LocationMode; import com.microsoft.azure.storage.StorageException; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; @@ -77,9 +75,9 @@ private static Settings.Builder generateMockSettings() { return Settings.builder().setSecureSettings(generateMockSecureSettings()); } + @SuppressWarnings("resource") private static AzureStorageService getAzureStorageService() { - return new AzureStorageServiceImpl(generateMockSettings().build(), - AzureStorageSettings.load(generateMockSettings().build())); + return new AzureRepositoryPlugin(generateMockSettings().build()).azureStoreService; } @Override @@ -94,7 +92,7 @@ private static String getContainerName() { * there mustn't be a hyphen between the 2 concatenated numbers * (can't have 2 consecutives hyphens on Azure containers) */ - String testName = "snapshot-itest-" + final String testName = "snapshot-itest-" .concat(RandomizedTest.getContext().getRunnerSeedAsString().toLowerCase(Locale.ROOT)); return testName.contains(" ") ? Strings.split(testName, " ")[0] : testName; } @@ -123,7 +121,7 @@ private static void createTestContainer(String containerName) throws Exception { // It could happen that we run this test really close to a previous one // so we might need some time to be able to create the container assertBusy(() -> { - getAzureStorageService().createContainer("default", LocationMode.PRIMARY_ONLY, containerName); + getAzureStorageService().createContainer("default", containerName); }, 30, TimeUnit.SECONDS); } @@ -132,7 +130,7 @@ private static void createTestContainer(String containerName) throws Exception { * @param containerName container name to use */ private static void removeTestContainer(String containerName) throws URISyntaxException, StorageException { - getAzureStorageService().removeContainer("default", LocationMode.PRIMARY_ONLY, containerName); + getAzureStorageService().removeContainer("default", containerName); } @Override @@ -141,7 +139,7 @@ protected Collection> nodePlugins() { } private String getRepositoryPath() { - String testName = "it-" + getTestName(); + final String testName = "it-" + getTestName(); return testName.contains(" ") ? Strings.split(testName, " ")[0] : testName; } @@ -159,21 +157,21 @@ public Settings indexSettings() { public final void wipeAzureRepositories() { try { client().admin().cluster().prepareDeleteRepository("*").get(); - } catch (RepositoryMissingException ignored) { + } catch (final RepositoryMissingException ignored) { } } public void testMultipleRepositories() { - Client client = client(); + final Client client = client(); logger.info("--> creating azure repository with path [{}]", getRepositoryPath()); - PutRepositoryResponse putRepositoryResponse1 = client.admin().cluster().preparePutRepository("test-repo1") + final PutRepositoryResponse putRepositoryResponse1 = client.admin().cluster().preparePutRepository("test-repo1") .setType("azure").setSettings(Settings.builder() .put(Repository.CONTAINER_SETTING.getKey(), getContainerName().concat("-1")) .put(Repository.BASE_PATH_SETTING.getKey(), getRepositoryPath()) .put(Repository.CHUNK_SIZE_SETTING.getKey(), randomIntBetween(1000, 10000), ByteSizeUnit.BYTES) ).get(); assertThat(putRepositoryResponse1.isAcknowledged(), equalTo(true)); - PutRepositoryResponse putRepositoryResponse2 = client.admin().cluster().preparePutRepository("test-repo2") + final PutRepositoryResponse putRepositoryResponse2 = client.admin().cluster().preparePutRepository("test-repo2") .setType("azure").setSettings(Settings.builder() .put(Repository.CONTAINER_SETTING.getKey(), getContainerName().concat("-2")) .put(Repository.BASE_PATH_SETTING.getKey(), getRepositoryPath()) @@ -194,14 +192,14 @@ public void testMultipleRepositories() { assertThat(client.prepareSearch("test-idx-2").setSize(0).get().getHits().getTotalHits(), equalTo(100L)); logger.info("--> snapshot 1"); - CreateSnapshotResponse createSnapshotResponse1 = client.admin().cluster().prepareCreateSnapshot("test-repo1", "test-snap") + final CreateSnapshotResponse createSnapshotResponse1 = client.admin().cluster().prepareCreateSnapshot("test-repo1", "test-snap") .setWaitForCompletion(true).setIndices("test-idx-1").get(); assertThat(createSnapshotResponse1.getSnapshotInfo().successfulShards(), greaterThan(0)); assertThat(createSnapshotResponse1.getSnapshotInfo().successfulShards(), equalTo(createSnapshotResponse1.getSnapshotInfo().totalShards())); logger.info("--> snapshot 2"); - CreateSnapshotResponse createSnapshotResponse2 = client.admin().cluster().prepareCreateSnapshot("test-repo2", "test-snap") + final CreateSnapshotResponse createSnapshotResponse2 = client.admin().cluster().prepareCreateSnapshot("test-repo2", "test-snap") .setWaitForCompletion(true).setIndices("test-idx-2").get(); assertThat(createSnapshotResponse2.getSnapshotInfo().successfulShards(), greaterThan(0)); assertThat(createSnapshotResponse2.getSnapshotInfo().successfulShards(), @@ -216,7 +214,7 @@ public void testMultipleRepositories() { logger.info("--> delete indices"); cluster().wipeIndices("test-idx-1", "test-idx-2"); logger.info("--> restore one index after deletion from snapshot 1"); - RestoreSnapshotResponse restoreSnapshotResponse1 = client.admin().cluster().prepareRestoreSnapshot("test-repo1", "test-snap") + final RestoreSnapshotResponse restoreSnapshotResponse1 = client.admin().cluster().prepareRestoreSnapshot("test-repo1", "test-snap") .setWaitForCompletion(true).setIndices("test-idx-1").get(); assertThat(restoreSnapshotResponse1.getRestoreInfo().totalShards(), greaterThan(0)); ensureGreen(); @@ -226,7 +224,7 @@ public void testMultipleRepositories() { assertThat(clusterState.getMetaData().hasIndex("test-idx-2"), equalTo(false)); logger.info("--> restore other index after deletion from snapshot 2"); - RestoreSnapshotResponse restoreSnapshotResponse2 = client.admin().cluster().prepareRestoreSnapshot("test-repo2", "test-snap") + final RestoreSnapshotResponse restoreSnapshotResponse2 = client.admin().cluster().prepareRestoreSnapshot("test-repo2", "test-snap") .setWaitForCompletion(true).setIndices("test-idx-2").get(); assertThat(restoreSnapshotResponse2.getRestoreInfo().totalShards(), greaterThan(0)); ensureGreen(); @@ -252,7 +250,7 @@ public void testListBlobs_26() throws StorageException, URISyntaxException { } refresh(); - ClusterAdminClient client = client().admin().cluster(); + final ClusterAdminClient client = client().admin().cluster(); logger.info("--> creating azure repository without any path"); PutRepositoryResponse putRepositoryResponse = client.preparePutRepository(repositoryName).setType("azure") .setSettings(Settings.builder() @@ -300,9 +298,9 @@ public void testListBlobs_26() throws StorageException, URISyntaxException { */ public void testGetDeleteNonExistingSnapshot_28() throws StorageException, URISyntaxException { final String repositoryName="test-repo-28"; - ClusterAdminClient client = client().admin().cluster(); + final ClusterAdminClient client = client().admin().cluster(); logger.info("--> creating azure repository without any path"); - PutRepositoryResponse putRepositoryResponse = client.preparePutRepository(repositoryName).setType("azure") + final PutRepositoryResponse putRepositoryResponse = client.preparePutRepository(repositoryName).setType("azure") .setSettings(Settings.builder() .put(Repository.CONTAINER_SETTING.getKey(), getContainerName()) ).get(); @@ -311,14 +309,14 @@ public void testGetDeleteNonExistingSnapshot_28() throws StorageException, URISy try { client.prepareGetSnapshots(repositoryName).addSnapshots("nonexistingsnapshotname").get(); fail("Shouldn't be here"); - } catch (SnapshotMissingException ex) { + } catch (final SnapshotMissingException ex) { // Expected } try { client.prepareDeleteSnapshot(repositoryName, "nonexistingsnapshotname").get(); fail("Shouldn't be here"); - } catch (SnapshotMissingException ex) { + } catch (final SnapshotMissingException ex) { // Expected } } @@ -328,9 +326,9 @@ public void testGetDeleteNonExistingSnapshot_28() throws StorageException, URISy */ public void testNonExistingRepo_23() { final String repositoryName = "test-repo-test23"; - Client client = client(); + final Client client = client(); logger.info("--> creating azure repository with path [{}]", getRepositoryPath()); - PutRepositoryResponse putRepositoryResponse = client.admin().cluster().preparePutRepository(repositoryName) + final PutRepositoryResponse putRepositoryResponse = client.admin().cluster().preparePutRepository(repositoryName) .setType("azure").setSettings(Settings.builder() .put(Repository.CONTAINER_SETTING.getKey(), getContainerName()) .put(Repository.BASE_PATH_SETTING.getKey(), getRepositoryPath()) @@ -342,7 +340,7 @@ public void testNonExistingRepo_23() { try { client.admin().cluster().prepareRestoreSnapshot(repositoryName, "no-existing-snapshot").setWaitForCompletion(true).get(); fail("Shouldn't be here"); - } catch (SnapshotRestoreException ex) { + } catch (final SnapshotRestoreException ex) { // Expected } } @@ -356,7 +354,7 @@ public void testRemoveAndCreateContainer() throws Exception { createTestContainer(container); removeTestContainer(container); - ClusterAdminClient client = client().admin().cluster(); + final ClusterAdminClient client = client().admin().cluster(); logger.info("--> creating azure repository while container is being removed"); try { client.preparePutRepository("test-repo").setType("azure") @@ -364,7 +362,7 @@ public void testRemoveAndCreateContainer() throws Exception { .put(Repository.CONTAINER_SETTING.getKey(), container) ).get(); fail("we should get a RepositoryVerificationException"); - } catch (RepositoryVerificationException e) { + } catch (final RepositoryVerificationException e) { // Fine we expect that } } @@ -378,9 +376,9 @@ public void testRemoveAndCreateContainer() throws Exception { * @throws Exception If anything goes wrong */ public void testGeoRedundantStorage() throws Exception { - Client client = client(); + final Client client = client(); logger.info("--> creating azure primary repository"); - PutRepositoryResponse putRepositoryResponsePrimary = client.admin().cluster().preparePutRepository("primary") + final PutRepositoryResponse putRepositoryResponsePrimary = client.admin().cluster().preparePutRepository("primary") .setType("azure").setSettings(Settings.builder() .put(Repository.CONTAINER_SETTING.getKey(), getContainerName()) ).get(); @@ -394,7 +392,7 @@ public void testGeoRedundantStorage() throws Exception { assertThat(endWait - startWait, lessThanOrEqualTo(30000L)); logger.info("--> creating azure secondary repository"); - PutRepositoryResponse putRepositoryResponseSecondary = client.admin().cluster().preparePutRepository("secondary") + final PutRepositoryResponse putRepositoryResponseSecondary = client.admin().cluster().preparePutRepository("secondary") .setType("azure").setSettings(Settings.builder() .put(Repository.CONTAINER_SETTING.getKey(), getContainerName()) .put(Repository.LOCATION_MODE_SETTING.getKey(), "secondary_only") diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureStorageServiceMock.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureStorageServiceMock.java index 4b111e549476c..a680af06fc655 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureStorageServiceMock.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureStorageServiceMock.java @@ -19,11 +19,14 @@ package org.elasticsearch.repositories.azure; -import com.microsoft.azure.storage.LocationMode; +import com.microsoft.azure.storage.OperationContext; import com.microsoft.azure.storage.StorageException; +import com.microsoft.azure.storage.blob.CloudBlobClient; + import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.support.PlainBlobMetaData; import org.elasticsearch.common.collect.MapBuilder; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.internal.io.Streams; @@ -40,6 +43,9 @@ import java.util.Locale; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; +import java.util.function.Supplier; + +import static java.util.Collections.emptyMap; /** * In memory storage for unit tests @@ -53,44 +59,44 @@ public AzureStorageServiceMock() { } @Override - public boolean doesContainerExist(String account, LocationMode mode, String container) { + public boolean doesContainerExist(String account, String container) { return true; } @Override - public void removeContainer(String account, LocationMode mode, String container) { + public void removeContainer(String account, String container) { } @Override - public void createContainer(String account, LocationMode mode, String container) { + public void createContainer(String account, String container) { } @Override - public void deleteFiles(String account, LocationMode mode, String container, String path) { - final Map blobs = listBlobsByPrefix(account, mode, container, path, null); - blobs.keySet().forEach(key -> deleteBlob(account, mode, container, key)); + public void deleteFiles(String account, String container, String path) { + final Map blobs = listBlobsByPrefix(account, container, path, null); + blobs.keySet().forEach(key -> deleteBlob(account, container, key)); } @Override - public boolean blobExists(String account, LocationMode mode, String container, String blob) { + public boolean blobExists(String account, String container, String blob) { return blobs.containsKey(blob); } @Override - public void deleteBlob(String account, LocationMode mode, String container, String blob) { + public void deleteBlob(String account, String container, String blob) { blobs.remove(blob); } @Override - public InputStream getInputStream(String account, LocationMode mode, String container, String blob) throws IOException { - if (!blobExists(account, mode, container, blob)) { + public InputStream getInputStream(String account, String container, String blob) throws IOException { + if (!blobExists(account, container, blob)) { throw new NoSuchFileException("missing blob [" + blob + "]"); } return AzureStorageService.giveSocketPermissionsToStream(new PermissionRequiringInputStream(blobs.get(blob).toByteArray())); } @Override - public Map listBlobsByPrefix(String account, LocationMode mode, String container, String keyPath, String prefix) { + public Map listBlobsByPrefix(String account, String container, String keyPath, String prefix) { MapBuilder blobsBuilder = MapBuilder.newMapBuilder(); blobs.forEach((String blobName, ByteArrayOutputStream bos) -> { final String checkBlob; @@ -108,7 +114,7 @@ public Map listBlobsByPrefix(String account, LocationMode } @Override - public void writeBlob(String account, LocationMode mode, String container, String blobName, InputStream inputStream, long blobSize) + public void writeBlob(String account, String container, String blobName, InputStream inputStream, long blobSize) throws URISyntaxException, StorageException, FileAlreadyExistsException { if (blobs.containsKey(blobName)) { throw new FileAlreadyExistsException(blobName); @@ -168,4 +174,14 @@ public synchronized int read(byte[] b, int off, int len) { return super.read(b, off, len); } } + + @Override + public Tuple> client(String clientName) { + return null; + } + + @Override + public Map refreshAndClearCache(Map clientsSettings) { + return emptyMap(); + } } diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureStorageServiceTests.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureStorageServiceTests.java index 447826dbf833f..3308db682fece 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureStorageServiceTests.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureStorageServiceTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.repositories.azure; -import com.microsoft.azure.storage.LocationMode; import com.microsoft.azure.storage.RetryExponentialRetry; import com.microsoft.azure.storage.blob.CloudBlobClient; import com.microsoft.azure.storage.core.Base64; @@ -28,6 +27,7 @@ import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.test.ESTestCase; +import java.io.IOException; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.Proxy; @@ -35,7 +35,6 @@ import java.net.URISyntaxException; import java.net.UnknownHostException; import java.nio.charset.StandardCharsets; -import java.util.Collections; import java.util.Map; import static org.elasticsearch.repositories.azure.AzureStorageServiceImpl.blobNameFromUri; @@ -50,17 +49,10 @@ public class AzureStorageServiceTests extends ESTestCase { public void testReadSecuredSettings() { - MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("azure.client.azure1.account", "myaccount1"); - secureSettings.setString("azure.client.azure1.key", encodeKey("mykey1")); - secureSettings.setString("azure.client.azure2.account", "myaccount2"); - secureSettings.setString("azure.client.azure2.key", encodeKey("mykey2")); - secureSettings.setString("azure.client.azure3.account", "myaccount3"); - secureSettings.setString("azure.client.azure3.key", encodeKey("mykey3")); - Settings settings = Settings.builder().setSecureSettings(secureSettings) + final Settings settings = Settings.builder().setSecureSettings(buildSecureSettings()) .put("azure.client.azure3.endpoint_suffix", "my_endpoint_suffix").build(); - Map loadedSettings = AzureStorageSettings.load(settings); + final Map loadedSettings = AzureStorageSettings.load(settings); assertThat(loadedSettings.keySet(), containsInAnyOrder("azure1","azure2","azure3","default")); assertThat(loadedSettings.get("azure1").getEndpointSuffix(), isEmptyString()); @@ -68,95 +60,161 @@ public void testReadSecuredSettings() { assertThat(loadedSettings.get("azure3").getEndpointSuffix(), equalTo("my_endpoint_suffix")); } - public void testCreateClientWithEndpointSuffix() { - MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("azure.client.azure1.account", "myaccount1"); - secureSettings.setString("azure.client.azure1.key", encodeKey("mykey1")); - secureSettings.setString("azure.client.azure2.account", "myaccount2"); - secureSettings.setString("azure.client.azure2.key", encodeKey("mykey2")); - Settings settings = Settings.builder().setSecureSettings(secureSettings) + public void testCreateClientWithEndpointSuffix() throws IOException { + final Settings settings = Settings.builder().setSecureSettings(buildSecureSettings()) .put("azure.client.azure1.endpoint_suffix", "my_endpoint_suffix").build(); - AzureStorageServiceImpl azureStorageService = new AzureStorageServiceImpl(settings, AzureStorageSettings.load(settings)); - CloudBlobClient client1 = azureStorageService.getSelectedClient("azure1", LocationMode.PRIMARY_ONLY); - assertThat(client1.getEndpoint().toString(), equalTo("https://myaccount1.blob.my_endpoint_suffix")); + try (AzureRepositoryPlugin plugin = new AzureRepositoryPlugin(settings)) { + final AzureStorageServiceImpl azureStorageService = (AzureStorageServiceImpl) plugin.azureStoreService; + final CloudBlobClient client1 = azureStorageService.client("azure1").v1(); + assertThat(client1.getEndpoint().toString(), equalTo("https://myaccount1.blob.my_endpoint_suffix")); + final CloudBlobClient client2 = azureStorageService.client("azure2").v1(); + assertThat(client2.getEndpoint().toString(), equalTo("https://myaccount2.blob.core.windows.net")); + } + } - CloudBlobClient client2 = azureStorageService.getSelectedClient("azure2", LocationMode.PRIMARY_ONLY); - assertThat(client2.getEndpoint().toString(), equalTo("https://myaccount2.blob.core.windows.net")); + public void testReinitClientSettings() throws IOException { + final MockSecureSettings secureSettings1 = new MockSecureSettings(); + secureSettings1.setString("azure.client.azure1.account", "myaccount11"); + secureSettings1.setString("azure.client.azure1.key", encodeKey("mykey11")); + secureSettings1.setString("azure.client.azure2.account", "myaccount12"); + secureSettings1.setString("azure.client.azure2.key", encodeKey("mykey12")); + final Settings settings1 = Settings.builder().setSecureSettings(secureSettings1).build(); + final MockSecureSettings secureSettings2 = new MockSecureSettings(); + secureSettings2.setString("azure.client.azure1.account", "myaccount21"); + secureSettings2.setString("azure.client.azure1.key", encodeKey("mykey21")); + secureSettings2.setString("azure.client.azure3.account", "myaccount23"); + secureSettings2.setString("azure.client.azure3.key", encodeKey("mykey23")); + final Settings settings2 = Settings.builder().setSecureSettings(secureSettings2).build(); + try (AzureRepositoryPlugin plugin = new AzureRepositoryPlugin(settings1)) { + final AzureStorageServiceImpl azureStorageService = (AzureStorageServiceImpl) plugin.azureStoreService; + final CloudBlobClient client11 = azureStorageService.client("azure1").v1(); + assertThat(client11.getEndpoint().toString(), equalTo("https://myaccount11.blob.core.windows.net")); + final CloudBlobClient client12 = azureStorageService.client("azure2").v1(); + assertThat(client12.getEndpoint().toString(), equalTo("https://myaccount12.blob.core.windows.net")); + // client 3 is missing + final SettingsException e1 = expectThrows(SettingsException.class, () -> azureStorageService.client("azure3")); + assertThat(e1.getMessage(), is("Unable to find client with name [azure3]")); + // update client settings + plugin.reload(settings2); + // old client 1 not changed + assertThat(client11.getEndpoint().toString(), equalTo("https://myaccount11.blob.core.windows.net")); + // new client 1 is changed + final CloudBlobClient client21 = azureStorageService.client("azure1").v1(); + assertThat(client21.getEndpoint().toString(), equalTo("https://myaccount21.blob.core.windows.net")); + // old client 2 not changed + assertThat(client12.getEndpoint().toString(), equalTo("https://myaccount12.blob.core.windows.net")); + // new client2 is gone + final SettingsException e2 = expectThrows(SettingsException.class, () -> azureStorageService.client("azure2")); + assertThat(e2.getMessage(), is("Unable to find client with name [azure2]")); + // client 3 emerged + final CloudBlobClient client23 = azureStorageService.client("azure3").v1(); + assertThat(client23.getEndpoint().toString(), equalTo("https://myaccount23.blob.core.windows.net")); + } } - public void testGetSelectedClientWithNoPrimaryAndSecondary() { - try { - new AzureStorageServiceImpl(Settings.EMPTY, Collections.emptyMap()); - fail("we should have raised an IllegalArgumentException"); - } catch (IllegalArgumentException e) { + public void testReinitClientEmptySettings() throws IOException { + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("azure.client.azure1.account", "myaccount1"); + secureSettings.setString("azure.client.azure1.key", encodeKey("mykey11")); + final Settings settings = Settings.builder().setSecureSettings(secureSettings).build(); + try (AzureRepositoryPlugin plugin = new AzureRepositoryPlugin(settings)) { + final AzureStorageServiceImpl azureStorageService = (AzureStorageServiceImpl) plugin.azureStoreService; + final CloudBlobClient client11 = azureStorageService.client("azure1").v1(); + assertThat(client11.getEndpoint().toString(), equalTo("https://myaccount1.blob.core.windows.net")); + // reinit with empty settings + final SettingsException e = expectThrows(SettingsException.class, () -> plugin.reload(Settings.EMPTY)); assertThat(e.getMessage(), is("If you want to use an azure repository, you need to define a client configuration.")); + // existing client untouched + assertThat(client11.getEndpoint().toString(), equalTo("https://myaccount1.blob.core.windows.net")); + // new client also untouched + final CloudBlobClient client21 = azureStorageService.client("azure1").v1(); + assertThat(client21.getEndpoint().toString(), equalTo("https://myaccount1.blob.core.windows.net")); + } + } + + public void testReinitClientWrongSettings() throws IOException { + final MockSecureSettings secureSettings1 = new MockSecureSettings(); + secureSettings1.setString("azure.client.azure1.account", "myaccount1"); + secureSettings1.setString("azure.client.azure1.key", encodeKey("mykey11")); + final Settings settings1 = Settings.builder().setSecureSettings(secureSettings1).build(); + final MockSecureSettings secureSettings2 = new MockSecureSettings(); + secureSettings2.setString("azure.client.azure1.account", "myaccount1"); + // missing key + final Settings settings2 = Settings.builder().setSecureSettings(secureSettings2).build(); + try (AzureRepositoryPlugin plugin = new AzureRepositoryPlugin(settings1)) { + final AzureStorageServiceImpl azureStorageService = (AzureStorageServiceImpl) plugin.azureStoreService; + final CloudBlobClient client11 = azureStorageService.client("azure1").v1(); + assertThat(client11.getEndpoint().toString(), equalTo("https://myaccount1.blob.core.windows.net")); + plugin.reload(settings2); + // existing client untouched + assertThat(client11.getEndpoint().toString(), equalTo("https://myaccount1.blob.core.windows.net")); + final SettingsException e = expectThrows(SettingsException.class, () -> azureStorageService.client("azure1")); + assertThat(e.getMessage(), is("Invalid azure client settings with name [azure1]")); } } public void testGetSelectedClientNonExisting() { - AzureStorageServiceImpl azureStorageService = createAzureService(buildSettings()); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { - azureStorageService.getSelectedClient("azure4", LocationMode.PRIMARY_ONLY); - }); + final AzureStorageServiceImpl azureStorageService = new AzureStorageServiceImpl(buildSettings()); + final SettingsException e = expectThrows(SettingsException.class, () -> azureStorageService.client("azure4")); assertThat(e.getMessage(), is("Unable to find client with name [azure4]")); } public void testGetSelectedClientDefaultTimeout() { - Settings timeoutSettings = Settings.builder() + final Settings timeoutSettings = Settings.builder() .setSecureSettings(buildSecureSettings()) .put("azure.client.azure3.timeout", "30s") .build(); - AzureStorageServiceImpl azureStorageService = createAzureService(timeoutSettings); - CloudBlobClient client1 = azureStorageService.getSelectedClient("azure1", LocationMode.PRIMARY_ONLY); + final AzureStorageServiceImpl azureStorageService = new AzureStorageServiceImpl(timeoutSettings); + final CloudBlobClient client1 = azureStorageService.client("azure1").v1(); assertThat(client1.getDefaultRequestOptions().getTimeoutIntervalInMs(), nullValue()); - CloudBlobClient client3 = azureStorageService.getSelectedClient("azure3", LocationMode.PRIMARY_ONLY); + final CloudBlobClient client3 = azureStorageService.client("azure3").v1(); assertThat(client3.getDefaultRequestOptions().getTimeoutIntervalInMs(), is(30 * 1000)); } public void testGetSelectedClientNoTimeout() { - AzureStorageServiceImpl azureStorageService = createAzureService(buildSettings()); - CloudBlobClient client1 = azureStorageService.getSelectedClient("azure1", LocationMode.PRIMARY_ONLY); + final AzureStorageServiceImpl azureStorageService = new AzureStorageServiceImpl(buildSettings()); + final CloudBlobClient client1 = azureStorageService.client("azure1").v1(); assertThat(client1.getDefaultRequestOptions().getTimeoutIntervalInMs(), is(nullValue())); } public void testGetSelectedClientBackoffPolicy() { - AzureStorageServiceImpl azureStorageService = createAzureService(buildSettings()); - CloudBlobClient client1 = azureStorageService.getSelectedClient("azure1", LocationMode.PRIMARY_ONLY); + final AzureStorageServiceImpl azureStorageService = new AzureStorageServiceImpl(buildSettings()); + final CloudBlobClient client1 = azureStorageService.client("azure1").v1(); assertThat(client1.getDefaultRequestOptions().getRetryPolicyFactory(), is(notNullValue())); assertThat(client1.getDefaultRequestOptions().getRetryPolicyFactory(), instanceOf(RetryExponentialRetry.class)); } public void testGetSelectedClientBackoffPolicyNbRetries() { - Settings timeoutSettings = Settings.builder() + final Settings timeoutSettings = Settings.builder() .setSecureSettings(buildSecureSettings()) .put("azure.client.azure1.max_retries", 7) .build(); - AzureStorageServiceImpl azureStorageService = createAzureService(timeoutSettings); - CloudBlobClient client1 = azureStorageService.getSelectedClient("azure1", LocationMode.PRIMARY_ONLY); + final AzureStorageServiceImpl azureStorageService = new AzureStorageServiceImpl(timeoutSettings); + final CloudBlobClient client1 = azureStorageService.client("azure1").v1(); assertThat(client1.getDefaultRequestOptions().getRetryPolicyFactory(), is(notNullValue())); assertThat(client1.getDefaultRequestOptions().getRetryPolicyFactory(), instanceOf(RetryExponentialRetry.class)); } public void testNoProxy() { - Settings settings = Settings.builder() + final Settings settings = Settings.builder() .setSecureSettings(buildSecureSettings()) .build(); - AzureStorageServiceImpl mock = createAzureService(settings); + final AzureStorageServiceImpl mock = new AzureStorageServiceImpl(settings); assertThat(mock.storageSettings.get("azure1").getProxy(), nullValue()); assertThat(mock.storageSettings.get("azure2").getProxy(), nullValue()); assertThat(mock.storageSettings.get("azure3").getProxy(), nullValue()); } public void testProxyHttp() throws UnknownHostException { - Settings settings = Settings.builder() + final Settings settings = Settings.builder() .setSecureSettings(buildSecureSettings()) .put("azure.client.azure1.proxy.host", "127.0.0.1") .put("azure.client.azure1.proxy.port", 8080) .put("azure.client.azure1.proxy.type", "http") .build(); - AzureStorageServiceImpl mock = createAzureService(settings); - Proxy azure1Proxy = mock.storageSettings.get("azure1").getProxy(); + final AzureStorageServiceImpl mock = new AzureStorageServiceImpl(settings); + final Proxy azure1Proxy = mock.storageSettings.get("azure1").getProxy(); assertThat(azure1Proxy, notNullValue()); assertThat(azure1Proxy.type(), is(Proxy.Type.HTTP)); @@ -166,7 +224,7 @@ public void testProxyHttp() throws UnknownHostException { } public void testMultipleProxies() throws UnknownHostException { - Settings settings = Settings.builder() + final Settings settings = Settings.builder() .setSecureSettings(buildSecureSettings()) .put("azure.client.azure1.proxy.host", "127.0.0.1") .put("azure.client.azure1.proxy.port", 8080) @@ -175,12 +233,12 @@ public void testMultipleProxies() throws UnknownHostException { .put("azure.client.azure2.proxy.port", 8081) .put("azure.client.azure2.proxy.type", "http") .build(); - AzureStorageServiceImpl mock = createAzureService(settings); - Proxy azure1Proxy = mock.storageSettings.get("azure1").getProxy(); + final AzureStorageServiceImpl mock = new AzureStorageServiceImpl(settings); + final Proxy azure1Proxy = mock.storageSettings.get("azure1").getProxy(); assertThat(azure1Proxy, notNullValue()); assertThat(azure1Proxy.type(), is(Proxy.Type.HTTP)); assertThat(azure1Proxy.address(), is(new InetSocketAddress(InetAddress.getByName("127.0.0.1"), 8080))); - Proxy azure2Proxy = mock.storageSettings.get("azure2").getProxy(); + final Proxy azure2Proxy = mock.storageSettings.get("azure2").getProxy(); assertThat(azure2Proxy, notNullValue()); assertThat(azure2Proxy.type(), is(Proxy.Type.HTTP)); assertThat(azure2Proxy.address(), is(new InetSocketAddress(InetAddress.getByName("127.0.0.1"), 8081))); @@ -188,14 +246,14 @@ public void testMultipleProxies() throws UnknownHostException { } public void testProxySocks() throws UnknownHostException { - Settings settings = Settings.builder() + final Settings settings = Settings.builder() .setSecureSettings(buildSecureSettings()) .put("azure.client.azure1.proxy.host", "127.0.0.1") .put("azure.client.azure1.proxy.port", 8080) .put("azure.client.azure1.proxy.type", "socks") .build(); - AzureStorageServiceImpl mock = createAzureService(settings); - Proxy azure1Proxy = mock.storageSettings.get("azure1").getProxy(); + final AzureStorageServiceImpl mock = new AzureStorageServiceImpl(settings); + final Proxy azure1Proxy = mock.storageSettings.get("azure1").getProxy(); assertThat(azure1Proxy, notNullValue()); assertThat(azure1Proxy.type(), is(Proxy.Type.SOCKS)); assertThat(azure1Proxy.address(), is(new InetSocketAddress(InetAddress.getByName("127.0.0.1"), 8080))); @@ -204,47 +262,46 @@ public void testProxySocks() throws UnknownHostException { } public void testProxyNoHost() { - Settings settings = Settings.builder() + final Settings settings = Settings.builder() .setSecureSettings(buildSecureSettings()) .put("azure.client.azure1.proxy.port", 8080) .put("azure.client.azure1.proxy.type", randomFrom("socks", "http")) .build(); - - SettingsException e = expectThrows(SettingsException.class, () -> createAzureService(settings)); + final SettingsException e = expectThrows(SettingsException.class, () -> new AzureStorageServiceImpl(settings)); assertEquals("Azure Proxy type has been set but proxy host or port is not defined.", e.getMessage()); } public void testProxyNoPort() { - Settings settings = Settings.builder() + final Settings settings = Settings.builder() .setSecureSettings(buildSecureSettings()) .put("azure.client.azure1.proxy.host", "127.0.0.1") .put("azure.client.azure1.proxy.type", randomFrom("socks", "http")) .build(); - SettingsException e = expectThrows(SettingsException.class, () -> createAzureService(settings)); + final SettingsException e = expectThrows(SettingsException.class, () -> new AzureStorageServiceImpl(settings)); assertEquals("Azure Proxy type has been set but proxy host or port is not defined.", e.getMessage()); } public void testProxyNoType() { - Settings settings = Settings.builder() + final Settings settings = Settings.builder() .setSecureSettings(buildSecureSettings()) .put("azure.client.azure1.proxy.host", "127.0.0.1") .put("azure.client.azure1.proxy.port", 8080) .build(); - SettingsException e = expectThrows(SettingsException.class, () -> createAzureService(settings)); + final SettingsException e = expectThrows(SettingsException.class, () -> new AzureStorageServiceImpl(settings)); assertEquals("Azure Proxy port or host have been set but proxy type is not defined.", e.getMessage()); } public void testProxyWrongHost() { - Settings settings = Settings.builder() + final Settings settings = Settings.builder() .setSecureSettings(buildSecureSettings()) .put("azure.client.azure1.proxy.type", randomFrom("socks", "http")) .put("azure.client.azure1.proxy.host", "thisisnotavalidhostorwehavebeensuperunlucky") .put("azure.client.azure1.proxy.port", 8080) .build(); - SettingsException e = expectThrows(SettingsException.class, () -> createAzureService(settings)); + final SettingsException e = expectThrows(SettingsException.class, () -> new AzureStorageServiceImpl(settings)); assertEquals("Azure proxy host is unknown.", e.getMessage()); } @@ -260,7 +317,7 @@ public void testBlobNameFromUri() throws URISyntaxException { } private static MockSecureSettings buildSecureSettings() { - MockSecureSettings secureSettings = new MockSecureSettings(); + final MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString("azure.client.azure1.account", "myaccount1"); secureSettings.setString("azure.client.azure1.key", encodeKey("mykey1")); secureSettings.setString("azure.client.azure2.account", "myaccount2"); @@ -274,10 +331,6 @@ private static Settings buildSettings() { return Settings.builder().setSecureSettings(buildSecureSettings()).build(); } - private static AzureStorageServiceImpl createAzureService(final Settings settings) { - return new AzureStorageServiceImpl(settings, AzureStorageSettings.load(settings)); - } - private static String encodeKey(final String value) { return Base64.encode(value.getBytes(StandardCharsets.UTF_8)); } diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java index 78fd9461ad54d..c20b99790088e 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java @@ -64,18 +64,24 @@ class GoogleCloudStorageBlobStore extends AbstractComponent implements BlobStore // https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload private static final int LARGE_BLOB_THRESHOLD_BYTE_SIZE = 5 * 1024 * 1024; - private final Storage storage; - private final String bucket; + private final String bucketName; + private final String clientName; + private final GoogleCloudStorageService storageService; - GoogleCloudStorageBlobStore(Settings settings, String bucket, Storage storage) { + GoogleCloudStorageBlobStore(Settings settings, String bucketName, String clientName, GoogleCloudStorageService storageService) { super(settings); - this.bucket = bucket; - this.storage = storage; - if (doesBucketExist(bucket) == false) { - throw new BlobStoreException("Bucket [" + bucket + "] does not exist"); + this.bucketName = bucketName; + this.clientName = clientName; + this.storageService = storageService; + if (doesBucketExist(bucketName) == false) { + throw new BlobStoreException("Bucket [" + bucketName + "] does not exist"); } } + private Storage client() throws IOException { + return storageService.client(clientName); + } + @Override public BlobContainer blobContainer(BlobPath path) { return new GoogleCloudStorageBlobContainer(path, this); @@ -91,14 +97,14 @@ public void close() { } /** - * Return true if the given bucket exists + * Return true iff the given bucket exists * * @param bucketName name of the bucket - * @return true if the bucket exists, false otherwise + * @return true iff the bucket exists */ boolean doesBucketExist(String bucketName) { try { - final Bucket bucket = SocketAccess.doPrivilegedIOException(() -> storage.get(bucketName)); + final Bucket bucket = SocketAccess.doPrivilegedIOException(() -> client().get(bucketName)); return bucket != null; } catch (final Exception e) { throw new BlobStoreException("Unable to check if bucket [" + bucketName + "] exists", e); @@ -106,10 +112,9 @@ boolean doesBucketExist(String bucketName) { } /** - * List blobs in the bucket under the specified path. The path root is removed. + * List blobs in the specific bucket under the specified path. The path root is removed. * - * @param path - * base path of the blobs to list + * @param path base path of the blobs to list * @return a map of blob names and their metadata */ Map listBlobs(String path) throws IOException { @@ -117,20 +122,19 @@ Map listBlobs(String path) throws IOException { } /** - * List all blobs in the bucket which have a prefix + * List all blobs in the specific bucket with names prefixed * * @param path * base path of the blobs to list. This path is removed from the * names of the blobs returned. - * @param prefix - * prefix of the blobs to list. + * @param prefix prefix of the blobs to list. * @return a map of blob names and their metadata. */ Map listBlobsByPrefix(String path, String prefix) throws IOException { final String pathPrefix = buildKey(path, prefix); final MapBuilder mapBuilder = MapBuilder.newMapBuilder(); SocketAccess.doPrivilegedVoidIOException(() -> { - storage.get(bucket).list(BlobListOption.prefix(pathPrefix)).iterateAll().forEach(blob -> { + client().get(bucketName).list(BlobListOption.prefix(pathPrefix)).iterateAll().forEach(blob -> { assert blob.getName().startsWith(path); final String suffixName = blob.getName().substring(path.length()); mapBuilder.put(suffixName, new PlainBlobMetaData(suffixName, blob.getSize())); @@ -140,26 +144,26 @@ Map listBlobsByPrefix(String path, String prefix) throws I } /** - * Returns true if the blob exists in the bucket + * Returns true if the blob exists in the specific bucket * * @param blobName name of the blob - * @return true if the blob exists, false otherwise + * @return true iff the blob exists */ boolean blobExists(String blobName) throws IOException { - final BlobId blobId = BlobId.of(bucket, blobName); - final Blob blob = SocketAccess.doPrivilegedIOException(() -> storage.get(blobId)); + final BlobId blobId = BlobId.of(bucketName, blobName); + final Blob blob = SocketAccess.doPrivilegedIOException(() -> client().get(blobId)); return blob != null; } /** - * Returns an {@link java.io.InputStream} for a given blob + * Returns an {@link java.io.InputStream} for the given blob name * * @param blobName name of the blob - * @return an InputStream + * @return the InputStream used to read the blob's content */ InputStream readBlob(String blobName) throws IOException { - final BlobId blobId = BlobId.of(bucket, blobName); - final Blob blob = SocketAccess.doPrivilegedIOException(() -> storage.get(blobId)); + final BlobId blobId = BlobId.of(bucketName, blobName); + final Blob blob = SocketAccess.doPrivilegedIOException(() -> client().get(blobId)); if (blob == null) { throw new NoSuchFileException("Blob [" + blobName + "] does not exit"); } @@ -184,13 +188,13 @@ public void close() throws IOException { } /** - * Writes a blob in the bucket. + * Writes a blob in the specific bucket * * @param inputStream content of the blob to be written * @param blobSize expected size of the blob to be written */ void writeBlob(String blobName, InputStream inputStream, long blobSize) throws IOException { - final BlobInfo blobInfo = BlobInfo.newBuilder(bucket, blobName).build(); + final BlobInfo blobInfo = BlobInfo.newBuilder(bucketName, blobName).build(); if (blobSize > LARGE_BLOB_THRESHOLD_BYTE_SIZE) { writeBlobResumable(blobInfo, inputStream); } else { @@ -208,8 +212,8 @@ void writeBlob(String blobName, InputStream inputStream, long blobSize) throws I */ private void writeBlobResumable(BlobInfo blobInfo, InputStream inputStream) throws IOException { try { - final WriteChannel writeChannel = SocketAccess.doPrivilegedIOException( - () -> storage.writer(blobInfo, Storage.BlobWriteOption.doesNotExist())); + final WriteChannel writeChannel = SocketAccess + .doPrivilegedIOException(() -> client().writer(blobInfo, Storage.BlobWriteOption.doesNotExist())); Streams.copy(inputStream, Channels.newOutputStream(new WritableByteChannel() { @Override public boolean isOpen() { @@ -227,7 +231,7 @@ public int write(ByteBuffer src) throws IOException { return SocketAccess.doPrivilegedIOException(() -> writeChannel.write(src)); } })); - } catch (StorageException se) { + } catch (final StorageException se) { if (se.getCode() == HTTP_PRECON_FAILED) { throw new FileAlreadyExistsException(blobInfo.getBlobId().getName(), null, se.getMessage()); } @@ -249,45 +253,43 @@ private void writeBlobMultipart(BlobInfo blobInfo, InputStream inputStream, long assert blobSize <= LARGE_BLOB_THRESHOLD_BYTE_SIZE : "large blob uploads should use the resumable upload method"; final ByteArrayOutputStream baos = new ByteArrayOutputStream(Math.toIntExact(blobSize)); Streams.copy(inputStream, baos); - SocketAccess.doPrivilegedVoidIOException( - () -> { - try { - storage.create(blobInfo, baos.toByteArray(), Storage.BlobTargetOption.doesNotExist()); - } catch (StorageException se) { - if (se.getCode() == HTTP_PRECON_FAILED) { - throw new FileAlreadyExistsException(blobInfo.getBlobId().getName(), null, se.getMessage()); - } - throw se; - } - }); + try { + SocketAccess.doPrivilegedVoidIOException( + () -> client().create(blobInfo, baos.toByteArray(), Storage.BlobTargetOption.doesNotExist())); + } catch (final StorageException se) { + if (se.getCode() == HTTP_PRECON_FAILED) { + throw new FileAlreadyExistsException(blobInfo.getBlobId().getName(), null, se.getMessage()); + } + throw se; + } } /** - * Deletes a blob in the bucket + * Deletes the blob from the specific bucket * * @param blobName name of the blob */ void deleteBlob(String blobName) throws IOException { - final BlobId blobId = BlobId.of(bucket, blobName); - final boolean deleted = SocketAccess.doPrivilegedIOException(() -> storage.delete(blobId)); + final BlobId blobId = BlobId.of(bucketName, blobName); + final boolean deleted = SocketAccess.doPrivilegedIOException(() -> client().delete(blobId)); if (deleted == false) { throw new NoSuchFileException("Blob [" + blobName + "] does not exist"); } } /** - * Deletes multiple blobs in the bucket that have a given prefix + * Deletes multiple blobs from the specific bucket all of which have prefixed names * - * @param prefix prefix of the buckets to delete + * @param prefix prefix of the blobs to delete */ void deleteBlobsByPrefix(String prefix) throws IOException { deleteBlobs(listBlobsByPrefix("", prefix).keySet()); } /** - * Deletes multiple blobs in the given bucket (uses a batch request to perform this) + * Deletes multiple blobs from the specific bucket using a batch request * - * @param blobNames names of the bucket to delete + * @param blobNames names of the blobs to delete */ void deleteBlobs(Collection blobNames) throws IOException { if (blobNames.isEmpty()) { @@ -298,13 +300,13 @@ void deleteBlobs(Collection blobNames) throws IOException { deleteBlob(blobNames.iterator().next()); return; } - final List blobIdsToDelete = blobNames.stream().map(blobName -> BlobId.of(bucket, blobName)).collect(Collectors.toList()); - final List deletedStatuses = SocketAccess.doPrivilegedIOException(() -> storage.delete(blobIdsToDelete)); + final List blobIdsToDelete = blobNames.stream().map(blob -> BlobId.of(bucketName, blob)).collect(Collectors.toList()); + final List deletedStatuses = SocketAccess.doPrivilegedIOException(() -> client().delete(blobIdsToDelete)); assert blobIdsToDelete.size() == deletedStatuses.size(); boolean failed = false; for (int i = 0; i < blobIdsToDelete.size(); i++) { if (deletedStatuses.get(i) == false) { - logger.error("Failed to delete blob [{}] in bucket [{}]", blobIdsToDelete.get(i).getName(), bucket); + logger.error("Failed to delete blob [{}] in bucket [{}]", blobIdsToDelete.get(i).getName(), bucketName); failed = true; } } diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java index 1d2d70584adf9..12e7fd26ff565 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStoragePlugin.java @@ -24,35 +24,34 @@ import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.env.Environment; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.ReloadablePlugin; import org.elasticsearch.plugins.RepositoryPlugin; import org.elasticsearch.repositories.Repository; - import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; -public class GoogleCloudStoragePlugin extends Plugin implements RepositoryPlugin { +public class GoogleCloudStoragePlugin extends Plugin implements RepositoryPlugin, ReloadablePlugin { - private final Map clientsSettings; + // package-private for tests + final GoogleCloudStorageService storageService; public GoogleCloudStoragePlugin(final Settings settings) { - clientsSettings = GoogleCloudStorageClientSettings.load(settings); - } - - protected Map getClientsSettings() { - return clientsSettings; + this.storageService = createStorageService(settings); + // eagerly load client settings so that secure settings are readable (not closed) + reload(settings); } // overridable for tests - protected GoogleCloudStorageService createStorageService(Environment environment) { - return new GoogleCloudStorageService(environment, clientsSettings); + protected GoogleCloudStorageService createStorageService(Settings settings) { + return new GoogleCloudStorageService(settings); } @Override public Map getRepositories(Environment env, NamedXContentRegistry namedXContentRegistry) { return Collections.singletonMap(GoogleCloudStorageRepository.TYPE, - (metadata) -> new GoogleCloudStorageRepository(metadata, env, namedXContentRegistry, createStorageService(env))); + (metadata) -> new GoogleCloudStorageRepository(metadata, env, namedXContentRegistry, this.storageService)); } @Override @@ -66,4 +65,15 @@ public List> getSettings() { GoogleCloudStorageClientSettings.APPLICATION_NAME_SETTING, GoogleCloudStorageClientSettings.TOKEN_URI_SETTING); } + + @Override + public void reload(Settings settings) { + // Secure settings should be readable inside this method. Duplicate client + // settings in a format (`GoogleCloudStorageClientSettings`) that does not + // require for the `SecureSettings` to be open. Pass that around (the + // `GoogleCloudStorageClientSettings` instance) instead of the `Settings` + // instance. + final Map clientsSettings = GoogleCloudStorageClientSettings.load(settings); + this.storageService.refreshAndClearCache(clientsSettings); + } } diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java index 976befae0a269..83d48eeda20aa 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java @@ -38,8 +38,6 @@ import static org.elasticsearch.common.settings.Setting.byteSizeSetting; import static org.elasticsearch.common.settings.Setting.simpleString; -import com.google.cloud.storage.Storage; - class GoogleCloudStorageRepository extends BlobStoreRepository { // package private for testing @@ -86,8 +84,7 @@ class GoogleCloudStorageRepository extends BlobStoreRepository { logger.debug("using bucket [{}], base_path [{}], chunk_size [{}], compress [{}]", bucket, basePath, chunkSize, compress); - Storage client = SocketAccess.doPrivilegedIOException(() -> storageService.createClient(clientName)); - this.blobStore = new GoogleCloudStorageBlobStore(settings, bucket, client); + this.blobStore = new GoogleCloudStorageBlobStore(settings, bucket, clientName, storageService); } diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java index 57bcc4b131356..b24674da174c3 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java @@ -28,11 +28,13 @@ import com.google.cloud.storage.Storage; import com.google.cloud.storage.StorageOptions; +import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.component.AbstractComponent; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.env.Environment; +import org.elasticsearch.common.util.LazyInitializable; import java.io.IOException; import java.net.HttpURLConnection; @@ -40,30 +42,74 @@ import java.net.URISyntaxException; import java.net.URL; import java.util.Map; +import java.util.concurrent.atomic.AtomicReference; + +import static java.util.Collections.emptyMap; public class GoogleCloudStorageService extends AbstractComponent { - /** Clients settings identified by client name. */ - private final Map clientsSettings; + /** + * Dictionary of client instances. Client instances are built lazily from the + * latest settings. + */ + private final AtomicReference>> clientsCache = new AtomicReference<>(emptyMap()); - public GoogleCloudStorageService(final Environment environment, final Map clientsSettings) { - super(environment.settings()); - this.clientsSettings = clientsSettings; + public GoogleCloudStorageService(final Settings settings) { + super(settings); } /** - * Creates a client that can be used to manage Google Cloud Storage objects. + * Refreshes the client settings and clears the client cache. Subsequent calls to + * {@code GoogleCloudStorageService#client} will return new clients constructed + * using the parameter settings. * - * @param clientName name of client settings to use, including secure settings - * @return a Client instance that can be used to manage Storage objects + * @param clientsSettings the new settings used for building clients for subsequent requests + */ + public synchronized void refreshAndClearCache(Map clientsSettings) { + // build the new lazy clients + final MapBuilder> newClientsCache = MapBuilder.newMapBuilder(); + for (final Map.Entry entry : clientsSettings.entrySet()) { + newClientsCache.put(entry.getKey(), + new LazyInitializable(() -> createClient(entry.getKey(), entry.getValue()))); + } + // make the new clients available + final Map> oldClientCache = clientsCache.getAndSet(newClientsCache.immutableMap()); + // release old clients + oldClientCache.values().forEach(LazyInitializable::reset); + } + + /** + * Attempts to retrieve a client from the cache. If the client does not exist it + * will be created from the latest settings and will populate the cache. The + * returned instance should not be cached by the calling code. Instead, for each + * use, the (possibly updated) instance should be requested by calling this + * method. + * + * @param clientName name of the client settings used to create the client + * @return a cached client storage instance that can be used to manage objects + * (blobs) */ - public Storage createClient(final String clientName) throws Exception { - final GoogleCloudStorageClientSettings clientSettings = clientsSettings.get(clientName); - if (clientSettings == null) { + public Storage client(final String clientName) throws IOException { + final LazyInitializable lazyClient = clientsCache.get().get(clientName); + if (lazyClient == null) { throw new IllegalArgumentException("Unknown client name [" + clientName + "]. Existing client configs: " - + Strings.collectionToDelimitedString(clientsSettings.keySet(), ",")); + + Strings.collectionToDelimitedString(clientsCache.get().keySet(), ",")); } - final HttpTransport httpTransport = createHttpTransport(clientSettings.getHost()); + return lazyClient.getOrCompute(); + } + + /** + * Creates a client that can be used to manage Google Cloud Storage objects. The client is thread-safe. + * + * @param clientName name of client settings to use, including secure settings + * @param clientSettings name of client settings to use, including secure settings + * @return a new client storage instance that can be used to manage objects + * (blobs) + */ + private Storage createClient(final String clientName, final GoogleCloudStorageClientSettings clientSettings) throws IOException { + logger.debug(() -> new ParameterizedMessage("creating GCS client with client_name [{}], endpoint [{}]", clientName, + clientSettings.getHost())); + final HttpTransport httpTransport = SocketAccess.doPrivilegedIOException(() -> createHttpTransport(clientSettings.getHost())); final HttpTransportOptions httpTransportOptions = HttpTransportOptions.newBuilder() .setConnectTimeout(toTimeout(clientSettings.getConnectTimeout())) .setReadTimeout(toTimeout(clientSettings.getReadTimeout())) @@ -114,6 +160,9 @@ private static HttpTransport createHttpTransport(final String endpoint) throws E builder.trustCertificates(GoogleUtils.getCertificateTrustStore()); if (Strings.hasLength(endpoint)) { final URL endpointUrl = URI.create(endpoint).toURL(); + // it is crucial to open a connection for each URL (see {@code + // DefaultConnectionFactory#openConnection}) instead of reusing connections, + // because the storage instance has to be thread-safe as it is cached. builder.setConnectionFactory(new DefaultConnectionFactory() { @Override public HttpURLConnection openConnection(final URL originalUrl) throws IOException { diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreContainerTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreContainerTests.java index 27736e24dbf51..0cc1243f28311 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreContainerTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreContainerTests.java @@ -26,11 +26,22 @@ import java.util.Locale; import java.util.concurrent.ConcurrentHashMap; +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + public class GoogleCloudStorageBlobStoreContainerTests extends ESBlobStoreContainerTestCase { @Override protected BlobStore newBlobStore() { - String bucket = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); - return new GoogleCloudStorageBlobStore(Settings.EMPTY, bucket, new MockStorage(bucket, new ConcurrentHashMap<>())); + final String bucketName = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); + final String clientName = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); + final GoogleCloudStorageService storageService = mock(GoogleCloudStorageService.class); + try { + when(storageService.client(any(String.class))).thenReturn(new MockStorage(bucketName, new ConcurrentHashMap<>())); + } catch (final Exception e) { + throw new RuntimeException(e); + } + return new GoogleCloudStorageBlobStore(Settings.EMPTY, bucketName, clientName, storageService); } } diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java index c4d9b67899672..3692b26f2bbb7 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java @@ -24,14 +24,12 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.env.Environment; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.blobstore.ESBlobStoreRepositoryIntegTestCase; import org.junit.AfterClass; import java.util.Collection; import java.util.Collections; -import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; @@ -73,19 +71,19 @@ public MockGoogleCloudStoragePlugin(final Settings settings) { } @Override - protected GoogleCloudStorageService createStorageService(Environment environment) { - return new MockGoogleCloudStorageService(environment, getClientsSettings()); + protected GoogleCloudStorageService createStorageService(Settings settings) { + return new MockGoogleCloudStorageService(settings); } } public static class MockGoogleCloudStorageService extends GoogleCloudStorageService { - MockGoogleCloudStorageService(Environment environment, Map clientsSettings) { - super(environment, clientsSettings); + MockGoogleCloudStorageService(Settings settings) { + super(settings); } @Override - public Storage createClient(String clientName) { + public Storage client(String clientName) { return new MockStorage(BUCKET, blobs); } } @@ -97,7 +95,7 @@ public void testChunkSize() { assertEquals(GoogleCloudStorageRepository.MAX_CHUNK_SIZE, chunkSize); // chunk size in settings - int size = randomIntBetween(1, 100); + final int size = randomIntBetween(1, 100); repositoryMetaData = new RepositoryMetaData("repo", GoogleCloudStorageRepository.TYPE, Settings.builder().put("chunk_size", size + "mb").build()); chunkSize = GoogleCloudStorageRepository.getSetting(GoogleCloudStorageRepository.CHUNK_SIZE, repositoryMetaData); @@ -105,7 +103,7 @@ public void testChunkSize() { // zero bytes is not allowed IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> { - RepositoryMetaData repoMetaData = new RepositoryMetaData("repo", GoogleCloudStorageRepository.TYPE, + final RepositoryMetaData repoMetaData = new RepositoryMetaData("repo", GoogleCloudStorageRepository.TYPE, Settings.builder().put("chunk_size", "0").build()); GoogleCloudStorageRepository.getSetting(GoogleCloudStorageRepository.CHUNK_SIZE, repoMetaData); }); @@ -113,7 +111,7 @@ public void testChunkSize() { // negative bytes not allowed e = expectThrows(IllegalArgumentException.class, () -> { - RepositoryMetaData repoMetaData = new RepositoryMetaData("repo", GoogleCloudStorageRepository.TYPE, + final RepositoryMetaData repoMetaData = new RepositoryMetaData("repo", GoogleCloudStorageRepository.TYPE, Settings.builder().put("chunk_size", "-1").build()); GoogleCloudStorageRepository.getSetting(GoogleCloudStorageRepository.CHUNK_SIZE, repoMetaData); }); @@ -121,7 +119,7 @@ public void testChunkSize() { // greater than max chunk size not allowed e = expectThrows(IllegalArgumentException.class, () -> { - RepositoryMetaData repoMetaData = new RepositoryMetaData("repo", GoogleCloudStorageRepository.TYPE, + final RepositoryMetaData repoMetaData = new RepositoryMetaData("repo", GoogleCloudStorageRepository.TYPE, Settings.builder().put("chunk_size", "101mb").build()); GoogleCloudStorageRepository.getSetting(GoogleCloudStorageRepository.CHUNK_SIZE, repoMetaData); }); diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreTests.java index 5e25307805235..4634bd3274a70 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreTests.java @@ -26,11 +26,22 @@ import java.util.Locale; import java.util.concurrent.ConcurrentHashMap; +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + public class GoogleCloudStorageBlobStoreTests extends ESBlobStoreTestCase { @Override protected BlobStore newBlobStore() { - String bucket = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); - return new GoogleCloudStorageBlobStore(Settings.EMPTY, bucket, new MockStorage(bucket, new ConcurrentHashMap<>())); + final String bucketName = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); + final String clientName = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); + final GoogleCloudStorageService storageService = mock(GoogleCloudStorageService.class); + try { + when(storageService.client(any(String.class))).thenReturn(new MockStorage(bucketName, new ConcurrentHashMap<>())); + } catch (final Exception e) { + throw new RuntimeException(e); + } + return new GoogleCloudStorageBlobStore(Settings.EMPTY, bucketName, clientName, storageService); } } diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java index a33ae90c549bc..0130d2c576cd5 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageServiceTests.java @@ -23,28 +23,36 @@ import com.google.cloud.http.HttpTransportOptions; import com.google.cloud.storage.Storage; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.env.Environment; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; -import java.util.Collections; + +import java.security.KeyPair; +import java.security.KeyPairGenerator; +import java.util.Base64; import java.util.Locale; +import java.util.UUID; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.containsString; public class GoogleCloudStorageServiceTests extends ESTestCase { public void testClientInitializer() throws Exception { - final String clientName = randomAlphaOfLength(4).toLowerCase(Locale.ROOT); - final Environment environment = mock(Environment.class); + final String clientName = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); final TimeValue connectTimeValue = TimeValue.timeValueNanos(randomIntBetween(0, 2000000)); final TimeValue readTimeValue = TimeValue.timeValueNanos(randomIntBetween(0, 2000000)); - final String applicationName = randomAlphaOfLength(4); - final String hostName = randomFrom("http://", "https://") + randomAlphaOfLength(4) + ":" + randomIntBetween(1, 65535); - final String projectIdName = randomAlphaOfLength(4); + final String applicationName = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); + final String endpoint = randomFrom("http://", "https://") + + randomFrom("www.elastic.co", "www.googleapis.com", "localhost/api", "google.com/oauth") + + ":" + randomIntBetween(1, 65535); + final String projectIdName = randomAlphaOfLength(randomIntBetween(1, 10)).toLowerCase(Locale.ROOT); final Settings settings = Settings.builder() .put(GoogleCloudStorageClientSettings.CONNECT_TIMEOUT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), connectTimeValue.getStringRep()) @@ -52,20 +60,18 @@ public void testClientInitializer() throws Exception { readTimeValue.getStringRep()) .put(GoogleCloudStorageClientSettings.APPLICATION_NAME_SETTING.getConcreteSettingForNamespace(clientName).getKey(), applicationName) - .put(GoogleCloudStorageClientSettings.ENDPOINT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), hostName) + .put(GoogleCloudStorageClientSettings.ENDPOINT_SETTING.getConcreteSettingForNamespace(clientName).getKey(), endpoint) .put(GoogleCloudStorageClientSettings.PROJECT_ID_SETTING.getConcreteSettingForNamespace(clientName).getKey(), projectIdName) .build(); - when(environment.settings()).thenReturn(settings); - final GoogleCloudStorageClientSettings clientSettings = GoogleCloudStorageClientSettings.getClientSettings(settings, clientName); - final GoogleCloudStorageService service = new GoogleCloudStorageService(environment, - Collections.singletonMap(clientName, clientSettings)); - final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> service.createClient("another_client")); + final GoogleCloudStorageService service = new GoogleCloudStorageService(settings); + service.refreshAndClearCache(GoogleCloudStorageClientSettings.load(settings)); + final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> service.client("another_client")); assertThat(e.getMessage(), Matchers.startsWith("Unknown client name")); assertSettingDeprecationsAndWarnings( new Setting[] { GoogleCloudStorageClientSettings.APPLICATION_NAME_SETTING.getConcreteSettingForNamespace(clientName) }); - final Storage storage = service.createClient(clientName); + final Storage storage = service.client(clientName); assertThat(storage.getOptions().getApplicationName(), Matchers.containsString(applicationName)); - assertThat(storage.getOptions().getHost(), Matchers.is(hostName)); + assertThat(storage.getOptions().getHost(), Matchers.is(endpoint)); assertThat(storage.getOptions().getProjectId(), Matchers.is(projectIdName)); assertThat(storage.getOptions().getTransportOptions(), Matchers.instanceOf(HttpTransportOptions.class)); assertThat(((HttpTransportOptions) storage.getOptions().getTransportOptions()).getConnectTimeout(), @@ -75,6 +81,58 @@ public void testClientInitializer() throws Exception { assertThat(storage.getOptions().getCredentials(), Matchers.nullValue(Credentials.class)); } + public void testReinitClientSettings() throws Exception { + final MockSecureSettings secureSettings1 = new MockSecureSettings(); + secureSettings1.setFile("gcs.client.gcs1.credentials_file", serviceAccountFileContent("project_gcs11")); + secureSettings1.setFile("gcs.client.gcs2.credentials_file", serviceAccountFileContent("project_gcs12")); + final Settings settings1 = Settings.builder().setSecureSettings(secureSettings1).build(); + final MockSecureSettings secureSettings2 = new MockSecureSettings(); + secureSettings2.setFile("gcs.client.gcs1.credentials_file", serviceAccountFileContent("project_gcs21")); + secureSettings2.setFile("gcs.client.gcs3.credentials_file", serviceAccountFileContent("project_gcs23")); + final Settings settings2 = Settings.builder().setSecureSettings(secureSettings2).build(); + try (GoogleCloudStoragePlugin plugin = new GoogleCloudStoragePlugin(settings1)) { + final GoogleCloudStorageService storageService = plugin.storageService; + final Storage client11 = storageService.client("gcs1"); + assertThat(client11.getOptions().getProjectId(), equalTo("project_gcs11")); + final Storage client12 = storageService.client("gcs2"); + assertThat(client12.getOptions().getProjectId(), equalTo("project_gcs12")); + // client 3 is missing + final IllegalArgumentException e1 = expectThrows(IllegalArgumentException.class, () -> storageService.client("gcs3")); + assertThat(e1.getMessage(), containsString("Unknown client name [gcs3].")); + // update client settings + plugin.reload(settings2); + // old client 1 not changed + assertThat(client11.getOptions().getProjectId(), equalTo("project_gcs11")); + // new client 1 is changed + final Storage client21 = storageService.client("gcs1"); + assertThat(client21.getOptions().getProjectId(), equalTo("project_gcs21")); + // old client 2 not changed + assertThat(client12.getOptions().getProjectId(), equalTo("project_gcs12")); + // new client2 is gone + final IllegalArgumentException e2 = expectThrows(IllegalArgumentException.class, () -> storageService.client("gcs2")); + assertThat(e2.getMessage(), containsString("Unknown client name [gcs2].")); + // client 3 emerged + final Storage client23 = storageService.client("gcs3"); + assertThat(client23.getOptions().getProjectId(), equalTo("project_gcs23")); + } + } + + private byte[] serviceAccountFileContent(String projectId) throws Exception { + final KeyPairGenerator keyPairGenerator = KeyPairGenerator.getInstance("RSA"); + keyPairGenerator.initialize(1024); + final KeyPair keyPair = keyPairGenerator.generateKeyPair(); + final String encodedKey = Base64.getEncoder().encodeToString(keyPair.getPrivate().getEncoded()); + final XContentBuilder serviceAccountBuilder = jsonBuilder().startObject() + .field("type", "service_account") + .field("project_id", projectId) + .field("private_key_id", UUID.randomUUID().toString()) + .field("private_key", "-----BEGIN PRIVATE KEY-----\n" + encodedKey + "\n-----END PRIVATE KEY-----\n") + .field("client_email", "integration_test@appspot.gserviceaccount.com") + .field("client_id", "client_id") + .endObject(); + return BytesReference.toBytes(BytesReference.bytes(serviceAccountBuilder)); + } + public void testToTimeout() { assertEquals(-1, GoogleCloudStorageService.toTimeout(null).intValue()); assertEquals(-1, GoogleCloudStorageService.toTimeout(TimeValue.ZERO).intValue()); diff --git a/plugins/repository-s3/build.gradle b/plugins/repository-s3/build.gradle index 23252881cd75f..8448b2ab9e1ac 100644 --- a/plugins/repository-s3/build.gradle +++ b/plugins/repository-s3/build.gradle @@ -55,7 +55,7 @@ bundlePlugin { } additionalTest('testRepositoryCreds'){ - include '**/RepositorySettingsCredentialsTests.class' + include '**/RepositoryCredentialsTests.class' systemProperty 'es.allow_insecure_settings', 'true' } diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/AmazonS3Reference.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/AmazonS3Reference.java new file mode 100644 index 0000000000000..6734fcfb56df5 --- /dev/null +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/AmazonS3Reference.java @@ -0,0 +1,63 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.repositories.s3; + +import org.elasticsearch.common.util.concurrent.AbstractRefCounted; + +import com.amazonaws.services.s3.AmazonS3; +import com.amazonaws.services.s3.AmazonS3Client; + +import org.elasticsearch.common.lease.Releasable; + +/** + * Handles the shutdown of the wrapped {@link AmazonS3Client} using reference + * counting. + */ +public class AmazonS3Reference extends AbstractRefCounted implements Releasable { + + private final AmazonS3 client; + + AmazonS3Reference(AmazonS3 client) { + super("AWS_S3_CLIENT"); + this.client = client; + } + + /** + * Call when the client is not needed anymore. + */ + @Override + public void close() { + decRef(); + } + + /** + * Returns the underlying `AmazonS3` client. All method calls are permitted BUT + * NOT shutdown. Shutdown is called when reference count reaches 0. + */ + public AmazonS3 client() { + return client; + } + + @Override + protected void closeInternal() { + client.shutdown(); + } + +} \ No newline at end of file diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/AwsS3Service.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/AwsS3Service.java index dbffe293a43b1..03b06c5b1bd34 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/AwsS3Service.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/AwsS3Service.java @@ -19,14 +19,25 @@ package org.elasticsearch.repositories.s3; -import com.amazonaws.services.s3.AmazonS3; -import org.elasticsearch.common.component.LifecycleComponent; -import org.elasticsearch.common.settings.Settings; +import java.io.Closeable; +import java.util.Map; -interface AwsS3Service extends LifecycleComponent { +interface AwsS3Service extends Closeable { /** - * Creates an {@code AmazonS3} client from the given repository metadata and node settings. + * Creates then caches an {@code AmazonS3} client using the current client + * settings. Returns an {@code AmazonS3Reference} wrapper which has to be + * released as soon as it is not needed anymore. */ - AmazonS3 client(Settings repositorySettings); + AmazonS3Reference client(String clientName); + + /** + * Updates settings for building clients and clears the client cache. Future + * client requests will use the new settings to lazily build new clients. + * + * @param clientsSettings the new refreshed settings + * @return the old stale settings + */ + Map refreshAndClearCache(Map clientsSettings); + } diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/InternalAwsS3Service.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/InternalAwsS3Service.java index d70ed9ea9aa8b..a54320f1fbd19 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/InternalAwsS3Service.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/InternalAwsS3Service.java @@ -28,66 +28,91 @@ import com.amazonaws.internal.StaticCredentialsProvider; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.AmazonS3Client; + import org.apache.logging.log4j.Logger; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.component.AbstractLifecycleComponent; -import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.settings.SecureString; -import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.collect.MapBuilder; +import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; -import java.util.HashMap; +import java.io.IOException; import java.util.Map; -import java.util.function.Function; - +import static java.util.Collections.emptyMap; -class InternalAwsS3Service extends AbstractLifecycleComponent implements AwsS3Service { - // pkg private for tests - static final Setting CLIENT_NAME = new Setting<>("client", "default", Function.identity()); +class InternalAwsS3Service extends AbstractComponent implements AwsS3Service { - private final Map clientsSettings; + private volatile Map clientsCache = emptyMap(); + private volatile Map clientsSettings = emptyMap(); - private final Map clientsCache = new HashMap<>(); - - InternalAwsS3Service(Settings settings, Map clientsSettings) { + InternalAwsS3Service(Settings settings) { super(settings); - this.clientsSettings = clientsSettings; } + /** + * Refreshes the settings for the AmazonS3 clients and clears the cache of + * existing clients. New clients will be build using these new settings. Old + * clients are usable until released. On release they will be destroyed instead + * to being returned to the cache. + */ @Override - public synchronized AmazonS3 client(Settings repositorySettings) { - String clientName = CLIENT_NAME.get(repositorySettings); - AmazonS3Client client = clientsCache.get(clientName); - if (client != null) { - return client; - } + public synchronized Map refreshAndClearCache(Map clientsSettings) { + // shutdown all unused clients + // others will shutdown on their respective release + releaseCachedClients(); + final Map prevSettings = this.clientsSettings; + this.clientsSettings = MapBuilder.newMapBuilder(clientsSettings).immutableMap(); + assert this.clientsSettings.containsKey("default") : "always at least have 'default'"; + // clients are built lazily by {@link client(String)} + return prevSettings; + } - S3ClientSettings clientSettings = clientsSettings.get(clientName); - if (clientSettings == null) { - throw new IllegalArgumentException("Unknown s3 client name [" + clientName + "]. Existing client configs: " + - Strings.collectionToDelimitedString(clientsSettings.keySet(), ",")); + /** + * Attempts to retrieve a client by name from the cache. If the client does not + * exist it will be created. + */ + @Override + public AmazonS3Reference client(String clientName) { + AmazonS3Reference clientReference = clientsCache.get(clientName); + if ((clientReference != null) && clientReference.tryIncRef()) { + return clientReference; } + synchronized (this) { + clientReference = clientsCache.get(clientName); + if ((clientReference != null) && clientReference.tryIncRef()) { + return clientReference; + } + final S3ClientSettings clientSettings = clientsSettings.get(clientName); + if (clientSettings == null) { + throw new IllegalArgumentException("Unknown s3 client name [" + clientName + "]. Existing client configs: " + + Strings.collectionToDelimitedString(clientsSettings.keySet(), ",")); + } + logger.debug("creating S3 client with client_name [{}], endpoint [{}]", clientName, clientSettings.endpoint); + clientReference = new AmazonS3Reference(buildClient(clientSettings)); + clientReference.incRef(); + clientsCache = MapBuilder.newMapBuilder(clientsCache).put(clientName, clientReference).immutableMap(); + return clientReference; + } + } - logger.debug("creating S3 client with client_name [{}], endpoint [{}]", clientName, clientSettings.endpoint); - - AWSCredentialsProvider credentials = buildCredentials(logger, deprecationLogger, clientSettings, repositorySettings); - ClientConfiguration configuration = buildConfiguration(clientSettings); - - client = new AmazonS3Client(credentials, configuration); - + private AmazonS3 buildClient(S3ClientSettings clientSettings) { + final AWSCredentialsProvider credentials = buildCredentials(logger, clientSettings); + final ClientConfiguration configuration = buildConfiguration(clientSettings); + final AmazonS3 client = buildClient(credentials, configuration); if (Strings.hasText(clientSettings.endpoint)) { client.setEndpoint(clientSettings.endpoint); } - - clientsCache.put(clientName, client); return client; } + // proxy for testing + AmazonS3 buildClient(AWSCredentialsProvider credentials, ClientConfiguration configuration) { + return new AmazonS3Client(credentials, configuration); + } + // pkg private for tests static ClientConfiguration buildConfiguration(S3ClientSettings clientSettings) { - ClientConfiguration clientConfiguration = new ClientConfiguration(); + final ClientConfiguration clientConfiguration = new ClientConfiguration(); // the response metadata cache is only there for diagnostics purposes, // but can force objects from every response to the old generation. clientConfiguration.setResponseMetadataCacheSize(0); @@ -109,27 +134,8 @@ static ClientConfiguration buildConfiguration(S3ClientSettings clientSettings) { } // pkg private for tests - static AWSCredentialsProvider buildCredentials(Logger logger, DeprecationLogger deprecationLogger, - S3ClientSettings clientSettings, Settings repositorySettings) { - - - BasicAWSCredentials credentials = clientSettings.credentials; - if (S3Repository.ACCESS_KEY_SETTING.exists(repositorySettings)) { - if (S3Repository.SECRET_KEY_SETTING.exists(repositorySettings) == false) { - throw new IllegalArgumentException("Repository setting [" + S3Repository.ACCESS_KEY_SETTING.getKey() + - " must be accompanied by setting [" + S3Repository.SECRET_KEY_SETTING.getKey() + "]"); - } - try (SecureString key = S3Repository.ACCESS_KEY_SETTING.get(repositorySettings); - SecureString secret = S3Repository.SECRET_KEY_SETTING.get(repositorySettings)) { - credentials = new BasicAWSCredentials(key.toString(), secret.toString()); - } - // backcompat for reading keys out of repository settings - deprecationLogger.deprecated("Using s3 access/secret key from repository settings. Instead " + - "store these in named clients and the elasticsearch keystore for secure settings."); - } else if (S3Repository.SECRET_KEY_SETTING.exists(repositorySettings)) { - throw new IllegalArgumentException("Repository setting [" + S3Repository.SECRET_KEY_SETTING.getKey() + - " must be accompanied by setting [" + S3Repository.ACCESS_KEY_SETTING.getKey() + "]"); - } + static AWSCredentialsProvider buildCredentials(Logger logger, S3ClientSettings clientSettings) { + final BasicAWSCredentials credentials = clientSettings.credentials; if (credentials == null) { logger.debug("Using instance profile credentials"); return new PrivilegedInstanceProfileCredentialsProvider(); @@ -139,21 +145,15 @@ static AWSCredentialsProvider buildCredentials(Logger logger, DeprecationLogger } } - @Override - protected void doStart() throws ElasticsearchException { - } - - @Override - protected void doStop() throws ElasticsearchException { - } - - @Override - protected void doClose() throws ElasticsearchException { - for (AmazonS3Client client : clientsCache.values()) { - client.shutdown(); + protected synchronized void releaseCachedClients() { + // the clients will shutdown when they will not be used anymore + for (final AmazonS3Reference clientReference : clientsCache.values()) { + clientReference.decRef(); } - - // Ensure that IdleConnectionReaper is shutdown + // clear previously cached clients, they will be build lazily + clientsCache = emptyMap(); + // shutdown IdleConnectionReaper background thread + // it will be restarted on new client usage IdleConnectionReaper.shutdown(); } @@ -174,4 +174,10 @@ public void refresh() { SocketAccess.doPrivilegedVoid(credentials::refresh); } } + + @Override + public void close() throws IOException { + releaseCachedClients(); + } + } diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java index 222802ae30437..102af6f9f5b4c 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java @@ -20,7 +20,6 @@ package org.elasticsearch.repositories.s3; import com.amazonaws.AmazonClientException; -import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.model.AbortMultipartUploadRequest; import com.amazonaws.services.s3.model.AmazonS3Exception; import com.amazonaws.services.s3.model.CompleteMultipartUploadRequest; @@ -47,8 +46,6 @@ import java.io.IOException; import java.io.InputStream; import java.nio.file.NoSuchFileException; -import java.security.AccessController; -import java.security.PrivilegedAction; import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -70,19 +67,20 @@ class S3BlobContainer extends AbstractBlobContainer { @Override public boolean blobExists(String blobName) { - try { - return SocketAccess.doPrivileged(() -> blobStore.client().doesObjectExist(blobStore.bucket(), buildKey(blobName))); - } catch (Exception e) { + try (AmazonS3Reference clientReference = blobStore.clientReference()) { + return SocketAccess.doPrivileged(() -> clientReference.client().doesObjectExist(blobStore.bucket(), buildKey(blobName))); + } catch (final Exception e) { throw new BlobStoreException("Failed to check if blob [" + blobName +"] exists", e); } } @Override public InputStream readBlob(String blobName) throws IOException { - try { - S3Object s3Object = SocketAccess.doPrivileged(() -> blobStore.client().getObject(blobStore.bucket(), buildKey(blobName))); + try (AmazonS3Reference clientReference = blobStore.clientReference()) { + final S3Object s3Object = SocketAccess.doPrivileged(() -> clientReference.client().getObject(blobStore.bucket(), + buildKey(blobName))); return s3Object.getObjectContent(); - } catch (AmazonClientException e) { + } catch (final AmazonClientException e) { if (e instanceof AmazonS3Exception) { if (404 == ((AmazonS3Exception) e).getStatusCode()) { throw new NoSuchFileException("Blob object [" + blobName + "] not found: " + e.getMessage()); @@ -110,44 +108,45 @@ public void deleteBlob(String blobName) throws IOException { throw new NoSuchFileException("Blob [" + blobName + "] does not exist"); } - try { - SocketAccess.doPrivilegedVoid(() -> blobStore.client().deleteObject(blobStore.bucket(), buildKey(blobName))); - } catch (AmazonClientException e) { + try (AmazonS3Reference clientReference = blobStore.clientReference()) { + SocketAccess.doPrivilegedVoid(() -> clientReference.client().deleteObject(blobStore.bucket(), buildKey(blobName))); + } catch (final AmazonClientException e) { throw new IOException("Exception when deleting blob [" + blobName + "]", e); } } @Override public Map listBlobsByPrefix(@Nullable String blobNamePrefix) throws IOException { - return AccessController.doPrivileged((PrivilegedAction>) () -> { - MapBuilder blobsBuilder = MapBuilder.newMapBuilder(); - AmazonS3 client = blobStore.client(); - SocketAccess.doPrivilegedVoid(() -> { - ObjectListing prevListing = null; - while (true) { - ObjectListing list; - if (prevListing != null) { - list = client.listNextBatchOfObjects(prevListing); - } else { - if (blobNamePrefix != null) { - list = client.listObjects(blobStore.bucket(), buildKey(blobNamePrefix)); - } else { - list = client.listObjects(blobStore.bucket(), keyPath); - } - } - for (S3ObjectSummary summary : list.getObjectSummaries()) { - String name = summary.getKey().substring(keyPath.length()); - blobsBuilder.put(name, new PlainBlobMetaData(name, summary.getSize())); - } - if (list.isTruncated()) { - prevListing = list; + final MapBuilder blobsBuilder = MapBuilder.newMapBuilder(); + try (AmazonS3Reference clientReference = blobStore.clientReference()) { + ObjectListing prevListing = null; + while (true) { + ObjectListing list; + if (prevListing != null) { + final ObjectListing finalPrevListing = prevListing; + list = SocketAccess.doPrivileged(() -> clientReference.client().listNextBatchOfObjects(finalPrevListing)); + } else { + if (blobNamePrefix != null) { + list = SocketAccess.doPrivileged(() -> clientReference.client().listObjects(blobStore.bucket(), + buildKey(blobNamePrefix))); } else { - break; + list = SocketAccess.doPrivileged(() -> clientReference.client().listObjects(blobStore.bucket(), keyPath)); } } - }); + for (final S3ObjectSummary summary : list.getObjectSummaries()) { + final String name = summary.getKey().substring(keyPath.length()); + blobsBuilder.put(name, new PlainBlobMetaData(name, summary.getSize())); + } + if (list.isTruncated()) { + prevListing = list; + } else { + break; + } + } return blobsBuilder.immutableMap(); - }); + } catch (final AmazonClientException e) { + throw new IOException("Exception when listing blobs by prefix [" + blobNamePrefix + "]", e); + } } @Override @@ -175,19 +174,20 @@ void executeSingleUpload(final S3BlobStore blobStore, throw new IllegalArgumentException("Upload request size [" + blobSize + "] can't be larger than buffer size"); } - try { - final ObjectMetadata md = new ObjectMetadata(); - md.setContentLength(blobSize); - if (blobStore.serverSideEncryption()) { - md.setSSEAlgorithm(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION); - } - - final PutObjectRequest putRequest = new PutObjectRequest(blobStore.bucket(), blobName, input, md); - putRequest.setStorageClass(blobStore.getStorageClass()); - putRequest.setCannedAcl(blobStore.getCannedACL()); + final ObjectMetadata md = new ObjectMetadata(); + md.setContentLength(blobSize); + if (blobStore.serverSideEncryption()) { + md.setSSEAlgorithm(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION); + } + final PutObjectRequest putRequest = new PutObjectRequest(blobStore.bucket(), blobName, input, md); + putRequest.setStorageClass(blobStore.getStorageClass()); + putRequest.setCannedAcl(blobStore.getCannedACL()); - blobStore.client().putObject(putRequest); - } catch (AmazonClientException e) { + try (AmazonS3Reference clientReference = blobStore.clientReference()) { + SocketAccess.doPrivilegedVoid(() -> { + clientReference.client().putObject(putRequest); + }); + } catch (final AmazonClientException e) { throw new IOException("Unable to upload object [" + blobName + "] using a single upload", e); } } @@ -218,23 +218,23 @@ void executeMultipartUpload(final S3BlobStore blobStore, final int nbParts = multiparts.v1().intValue(); final long lastPartSize = multiparts.v2(); - assert blobSize == (nbParts - 1) * partSize + lastPartSize : "blobSize does not match multipart sizes"; + assert blobSize == (((nbParts - 1) * partSize) + lastPartSize) : "blobSize does not match multipart sizes"; final SetOnce uploadId = new SetOnce<>(); final String bucketName = blobStore.bucket(); boolean success = false; - try { - final InitiateMultipartUploadRequest initRequest = new InitiateMultipartUploadRequest(bucketName, blobName); - initRequest.setStorageClass(blobStore.getStorageClass()); - initRequest.setCannedACL(blobStore.getCannedACL()); - if (blobStore.serverSideEncryption()) { - final ObjectMetadata md = new ObjectMetadata(); - md.setSSEAlgorithm(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION); - initRequest.setObjectMetadata(md); - } + final InitiateMultipartUploadRequest initRequest = new InitiateMultipartUploadRequest(bucketName, blobName); + initRequest.setStorageClass(blobStore.getStorageClass()); + initRequest.setCannedACL(blobStore.getCannedACL()); + if (blobStore.serverSideEncryption()) { + final ObjectMetadata md = new ObjectMetadata(); + md.setSSEAlgorithm(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION); + initRequest.setObjectMetadata(md); + } + try (AmazonS3Reference clientReference = blobStore.clientReference()) { - uploadId.set(blobStore.client().initiateMultipartUpload(initRequest).getUploadId()); + uploadId.set(SocketAccess.doPrivileged(() -> clientReference.client().initiateMultipartUpload(initRequest).getUploadId())); if (Strings.isEmpty(uploadId.get())) { throw new IOException("Failed to initialize multipart upload " + blobName); } @@ -259,7 +259,7 @@ void executeMultipartUpload(final S3BlobStore blobStore, } bytesCount += uploadRequest.getPartSize(); - final UploadPartResult uploadResponse = blobStore.client().uploadPart(uploadRequest); + final UploadPartResult uploadResponse = SocketAccess.doPrivileged(() -> clientReference.client().uploadPart(uploadRequest)); parts.add(uploadResponse.getPartETag()); } @@ -268,16 +268,19 @@ void executeMultipartUpload(final S3BlobStore blobStore, + "bytes sent but got " + bytesCount); } - CompleteMultipartUploadRequest complRequest = new CompleteMultipartUploadRequest(bucketName, blobName, uploadId.get(), parts); - blobStore.client().completeMultipartUpload(complRequest); + final CompleteMultipartUploadRequest complRequest = new CompleteMultipartUploadRequest(bucketName, blobName, uploadId.get(), + parts); + SocketAccess.doPrivilegedVoid(() -> clientReference.client().completeMultipartUpload(complRequest)); success = true; - } catch (AmazonClientException e) { + } catch (final AmazonClientException e) { throw new IOException("Unable to upload object [" + blobName + "] using multipart upload", e); } finally { - if (success == false && Strings.hasLength(uploadId.get())) { + if ((success == false) && Strings.hasLength(uploadId.get())) { final AbortMultipartUploadRequest abortRequest = new AbortMultipartUploadRequest(bucketName, blobName, uploadId.get()); - blobStore.client().abortMultipartUpload(abortRequest); + try (AmazonS3Reference clientReference = blobStore.clientReference()) { + SocketAccess.doPrivilegedVoid(() -> clientReference.client().abortMultipartUpload(abortRequest)); + } } } } @@ -296,7 +299,7 @@ static Tuple numberOfMultiparts(final long totalSize, final long par throw new IllegalArgumentException("Part size must be greater than zero"); } - if (totalSize == 0L || totalSize <= partSize) { + if ((totalSize == 0L) || (totalSize <= partSize)) { return Tuple.tuple(1L, totalSize); } diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java index 27349f12135ed..c0f61e4d07828 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java @@ -19,13 +19,13 @@ package org.elasticsearch.repositories.s3; -import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.model.CannedAccessControlList; import com.amazonaws.services.s3.model.DeleteObjectsRequest; import com.amazonaws.services.s3.model.DeleteObjectsRequest.KeyVersion; import com.amazonaws.services.s3.model.ObjectListing; import com.amazonaws.services.s3.model.S3ObjectSummary; import com.amazonaws.services.s3.model.StorageClass; + import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; @@ -34,14 +34,15 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; -import java.security.AccessController; -import java.security.PrivilegedAction; +import java.io.IOException; import java.util.ArrayList; import java.util.Locale; class S3BlobStore extends AbstractComponent implements BlobStore { - private final AmazonS3 client; + private final AwsS3Service service; + + private final String clientName; private final String bucket; @@ -53,10 +54,11 @@ class S3BlobStore extends AbstractComponent implements BlobStore { private final StorageClass storageClass; - S3BlobStore(Settings settings, AmazonS3 client, String bucket, boolean serverSideEncryption, + S3BlobStore(Settings settings, AwsS3Service service, String clientName, String bucket, boolean serverSideEncryption, ByteSizeValue bufferSize, String cannedACL, String storageClass) { super(settings); - this.client = client; + this.service = service; + this.clientName = clientName; this.bucket = bucket; this.serverSideEncryption = serverSideEncryption; this.bufferSize = bufferSize; @@ -68,12 +70,14 @@ class S3BlobStore extends AbstractComponent implements BlobStore { // Also, if invalid security credentials are used to execute this method, the // client is not able to distinguish between bucket permission errors and // invalid credential errors, and this method could return an incorrect result. - SocketAccess.doPrivilegedVoid(() -> { - if (client.doesBucketExist(bucket) == false) { - throw new IllegalArgumentException("The bucket [" + bucket + "] does not exist. Please create it before " + - " creating an s3 snapshot repository backed by it."); - } - }); + try (AmazonS3Reference clientReference = clientReference()) { + SocketAccess.doPrivilegedVoid(() -> { + if (clientReference.client().doesBucketExist(bucket) == false) { + throw new IllegalArgumentException("The bucket [" + bucket + "] does not exist. Please create it before " + + " creating an s3 snapshot repository backed by it."); + } + }); + } } @Override @@ -81,8 +85,8 @@ public String toString() { return bucket; } - public AmazonS3 client() { - return client; + public AmazonS3Reference clientReference() { + return service.client(clientName); } public String bucket() { @@ -104,27 +108,30 @@ public BlobContainer blobContainer(BlobPath path) { @Override public void delete(BlobPath path) { - AccessController.doPrivileged((PrivilegedAction) () -> { + try (AmazonS3Reference clientReference = clientReference()) { ObjectListing prevListing = null; - //From http://docs.amazonwebservices.com/AmazonS3/latest/dev/DeletingMultipleObjectsUsingJava.html - //we can do at most 1K objects per delete - //We don't know the bucket name until first object listing + // From + // http://docs.amazonwebservices.com/AmazonS3/latest/dev/DeletingMultipleObjectsUsingJava.html + // we can do at most 1K objects per delete + // We don't know the bucket name until first object listing DeleteObjectsRequest multiObjectDeleteRequest = null; - ArrayList keys = new ArrayList<>(); + final ArrayList keys = new ArrayList<>(); while (true) { ObjectListing list; if (prevListing != null) { - list = client.listNextBatchOfObjects(prevListing); + final ObjectListing finalPrevListing = prevListing; + list = SocketAccess.doPrivileged(() -> clientReference.client().listNextBatchOfObjects(finalPrevListing)); } else { - list = client.listObjects(bucket, path.buildAsString()); + list = SocketAccess.doPrivileged(() -> clientReference.client().listObjects(bucket, path.buildAsString())); multiObjectDeleteRequest = new DeleteObjectsRequest(list.getBucketName()); } - for (S3ObjectSummary summary : list.getObjectSummaries()) { + for (final S3ObjectSummary summary : list.getObjectSummaries()) { keys.add(new KeyVersion(summary.getKey())); - //Every 500 objects batch the delete request + // Every 500 objects batch the delete request if (keys.size() > 500) { multiObjectDeleteRequest.setKeys(keys); - client.deleteObjects(multiObjectDeleteRequest); + final DeleteObjectsRequest finalMultiObjectDeleteRequest = multiObjectDeleteRequest; + SocketAccess.doPrivilegedVoid(() -> clientReference.client().deleteObjects(finalMultiObjectDeleteRequest)); multiObjectDeleteRequest = new DeleteObjectsRequest(list.getBucketName()); keys.clear(); } @@ -137,14 +144,15 @@ public void delete(BlobPath path) { } if (!keys.isEmpty()) { multiObjectDeleteRequest.setKeys(keys); - client.deleteObjects(multiObjectDeleteRequest); + final DeleteObjectsRequest finalMultiObjectDeleteRequest = multiObjectDeleteRequest; + SocketAccess.doPrivilegedVoid(() -> clientReference.client().deleteObjects(finalMultiObjectDeleteRequest)); } - return null; - }); + } } @Override - public void close() { + public void close() throws IOException { + this.service.close(); } public CannedAccessControlList getCannedACL() { @@ -154,18 +162,18 @@ public CannedAccessControlList getCannedACL() { public StorageClass getStorageClass() { return storageClass; } public static StorageClass initStorageClass(String storageClass) { - if (storageClass == null || storageClass.equals("")) { + if ((storageClass == null) || storageClass.equals("")) { return StorageClass.Standard; } try { - StorageClass _storageClass = StorageClass.fromValue(storageClass.toUpperCase(Locale.ENGLISH)); + final StorageClass _storageClass = StorageClass.fromValue(storageClass.toUpperCase(Locale.ENGLISH)); if (_storageClass.equals(StorageClass.Glacier)) { throw new BlobStoreException("Glacier storage class is not supported"); } return _storageClass; - } catch (IllegalArgumentException illegalArgumentException) { + } catch (final IllegalArgumentException illegalArgumentException) { throw new BlobStoreException("`" + storageClass + "` is not a valid S3 Storage Class."); } } @@ -174,11 +182,11 @@ public static StorageClass initStorageClass(String storageClass) { * Constructs canned acl from string */ public static CannedAccessControlList initCannedACL(String cannedACL) { - if (cannedACL == null || cannedACL.equals("")) { + if ((cannedACL == null) || cannedACL.equals("")) { return CannedAccessControlList.Private; } - for (CannedAccessControlList cur : CannedAccessControlList.values()) { + for (final CannedAccessControlList cur : CannedAccessControlList.values()) { if (cur.toString().equalsIgnoreCase(cannedACL)) { return cur; } diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3ClientSettings.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3ClientSettings.java index 4d32d2518fff1..ef6088fe154bf 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3ClientSettings.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3ClientSettings.java @@ -24,10 +24,11 @@ import java.util.Locale; import java.util.Map; import java.util.Set; - import com.amazonaws.ClientConfiguration; import com.amazonaws.Protocol; import com.amazonaws.auth.BasicAWSCredentials; + +import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.settings.SecureSetting; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; @@ -38,7 +39,7 @@ /** * A container for settings used to create an S3 client. */ -class S3ClientSettings { +final class S3ClientSettings { // prefix for s3 client settings private static final String PREFIX = "s3.client."; @@ -119,7 +120,7 @@ class S3ClientSettings { /** Whether the s3 client should use an exponential backoff retry policy. */ final boolean throttleRetries; - private S3ClientSettings(BasicAWSCredentials credentials, String endpoint, Protocol protocol, + protected S3ClientSettings(BasicAWSCredentials credentials, String endpoint, Protocol protocol, String proxyHost, int proxyPort, String proxyUsername, String proxyPassword, int readTimeoutMillis, int maxRetries, boolean throttleRetries) { this.credentials = credentials; @@ -140,9 +141,9 @@ private S3ClientSettings(BasicAWSCredentials credentials, String endpoint, Proto * Note this will always at least return a client named "default". */ static Map load(Settings settings) { - Set clientNames = settings.getGroups(PREFIX).keySet(); - Map clients = new HashMap<>(); - for (String clientName : clientNames) { + final Set clientNames = settings.getGroups(PREFIX).keySet(); + final Map clients = new HashMap<>(); + for (final String clientName : clientNames) { clients.put(clientName, getClientSettings(settings, clientName)); } if (clients.containsKey("default") == false) { @@ -153,23 +154,64 @@ static Map load(Settings settings) { return Collections.unmodifiableMap(clients); } - // pkg private for tests - /** Parse settings for a single client. */ - static S3ClientSettings getClientSettings(Settings settings, String clientName) { + static Map overrideCredentials(Map clientsSettings, + BasicAWSCredentials credentials) { + final MapBuilder mapBuilder = new MapBuilder<>(); + for (final Map.Entry entry : clientsSettings.entrySet()) { + final S3ClientSettings s3ClientSettings = new S3ClientSettings(credentials, entry.getValue().endpoint, + entry.getValue().protocol, entry.getValue().proxyHost, entry.getValue().proxyPort, entry.getValue().proxyUsername, + entry.getValue().proxyPassword, entry.getValue().readTimeoutMillis, entry.getValue().maxRetries, + entry.getValue().throttleRetries); + mapBuilder.put(entry.getKey(), s3ClientSettings); + } + return mapBuilder.immutableMap(); + } + + static boolean checkDeprecatedCredentials(Settings repositorySettings) { + if (S3Repository.ACCESS_KEY_SETTING.exists(repositorySettings)) { + if (S3Repository.SECRET_KEY_SETTING.exists(repositorySettings) == false) { + throw new IllegalArgumentException("Repository setting [" + S3Repository.ACCESS_KEY_SETTING.getKey() + + " must be accompanied by setting [" + S3Repository.SECRET_KEY_SETTING.getKey() + "]"); + } + return true; + } else if (S3Repository.SECRET_KEY_SETTING.exists(repositorySettings)) { + throw new IllegalArgumentException("Repository setting [" + S3Repository.SECRET_KEY_SETTING.getKey() + + " must be accompanied by setting [" + S3Repository.ACCESS_KEY_SETTING.getKey() + "]"); + } + return false; + } + + // backcompat for reading keys out of repository settings (clusterState) + static BasicAWSCredentials loadDeprecatedCredentials(Settings repositorySettings) { + assert checkDeprecatedCredentials(repositorySettings); + try (SecureString key = S3Repository.ACCESS_KEY_SETTING.get(repositorySettings); + SecureString secret = S3Repository.SECRET_KEY_SETTING.get(repositorySettings)) { + return new BasicAWSCredentials(key.toString(), secret.toString()); + } + } + + static BasicAWSCredentials loadCredentials(Settings settings, String clientName) { try (SecureString accessKey = getConfigValue(settings, clientName, ACCESS_KEY_SETTING); - SecureString secretKey = getConfigValue(settings, clientName, SECRET_KEY_SETTING); - SecureString proxyUsername = getConfigValue(settings, clientName, PROXY_USERNAME_SETTING); - SecureString proxyPassword = getConfigValue(settings, clientName, PROXY_PASSWORD_SETTING)) { - BasicAWSCredentials credentials = null; + SecureString secretKey = getConfigValue(settings, clientName, SECRET_KEY_SETTING);) { if (accessKey.length() != 0) { if (secretKey.length() != 0) { - credentials = new BasicAWSCredentials(accessKey.toString(), secretKey.toString()); + return new BasicAWSCredentials(accessKey.toString(), secretKey.toString()); } else { throw new IllegalArgumentException("Missing secret key for s3 client [" + clientName + "]"); } } else if (secretKey.length() != 0) { throw new IllegalArgumentException("Missing access key for s3 client [" + clientName + "]"); } + return null; + } + } + + // pkg private for tests + /** Parse settings for a single client. */ + static S3ClientSettings getClientSettings(Settings settings, String clientName) { + final BasicAWSCredentials credentials = S3ClientSettings.loadCredentials(settings, clientName); + try (SecureString proxyUsername = getConfigValue(settings, clientName, PROXY_USERNAME_SETTING); + SecureString proxyPassword = getConfigValue(settings, clientName, PROXY_PASSWORD_SETTING)) { return new S3ClientSettings( credentials, getConfigValue(settings, clientName, ENDPOINT_SETTING), @@ -187,7 +229,7 @@ static S3ClientSettings getClientSettings(Settings settings, String clientName) private static T getConfigValue(Settings settings, String clientName, Setting.AffixSetting clientSetting) { - Setting concreteSetting = clientSetting.getConcreteSettingForNamespace(clientName); + final Setting concreteSetting = clientSetting.getConcreteSettingForNamespace(clientName); return concreteSetting.get(settings); } diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java index c185027d67f26..063e266837bad 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java @@ -19,7 +19,8 @@ package org.elasticsearch.repositories.s3; -import com.amazonaws.services.s3.AmazonS3; +import com.amazonaws.auth.BasicAWSCredentials; + import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.blobstore.BlobPath; @@ -35,6 +36,9 @@ import org.elasticsearch.repositories.RepositoryException; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; +import java.io.IOException; +import java.util.Map; +import java.util.function.Function; /** * Shared file system implementation of the BlobStoreRepository @@ -134,6 +138,8 @@ class S3Repository extends BlobStoreRepository { */ static final Setting CANNED_ACL_SETTING = Setting.simpleString("canned_acl"); + static final Setting CLIENT_NAME = new Setting<>("client", "default", Function.identity()); + /** * Specifies the path within bucket to repository data. Defaults to root directory. */ @@ -143,23 +149,24 @@ class S3Repository extends BlobStoreRepository { private final BlobPath basePath; - private ByteSizeValue chunkSize; + private final ByteSizeValue chunkSize; - private boolean compress; + private final boolean compress; /** * Constructs an s3 backed repository */ - S3Repository(RepositoryMetaData metadata, Settings settings, NamedXContentRegistry namedXContentRegistry, AwsS3Service s3Service) { + S3Repository(RepositoryMetaData metadata, Settings settings, NamedXContentRegistry namedXContentRegistry, + AwsS3Service awsService) throws IOException { super(metadata, settings, namedXContentRegistry); - String bucket = BUCKET_SETTING.get(metadata.settings()); + final String bucket = BUCKET_SETTING.get(metadata.settings()); if (bucket == null) { throw new RepositoryException(metadata.name(), "No bucket defined for s3 repository"); } - boolean serverSideEncryption = SERVER_SIDE_ENCRYPTION_SETTING.get(metadata.settings()); - ByteSizeValue bufferSize = BUFFER_SIZE_SETTING.get(metadata.settings()); + final boolean serverSideEncryption = SERVER_SIDE_ENCRYPTION_SETTING.get(metadata.settings()); + final ByteSizeValue bufferSize = BUFFER_SIZE_SETTING.get(metadata.settings()); this.chunkSize = CHUNK_SIZE_SETTING.get(metadata.settings()); this.compress = COMPRESS_SETTING.get(metadata.settings()); @@ -170,17 +177,22 @@ class S3Repository extends BlobStoreRepository { } // Parse and validate the user's S3 Storage Class setting - String storageClass = STORAGE_CLASS_SETTING.get(metadata.settings()); - String cannedACL = CANNED_ACL_SETTING.get(metadata.settings()); + final String storageClass = STORAGE_CLASS_SETTING.get(metadata.settings()); + final String cannedACL = CANNED_ACL_SETTING.get(metadata.settings()); + final String clientName = CLIENT_NAME.get(metadata.settings()); logger.debug("using bucket [{}], chunk_size [{}], server_side_encryption [{}], " + "buffer_size [{}], cannedACL [{}], storageClass [{}]", bucket, chunkSize, serverSideEncryption, bufferSize, cannedACL, storageClass); - AmazonS3 client = s3Service.client(metadata.settings()); - blobStore = new S3BlobStore(settings, client, bucket, serverSideEncryption, bufferSize, cannedACL, storageClass); + // deprecated behavior: override client credentials from the cluster state + // (repository settings) + if (S3ClientSettings.checkDeprecatedCredentials(metadata.settings())) { + overrideCredentialsFromClusterState(awsService); + } + blobStore = new S3BlobStore(settings, awsService, clientName, bucket, serverSideEncryption, bufferSize, cannedACL, storageClass); - String basePath = BASE_PATH_SETTING.get(metadata.settings()); + final String basePath = BASE_PATH_SETTING.get(metadata.settings()); if (Strings.hasLength(basePath)) { this.basePath = new BlobPath().add(basePath); } else { @@ -207,4 +219,14 @@ protected boolean isCompress() { protected ByteSizeValue chunkSize() { return chunkSize; } + + void overrideCredentialsFromClusterState(AwsS3Service awsService) { + deprecationLogger.deprecated("Using s3 access/secret key from repository settings. Instead " + + "store these in named clients and the elasticsearch keystore for secure settings."); + final BasicAWSCredentials insecureCredentials = S3ClientSettings.loadDeprecatedCredentials(metadata.settings()); + // hack, but that's ok because the whole if branch should be axed + final Map prevSettings = awsService.refreshAndClearCache(S3ClientSettings.load(Settings.EMPTY)); + final Map newSettings = S3ClientSettings.overrideCredentials(prevSettings, insecureCredentials); + awsService.refreshAndClearCache(newSettings); + } } diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java index e31495efc0eef..93561c94d2b9a 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java @@ -19,6 +19,7 @@ package org.elasticsearch.repositories.s3; +import java.io.IOException; import java.security.AccessController; import java.security.PrivilegedAction; import java.util.Arrays; @@ -28,18 +29,20 @@ import com.amazonaws.util.json.Jackson; import org.elasticsearch.SpecialPermission; +import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.env.Environment; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.ReloadablePlugin; import org.elasticsearch.plugins.RepositoryPlugin; import org.elasticsearch.repositories.Repository; /** * A plugin to add a repository type that writes to and from the AWS S3. */ -public class S3RepositoryPlugin extends Plugin implements RepositoryPlugin { +public class S3RepositoryPlugin extends Plugin implements RepositoryPlugin, ReloadablePlugin { static { SpecialPermission.check(); @@ -50,30 +53,40 @@ public class S3RepositoryPlugin extends Plugin implements RepositoryPlugin { // ClientConfiguration clinit has some classloader problems // TODO: fix that Class.forName("com.amazonaws.ClientConfiguration"); - } catch (ClassNotFoundException e) { + } catch (final ClassNotFoundException e) { throw new RuntimeException(e); } return null; }); } - private final Map clientsSettings; + private final AwsS3Service awsS3Service; public S3RepositoryPlugin(Settings settings) { + this.awsS3Service = getAwsS3Service(settings); // eagerly load client settings so that secure settings are read - clientsSettings = S3ClientSettings.load(settings); - assert clientsSettings.isEmpty() == false : "always at least have 'default'"; + final Map clientsSettings = S3ClientSettings.load(settings); + this.awsS3Service.refreshAndClearCache(clientsSettings); } - // overridable for tests - protected AwsS3Service createStorageService(Settings settings) { - return new InternalAwsS3Service(settings, clientsSettings); + protected S3RepositoryPlugin(AwsS3Service awsS3Service) { + this.awsS3Service = awsS3Service; + } + + // proxy method for testing + protected S3Repository getS3Repository(RepositoryMetaData metadata, Settings settings, NamedXContentRegistry namedXContentRegistry) + throws IOException { + return new S3Repository(metadata, settings, namedXContentRegistry, awsS3Service); + } + + // proxy method for testing + protected AwsS3Service getAwsS3Service(Settings settings) { + return new InternalAwsS3Service(settings); } @Override public Map getRepositories(Environment env, NamedXContentRegistry namedXContentRegistry) { - return Collections.singletonMap(S3Repository.TYPE, - (metadata) -> new S3Repository(metadata, env.settings(), namedXContentRegistry, createStorageService(env.settings()))); + return Collections.singletonMap(S3Repository.TYPE, (metadata) -> getS3Repository(metadata, env.settings(), namedXContentRegistry)); } @Override @@ -94,4 +107,16 @@ public List> getSettings() { S3Repository.ACCESS_KEY_SETTING, S3Repository.SECRET_KEY_SETTING); } + + @Override + public void reload(Settings settings) { + // secure settings should be readable + final Map clientsSettings = S3ClientSettings.load(settings); + awsS3Service.refreshAndClearCache(clientsSettings); + } + + @Override + public void close() throws IOException { + awsS3Service.close(); + } } diff --git a/plugins/repository-s3/src/main/plugin-metadata/plugin-security.policy b/plugins/repository-s3/src/main/plugin-metadata/plugin-security.policy index d8fca1fc89938..5fd69b4c2fc3f 100644 --- a/plugins/repository-s3/src/main/plugin-metadata/plugin-security.policy +++ b/plugins/repository-s3/src/main/plugin-metadata/plugin-security.policy @@ -37,4 +37,7 @@ grant { // s3 client opens socket connections for to access repository permission java.net.SocketPermission "*", "connect"; + + // only for tests : org.elasticsearch.repositories.s3.S3RepositoryPlugin + permission java.util.PropertyPermission "es.allow_insecure_settings", "read,write"; }; diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java index b40dc75c83701..dd829ee90c12f 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AbstractS3SnapshotRestoreTest.java @@ -19,7 +19,6 @@ package org.elasticsearch.repositories.s3; -import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.model.DeleteObjectsRequest; import com.amazonaws.services.s3.model.ObjectListing; import com.amazonaws.services.s3.model.S3ObjectSummary; @@ -180,13 +179,13 @@ public void testEncryption() { Settings settings = internalCluster().getInstance(Settings.class); Settings bucket = settings.getByPrefix("repositories.s3."); - AmazonS3 s3Client = internalCluster().getInstance(AwsS3Service.class).client(repositorySettings); - - String bucketName = bucket.get("bucket"); - logger.info("--> verify encryption for bucket [{}], prefix [{}]", bucketName, basePath); - List summaries = s3Client.listObjects(bucketName, basePath).getObjectSummaries(); - for (S3ObjectSummary summary : summaries) { - assertThat(s3Client.getObjectMetadata(bucketName, summary.getKey()).getSSEAlgorithm(), equalTo("AES256")); + try (AmazonS3Reference s3Client = internalCluster().getInstance(AwsS3Service.class).client("default")) { + String bucketName = bucket.get("bucket"); + logger.info("--> verify encryption for bucket [{}], prefix [{}]", bucketName, basePath); + List summaries = s3Client.client().listObjects(bucketName, basePath).getObjectSummaries(); + for (S3ObjectSummary summary : summaries) { + assertThat(s3Client.client().getObjectMetadata(bucketName, summary.getKey()).getSSEAlgorithm(), equalTo("AES256")); + } } logger.info("--> delete some data"); @@ -443,8 +442,7 @@ public void cleanRepositoryFiles(String basePath) { // We check that settings has been set in elasticsearch.yml integration test file // as described in README assertThat("Your settings in elasticsearch.yml are incorrect. Check README file.", bucketName, notNullValue()); - AmazonS3 client = internalCluster().getInstance(AwsS3Service.class).client(Settings.EMPTY); - try { + try (AmazonS3Reference s3Client = internalCluster().getInstance(AwsS3Service.class).client("default")) { ObjectListing prevListing = null; //From http://docs.amazonwebservices.com/AmazonS3/latest/dev/DeletingMultipleObjectsUsingJava.html //we can do at most 1K objects per delete @@ -454,9 +452,9 @@ public void cleanRepositoryFiles(String basePath) { while (true) { ObjectListing list; if (prevListing != null) { - list = client.listNextBatchOfObjects(prevListing); + list = s3Client.client().listNextBatchOfObjects(prevListing); } else { - list = client.listObjects(bucketName, basePath); + list = s3Client.client().listObjects(bucketName, basePath); multiObjectDeleteRequest = new DeleteObjectsRequest(list.getBucketName()); } for (S3ObjectSummary summary : list.getObjectSummaries()) { @@ -464,7 +462,7 @@ public void cleanRepositoryFiles(String basePath) { //Every 500 objects batch the delete request if (keys.size() > 500) { multiObjectDeleteRequest.setKeys(keys); - client.deleteObjects(multiObjectDeleteRequest); + s3Client.client().deleteObjects(multiObjectDeleteRequest); multiObjectDeleteRequest = new DeleteObjectsRequest(list.getBucketName()); keys.clear(); } @@ -477,7 +475,7 @@ public void cleanRepositoryFiles(String basePath) { } if (!keys.isEmpty()) { multiObjectDeleteRequest.setKeys(keys); - client.deleteObjects(multiObjectDeleteRequest); + s3Client.client().deleteObjects(multiObjectDeleteRequest); } } catch (Exception ex) { logger.warn((Supplier) () -> new ParameterizedMessage("Failed to delete S3 repository [{}]", bucketName), ex); diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3Wrapper.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3Wrapper.java index bcab130e7d531..91b364011b80a 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3Wrapper.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3Wrapper.java @@ -727,4 +727,9 @@ public BucketReplicationConfiguration getBucketReplicationConfiguration(GetBucke public HeadBucketResult headBucket(HeadBucketRequest headBucketRequest) throws AmazonClientException, AmazonServiceException { return delegate.headBucket(headBucketRequest); } + + @Override + public void shutdown() { + delegate.shutdown(); + } } diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AwsS3ServiceImplTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AwsS3ServiceImplTests.java index 353de31fa1873..6f55f3ed345df 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AwsS3ServiceImplTests.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AwsS3ServiceImplTests.java @@ -21,75 +21,89 @@ import com.amazonaws.ClientConfiguration; import com.amazonaws.Protocol; -import com.amazonaws.auth.AWSCredentials; import com.amazonaws.auth.AWSCredentialsProvider; +import com.amazonaws.internal.StaticCredentialsProvider; + import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; +import java.util.Locale; +import java.util.Map; + import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; public class AwsS3ServiceImplTests extends ESTestCase { - public void testAWSCredentialsWithSystemProviders() { - S3ClientSettings clientSettings = S3ClientSettings.getClientSettings(Settings.EMPTY, "default"); - AWSCredentialsProvider credentialsProvider = - InternalAwsS3Service.buildCredentials(logger, deprecationLogger, clientSettings, Settings.EMPTY); + public void testAWSCredentialsDefaultToInstanceProviders() { + final String inexistentClientName = randomAlphaOfLength(8).toLowerCase(Locale.ROOT); + final S3ClientSettings clientSettings = S3ClientSettings.getClientSettings(Settings.EMPTY, inexistentClientName); + final AWSCredentialsProvider credentialsProvider = InternalAwsS3Service.buildCredentials(logger, clientSettings); assertThat(credentialsProvider, instanceOf(InternalAwsS3Service.PrivilegedInstanceProfileCredentialsProvider.class)); } - public void testAwsCredsDefaultSettings() { - MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("s3.client.default.access_key", "aws_key"); - secureSettings.setString("s3.client.default.secret_key", "aws_secret"); - Settings settings = Settings.builder().setSecureSettings(secureSettings).build(); - assertCredentials(Settings.EMPTY, settings, "aws_key", "aws_secret"); - } - - public void testAwsCredsExplicitConfigSettings() { - Settings repositorySettings = Settings.builder().put(InternalAwsS3Service.CLIENT_NAME.getKey(), "myconfig").build(); - MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("s3.client.myconfig.access_key", "aws_key"); - secureSettings.setString("s3.client.myconfig.secret_key", "aws_secret"); - secureSettings.setString("s3.client.default.access_key", "wrong_key"); - secureSettings.setString("s3.client.default.secret_key", "wrong_secret"); - Settings settings = Settings.builder().setSecureSettings(secureSettings).build(); - assertCredentials(repositorySettings, settings, "aws_key", "aws_secret"); - } - - public void testRepositorySettingsCredentialsDisallowed() { - Settings repositorySettings = Settings.builder() - .put(S3Repository.ACCESS_KEY_SETTING.getKey(), "aws_key") - .put(S3Repository.SECRET_KEY_SETTING.getKey(), "aws_secret").build(); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> - assertCredentials(repositorySettings, Settings.EMPTY, "aws_key", "aws_secret")); - assertThat(e.getMessage(), containsString("Setting [access_key] is insecure")); - } - - public void testRepositorySettingsCredentialsMissingKey() { - Settings repositorySettings = Settings.builder().put(S3Repository.SECRET_KEY_SETTING.getKey(), "aws_secret").build(); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> - assertCredentials(repositorySettings, Settings.EMPTY, "aws_key", "aws_secret")); - assertThat(e.getMessage(), containsString("must be accompanied by setting [access_key]")); + public void testAWSCredentialsFromKeystore() { + final MockSecureSettings secureSettings = new MockSecureSettings(); + final String clientNamePrefix = "some_client_name_"; + final int clientsCount = randomIntBetween(0, 4); + for (int i = 0; i < clientsCount; i++) { + final String clientName = clientNamePrefix + i; + secureSettings.setString("s3.client." + clientName + ".access_key", clientName + "_aws_access_key"); + secureSettings.setString("s3.client." + clientName + ".secret_key", clientName + "_aws_secret_key"); + } + final Settings settings = Settings.builder().setSecureSettings(secureSettings).build(); + final Map allClientsSettings = S3ClientSettings.load(settings); + // no less, no more + assertThat(allClientsSettings.size(), is(clientsCount + 1)); // including default + for (int i = 0; i < clientsCount; i++) { + final String clientName = clientNamePrefix + i; + final S3ClientSettings someClientSettings = allClientsSettings.get(clientName); + final AWSCredentialsProvider credentialsProvider = InternalAwsS3Service.buildCredentials(logger, someClientSettings); + assertThat(credentialsProvider, instanceOf(StaticCredentialsProvider.class)); + assertThat(credentialsProvider.getCredentials().getAWSAccessKeyId(), is(clientName + "_aws_access_key")); + assertThat(credentialsProvider.getCredentials().getAWSSecretKey(), is(clientName + "_aws_secret_key")); + } + // test default exists and is an Instance provider + final S3ClientSettings defaultClientSettings = allClientsSettings.get("default"); + final AWSCredentialsProvider defaultCredentialsProvider = InternalAwsS3Service.buildCredentials(logger, defaultClientSettings); + assertThat(defaultCredentialsProvider, instanceOf(InternalAwsS3Service.PrivilegedInstanceProfileCredentialsProvider.class)); } - public void testRepositorySettingsCredentialsMissingSecret() { - Settings repositorySettings = Settings.builder().put(S3Repository.ACCESS_KEY_SETTING.getKey(), "aws_key").build(); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> - assertCredentials(repositorySettings, Settings.EMPTY, "aws_key", "aws_secret")); - assertThat(e.getMessage(), containsString("must be accompanied by setting [secret_key]")); + public void testSetDefaultCredential() { + final MockSecureSettings secureSettings = new MockSecureSettings(); + final String awsAccessKey = randomAlphaOfLength(8); + final String awsSecretKey = randomAlphaOfLength(8); + secureSettings.setString("s3.client.default.access_key", awsAccessKey); + secureSettings.setString("s3.client.default.secret_key", awsSecretKey); + final Settings settings = Settings.builder().setSecureSettings(secureSettings).build(); + final Map allClientsSettings = S3ClientSettings.load(settings); + assertThat(allClientsSettings.size(), is(1)); + // test default exists and is an Instance provider + final S3ClientSettings defaultClientSettings = allClientsSettings.get("default"); + final AWSCredentialsProvider defaultCredentialsProvider = InternalAwsS3Service.buildCredentials(logger, defaultClientSettings); + assertThat(defaultCredentialsProvider, instanceOf(StaticCredentialsProvider.class)); + assertThat(defaultCredentialsProvider.getCredentials().getAWSAccessKeyId(), is(awsAccessKey)); + assertThat(defaultCredentialsProvider.getCredentials().getAWSSecretKey(), is(awsSecretKey)); } - private void assertCredentials(Settings singleRepositorySettings, Settings settings, - String expectedKey, String expectedSecret) { - String configName = InternalAwsS3Service.CLIENT_NAME.get(singleRepositorySettings); - S3ClientSettings clientSettings = S3ClientSettings.getClientSettings(settings, configName); - AWSCredentials credentials = InternalAwsS3Service.buildCredentials(logger, deprecationLogger, - clientSettings, singleRepositorySettings).getCredentials(); - assertThat(credentials.getAWSAccessKeyId(), is(expectedKey)); - assertThat(credentials.getAWSSecretKey(), is(expectedSecret)); + public void testCredentialsIncomplete() { + final MockSecureSettings secureSettings = new MockSecureSettings(); + final String clientName = randomAlphaOfLength(8).toLowerCase(Locale.ROOT); + final boolean missingOrMissing = randomBoolean(); + if (missingOrMissing) { + secureSettings.setString("s3.client." + clientName + ".access_key", "aws_access_key"); + } else { + secureSettings.setString("s3.client." + clientName + ".secret_key", "aws_secret_key"); + } + final Settings settings = Settings.builder().setSecureSettings(secureSettings).build(); + final Exception e = expectThrows(IllegalArgumentException.class, () -> S3ClientSettings.load(settings)); + if (missingOrMissing) { + assertThat(e.getMessage(), containsString("Missing secret key for s3 client [" + clientName + "]")); + } else { + assertThat(e.getMessage(), containsString("Missing access key for s3 client [" + clientName + "]")); + } } public void testAWSDefaultConfiguration() { @@ -98,10 +112,10 @@ public void testAWSDefaultConfiguration() { } public void testAWSConfigurationWithAwsSettings() { - MockSecureSettings secureSettings = new MockSecureSettings(); + final MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString("s3.client.default.proxy.username", "aws_proxy_username"); secureSettings.setString("s3.client.default.proxy.password", "aws_proxy_password"); - Settings settings = Settings.builder() + final Settings settings = Settings.builder() .setSecureSettings(secureSettings) .put("s3.client.default.protocol", "http") .put("s3.client.default.proxy.host", "aws_proxy_host") @@ -113,7 +127,7 @@ public void testAWSConfigurationWithAwsSettings() { } public void testRepositoryMaxRetries() { - Settings settings = Settings.builder() + final Settings settings = Settings.builder() .put("s3.client.default.max_retries", 5) .build(); launchAWSConfigurationTest(settings, Protocol.HTTPS, null, -1, null, @@ -123,7 +137,7 @@ public void testRepositoryMaxRetries() { public void testRepositoryThrottleRetries() { final boolean throttling = randomBoolean(); - Settings settings = Settings.builder().put("s3.client.default.use_throttle_retries", throttling).build(); + final Settings settings = Settings.builder().put("s3.client.default.use_throttle_retries", throttling).build(); launchAWSConfigurationTest(settings, Protocol.HTTPS, null, -1, null, null, 3, throttling, 50000); } @@ -137,8 +151,8 @@ private void launchAWSConfigurationTest(Settings settings, boolean expectedUseThrottleRetries, int expectedReadTimeout) { - S3ClientSettings clientSettings = S3ClientSettings.getClientSettings(settings, "default"); - ClientConfiguration configuration = InternalAwsS3Service.buildConfiguration(clientSettings); + final S3ClientSettings clientSettings = S3ClientSettings.getClientSettings(settings, "default"); + final ClientConfiguration configuration = InternalAwsS3Service.buildConfiguration(clientSettings); assertThat(configuration.getResponseMetadataCacheSize(), is(0)); assertThat(configuration.getProtocol(), is(expectedProtocol)); @@ -152,15 +166,15 @@ private void launchAWSConfigurationTest(Settings settings, } public void testEndpointSetting() { - Settings settings = Settings.builder() + final Settings settings = Settings.builder() .put("s3.client.default.endpoint", "s3.endpoint") .build(); assertEndpoint(Settings.EMPTY, settings, "s3.endpoint"); } private void assertEndpoint(Settings repositorySettings, Settings settings, String expectedEndpoint) { - String configName = InternalAwsS3Service.CLIENT_NAME.get(repositorySettings); - S3ClientSettings clientSettings = S3ClientSettings.getClientSettings(settings, configName); + final String configName = S3Repository.CLIENT_NAME.get(repositorySettings); + final S3ClientSettings clientSettings = S3ClientSettings.getClientSettings(settings, configName); assertThat(clientSettings.endpoint, is(expectedEndpoint)); } diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/MockAmazonS3.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/MockAmazonS3.java index 33d5d5fbc2038..d610e6d74a06d 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/MockAmazonS3.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/MockAmazonS3.java @@ -157,6 +157,11 @@ public void deleteObject(final DeleteObjectRequest request) throws AmazonClientE throw exception; } } + + @Override + public void shutdown() { + // TODO check close + } @Override public DeleteObjectsResult deleteObjects(DeleteObjectsRequest request) throws SdkClientException { diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java new file mode 100644 index 0000000000000..f3bd894977999 --- /dev/null +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java @@ -0,0 +1,211 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.repositories.s3; + +import com.amazonaws.ClientConfiguration; +import com.amazonaws.auth.AWSCredentials; +import com.amazonaws.auth.AWSCredentialsProvider; +import com.amazonaws.services.s3.AmazonS3; +import org.elasticsearch.cluster.metadata.RepositoryMetaData; +import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.common.settings.MockSecureSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.security.AccessController; +import java.security.PrivilegedAction; + +import static org.hamcrest.Matchers.is; + +@SuppressForbidden(reason = "test fixture requires System.setProperty") +public class RepositoryCredentialsTests extends ESTestCase { + + static { + AccessController.doPrivileged((PrivilegedAction) () -> { + // required for client settings overwriting + System.setProperty("es.allow_insecure_settings", "true"); + return null; + }); + } + + static final class ProxyS3RepositoryPlugin extends S3RepositoryPlugin { + + static final class ClientAndCredentials extends AmazonS3Wrapper { + final AWSCredentialsProvider credentials; + + ClientAndCredentials(AmazonS3 delegate, AWSCredentialsProvider credentials) { + super(delegate); + this.credentials = credentials; + } + + @Override + public boolean doesBucketExist(String bucketName) { + return true; + } + } + + static final class ProxyInternalAwsS3Service extends InternalAwsS3Service { + + ProxyInternalAwsS3Service(Settings settings) { + super(settings); + } + + @Override + AmazonS3 buildClient(AWSCredentialsProvider credentials, ClientConfiguration configuration) { + final AmazonS3 client = super.buildClient(credentials, configuration); + return new ClientAndCredentials(client, credentials); + } + + } + + protected ProxyS3RepositoryPlugin(Settings settings) { + super(settings); + } + + @Override + protected AwsS3Service getAwsS3Service(Settings settings) { + return new ProxyInternalAwsS3Service(settings); + } + + } + + public void testRepositoryCredentialsOverrideSecureCredentials() throws IOException { + final int clientsCount = randomIntBetween(0, 4); + final String[] clientNames = new String[clientsCount + 1]; + clientNames[0] = "default"; + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("s3.client.default.access_key", "secure_aws_key"); + secureSettings.setString("s3.client.default.secret_key", "secure_aws_secret"); + for (int i = 0; i < clientsCount; i++) { + final String clientName = "client_" + i; + secureSettings.setString("s3.client." + clientName + ".access_key", "secure_aws_key_" + i); + secureSettings.setString("s3.client." + clientName + ".secret_key", "secure_aws_secret_" + i); + clientNames[i + 1] = clientName; + } + final Settings settings = Settings.builder().setSecureSettings(secureSettings).build(); + // repository settings for credentials override node secure settings + final RepositoryMetaData metadata = new RepositoryMetaData("dummy-repo", "mock", Settings.builder() + .put(S3Repository.CLIENT_NAME.getKey(), randomFrom(clientNames)) + .put(S3Repository.ACCESS_KEY_SETTING.getKey(), "insecure_aws_key") + .put(S3Repository.SECRET_KEY_SETTING.getKey(), "insecure_aws_secret").build()); + try (S3RepositoryPlugin s3Plugin = new ProxyS3RepositoryPlugin(settings); + S3Repository s3repo = s3Plugin.getS3Repository(metadata, Settings.EMPTY, NamedXContentRegistry.EMPTY); + AmazonS3Reference s3Ref = ((S3BlobStore) s3repo.blobStore()).clientReference()) { + final AWSCredentials credentials = ((ProxyS3RepositoryPlugin.ClientAndCredentials) s3Ref.client()).credentials.getCredentials(); + assertThat(credentials.getAWSAccessKeyId(), is("insecure_aws_key")); + assertThat(credentials.getAWSSecretKey(), is("insecure_aws_secret")); + } + assertWarnings( + "[secret_key] setting was deprecated in Elasticsearch and will be removed in a future release!" + + " See the breaking changes documentation for the next major version.", + "Using s3 access/secret key from repository settings. Instead store these in named clients and" + + " the elasticsearch keystore for secure settings.", + "[access_key] setting was deprecated in Elasticsearch and will be removed in a future release!" + + " See the breaking changes documentation for the next major version."); + } + + public void testRepositoryCredentialsOnly() throws IOException { + // repository settings for credentials override node secure settings + final RepositoryMetaData metadata = new RepositoryMetaData("dummy-repo", "mock", + Settings.builder() + .put(S3Repository.ACCESS_KEY_SETTING.getKey(), "insecure_aws_key") + .put(S3Repository.SECRET_KEY_SETTING.getKey(), "insecure_aws_secret") + .build()); + try (S3RepositoryPlugin s3Plugin = new ProxyS3RepositoryPlugin(Settings.EMPTY); + S3Repository s3repo = s3Plugin.getS3Repository(metadata, Settings.EMPTY, NamedXContentRegistry.EMPTY); + AmazonS3Reference s3Ref = ((S3BlobStore) s3repo.blobStore()).clientReference()) { + final AWSCredentials credentials = ((ProxyS3RepositoryPlugin.ClientAndCredentials) s3Ref.client()).credentials.getCredentials(); + assertThat(credentials.getAWSAccessKeyId(), is("insecure_aws_key")); + assertThat(credentials.getAWSSecretKey(), is("insecure_aws_secret")); + } + assertWarnings( + "[secret_key] setting was deprecated in Elasticsearch and will be removed in a future release!" + + " See the breaking changes documentation for the next major version.", + "Using s3 access/secret key from repository settings. Instead store these in named clients and" + + " the elasticsearch keystore for secure settings.", + "[access_key] setting was deprecated in Elasticsearch and will be removed in a future release!" + + " See the breaking changes documentation for the next major version."); + } + + public void testReinitSecureCredentials() throws IOException { + final String clientName = randomFrom("default", "some_client"); + // initial client node settings + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("s3.client." + clientName + ".access_key", "secure_aws_key"); + secureSettings.setString("s3.client." + clientName + ".secret_key", "secure_aws_secret"); + final Settings settings = Settings.builder().setSecureSettings(secureSettings).build(); + // repository settings + final Settings.Builder builder = Settings.builder().put(S3Repository.CLIENT_NAME.getKey(), clientName); + final boolean repositorySettings = randomBoolean(); + if (repositorySettings) { + builder.put(S3Repository.ACCESS_KEY_SETTING.getKey(), "insecure_aws_key"); + builder.put(S3Repository.SECRET_KEY_SETTING.getKey(), "insecure_aws_secret"); + } + final RepositoryMetaData metadata = new RepositoryMetaData("dummy-repo", "mock", builder.build()); + try (S3RepositoryPlugin s3Plugin = new ProxyS3RepositoryPlugin(settings); + S3Repository s3repo = s3Plugin.getS3Repository(metadata, Settings.EMPTY, NamedXContentRegistry.EMPTY)) { + try (AmazonS3Reference s3Ref = ((S3BlobStore) s3repo.blobStore()).clientReference()) { + final AWSCredentials credentials = ((ProxyS3RepositoryPlugin.ClientAndCredentials) s3Ref.client()).credentials + .getCredentials(); + if (repositorySettings) { + assertThat(credentials.getAWSAccessKeyId(), is("insecure_aws_key")); + assertThat(credentials.getAWSSecretKey(), is("insecure_aws_secret")); + } else { + assertThat(credentials.getAWSAccessKeyId(), is("secure_aws_key")); + assertThat(credentials.getAWSSecretKey(), is("secure_aws_secret")); + } + // new settings + final MockSecureSettings newSecureSettings = new MockSecureSettings(); + newSecureSettings.setString("s3.client." + clientName + ".access_key", "new_secret_aws_key"); + newSecureSettings.setString("s3.client." + clientName + ".secret_key", "new_secret_aws_secret"); + final Settings newSettings = Settings.builder().setSecureSettings(newSecureSettings).build(); + // reload S3 plugin settings + s3Plugin.reload(newSettings); + // check the not-yet-closed client reference still has the same credentials + if (repositorySettings) { + assertThat(credentials.getAWSAccessKeyId(), is("insecure_aws_key")); + assertThat(credentials.getAWSSecretKey(), is("insecure_aws_secret")); + } else { + assertThat(credentials.getAWSAccessKeyId(), is("secure_aws_key")); + assertThat(credentials.getAWSSecretKey(), is("secure_aws_secret")); + } + } + // check credentials have been updated + try (AmazonS3Reference s3Ref = ((S3BlobStore) s3repo.blobStore()).clientReference()) { + final AWSCredentials newCredentials = ((ProxyS3RepositoryPlugin.ClientAndCredentials) s3Ref.client()).credentials + .getCredentials(); + assertThat(newCredentials.getAWSAccessKeyId(), is("new_secret_aws_key")); + assertThat(newCredentials.getAWSSecretKey(), is("new_secret_aws_secret")); + } + } + if (repositorySettings) { + assertWarnings( + "[secret_key] setting was deprecated in Elasticsearch and will be removed in a future release!" + + " See the breaking changes documentation for the next major version.", + "Using s3 access/secret key from repository settings. Instead store these in named clients and" + + " the elasticsearch keystore for secure settings.", + "[access_key] setting was deprecated in Elasticsearch and will be removed in a future release!" + + " See the breaking changes documentation for the next major version."); + } + } + +} diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositorySettingsCredentialsTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositorySettingsCredentialsTests.java deleted file mode 100644 index c3e7069fdfd65..0000000000000 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositorySettingsCredentialsTests.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.repositories.s3; - -import com.amazonaws.auth.AWSCredentials; -import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.test.ESTestCase; - -public class RepositorySettingsCredentialsTests extends ESTestCase { - - public void testRepositorySettingsCredentials() { - Settings repositorySettings = Settings.builder() - .put(S3Repository.ACCESS_KEY_SETTING.getKey(), "aws_key") - .put(S3Repository.SECRET_KEY_SETTING.getKey(), "aws_secret").build(); - AWSCredentials credentials = InternalAwsS3Service.buildCredentials(logger, deprecationLogger, - S3ClientSettings.getClientSettings(Settings.EMPTY, "default"), repositorySettings).getCredentials(); - assertEquals("aws_key", credentials.getAWSAccessKeyId()); - assertEquals("aws_secret", credentials.getAWSSecretKey()); - assertSettingDeprecationsAndWarnings(new Setting[] { S3Repository.ACCESS_KEY_SETTING, S3Repository.SECRET_KEY_SETTING }, - "Using s3 access/secret key from repository settings. " + - "Instead store these in named clients and the elasticsearch keystore for secure settings."); - } -} diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreContainerTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreContainerTests.java index c760e86d1353f..b2afd826c5b8e 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreContainerTests.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreContainerTests.java @@ -57,6 +57,7 @@ import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; +import static org.mockito.Mockito.doAnswer; public class S3BlobStoreContainerTests extends ESBlobStoreContainerTestCase { @@ -74,7 +75,7 @@ public void testExecuteSingleUploadBlobSizeTooLarge() { final S3BlobStore blobStore = mock(S3BlobStore.class); final S3BlobContainer blobContainer = new S3BlobContainer(mock(BlobPath.class), blobStore); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> + final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> blobContainer.executeSingleUpload(blobStore, randomAlphaOfLengthBetween(1, 10), null, blobSize)); assertEquals("Upload request size [" + blobSize + "] can't be larger than 5gb", e.getMessage()); } @@ -86,7 +87,7 @@ public void testExecuteSingleUploadBlobSizeLargerThanBufferSize() { final S3BlobContainer blobContainer = new S3BlobContainer(mock(BlobPath.class), blobStore); final String blobName = randomAlphaOfLengthBetween(1, 10); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> + final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> blobContainer.executeSingleUpload(blobStore, blobName, new ByteArrayInputStream(new byte[0]), ByteSizeUnit.MB.toBytes(2))); assertEquals("Upload request size [2097152] can't be larger than buffer size", e.getMessage()); } @@ -121,7 +122,8 @@ public void testExecuteSingleUpload() throws IOException { } final AmazonS3 client = mock(AmazonS3.class); - when(blobStore.client()).thenReturn(client); + final AmazonS3Reference clientReference = new AmazonS3Reference(client); + when(blobStore.clientReference()).thenReturn(clientReference); final ArgumentCaptor argumentCaptor = ArgumentCaptor.forClass(PutObjectRequest.class); when(client.putObject(argumentCaptor.capture())).thenReturn(new PutObjectResult()); @@ -146,7 +148,7 @@ public void testExecuteMultipartUploadBlobSizeTooLarge() { final S3BlobStore blobStore = mock(S3BlobStore.class); final S3BlobContainer blobContainer = new S3BlobContainer(mock(BlobPath.class), blobStore); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> + final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> blobContainer.executeMultipartUpload(blobStore, randomAlphaOfLengthBetween(1, 10), null, blobSize) ); assertEquals("Multipart upload request size [" + blobSize + "] can't be larger than 5tb", e.getMessage()); @@ -157,7 +159,7 @@ public void testExecuteMultipartUploadBlobSizeTooSmall() { final S3BlobStore blobStore = mock(S3BlobStore.class); final S3BlobContainer blobContainer = new S3BlobContainer(mock(BlobPath.class), blobStore); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> + final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> blobContainer.executeMultipartUpload(blobStore, randomAlphaOfLengthBetween(1, 10), null, blobSize) ); assertEquals("Multipart upload request size [" + blobSize + "] can't be smaller than 5mb", e.getMessage()); @@ -191,7 +193,8 @@ public void testExecuteMultipartUpload() throws IOException { } final AmazonS3 client = mock(AmazonS3.class); - when(blobStore.client()).thenReturn(client); + final AmazonS3Reference clientReference = new AmazonS3Reference(client); + when(blobStore.clientReference()).thenReturn(clientReference); final ArgumentCaptor initArgCaptor = ArgumentCaptor.forClass(InitiateMultipartUploadRequest.class); final InitiateMultipartUploadResult initResult = new InitiateMultipartUploadResult(); @@ -201,7 +204,7 @@ public void testExecuteMultipartUpload() throws IOException { final ArgumentCaptor uploadArgCaptor = ArgumentCaptor.forClass(UploadPartRequest.class); final List expectedEtags = new ArrayList<>(); - long partSize = Math.min(bufferSize, blobSize); + final long partSize = Math.min(bufferSize, blobSize); long totalBytes = 0; do { expectedEtags.add(randomAlphaOfLength(50)); @@ -238,7 +241,7 @@ public void testExecuteMultipartUpload() throws IOException { assertEquals(numberOfParts.v1().intValue(), uploadRequests.size()); for (int i = 0; i < uploadRequests.size(); i++) { - UploadPartRequest uploadRequest = uploadRequests.get(i); + final UploadPartRequest uploadRequest = uploadRequests.get(i); assertEquals(bucketName, uploadRequest.getBucketName()); assertEquals(blobPath.buildAsString() + blobName, uploadRequest.getKey()); @@ -260,7 +263,7 @@ public void testExecuteMultipartUpload() throws IOException { assertEquals(blobPath.buildAsString() + blobName, compRequest.getKey()); assertEquals(initResult.getUploadId(), compRequest.getUploadId()); - List actualETags = compRequest.getPartETags().stream().map(PartETag::getETag).collect(Collectors.toList()); + final List actualETags = compRequest.getPartETags().stream().map(PartETag::getETag).collect(Collectors.toList()); assertEquals(expectedEtags, actualETags); } @@ -278,7 +281,11 @@ public void testExecuteMultipartUploadAborted() { when(blobStore.getStorageClass()).thenReturn(randomFrom(StorageClass.values())); final AmazonS3 client = mock(AmazonS3.class); - when(blobStore.client()).thenReturn(client); + final AmazonS3Reference clientReference = new AmazonS3Reference(client); + doAnswer(invocation -> { + clientReference.incRef(); + return clientReference; + }).when(blobStore).clientReference(); final String uploadId = randomAlphaOfLength(25); @@ -360,7 +367,7 @@ public void testExecuteMultipartUploadAborted() { } public void testNumberOfMultipartsWithZeroPartSize() { - IllegalArgumentException e = + final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> S3BlobContainer.numberOfMultiparts(randomNonNegativeLong(), 0L)); assertEquals("Part size must be greater than zero", e.getMessage()); } @@ -382,7 +389,7 @@ public void testNumberOfMultiparts() { // Fits in N parts plus a bit more final long remaining = randomIntBetween(1, (size > Integer.MAX_VALUE) ? Integer.MAX_VALUE : (int) size - 1); - assertNumberOfMultiparts(factor + 1, remaining, size * factor + remaining, size); + assertNumberOfMultiparts(factor + 1, remaining, (size * factor) + remaining, size); } private static void assertNumberOfMultiparts(final int expectedParts, final long expectedRemaining, long totalSize, long partSize) { diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java index e599f84b411e4..2843390f1aa80 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.repositories.s3; -import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.model.CannedAccessControlList; import com.amazonaws.services.s3.model.StorageClass; @@ -50,7 +49,6 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicReference; -import static java.util.Collections.emptyMap; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.not; @@ -87,11 +85,9 @@ public static void wipeRepository() { @Override protected void createTestRepository(final String name) { - assertAcked(client().admin().cluster().preparePutRepository(name) - .setType(S3Repository.TYPE) - .setSettings(Settings.builder() + assertAcked(client().admin().cluster().preparePutRepository(name).setType(S3Repository.TYPE).setSettings(Settings.builder() .put(S3Repository.BUCKET_SETTING.getKey(), bucket) - .put(InternalAwsS3Service.CLIENT_NAME.getKey(), client) + .put(S3Repository.CLIENT_NAME.getKey(), client) .put(S3Repository.BUFFER_SIZE_SETTING.getKey(), bufferSize) .put(S3Repository.SERVER_SIDE_ENCRYPTION_SETTING.getKey(), serverSideEncryption) .put(S3Repository.CANNED_ACL_SETTING.getKey(), cannedACL) @@ -113,13 +109,17 @@ public TestS3RepositoryPlugin(final Settings settings) { @Override public Map getRepositories(final Environment env, final NamedXContentRegistry registry) { - return Collections.singletonMap(S3Repository.TYPE, (metadata) -> - new S3Repository(metadata, env.settings(), registry, new InternalAwsS3Service(env.settings(), emptyMap()) { - @Override - public synchronized AmazonS3 client(final Settings repositorySettings) { - return new MockAmazonS3(blobs, bucket, serverSideEncryption, cannedACL, storageClass); - } - })); + return Collections.singletonMap(S3Repository.TYPE, + (metadata) -> new S3Repository(metadata, env.settings(), registry, new InternalAwsS3Service(env.settings()) { + @Override + public synchronized AmazonS3Reference client(String clientName) { + return new AmazonS3Reference(new MockAmazonS3(blobs, bucket, serverSideEncryption, cannedACL, storageClass)); + } + }) { + @Override + void overrideCredentialsFromClusterState(AwsS3Service awsService) { + } + }); } } diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreTests.java index 4a23e4efa9a29..a44946b6b3ffa 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreTests.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreTests.java @@ -115,7 +115,15 @@ public static S3BlobStore randomMockS3BlobStore() { storageClass = randomValueOtherThan(StorageClass.Glacier, () -> randomFrom(StorageClass.values())).toString(); } - AmazonS3 client = new MockAmazonS3(new ConcurrentHashMap<>(), bucket, serverSideEncryption, cannedACL, storageClass); - return new S3BlobStore(Settings.EMPTY, client, bucket, serverSideEncryption, bufferSize, cannedACL, storageClass); + final String theClientName = randomAlphaOfLength(4); + final AmazonS3 client = new MockAmazonS3(new ConcurrentHashMap<>(), bucket, serverSideEncryption, cannedACL, storageClass); + final AwsS3Service service = new InternalAwsS3Service(Settings.EMPTY) { + @Override + public synchronized AmazonS3Reference client(String clientName) { + assert theClientName.equals(clientName); + return new AmazonS3Reference(client); + } + }; + return new S3BlobStore(Settings.EMPTY, service, theClientName, bucket, serverSideEncryption, bufferSize, cannedACL, storageClass); } } diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java index 7da65c27d8194..5c0aada66585c 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.repositories.s3; import com.amazonaws.services.s3.AbstractAmazonS3; -import com.amazonaws.services.s3.AmazonS3; import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.common.component.AbstractLifecycleComponent; @@ -31,18 +30,25 @@ import org.elasticsearch.repositories.RepositoryException; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; - import java.io.IOException; +import java.util.Collections; +import java.util.Map; import static org.hamcrest.Matchers.containsString; public class S3RepositoryTests extends ESTestCase { private static class DummyS3Client extends AbstractAmazonS3 { + @Override public boolean doesBucketExist(String bucketName) { return true; } + + @Override + public void shutdown() { + // TODO check is closed + } } private static class DummyS3Service extends AbstractLifecycleComponent implements AwsS3Service { @@ -56,53 +62,70 @@ protected void doStop() {} @Override protected void doClose() {} @Override - public AmazonS3 client(Settings settings) { - return new DummyS3Client(); + public AmazonS3Reference client(String clientName) { + return new AmazonS3Reference(new DummyS3Client()); + } + + @Override + public Map refreshAndClearCache(Map clientsSettings) { + return Collections.emptyMap(); + } + + @Override + public void close() { } } public void testInvalidChunkBufferSizeSettings() throws IOException { // chunk < buffer should fail - assertInvalidBuffer(10, 5, RepositoryException.class, "chunk_size (5mb) can't be lower than buffer_size (10mb)."); + final Settings s1 = bufferAndChunkSettings(10, 5); + final Exception e1 = expectThrows(RepositoryException.class, + () -> new S3Repository(getRepositoryMetaData(s1), Settings.EMPTY, NamedXContentRegistry.EMPTY, new DummyS3Service())); + assertThat(e1.getMessage(), containsString("chunk_size (5mb) can't be lower than buffer_size (10mb)")); // chunk > buffer should pass - assertValidBuffer(5, 10); + final Settings s2 = bufferAndChunkSettings(5, 10); + new S3Repository(getRepositoryMetaData(s2), Settings.EMPTY, NamedXContentRegistry.EMPTY, new DummyS3Service()).close(); // chunk = buffer should pass - assertValidBuffer(5, 5); + final Settings s3 = bufferAndChunkSettings(5, 5); + new S3Repository(getRepositoryMetaData(s3), Settings.EMPTY, NamedXContentRegistry.EMPTY, new DummyS3Service()).close(); // buffer < 5mb should fail - assertInvalidBuffer(4, 10, IllegalArgumentException.class, - "failed to parse value [4mb] for setting [buffer_size], must be >= [5mb]"); - // chunk > 5tb should fail - assertInvalidBuffer(5, 6000000, IllegalArgumentException.class, - "failed to parse value [6000000mb] for setting [chunk_size], must be <= [5tb]"); + final Settings s4 = bufferAndChunkSettings(4, 10); + final IllegalArgumentException e2 = expectThrows(IllegalArgumentException.class, + () -> new S3Repository(getRepositoryMetaData(s4), Settings.EMPTY, NamedXContentRegistry.EMPTY, new DummyS3Service()) + .close()); + assertThat(e2.getMessage(), containsString("failed to parse value [4mb] for setting [buffer_size], must be >= [5mb]")); + final Settings s5 = bufferAndChunkSettings(5, 6000000); + final IllegalArgumentException e3 = expectThrows(IllegalArgumentException.class, + () -> new S3Repository(getRepositoryMetaData(s5), Settings.EMPTY, NamedXContentRegistry.EMPTY, new DummyS3Service()) + .close()); + assertThat(e3.getMessage(), containsString("failed to parse value [6000000mb] for setting [chunk_size], must be <= [5tb]")); } - private void assertValidBuffer(long bufferMB, long chunkMB) throws IOException { - RepositoryMetaData metadata = new RepositoryMetaData("dummy-repo", "mock", Settings.builder() - .put(S3Repository.BUFFER_SIZE_SETTING.getKey(), new ByteSizeValue(bufferMB, ByteSizeUnit.MB).getStringRep()) - .put(S3Repository.CHUNK_SIZE_SETTING.getKey(), new ByteSizeValue(chunkMB, ByteSizeUnit.MB).getStringRep()).build()); - new S3Repository(metadata, Settings.EMPTY, NamedXContentRegistry.EMPTY, new DummyS3Service()); + private Settings bufferAndChunkSettings(long buffer, long chunk) { + return Settings.builder() + .put(S3Repository.BUFFER_SIZE_SETTING.getKey(), new ByteSizeValue(buffer, ByteSizeUnit.MB).getStringRep()) + .put(S3Repository.CHUNK_SIZE_SETTING.getKey(), new ByteSizeValue(chunk, ByteSizeUnit.MB).getStringRep()) + .build(); } - private void assertInvalidBuffer(int bufferMB, int chunkMB, Class clazz, String msg) throws IOException { - RepositoryMetaData metadata = new RepositoryMetaData("dummy-repo", "mock", Settings.builder() - .put(S3Repository.BUFFER_SIZE_SETTING.getKey(), new ByteSizeValue(bufferMB, ByteSizeUnit.MB).getStringRep()) - .put(S3Repository.CHUNK_SIZE_SETTING.getKey(), new ByteSizeValue(chunkMB, ByteSizeUnit.MB).getStringRep()).build()); - - Exception e = expectThrows(clazz, () -> new S3Repository(metadata, Settings.EMPTY, NamedXContentRegistry.EMPTY, - new DummyS3Service())); - assertThat(e.getMessage(), containsString(msg)); + private RepositoryMetaData getRepositoryMetaData(Settings settings) { + return new RepositoryMetaData("dummy-repo", "mock", Settings.builder().put(settings).build()); } public void testBasePathSetting() throws IOException { - RepositoryMetaData metadata = new RepositoryMetaData("dummy-repo", "mock", Settings.builder() - .put(S3Repository.BASE_PATH_SETTING.getKey(), "foo/bar").build()); - S3Repository s3repo = new S3Repository(metadata, Settings.EMPTY, NamedXContentRegistry.EMPTY, new DummyS3Service()); - assertEquals("foo/bar/", s3repo.basePath().buildAsString()); + final RepositoryMetaData metadata = new RepositoryMetaData("dummy-repo", "mock", Settings.builder() + .put(S3Repository.BASE_PATH_SETTING.getKey(), "foo/bar").build()); + try (S3Repository s3repo = new S3Repository(metadata, Settings.EMPTY, NamedXContentRegistry.EMPTY, new DummyS3Service())) { + assertEquals("foo/bar/", s3repo.basePath().buildAsString()); + } } - public void testDefaultBufferSize() { - ByteSizeValue defaultBufferSize = S3Repository.BUFFER_SIZE_SETTING.get(Settings.EMPTY); - assertThat(defaultBufferSize, Matchers.lessThanOrEqualTo(new ByteSizeValue(100, ByteSizeUnit.MB))); - assertThat(defaultBufferSize, Matchers.greaterThanOrEqualTo(new ByteSizeValue(5, ByteSizeUnit.MB))); + public void testDefaultBufferSize() throws IOException { + final RepositoryMetaData metadata = new RepositoryMetaData("dummy-repo", "mock", Settings.EMPTY); + try (S3Repository s3repo = new S3Repository(metadata, Settings.EMPTY, NamedXContentRegistry.EMPTY, new DummyS3Service())) { + final long defaultBufferSize = ((S3BlobStore) s3repo.blobStore()).bufferSizeInBytes(); + assertThat(defaultBufferSize, Matchers.lessThanOrEqualTo(100L * 1024 * 1024)); + assertThat(defaultBufferSize, Matchers.greaterThanOrEqualTo(5L * 1024 * 1024)); + } } } diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/TestAmazonS3.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/TestAmazonS3.java index 93bf58cc28964..0c762659a5fe0 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/TestAmazonS3.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/TestAmazonS3.java @@ -51,7 +51,7 @@ public class TestAmazonS3 extends AmazonS3Wrapper { private double writeFailureRate = 0.0; private double readFailureRate = 0.0; - private String randomPrefix; + private final String randomPrefix; ConcurrentMap accessCounts = new ConcurrentHashMap<>(); @@ -76,18 +76,18 @@ public TestAmazonS3(AmazonS3 delegate, Settings settings) { @Override public PutObjectResult putObject(String bucketName, String key, InputStream input, ObjectMetadata metadata) throws AmazonClientException, AmazonServiceException { if (shouldFail(bucketName, key, writeFailureRate)) { - long length = metadata.getContentLength(); - long partToRead = (long) (length * randomDouble()); - byte[] buffer = new byte[1024]; + final long length = metadata.getContentLength(); + final long partToRead = (long) (length * randomDouble()); + final byte[] buffer = new byte[1024]; for (long cur = 0; cur < partToRead; cur += buffer.length) { try { - input.read(buffer, 0, (int) (partToRead - cur > buffer.length ? buffer.length : partToRead - cur)); - } catch (IOException ex) { + input.read(buffer, 0, (int) ((partToRead - cur) > buffer.length ? buffer.length : partToRead - cur)); + } catch (final IOException ex) { throw new ElasticsearchException("cannot read input stream", ex); } } logger.info("--> random write failure on putObject method: throwing an exception for [bucket={}, key={}]", bucketName, key); - AmazonS3Exception ex = new AmazonS3Exception("Random S3 exception"); + final AmazonS3Exception ex = new AmazonS3Exception("Random S3 exception"); ex.setStatusCode(400); ex.setErrorCode("RequestTimeout"); throw ex; @@ -99,18 +99,18 @@ public PutObjectResult putObject(String bucketName, String key, InputStream inpu @Override public UploadPartResult uploadPart(UploadPartRequest request) throws AmazonClientException, AmazonServiceException { if (shouldFail(request.getBucketName(), request.getKey(), writeFailureRate)) { - long length = request.getPartSize(); - long partToRead = (long) (length * randomDouble()); - byte[] buffer = new byte[1024]; + final long length = request.getPartSize(); + final long partToRead = (long) (length * randomDouble()); + final byte[] buffer = new byte[1024]; for (long cur = 0; cur < partToRead; cur += buffer.length) { try (InputStream input = request.getInputStream()){ - input.read(buffer, 0, (int) (partToRead - cur > buffer.length ? buffer.length : partToRead - cur)); - } catch (IOException ex) { + input.read(buffer, 0, (int) ((partToRead - cur) > buffer.length ? buffer.length : partToRead - cur)); + } catch (final IOException ex) { throw new ElasticsearchException("cannot read input stream", ex); } } logger.info("--> random write failure on uploadPart method: throwing an exception for [bucket={}, key={}]", request.getBucketName(), request.getKey()); - AmazonS3Exception ex = new AmazonS3Exception("Random S3 write exception"); + final AmazonS3Exception ex = new AmazonS3Exception("Random S3 write exception"); ex.setStatusCode(400); ex.setErrorCode("RequestTimeout"); throw ex; @@ -123,7 +123,7 @@ public UploadPartResult uploadPart(UploadPartRequest request) throws AmazonClien public S3Object getObject(String bucketName, String key) throws AmazonClientException, AmazonServiceException { if (shouldFail(bucketName, key, readFailureRate)) { logger.info("--> random read failure on getObject method: throwing an exception for [bucket={}, key={}]", bucketName, key); - AmazonS3Exception ex = new AmazonS3Exception("Random S3 read exception"); + final AmazonS3Exception ex = new AmazonS3Exception("Random S3 read exception"); ex.setStatusCode(404); throw ex; } else { @@ -135,7 +135,7 @@ private boolean shouldFail(String bucketName, String key, double probability) { if (probability > 0.0) { String path = randomPrefix + "-" + bucketName + "+" + key; path += "/" + incrementAndGet(path); - return Math.abs(hashCode(path)) < Integer.MAX_VALUE * probability; + return Math.abs(hashCode(path)) < (Integer.MAX_VALUE * probability); } else { return false; } @@ -143,14 +143,14 @@ private boolean shouldFail(String bucketName, String key, double probability) { private int hashCode(String path) { try { - MessageDigest digest = MessageDigest.getInstance("MD5"); - byte[] bytes = digest.digest(path.getBytes("UTF-8")); + final MessageDigest digest = MessageDigest.getInstance("MD5"); + final byte[] bytes = digest.digest(path.getBytes("UTF-8")); int i = 0; return ((bytes[i++] & 0xFF) << 24) | ((bytes[i++] & 0xFF) << 16) | ((bytes[i++] & 0xFF) << 8) | (bytes[i++] & 0xFF); - } catch (UnsupportedEncodingException ex) { + } catch (final UnsupportedEncodingException ex) { throw new ElasticsearchException("cannot calculate hashcode", ex); - } catch (NoSuchAlgorithmException ex) { + } catch (final NoSuchAlgorithmException ex) { throw new ElasticsearchException("cannot calculate hashcode", ex); } } diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/TestAwsS3Service.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/TestAwsS3Service.java index c5012d9c68bc7..f376f73820624 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/TestAwsS3Service.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/TestAwsS3Service.java @@ -22,45 +22,39 @@ import java.util.IdentityHashMap; import com.amazonaws.services.s3.AmazonS3; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.settings.Settings; public class TestAwsS3Service extends InternalAwsS3Service { public static class TestPlugin extends S3RepositoryPlugin { public TestPlugin(Settings settings) { - super(settings); - } - @Override - protected AwsS3Service createStorageService(Settings settings) { - return new TestAwsS3Service(settings); + super(new TestAwsS3Service(settings)); } } - IdentityHashMap clients = new IdentityHashMap<>(); + IdentityHashMap clients = new IdentityHashMap<>(); public TestAwsS3Service(Settings settings) { - super(settings, S3ClientSettings.load(settings)); + super(settings); } @Override - public synchronized AmazonS3 client(Settings repositorySettings) { - return cachedWrapper(super.client(repositorySettings)); + public synchronized AmazonS3Reference client(String clientName) { + return new AmazonS3Reference(cachedWrapper(super.client(clientName))); } - private AmazonS3 cachedWrapper(AmazonS3 client) { - TestAmazonS3 wrapper = clients.get(client); + private AmazonS3 cachedWrapper(AmazonS3Reference clientReference) { + TestAmazonS3 wrapper = clients.get(clientReference); if (wrapper == null) { - wrapper = new TestAmazonS3(client, settings); - clients.put(client, wrapper); + wrapper = new TestAmazonS3(clientReference.client(), settings); + clients.put(clientReference, wrapper); } return wrapper; } @Override - protected synchronized void doClose() throws ElasticsearchException { - super.doClose(); + protected synchronized void releaseCachedClients() { + super.releaseCachedClients(); clients.clear(); } - } diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index 17acf7c10f534..7ddb39b6d6225 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -29,6 +29,8 @@ import org.elasticsearch.action.admin.cluster.node.info.NodesInfoAction; import org.elasticsearch.action.admin.cluster.node.info.TransportNodesInfoAction; import org.elasticsearch.action.admin.cluster.node.liveness.TransportLivenessAction; +import org.elasticsearch.action.admin.cluster.node.reload.NodesReloadSecureSettingsAction; +import org.elasticsearch.action.admin.cluster.node.reload.TransportNodesReloadSecureSettingsAction; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsAction; import org.elasticsearch.action.admin.cluster.node.stats.TransportNodesStatsAction; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksAction; @@ -241,6 +243,7 @@ import org.elasticsearch.rest.action.admin.cluster.RestPendingClusterTasksAction; import org.elasticsearch.rest.action.admin.cluster.RestPutRepositoryAction; import org.elasticsearch.rest.action.admin.cluster.RestPutStoredScriptAction; +import org.elasticsearch.rest.action.admin.cluster.RestReloadSecureSettingsAction; import org.elasticsearch.rest.action.admin.cluster.RestRemoteClusterInfoAction; import org.elasticsearch.rest.action.admin.cluster.RestRestoreSnapshotAction; import org.elasticsearch.rest.action.admin.cluster.RestSnapshotsStatusAction; @@ -491,6 +494,7 @@ public void reg actions.register(ExplainAction.INSTANCE, TransportExplainAction.class); actions.register(ClearScrollAction.INSTANCE, TransportClearScrollAction.class); actions.register(RecoveryAction.INSTANCE, TransportRecoveryAction.class); + actions.register(NodesReloadSecureSettingsAction.INSTANCE, TransportNodesReloadSecureSettingsAction.class); //Indexed scripts actions.register(PutStoredScriptAction.INSTANCE, TransportPutStoredScriptAction.class); @@ -610,6 +614,8 @@ public void initRestHandlers(Supplier nodesInCluster) { registerHandler.accept(new RestRecoveryAction(settings, restController)); + registerHandler.accept(new RestReloadSecureSettingsAction(settings, restController)); + // Scripts API registerHandler.accept(new RestGetStoredScriptAction(settings, restController)); registerHandler.accept(new RestPutStoredScriptAction(settings, restController)); diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureServiceRemoteException.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsAction.java similarity index 56% rename from plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureServiceRemoteException.java rename to server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsAction.java index 3f20e29505751..ccaeca8702f0b 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureServiceRemoteException.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsAction.java @@ -17,14 +17,23 @@ * under the License. */ -package org.elasticsearch.repositories.azure; +package org.elasticsearch.action.admin.cluster.node.reload; -public class AzureServiceRemoteException extends IllegalStateException { - public AzureServiceRemoteException(String msg) { - super(msg); +import org.elasticsearch.action.Action; + +public class NodesReloadSecureSettingsAction + extends Action { + + public static final NodesReloadSecureSettingsAction INSTANCE = new NodesReloadSecureSettingsAction(); + public static final String NAME = "cluster:admin/nodes/reload_secure_settings"; + + private NodesReloadSecureSettingsAction() { + super(NAME); } - public AzureServiceRemoteException(String msg, Throwable cause) { - super(msg, cause); + @Override + public NodesReloadSecureSettingsResponse newResponse() { + return new NodesReloadSecureSettingsResponse(); } + } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsRequest.java new file mode 100644 index 0000000000000..50df7b1bb26e0 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsRequest.java @@ -0,0 +1,160 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.node.reload; + + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.nodes.BaseNodesRequest; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.settings.SecureString; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.CharBuffer; +import java.nio.charset.StandardCharsets; +import java.util.Arrays; + +import static org.elasticsearch.action.ValidateActions.addValidationError; + +/** + * Request for a reload secure settings action + */ +public class NodesReloadSecureSettingsRequest extends BaseNodesRequest { + + /** + * The password which is broadcasted to all nodes, but is never stored on + * persistent storage. The password is used to reread and decrypt the contents + * of the node's keystore (backing the implementation of + * {@code SecureSettings}). + */ + private SecureString secureSettingsPassword; + + public NodesReloadSecureSettingsRequest() { + } + + /** + * Reload secure settings only on certain nodes, based on the nodes ids + * specified. If none are passed, secure settings will be reloaded on all the + * nodes. + */ + public NodesReloadSecureSettingsRequest(String... nodesIds) { + super(nodesIds); + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + if (secureSettingsPassword == null) { + validationException = addValidationError("secure settings password cannot be null (use empty string instead)", + validationException); + } + return validationException; + } + + public SecureString secureSettingsPassword() { + return secureSettingsPassword; + } + + public NodesReloadSecureSettingsRequest secureStorePassword(SecureString secureStorePassword) { + this.secureSettingsPassword = secureStorePassword; + return this; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + final byte[] passwordBytes = in.readByteArray(); + try { + this.secureSettingsPassword = new SecureString(utf8BytesToChars(passwordBytes)); + } finally { + Arrays.fill(passwordBytes, (byte) 0); + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + final byte[] passwordBytes = charsToUtf8Bytes(this.secureSettingsPassword.getChars()); + try { + out.writeByteArray(passwordBytes); + } finally { + Arrays.fill(passwordBytes, (byte) 0); + } + } + + /** + * Encodes the provided char[] to a UTF-8 byte[]. This is done while avoiding + * conversions to String. The provided char[] is not modified by this method, so + * the caller needs to take care of clearing the value if it is sensitive. + */ + private static byte[] charsToUtf8Bytes(char[] chars) { + final CharBuffer charBuffer = CharBuffer.wrap(chars); + final ByteBuffer byteBuffer = StandardCharsets.UTF_8.encode(charBuffer); + final byte[] bytes; + if (byteBuffer.hasArray()) { + // there is no guarantee that the byte buffers backing array is the right size + // so we need to make a copy + bytes = Arrays.copyOfRange(byteBuffer.array(), byteBuffer.position(), byteBuffer.limit()); + Arrays.fill(byteBuffer.array(), (byte) 0); // clear sensitive data + } else { + final int length = byteBuffer.limit() - byteBuffer.position(); + bytes = new byte[length]; + byteBuffer.get(bytes); + // if the buffer is not read only we can reset and fill with 0's + if (byteBuffer.isReadOnly() == false) { + byteBuffer.clear(); // reset + for (int i = 0; i < byteBuffer.limit(); i++) { + byteBuffer.put((byte) 0); + } + } + } + return bytes; + } + + /** + * Decodes the provided byte[] to a UTF-8 char[]. This is done while avoiding + * conversions to String. The provided byte[] is not modified by this method, so + * the caller needs to take care of clearing the value if it is sensitive. + */ + public static char[] utf8BytesToChars(byte[] utf8Bytes) { + final ByteBuffer byteBuffer = ByteBuffer.wrap(utf8Bytes); + final CharBuffer charBuffer = StandardCharsets.UTF_8.decode(byteBuffer); + final char[] chars; + if (charBuffer.hasArray()) { + // there is no guarantee that the char buffers backing array is the right size + // so we need to make a copy + chars = Arrays.copyOfRange(charBuffer.array(), charBuffer.position(), charBuffer.limit()); + Arrays.fill(charBuffer.array(), (char) 0); // clear sensitive data + } else { + final int length = charBuffer.limit() - charBuffer.position(); + chars = new char[length]; + charBuffer.get(chars); + // if the buffer is not read only we can reset and fill with 0's + if (charBuffer.isReadOnly() == false) { + charBuffer.clear(); // reset + for (int i = 0; i < charBuffer.limit(); i++) { + charBuffer.put((char) 0); + } + } + } + return chars; + } +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsRequestBuilder.java new file mode 100644 index 0000000000000..b5f2f73e56f51 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsRequestBuilder.java @@ -0,0 +1,84 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.node.reload; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.support.nodes.NodesOperationRequestBuilder; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Objects; + +/** + * Builder for the reload secure settings nodes request + */ +public class NodesReloadSecureSettingsRequestBuilder extends NodesOperationRequestBuilder { + + public static final String SECURE_SETTINGS_PASSWORD_FIELD_NAME = "secure_settings_password"; + + public NodesReloadSecureSettingsRequestBuilder(ElasticsearchClient client, NodesReloadSecureSettingsAction action) { + super(client, action, new NodesReloadSecureSettingsRequest()); + } + + public NodesReloadSecureSettingsRequestBuilder setSecureStorePassword(SecureString secureStorePassword) { + request.secureStorePassword(secureStorePassword); + return this; + } + + public NodesReloadSecureSettingsRequestBuilder source(BytesReference source, XContentType xContentType) throws IOException { + Objects.requireNonNull(xContentType); + // EMPTY is ok here because we never call namedObject + try (InputStream stream = source.streamInput(); + XContentParser parser = xContentType.xContent().createParser(NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, stream)) { + XContentParser.Token token; + token = parser.nextToken(); + if (token != XContentParser.Token.START_OBJECT) { + throw new ElasticsearchParseException("expected an object, but found token [{}]", token); + } + token = parser.nextToken(); + if (token != XContentParser.Token.FIELD_NAME || false == SECURE_SETTINGS_PASSWORD_FIELD_NAME.equals(parser.currentName())) { + throw new ElasticsearchParseException("expected a field named [{}], but found [{}]", SECURE_SETTINGS_PASSWORD_FIELD_NAME, + token); + } + token = parser.nextToken(); + if (token != XContentParser.Token.VALUE_STRING) { + throw new ElasticsearchParseException("expected field [{}] to be of type string, but found [{}] instead", + SECURE_SETTINGS_PASSWORD_FIELD_NAME, token); + } + final String password = parser.text(); + setSecureStorePassword(new SecureString(password.toCharArray())); + token = parser.nextToken(); + if (token != XContentParser.Token.END_OBJECT) { + throw new ElasticsearchParseException("expected end of object, but found token [{}]", token); + } + } + return this; + } + +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsResponse.java new file mode 100644 index 0000000000000..394b1f10dc2d9 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsResponse.java @@ -0,0 +1,149 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.node.reload; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.support.nodes.BaseNodeResponse; +import org.elasticsearch.action.support.nodes.BaseNodesResponse; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContentFragment; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import java.io.IOException; +import java.util.List; + +/** + * The response for the reload secure settings action + */ +public class NodesReloadSecureSettingsResponse extends BaseNodesResponse + implements ToXContentFragment { + + public NodesReloadSecureSettingsResponse() { + } + + public NodesReloadSecureSettingsResponse(ClusterName clusterName, List nodes, List failures) { + super(clusterName, nodes, failures); + } + + @Override + protected List readNodesFrom(StreamInput in) throws IOException { + return in.readList(NodeResponse::readNodeResponse); + } + + @Override + protected void writeNodesTo(StreamOutput out, List nodes) throws IOException { + out.writeStreamableList(nodes); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject("nodes"); + for (final NodesReloadSecureSettingsResponse.NodeResponse node : getNodes()) { + builder.startObject(node.getNode().getId()); + builder.field("name", node.getNode().getName()); + final Exception e = node.reloadException(); + if (e != null) { + builder.startObject("reload_exception"); + ElasticsearchException.generateThrowableXContent(builder, params, e); + builder.endObject(); + } + builder.endObject(); + } + builder.endObject(); + return builder; + } + + @Override + public String toString() { + try { + final XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); + builder.startObject(); + toXContent(builder, EMPTY_PARAMS); + builder.endObject(); + return Strings.toString(builder); + } catch (final IOException e) { + return "{ \"error\" : \"" + e.getMessage() + "\"}"; + } + } + + public static class NodeResponse extends BaseNodeResponse { + + private Exception reloadException = null; + + public NodeResponse() { + } + + public NodeResponse(DiscoveryNode node, Exception reloadException) { + super(node); + this.reloadException = reloadException; + } + + public Exception reloadException() { + return this.reloadException; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + if (in.readBoolean()) { + reloadException = in.readException(); + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + if (reloadException != null) { + out.writeBoolean(true); + out.writeException(reloadException); + } else { + out.writeBoolean(false); + } + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + final NodesReloadSecureSettingsResponse.NodeResponse that = (NodesReloadSecureSettingsResponse.NodeResponse) o; + return reloadException != null ? reloadException.equals(that.reloadException) : that.reloadException == null; + } + + @Override + public int hashCode() { + return reloadException != null ? reloadException.hashCode() : 0; + } + + public static NodeResponse readNodeResponse(StreamInput in) throws IOException { + final NodeResponse node = new NodeResponse(); + node.readFrom(in); + return node; + } + } +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/TransportNodesReloadSecureSettingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/TransportNodesReloadSecureSettingsAction.java new file mode 100644 index 0000000000000..cb870e58d3187 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/TransportNodesReloadSecureSettingsAction.java @@ -0,0 +1,144 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.node.reload; + +import org.apache.logging.log4j.message.ParameterizedMessage; +import org.apache.logging.log4j.util.Supplier; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.nodes.BaseNodeRequest; +import org.elasticsearch.action.support.nodes.TransportNodesAction; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.settings.KeyStoreWrapper; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.plugins.PluginsService; +import org.elasticsearch.plugins.ReloadablePlugin; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +public class TransportNodesReloadSecureSettingsAction extends TransportNodesAction { + + private final Environment environment; + private final PluginsService pluginsService; + + @Inject + public TransportNodesReloadSecureSettingsAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, + TransportService transportService, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, Environment environment, + PluginsService pluginService) { + super(settings, NodesReloadSecureSettingsAction.NAME, threadPool, clusterService, transportService, actionFilters, + indexNameExpressionResolver, NodesReloadSecureSettingsRequest::new, NodeRequest::new, ThreadPool.Names.GENERIC, + NodesReloadSecureSettingsResponse.NodeResponse.class); + this.environment = environment; + this.pluginsService = pluginService; + } + + @Override + protected NodesReloadSecureSettingsResponse newResponse(NodesReloadSecureSettingsRequest request, + List responses, + List failures) { + return new NodesReloadSecureSettingsResponse(clusterService.getClusterName(), responses, failures); + } + + @Override + protected NodeRequest newNodeRequest(String nodeId, NodesReloadSecureSettingsRequest request) { + return new NodeRequest(nodeId, request); + } + + @Override + protected NodesReloadSecureSettingsResponse.NodeResponse newNodeResponse() { + return new NodesReloadSecureSettingsResponse.NodeResponse(); + } + + @Override + protected NodesReloadSecureSettingsResponse.NodeResponse nodeOperation(NodeRequest nodeReloadRequest) { + final NodesReloadSecureSettingsRequest request = nodeReloadRequest.request; + final SecureString secureSettingsPassword = request.secureSettingsPassword(); + try (KeyStoreWrapper keystore = KeyStoreWrapper.load(environment.configFile())) { + // reread keystore from config file + if (keystore == null) { + return new NodesReloadSecureSettingsResponse.NodeResponse(clusterService.localNode(), + new IllegalStateException("Keystore is missing")); + } + // decrypt the keystore using the password from the request + keystore.decrypt(secureSettingsPassword.getChars()); + // add the keystore to the original node settings object + final Settings settingsWithKeystore = Settings.builder() + .put(environment.settings(), false) + .setSecureSettings(keystore) + .build(); + final List exceptions = new ArrayList<>(); + // broadcast the new settings object (with the open embedded keystore) to all reloadable plugins + pluginsService.filterPlugins(ReloadablePlugin.class).stream().forEach(p -> { + try { + p.reload(settingsWithKeystore); + } catch (final Exception e) { + logger.warn((Supplier) () -> new ParameterizedMessage("Reload failed for plugin [{}]", p.getClass().getSimpleName()), + e); + exceptions.add(e); + } + }); + ExceptionsHelper.rethrowAndSuppress(exceptions); + return new NodesReloadSecureSettingsResponse.NodeResponse(clusterService.localNode(), null); + } catch (final Exception e) { + return new NodesReloadSecureSettingsResponse.NodeResponse(clusterService.localNode(), e); + } + } + + public static class NodeRequest extends BaseNodeRequest { + + NodesReloadSecureSettingsRequest request; + + public NodeRequest() { + } + + NodeRequest(String nodeId, NodesReloadSecureSettingsRequest request) { + super(nodeId); + this.request = request; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + request = new NodesReloadSecureSettingsRequest(); + request.readFrom(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + request.writeTo(out); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/client/ClusterAdminClient.java b/server/src/main/java/org/elasticsearch/client/ClusterAdminClient.java index 5b21036b8cd4f..949b0110fff20 100644 --- a/server/src/main/java/org/elasticsearch/client/ClusterAdminClient.java +++ b/server/src/main/java/org/elasticsearch/client/ClusterAdminClient.java @@ -33,6 +33,7 @@ import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequestBuilder; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; +import org.elasticsearch.action.admin.cluster.node.reload.NodesReloadSecureSettingsRequestBuilder; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequestBuilder; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; @@ -185,6 +186,11 @@ public interface ClusterAdminClient extends ElasticsearchClient { */ ClusterUpdateSettingsRequestBuilder prepareUpdateSettings(); + /** + * Re initialize each cluster node and pass them the secret store password. + */ + NodesReloadSecureSettingsRequestBuilder prepareReloadSecureSettings(); + /** * Reroutes allocation of shards. Advance API. */ diff --git a/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java b/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java index 41c1d245d39ca..dc70da4e61f7e 100644 --- a/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java +++ b/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java @@ -41,6 +41,8 @@ import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequestBuilder; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; +import org.elasticsearch.action.admin.cluster.node.reload.NodesReloadSecureSettingsAction; +import org.elasticsearch.action.admin.cluster.node.reload.NodesReloadSecureSettingsRequestBuilder; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsAction; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest; import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequestBuilder; @@ -771,6 +773,11 @@ public ClusterUpdateSettingsRequestBuilder prepareUpdateSettings() { return new ClusterUpdateSettingsRequestBuilder(this, ClusterUpdateSettingsAction.INSTANCE); } + @Override + public NodesReloadSecureSettingsRequestBuilder prepareReloadSecureSettings() { + return new NodesReloadSecureSettingsRequestBuilder(this, NodesReloadSecureSettingsAction.INSTANCE); + } + @Override public ActionFuture nodesInfo(final NodesInfoRequest request) { return execute(NodesInfoAction.INSTANCE, request); diff --git a/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java b/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java index f47760491f8d5..3a8a06949b29c 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java +++ b/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java @@ -308,7 +308,9 @@ public void decrypt(char[] password) throws GeneralSecurityException, IOExceptio } if (formatVersion <= 2) { decryptLegacyEntries(); - assert password.length == 0; + if (password.length != 0) { + throw new IllegalArgumentException("Keystore format does not accept non-empty passwords"); + } return; } diff --git a/server/src/main/java/org/elasticsearch/common/util/LazyInitializable.java b/server/src/main/java/org/elasticsearch/common/util/LazyInitializable.java new file mode 100644 index 0000000000000..ad3a3bcc299d0 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/util/LazyInitializable.java @@ -0,0 +1,108 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.util; + +import org.elasticsearch.common.CheckedSupplier; + +import java.util.Objects; +import java.util.function.Consumer; + +/** + * Encapsulates a {@link CheckedSupplier} which is lazily invoked once on the + * first call to {@code #getOrCompute()}. The value which the + * supplier returns is memorized and will be served until + * {@code #reset()} is called. Each value returned by {@code #getOrCompute()}, + * newly minted or cached, will be passed to the onGet + * {@link Consumer}. On {@code #reset()} the value will be passed to the + * onReset {@code Consumer} and the next {@code #getOrCompute()} + * will regenerate the value. + */ +public final class LazyInitializable { + + private final CheckedSupplier supplier; + private final Consumer onGet; + private final Consumer onReset; + private volatile T value; + + /** + * Creates the simple LazyInitializable instance. + * + * @param supplier + * The {@code CheckedSupplier} to generate values which will be + * served on {@code #getOrCompute()} invocations. + */ + public LazyInitializable(CheckedSupplier supplier) { + this(supplier, v -> {}, v -> {}); + } + + /** + * Creates the complete LazyInitializable instance. + * + * @param supplier + * The {@code CheckedSupplier} to generate values which will be + * served on {@code #getOrCompute()} invocations. + * @param onGet + * A {@code Consumer} which is called on each value, newly forged or + * stale, that is returned by {@code #getOrCompute()} + * @param onReset + * A {@code Consumer} which is invoked on the value that will be + * erased when calling {@code #reset()} + */ + public LazyInitializable(CheckedSupplier supplier, Consumer onGet, Consumer onReset) { + this.supplier = supplier; + this.onGet = onGet; + this.onReset = onReset; + } + + /** + * Returns a value that was created by supplier. The value might + * have been previously created, if not it will be created now, thread safe of + * course. + */ + public T getOrCompute() throws E { + final T readOnce = value; // Read volatile just once... + final T result = readOnce == null ? maybeCompute(supplier) : readOnce; + onGet.accept(result); + return result; + } + + /** + * Clears the value, if it has been previously created by calling + * {@code #getOrCompute()}. The onReset will be called on this + * value. The next call to {@code #getOrCompute()} will recreate the value. + */ + public synchronized void reset() { + if (value != null) { + onReset.accept(value); + value = null; + } + } + + /** + * Creates a new value thread safely. + */ + private synchronized T maybeCompute(CheckedSupplier supplier) throws E { + if (value == null) { + value = Objects.requireNonNull(supplier.get()); + } + return value; + } + +} diff --git a/server/src/main/java/org/elasticsearch/plugins/Plugin.java b/server/src/main/java/org/elasticsearch/plugins/Plugin.java index 0ef703448b799..65d47682a95c0 100644 --- a/server/src/main/java/org/elasticsearch/plugins/Plugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/Plugin.java @@ -74,6 +74,7 @@ *
  • {@link RepositoryPlugin} *
  • {@link ScriptPlugin} *
  • {@link SearchPlugin} + *
  • {@link ReloadablePlugin} * *

    In addition to extension points this class also declares some {@code @Deprecated} {@code public final void onModule} methods. These * methods should cause any extensions of {@linkplain Plugin} that used the pre-5.x style extension syntax to fail to build and point the diff --git a/server/src/main/java/org/elasticsearch/plugins/ReloadablePlugin.java b/server/src/main/java/org/elasticsearch/plugins/ReloadablePlugin.java new file mode 100644 index 0000000000000..86d7759185e69 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/plugins/ReloadablePlugin.java @@ -0,0 +1,54 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.plugins; + +import org.elasticsearch.common.settings.Settings; + +/** + * An extension point for {@link Plugin}s that can be reloaded. There is no + * clear definition about what reloading a plugin actually means. When a plugin + * is reloaded it might rebuild any internal members. Plugins usually implement + * this interface in order to reread the values of {@code SecureSetting}s and + * then rebuild any dependent internal members. + */ +public interface ReloadablePlugin { + /** + * Called to trigger the rebuilt of the plugin's internal members. The reload + * operation is required to have been completed when the method returns. + * Strictly speaking, the settings argument should not be accessed + * outside of this method's call stack, as any values stored in the node's + * keystore (see {@code SecureSetting}) will not otherwise be retrievable. The + * setting values do not follow dynamic updates, i.e. the values are identical + * to the ones during the initial plugin loading, barring the keystore file on + * disk changes. Any failure during the operation should be signaled by raising + * an exception, but the plugin should otherwise continue to function + * unperturbed. + * + * @param settings + * Settings used while reloading the plugin. All values are + * retrievable, including the values stored in the node's keystore. + * The setting values are the initial ones, from when the node has be + * started, i.e. they don't follow dynamic updates. + * @throws Exception + * if the operation failed. The plugin should continue to operate as + * if the offending call didn't happen. + */ + void reload(Settings settings) throws Exception; +} \ No newline at end of file diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestReloadSecureSettingsAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestReloadSecureSettingsAction.java new file mode 100644 index 0000000000000..0697871ea5d1c --- /dev/null +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestReloadSecureSettingsAction.java @@ -0,0 +1,87 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.rest.action.admin.cluster; + +import org.elasticsearch.action.admin.cluster.node.reload.NodesReloadSecureSettingsRequest; +import org.elasticsearch.action.admin.cluster.node.reload.NodesReloadSecureSettingsRequestBuilder; +import org.elasticsearch.action.admin.cluster.node.reload.NodesReloadSecureSettingsResponse; +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.rest.action.RestActions; +import org.elasticsearch.rest.action.RestBuilderListener; + +import java.io.IOException; + +import static org.elasticsearch.rest.RestRequest.Method.POST; + +public final class RestReloadSecureSettingsAction extends BaseRestHandler { + + public RestReloadSecureSettingsAction(Settings settings, RestController controller) { + super(settings); + controller.registerHandler(POST, "/_nodes/reload_secure_settings", this); + controller.registerHandler(POST, "/_nodes/{nodeId}/reload_secure_settings", this); + } + + @Override + public String getName() { + return "nodes_reload_action"; + } + + @Override + public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + final String[] nodesIds = Strings.splitStringByCommaToArray(request.param("nodeId")); + final NodesReloadSecureSettingsRequestBuilder nodesRequestBuilder = client.admin() + .cluster() + .prepareReloadSecureSettings() + .setTimeout(request.param("timeout")) + .source(request.requiredContent(), request.getXContentType()) + .setNodesIds(nodesIds); + final NodesReloadSecureSettingsRequest nodesRequest = nodesRequestBuilder.request(); + return channel -> nodesRequestBuilder + .execute(new RestBuilderListener(channel) { + @Override + public RestResponse buildResponse(NodesReloadSecureSettingsResponse response, XContentBuilder builder) + throws Exception { + builder.startObject(); + RestActions.buildNodesHeader(builder, channel.request(), response); + builder.field("cluster_name", response.getClusterName().value()); + response.toXContent(builder, channel.request()); + builder.endObject(); + // clear password for the original request + nodesRequest.secureSettingsPassword().close(); + return new BytesRestResponse(RestStatus.OK, builder); + } + }); + } + + @Override + public boolean canTripCircuitBreaker() { + return false; + } + +} diff --git a/server/src/test/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java b/server/src/test/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java new file mode 100644 index 0000000000000..2061349e3301d --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java @@ -0,0 +1,422 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.admin.cluster.node.reload.NodesReloadSecureSettingsResponse; +import org.elasticsearch.common.settings.KeyStoreWrapper; +import org.elasticsearch.common.settings.SecureSettings; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.PluginsService; +import org.elasticsearch.plugins.ReloadablePlugin; +import org.elasticsearch.test.ESIntegTestCase; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Files; +import java.nio.file.StandardCopyOption; +import java.security.AccessControlException; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicReference; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.containsString; + +public class ReloadSecureSettingsIT extends ESIntegTestCase { + + public void testMissingKeystoreFile() throws Exception { + final PluginsService pluginsService = internalCluster().getInstance(PluginsService.class); + final MockReloadablePlugin mockReloadablePlugin = pluginsService.filterPlugins(MockReloadablePlugin.class) + .stream().findFirst().get(); + final Environment environment = internalCluster().getInstance(Environment.class); + final AtomicReference reloadSettingsError = new AtomicReference<>(); + // keystore file should be missing for this test case + Files.deleteIfExists(KeyStoreWrapper.keystorePath(environment.configFile())); + final int initialReloadCount = mockReloadablePlugin.getReloadCount(); + final CountDownLatch latch = new CountDownLatch(1); + client().admin().cluster().prepareReloadSecureSettings().setSecureStorePassword(new SecureString(new char[0])).execute( + new ActionListener() { + @Override + public void onResponse(NodesReloadSecureSettingsResponse nodesReloadResponse) { + try { + assertThat(nodesReloadResponse, notNullValue()); + final Map nodesMap = nodesReloadResponse.getNodesMap(); + assertThat(nodesMap.size(), equalTo(cluster().size())); + for (final NodesReloadSecureSettingsResponse.NodeResponse nodeResponse : nodesReloadResponse.getNodes()) { + assertThat(nodeResponse.reloadException(), notNullValue()); + assertThat(nodeResponse.reloadException(), instanceOf(IllegalStateException.class)); + assertThat(nodeResponse.reloadException().getMessage(), containsString("Keystore is missing")); + } + } catch (final AssertionError e) { + reloadSettingsError.set(e); + } finally { + latch.countDown(); + } + } + + @Override + public void onFailure(Exception e) { + reloadSettingsError.set(new AssertionError("Nodes request failed", e)); + latch.countDown(); + } + }); + latch.await(); + if (reloadSettingsError.get() != null) { + throw reloadSettingsError.get(); + } + // in the missing keystore case no reload should be triggered + assertThat(mockReloadablePlugin.getReloadCount(), equalTo(initialReloadCount)); + } + + public void testNullKeystorePassword() throws Exception { + final PluginsService pluginsService = internalCluster().getInstance(PluginsService.class); + final MockReloadablePlugin mockReloadablePlugin = pluginsService.filterPlugins(MockReloadablePlugin.class) + .stream().findFirst().get(); + final AtomicReference reloadSettingsError = new AtomicReference<>(); + final int initialReloadCount = mockReloadablePlugin.getReloadCount(); + final CountDownLatch latch = new CountDownLatch(1); + client().admin().cluster().prepareReloadSecureSettings().execute( + new ActionListener() { + @Override + public void onResponse(NodesReloadSecureSettingsResponse nodesReloadResponse) { + try { + reloadSettingsError.set(new AssertionError("Null keystore password should fail")); + } finally { + latch.countDown(); + } + } + + @Override + public void onFailure(Exception e) { + try { + assertThat(e, instanceOf(ActionRequestValidationException.class)); + assertThat(e.getMessage(), containsString("secure settings password cannot be null")); + } catch (final AssertionError ae) { + reloadSettingsError.set(ae); + } finally { + latch.countDown(); + } + } + }); + latch.await(); + if (reloadSettingsError.get() != null) { + throw reloadSettingsError.get(); + } + // in the null password case no reload should be triggered + assertThat(mockReloadablePlugin.getReloadCount(), equalTo(initialReloadCount)); + } + + public void testInvalidKeystoreFile() throws Exception { + final PluginsService pluginsService = internalCluster().getInstance(PluginsService.class); + final MockReloadablePlugin mockReloadablePlugin = pluginsService.filterPlugins(MockReloadablePlugin.class) + .stream().findFirst().get(); + final Environment environment = internalCluster().getInstance(Environment.class); + final AtomicReference reloadSettingsError = new AtomicReference<>(); + final int initialReloadCount = mockReloadablePlugin.getReloadCount(); + // invalid "keystore" file should be present in the config dir + try (InputStream keystore = ReloadSecureSettingsIT.class.getResourceAsStream("invalid.txt.keystore")) { + if (Files.exists(environment.configFile()) == false) { + Files.createDirectory(environment.configFile()); + } + Files.copy(keystore, KeyStoreWrapper.keystorePath(environment.configFile()), StandardCopyOption.REPLACE_EXISTING); + } + final CountDownLatch latch = new CountDownLatch(1); + client().admin().cluster().prepareReloadSecureSettings().setSecureStorePassword(new SecureString(new char[0])).execute( + new ActionListener() { + @Override + public void onResponse(NodesReloadSecureSettingsResponse nodesReloadResponse) { + try { + assertThat(nodesReloadResponse, notNullValue()); + final Map nodesMap = nodesReloadResponse.getNodesMap(); + assertThat(nodesMap.size(), equalTo(cluster().size())); + for (final NodesReloadSecureSettingsResponse.NodeResponse nodeResponse : nodesReloadResponse.getNodes()) { + assertThat(nodeResponse.reloadException(), notNullValue()); + } + } catch (final AssertionError e) { + reloadSettingsError.set(e); + } finally { + latch.countDown(); + } + } + + @Override + public void onFailure(Exception e) { + reloadSettingsError.set(new AssertionError("Nodes request failed", e)); + latch.countDown(); + } + }); + latch.await(); + if (reloadSettingsError.get() != null) { + throw reloadSettingsError.get(); + } + // in the invalid keystore format case no reload should be triggered + assertThat(mockReloadablePlugin.getReloadCount(), equalTo(initialReloadCount)); + } + + public void testWrongKeystorePassword() throws Exception { + final PluginsService pluginsService = internalCluster().getInstance(PluginsService.class); + final MockReloadablePlugin mockReloadablePlugin = pluginsService.filterPlugins(MockReloadablePlugin.class) + .stream().findFirst().get(); + final Environment environment = internalCluster().getInstance(Environment.class); + final AtomicReference reloadSettingsError = new AtomicReference<>(); + final int initialReloadCount = mockReloadablePlugin.getReloadCount(); + // "some" keystore should be present in this case + writeEmptyKeystore(environment, new char[0]); + final CountDownLatch latch = new CountDownLatch(1); + client().admin() + .cluster() + .prepareReloadSecureSettings() + .setSecureStorePassword(new SecureString(new char[] { 'W', 'r', 'o', 'n', 'g' })) + .execute(new ActionListener() { + @Override + public void onResponse(NodesReloadSecureSettingsResponse nodesReloadResponse) { + try { + assertThat(nodesReloadResponse, notNullValue()); + final Map nodesMap = nodesReloadResponse.getNodesMap(); + assertThat(nodesMap.size(), equalTo(cluster().size())); + for (final NodesReloadSecureSettingsResponse.NodeResponse nodeResponse : nodesReloadResponse.getNodes()) { + assertThat(nodeResponse.reloadException(), notNullValue()); + assertThat(nodeResponse.reloadException(), instanceOf(IOException.class)); + } + } catch (final AssertionError e) { + reloadSettingsError.set(e); + } finally { + latch.countDown(); + } + } + + @Override + public void onFailure(Exception e) { + reloadSettingsError.set(new AssertionError("Nodes request failed", e)); + latch.countDown(); + } + }); + latch.await(); + if (reloadSettingsError.get() != null) { + throw reloadSettingsError.get(); + } + // in the wrong password case no reload should be triggered + assertThat(mockReloadablePlugin.getReloadCount(), equalTo(initialReloadCount)); + } + + public void testMisbehavingPlugin() throws Exception { + final Environment environment = internalCluster().getInstance(Environment.class); + final PluginsService pluginsService = internalCluster().getInstance(PluginsService.class); + final MockReloadablePlugin mockReloadablePlugin = pluginsService.filterPlugins(MockReloadablePlugin.class) + .stream().findFirst().get(); + // make plugins throw on reload + for (final String nodeName : internalCluster().getNodeNames()) { + internalCluster().getInstance(PluginsService.class, nodeName) + .filterPlugins(MisbehavingReloadablePlugin.class) + .stream().findFirst().get().setShouldThrow(true); + } + final AtomicReference reloadSettingsError = new AtomicReference<>(); + final int initialReloadCount = mockReloadablePlugin.getReloadCount(); + // "some" keystore should be present + final SecureSettings secureSettings = writeEmptyKeystore(environment, new char[0]); + // read seed setting value from the test case (not from the node) + final String seedValue = KeyStoreWrapper.SEED_SETTING + .get(Settings.builder().put(environment.settings()).setSecureSettings(secureSettings).build()) + .toString(); + final CountDownLatch latch = new CountDownLatch(1); + client().admin().cluster().prepareReloadSecureSettings().setSecureStorePassword(new SecureString(new char[0])).execute( + new ActionListener() { + @Override + public void onResponse(NodesReloadSecureSettingsResponse nodesReloadResponse) { + try { + assertThat(nodesReloadResponse, notNullValue()); + final Map nodesMap = nodesReloadResponse.getNodesMap(); + assertThat(nodesMap.size(), equalTo(cluster().size())); + for (final NodesReloadSecureSettingsResponse.NodeResponse nodeResponse : nodesReloadResponse.getNodes()) { + assertThat(nodeResponse.reloadException(), notNullValue()); + assertThat(nodeResponse.reloadException().getMessage(), containsString("If shouldThrow I throw")); + } + } catch (final AssertionError e) { + reloadSettingsError.set(e); + } finally { + latch.countDown(); + } + } + + @Override + public void onFailure(Exception e) { + reloadSettingsError.set(new AssertionError("Nodes request failed", e)); + latch.countDown(); + } + }); + latch.await(); + if (reloadSettingsError.get() != null) { + throw reloadSettingsError.get(); + } + // even if one plugin fails to reload (throws Exception), others should be + // unperturbed + assertThat(mockReloadablePlugin.getReloadCount() - initialReloadCount, equalTo(1)); + // mock plugin should have been reloaded successfully + assertThat(mockReloadablePlugin.getSeedValue(), equalTo(seedValue)); + } + + public void testReloadWhileKeystoreChanged() throws Exception { + final PluginsService pluginsService = internalCluster().getInstance(PluginsService.class); + final MockReloadablePlugin mockReloadablePlugin = pluginsService.filterPlugins(MockReloadablePlugin.class) + .stream().findFirst().get(); + final Environment environment = internalCluster().getInstance(Environment.class); + final int initialReloadCount = mockReloadablePlugin.getReloadCount(); + for (int i = 0; i < randomIntBetween(4, 8); i++) { + // write keystore + final SecureSettings secureSettings = writeEmptyKeystore(environment, new char[0]); + // read seed setting value from the test case (not from the node) + final String seedValue = KeyStoreWrapper.SEED_SETTING + .get(Settings.builder().put(environment.settings()).setSecureSettings(secureSettings).build()) + .toString(); + // reload call + successfulReloadCall(); + assertThat(mockReloadablePlugin.getSeedValue(), equalTo(seedValue)); + assertThat(mockReloadablePlugin.getReloadCount() - initialReloadCount, equalTo(i + 1)); + } + } + + @Override + protected Collection> nodePlugins() { + final List> plugins = Arrays.asList(MockReloadablePlugin.class, MisbehavingReloadablePlugin.class); + // shuffle as reload is called in order + Collections.shuffle(plugins, random()); + return plugins; + } + + private void successfulReloadCall() throws InterruptedException { + final AtomicReference reloadSettingsError = new AtomicReference<>(); + final CountDownLatch latch = new CountDownLatch(1); + client().admin().cluster().prepareReloadSecureSettings().setSecureStorePassword(new SecureString(new char[0])).execute( + new ActionListener() { + @Override + public void onResponse(NodesReloadSecureSettingsResponse nodesReloadResponse) { + try { + assertThat(nodesReloadResponse, notNullValue()); + final Map nodesMap = nodesReloadResponse.getNodesMap(); + assertThat(nodesMap.size(), equalTo(cluster().size())); + for (final NodesReloadSecureSettingsResponse.NodeResponse nodeResponse : nodesReloadResponse.getNodes()) { + assertThat(nodeResponse.reloadException(), nullValue()); + } + } catch (final AssertionError e) { + reloadSettingsError.set(e); + } finally { + latch.countDown(); + } + } + + @Override + public void onFailure(Exception e) { + reloadSettingsError.set(new AssertionError("Nodes request failed", e)); + latch.countDown(); + } + }); + latch.await(); + if (reloadSettingsError.get() != null) { + throw reloadSettingsError.get(); + } + } + + private SecureSettings writeEmptyKeystore(Environment environment, char[] password) throws Exception { + final KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.create(); + try { + keyStoreWrapper.save(environment.configFile(), password); + } catch (final AccessControlException e) { + if (e.getPermission() instanceof RuntimePermission && e.getPermission().getName().equals("accessUserInformation")) { + // this is expected: the save method is extra diligent and wants to make sure + // the keystore is readable, not relying on umask and whatnot. It's ok, we don't + // care about this in tests. + } else { + throw e; + } + } + return keyStoreWrapper; + } + + public static class CountingReloadablePlugin extends Plugin implements ReloadablePlugin { + + private volatile int reloadCount; + + public CountingReloadablePlugin() { + } + + @Override + public void reload(Settings settings) throws Exception { + reloadCount++; + } + + public int getReloadCount() { + return reloadCount; + } + + } + + public static class MockReloadablePlugin extends CountingReloadablePlugin { + + private volatile String seedValue; + + public MockReloadablePlugin() { + } + + @Override + public void reload(Settings settings) throws Exception { + super.reload(settings); + this.seedValue = KeyStoreWrapper.SEED_SETTING.get(settings).toString(); + } + + public String getSeedValue() { + return seedValue; + } + + } + + public static class MisbehavingReloadablePlugin extends CountingReloadablePlugin { + + private boolean shouldThrow = false; + + public MisbehavingReloadablePlugin() { + } + + @Override + public synchronized void reload(Settings settings) throws Exception { + super.reload(settings); + if (shouldThrow) { + shouldThrow = false; + throw new Exception("If shouldThrow I throw"); + } + } + + public synchronized void setShouldThrow(boolean shouldThrow) { + this.shouldThrow = shouldThrow; + } + } + +} diff --git a/server/src/test/resources/org/elasticsearch/action/admin/invalid.txt.keystore b/server/src/test/resources/org/elasticsearch/action/admin/invalid.txt.keystore new file mode 100644 index 0000000000000..04613ffab7f36 --- /dev/null +++ b/server/src/test/resources/org/elasticsearch/action/admin/invalid.txt.keystore @@ -0,0 +1,3 @@ +admin admin +dragon 12345 + From 8c0ec05a1225c76a4ae9e6592ac38e63e5ed4dc8 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Mon, 18 Jun 2018 09:46:12 +0100 Subject: [PATCH 18/34] Expose lucene's RemoveDuplicatesTokenFilter (#31275) --- docs/reference/analysis/tokenfilters.asciidoc | 4 +- .../remove-duplicates-tokenfilter.asciidoc | 5 ++ .../analysis/common/CommonAnalysisPlugin.java | 1 + .../RemoveDuplicatesTokenFilterFactory.java | 42 +++++++++++++ .../RemoveDuplicatesFilterFactoryTests.java | 61 +++++++++++++++++++ 5 files changed, 112 insertions(+), 1 deletion(-) create mode 100644 docs/reference/analysis/tokenfilters/remove-duplicates-tokenfilter.asciidoc create mode 100644 modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/RemoveDuplicatesTokenFilterFactory.java create mode 100644 modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/RemoveDuplicatesFilterFactoryTests.java diff --git a/docs/reference/analysis/tokenfilters.asciidoc b/docs/reference/analysis/tokenfilters.asciidoc index 6e77b4498650d..dd5cb2e702cff 100644 --- a/docs/reference/analysis/tokenfilters.asciidoc +++ b/docs/reference/analysis/tokenfilters.asciidoc @@ -95,4 +95,6 @@ include::tokenfilters/decimal-digit-tokenfilter.asciidoc[] include::tokenfilters/fingerprint-tokenfilter.asciidoc[] -include::tokenfilters/minhash-tokenfilter.asciidoc[] \ No newline at end of file +include::tokenfilters/minhash-tokenfilter.asciidoc[] + +include::tokenfilters/remove-duplicates-tokenfilter.asciidoc[] \ No newline at end of file diff --git a/docs/reference/analysis/tokenfilters/remove-duplicates-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/remove-duplicates-tokenfilter.asciidoc new file mode 100644 index 0000000000000..594e18eaf7f7e --- /dev/null +++ b/docs/reference/analysis/tokenfilters/remove-duplicates-tokenfilter.asciidoc @@ -0,0 +1,5 @@ +[[analysis-remove-duplicates-tokenfilter]] +=== Remove Duplicates Token Filter + +A token filter of type `remove_duplicates` that drops identical tokens at the +same position. diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java index 722d75a9293f7..04df77245438c 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java @@ -194,6 +194,7 @@ public Map> getTokenFilters() { filters.put("pattern_replace", requriesAnalysisSettings(PatternReplaceTokenFilterFactory::new)); filters.put("persian_normalization", PersianNormalizationFilterFactory::new); filters.put("porter_stem", PorterStemTokenFilterFactory::new); + filters.put("remove_duplicates", RemoveDuplicatesTokenFilterFactory::new); filters.put("reverse", ReverseTokenFilterFactory::new); filters.put("russian_stem", RussianStemTokenFilterFactory::new); filters.put("scandinavian_folding", ScandinavianFoldingFilterFactory::new); diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/RemoveDuplicatesTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/RemoveDuplicatesTokenFilterFactory.java new file mode 100644 index 0000000000000..a136c5573121e --- /dev/null +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/RemoveDuplicatesTokenFilterFactory.java @@ -0,0 +1,42 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.analysis.common; + +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.miscellaneous.RemoveDuplicatesTokenFilter; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractTokenFilterFactory; + +/** + * Filter factory for the lucene RemoveDuplicatesTokenFilter + */ +class RemoveDuplicatesTokenFilterFactory extends AbstractTokenFilterFactory { + + RemoveDuplicatesTokenFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { + super(indexSettings, name, settings); + } + + @Override + public TokenStream create(TokenStream tokenStream) { + return new RemoveDuplicatesTokenFilter(tokenStream); + } +} diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/RemoveDuplicatesFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/RemoveDuplicatesFilterFactoryTests.java new file mode 100644 index 0000000000000..8180985416f52 --- /dev/null +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/RemoveDuplicatesFilterFactoryTests.java @@ -0,0 +1,61 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.analysis.common; + +import org.apache.lucene.analysis.CannedTokenStream; +import org.apache.lucene.analysis.Token; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.index.analysis.AnalysisTestsHelper; +import org.elasticsearch.index.analysis.TokenFilterFactory; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.ESTokenStreamTestCase; + +import java.io.IOException; + +import static org.hamcrest.Matchers.instanceOf; + +public class RemoveDuplicatesFilterFactoryTests extends ESTokenStreamTestCase { + + public void testRemoveDuplicatesFilter() throws IOException { + Settings settings = Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put("index.analysis.filter.removedups.type", "remove_duplicates") + .build(); + ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin()); + TokenFilterFactory tokenFilter = analysis.tokenFilter.get("removedups"); + assertThat(tokenFilter, instanceOf(RemoveDuplicatesTokenFilterFactory.class)); + + CannedTokenStream cts = new CannedTokenStream( + new Token("a", 1, 0, 1), + new Token("b", 1, 2, 3), + new Token("c", 0, 2, 3), + new Token("b", 0, 2, 3), + new Token("d", 1, 4, 5) + ); + + assertTokenStreamContents(tokenFilter.create(cts), new String[]{ + "a", "b", "c", "d" + }, new int[]{ + 1, 1, 0, 1 + }); + } + +} From f5135050526d5ba3febcb0b43a7dd708d49a4c0f Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Mon, 18 Jun 2018 10:49:22 +0200 Subject: [PATCH 19/34] [Test] Fix :example-plugins:rest-handler on Windows --- plugins/examples/rest-handler/build.gradle | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/plugins/examples/rest-handler/build.gradle b/plugins/examples/rest-handler/build.gradle index 2c55c3c79fce7..cfe84e6a45a93 100644 --- a/plugins/examples/rest-handler/build.gradle +++ b/plugins/examples/rest-handler/build.gradle @@ -30,10 +30,9 @@ test.enabled = false task exampleFixture(type: org.elasticsearch.gradle.test.AntFixture) { dependsOn testClasses + env 'CLASSPATH', "${ -> project.sourceSets.test.runtimeClasspath.asPath }" executable = new File(project.runtimeJavaHome, 'bin/java') - args '-cp', "${ -> project.sourceSets.test.runtimeClasspath.asPath }", - 'org.elasticsearch.example.resthandler.ExampleFixture', - baseDir, 'TEST' + args 'org.elasticsearch.example.resthandler.ExampleFixture', baseDir, 'TEST' } integTestCluster { From 47095357bc1ae0df9a19ea328a64745b11aa2e08 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Mon, 18 Jun 2018 11:24:43 +0200 Subject: [PATCH 20/34] Move language analyzers from server to analysis-common module. (#31300) The following analyzers were moved from server module to analysis-common module: `greek`, `hindi`, `hungarian`, `indonesian`, `irish`, `italian`, `latvian`, `lithuanian`, `norwegian`, `persian`, `portuguese`, `romanian`, `russian`, `sorani`, `spanish`, `swedish`, `turkish` and `thai`. Relates to #23658 --- .../analysis/common/CommonAnalysisPlugin.java | 62 ++- .../common}/GreekAnalyzerProvider.java | 6 +- .../common}/HindiAnalyzerProvider.java | 6 +- .../common}/HungarianAnalyzerProvider.java | 6 +- .../common}/IndonesianAnalyzerProvider.java | 6 +- .../common}/IrishAnalyzerProvider.java | 6 +- .../common}/ItalianAnalyzerProvider.java | 6 +- .../common}/LatvianAnalyzerProvider.java | 6 +- .../common}/LithuanianAnalyzerProvider.java | 6 +- .../common}/NorwegianAnalyzerProvider.java | 6 +- .../common}/PersianAnalyzerProvider.java | 6 +- .../common}/PortugueseAnalyzerProvider.java | 6 +- .../common}/RomanianAnalyzerProvider.java | 6 +- .../common}/RussianAnalyzerProvider.java | 6 +- .../common}/SoraniAnalyzerProvider.java | 6 +- .../common}/SpanishAnalyzerProvider.java | 6 +- .../common}/SwedishAnalyzerProvider.java | 6 +- .../common}/ThaiAnalyzerProvider.java | 6 +- .../common}/TurkishAnalyzerProvider.java | 6 +- .../test/analysis-common/20_analyzers.yml | 522 ++++++++++++++++++ .../indices/analysis/AnalysisModule.java | 36 -- .../indices/analysis/PreBuiltAnalyzers.java | 180 ------ .../indices/analysis/AnalysisModuleTests.java | 2 +- 23 files changed, 655 insertions(+), 255 deletions(-) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/GreekAnalyzerProvider.java (84%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/HindiAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/HungarianAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/IndonesianAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/IrishAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/ItalianAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/LatvianAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/LithuanianAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/NorwegianAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/PersianAnalyzerProvider.java (83%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/PortugueseAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/RomanianAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/RussianAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/SoraniAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/SpanishAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/SwedishAnalyzerProvider.java (85%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/ThaiAnalyzerProvider.java (83%) rename {server/src/main/java/org/elasticsearch/index/analysis => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/TurkishAnalyzerProvider.java (85%) diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java index 04df77245438c..cdd8101a73c70 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java @@ -37,6 +37,7 @@ import org.apache.lucene.analysis.cjk.CJKAnalyzer; import org.apache.lucene.analysis.cjk.CJKBigramFilter; import org.apache.lucene.analysis.cjk.CJKWidthFilter; +import org.apache.lucene.analysis.ckb.SoraniAnalyzer; import org.apache.lucene.analysis.ckb.SoraniNormalizationFilter; import org.apache.lucene.analysis.commongrams.CommonGramsFilter; import org.apache.lucene.analysis.core.DecimalDigitFilter; @@ -52,17 +53,27 @@ import org.apache.lucene.analysis.de.GermanAnalyzer; import org.apache.lucene.analysis.de.GermanNormalizationFilter; import org.apache.lucene.analysis.de.GermanStemFilter; +import org.apache.lucene.analysis.el.GreekAnalyzer; import org.apache.lucene.analysis.en.EnglishAnalyzer; import org.apache.lucene.analysis.en.KStemFilter; import org.apache.lucene.analysis.en.PorterStemFilter; +import org.apache.lucene.analysis.es.SpanishAnalyzer; import org.apache.lucene.analysis.eu.BasqueAnalyzer; +import org.apache.lucene.analysis.fa.PersianAnalyzer; import org.apache.lucene.analysis.fa.PersianNormalizationFilter; import org.apache.lucene.analysis.fi.FinnishAnalyzer; import org.apache.lucene.analysis.fr.FrenchAnalyzer; +import org.apache.lucene.analysis.ga.IrishAnalyzer; import org.apache.lucene.analysis.gl.GalicianAnalyzer; +import org.apache.lucene.analysis.hi.HindiAnalyzer; import org.apache.lucene.analysis.hi.HindiNormalizationFilter; +import org.apache.lucene.analysis.hu.HungarianAnalyzer; import org.apache.lucene.analysis.hy.ArmenianAnalyzer; +import org.apache.lucene.analysis.id.IndonesianAnalyzer; import org.apache.lucene.analysis.in.IndicNormalizationFilter; +import org.apache.lucene.analysis.it.ItalianAnalyzer; +import org.apache.lucene.analysis.lt.LithuanianAnalyzer; +import org.apache.lucene.analysis.lv.LatvianAnalyzer; import org.apache.lucene.analysis.miscellaneous.ASCIIFoldingFilter; import org.apache.lucene.analysis.miscellaneous.DisableGraphAttribute; import org.apache.lucene.analysis.miscellaneous.KeywordRepeatFilter; @@ -79,19 +90,26 @@ import org.apache.lucene.analysis.ngram.NGramTokenFilter; import org.apache.lucene.analysis.ngram.NGramTokenizer; import org.apache.lucene.analysis.nl.DutchAnalyzer; +import org.apache.lucene.analysis.no.NorwegianAnalyzer; import org.apache.lucene.analysis.path.PathHierarchyTokenizer; import org.apache.lucene.analysis.pattern.PatternTokenizer; import org.apache.lucene.analysis.payloads.DelimitedPayloadTokenFilter; import org.apache.lucene.analysis.payloads.TypeAsPayloadTokenFilter; +import org.apache.lucene.analysis.pt.PortugueseAnalyzer; import org.apache.lucene.analysis.reverse.ReverseStringFilter; +import org.apache.lucene.analysis.ro.RomanianAnalyzer; +import org.apache.lucene.analysis.ru.RussianAnalyzer; import org.apache.lucene.analysis.shingle.ShingleFilter; import org.apache.lucene.analysis.snowball.SnowballFilter; import org.apache.lucene.analysis.standard.ClassicFilter; import org.apache.lucene.analysis.standard.ClassicTokenizer; import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.analysis.standard.UAX29URLEmailTokenizer; +import org.apache.lucene.analysis.sv.SwedishAnalyzer; +import org.apache.lucene.analysis.th.ThaiAnalyzer; import org.apache.lucene.analysis.th.ThaiTokenizer; import org.apache.lucene.analysis.tr.ApostropheFilter; +import org.apache.lucene.analysis.tr.TurkishAnalyzer; import org.apache.lucene.analysis.util.ElisionFilter; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.Loggers; @@ -130,6 +148,8 @@ public Map>> getAn analyzers.put("standard_html_strip", StandardHtmlStripAnalyzerProvider::new); analyzers.put("pattern", PatternAnalyzerProvider::new); analyzers.put("snowball", SnowballAnalyzerProvider::new); + + // Language analyzers: analyzers.put("arabic", ArabicAnalyzerProvider::new); analyzers.put("armenian", ArmenianAnalyzerProvider::new); analyzers.put("basque", BasqueAnalyzerProvider::new); @@ -147,6 +167,24 @@ public Map>> getAn analyzers.put("french", FrenchAnalyzerProvider::new); analyzers.put("galician", GalicianAnalyzerProvider::new); analyzers.put("german", GermanAnalyzerProvider::new); + analyzers.put("greek", GreekAnalyzerProvider::new); + analyzers.put("hindi", HindiAnalyzerProvider::new); + analyzers.put("hungarian", HungarianAnalyzerProvider::new); + analyzers.put("indonesian", IndonesianAnalyzerProvider::new); + analyzers.put("irish", IrishAnalyzerProvider::new); + analyzers.put("italian", ItalianAnalyzerProvider::new); + analyzers.put("latvian", LatvianAnalyzerProvider::new); + analyzers.put("lithuanian", LithuanianAnalyzerProvider::new); + analyzers.put("norwegian", NorwegianAnalyzerProvider::new); + analyzers.put("persian", PersianAnalyzerProvider::new); + analyzers.put("portuguese", PortugueseAnalyzerProvider::new); + analyzers.put("romanian", RomanianAnalyzerProvider::new); + analyzers.put("russian", RussianAnalyzerProvider::new); + analyzers.put("sorani", SoraniAnalyzerProvider::new); + analyzers.put("spanish", SpanishAnalyzerProvider::new); + analyzers.put("swedish", SwedishAnalyzerProvider::new); + analyzers.put("turkish", TurkishAnalyzerProvider::new); + analyzers.put("thai", ThaiAnalyzerProvider::new); return analyzers; } @@ -248,13 +286,15 @@ public Map> getTokenizers() { @Override public List getPreBuiltAnalyzerProviderFactories() { List analyzers = new ArrayList<>(); - analyzers.add(new PreBuiltAnalyzerProviderFactory("standard_html_strip", CachingStrategy.LUCENE, + analyzers.add(new PreBuiltAnalyzerProviderFactory("standard_html_strip", CachingStrategy.ELASTICSEARCH, () -> new StandardHtmlStripAnalyzer(CharArraySet.EMPTY_SET))); analyzers.add(new PreBuiltAnalyzerProviderFactory("pattern", CachingStrategy.ELASTICSEARCH, () -> new PatternAnalyzer(Regex.compile("\\W+" /*PatternAnalyzer.NON_WORD_PATTERN*/, null), true, CharArraySet.EMPTY_SET))); analyzers.add(new PreBuiltAnalyzerProviderFactory("snowball", CachingStrategy.LUCENE, () -> new SnowballAnalyzer("English", StopAnalyzer.ENGLISH_STOP_WORDS_SET))); + + // Language analyzers: analyzers.add(new PreBuiltAnalyzerProviderFactory("arabic", CachingStrategy.LUCENE, ArabicAnalyzer::new)); analyzers.add(new PreBuiltAnalyzerProviderFactory("armenian", CachingStrategy.LUCENE, ArmenianAnalyzer::new)); analyzers.add(new PreBuiltAnalyzerProviderFactory("basque", CachingStrategy.LUCENE, BasqueAnalyzer::new)); @@ -263,7 +303,7 @@ public List getPreBuiltAnalyzerProviderFactorie analyzers.add(new PreBuiltAnalyzerProviderFactory("bulgarian", CachingStrategy.LUCENE, BulgarianAnalyzer::new)); analyzers.add(new PreBuiltAnalyzerProviderFactory("catalan", CachingStrategy.LUCENE, CatalanAnalyzer::new)); // chinese analyzer: only for old indices, best effort - analyzers.add(new PreBuiltAnalyzerProviderFactory("chinese", CachingStrategy.LUCENE, StandardAnalyzer::new)); + analyzers.add(new PreBuiltAnalyzerProviderFactory("chinese", CachingStrategy.ONE, StandardAnalyzer::new)); analyzers.add(new PreBuiltAnalyzerProviderFactory("cjk", CachingStrategy.LUCENE, CJKAnalyzer::new)); analyzers.add(new PreBuiltAnalyzerProviderFactory("czech", CachingStrategy.LUCENE, CzechAnalyzer::new)); analyzers.add(new PreBuiltAnalyzerProviderFactory("danish", CachingStrategy.LUCENE, DanishAnalyzer::new)); @@ -273,6 +313,24 @@ public List getPreBuiltAnalyzerProviderFactorie analyzers.add(new PreBuiltAnalyzerProviderFactory("french", CachingStrategy.LUCENE, FrenchAnalyzer::new)); analyzers.add(new PreBuiltAnalyzerProviderFactory("galician", CachingStrategy.LUCENE, GalicianAnalyzer::new)); analyzers.add(new PreBuiltAnalyzerProviderFactory("german", CachingStrategy.LUCENE, GermanAnalyzer::new)); + analyzers.add(new PreBuiltAnalyzerProviderFactory("greek", CachingStrategy.LUCENE, GreekAnalyzer::new)); + analyzers.add(new PreBuiltAnalyzerProviderFactory("hindi", CachingStrategy.LUCENE, HindiAnalyzer::new)); + analyzers.add(new PreBuiltAnalyzerProviderFactory("hungarian", CachingStrategy.LUCENE, HungarianAnalyzer::new)); + analyzers.add(new PreBuiltAnalyzerProviderFactory("indonesian", CachingStrategy.LUCENE, IndonesianAnalyzer::new)); + analyzers.add(new PreBuiltAnalyzerProviderFactory("irish", CachingStrategy.LUCENE, IrishAnalyzer::new)); + analyzers.add(new PreBuiltAnalyzerProviderFactory("italian", CachingStrategy.LUCENE, ItalianAnalyzer::new)); + analyzers.add(new PreBuiltAnalyzerProviderFactory("latvian", CachingStrategy.LUCENE, LatvianAnalyzer::new)); + analyzers.add(new PreBuiltAnalyzerProviderFactory("lithuanian", CachingStrategy.LUCENE, LithuanianAnalyzer::new)); + analyzers.add(new PreBuiltAnalyzerProviderFactory("norwegian", CachingStrategy.LUCENE, NorwegianAnalyzer::new)); + analyzers.add(new PreBuiltAnalyzerProviderFactory("persian", CachingStrategy.LUCENE, PersianAnalyzer::new)); + analyzers.add(new PreBuiltAnalyzerProviderFactory("portuguese", CachingStrategy.LUCENE, PortugueseAnalyzer::new)); + analyzers.add(new PreBuiltAnalyzerProviderFactory("romanian", CachingStrategy.LUCENE, RomanianAnalyzer::new)); + analyzers.add(new PreBuiltAnalyzerProviderFactory("russian", CachingStrategy.LUCENE, RussianAnalyzer::new)); + analyzers.add(new PreBuiltAnalyzerProviderFactory("sorani", CachingStrategy.LUCENE, SoraniAnalyzer::new)); + analyzers.add(new PreBuiltAnalyzerProviderFactory("spanish", CachingStrategy.LUCENE, SpanishAnalyzer::new)); + analyzers.add(new PreBuiltAnalyzerProviderFactory("swedish", CachingStrategy.LUCENE, SwedishAnalyzer::new)); + analyzers.add(new PreBuiltAnalyzerProviderFactory("turkish", CachingStrategy.LUCENE, TurkishAnalyzer::new)); + analyzers.add(new PreBuiltAnalyzerProviderFactory("thai", CachingStrategy.LUCENE, ThaiAnalyzer::new)); return analyzers; } diff --git a/server/src/main/java/org/elasticsearch/index/analysis/GreekAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/GreekAnalyzerProvider.java similarity index 84% rename from server/src/main/java/org/elasticsearch/index/analysis/GreekAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/GreekAnalyzerProvider.java index d14539269b497..2c237f7fe3cd0 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/GreekAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/GreekAnalyzerProvider.java @@ -17,18 +17,20 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.el.GreekAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class GreekAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final GreekAnalyzer analyzer; - public GreekAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + GreekAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new GreekAnalyzer( Analysis.parseStopWords(env, settings, GreekAnalyzer.getDefaultStopSet())); diff --git a/server/src/main/java/org/elasticsearch/index/analysis/HindiAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HindiAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/HindiAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HindiAnalyzerProvider.java index 166390df1ba3f..2f4d50c4c76b4 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/HindiAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HindiAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.hi.HindiAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class HindiAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final HindiAnalyzer analyzer; - public HindiAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + HindiAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new HindiAnalyzer( Analysis.parseStopWords(env, settings, HindiAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/HungarianAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HungarianAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/HungarianAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HungarianAnalyzerProvider.java index f4c481701f688..55ade50a1c95a 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/HungarianAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HungarianAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.hu.HungarianAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class HungarianAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final HungarianAnalyzer analyzer; - public HungarianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + HungarianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new HungarianAnalyzer( Analysis.parseStopWords(env, settings, HungarianAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/IndonesianAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/IndonesianAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/IndonesianAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/IndonesianAnalyzerProvider.java index f5a31fb3f8911..583268165f243 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/IndonesianAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/IndonesianAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.id.IndonesianAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class IndonesianAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final IndonesianAnalyzer analyzer; - public IndonesianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + IndonesianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new IndonesianAnalyzer( Analysis.parseStopWords(env, settings, IndonesianAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/IrishAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/IrishAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/IrishAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/IrishAnalyzerProvider.java index 9178f9e70d185..515121a3a6ae4 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/IrishAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/IrishAnalyzerProvider.java @@ -17,13 +17,15 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.ga.IrishAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; /** * Provider for {@link IrishAnalyzer} @@ -32,7 +34,7 @@ public class IrishAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final ItalianAnalyzer analyzer; - public ItalianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + ItalianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new ItalianAnalyzer( Analysis.parseStopWords(env, settings, ItalianAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/LatvianAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LatvianAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/LatvianAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LatvianAnalyzerProvider.java index 39f4c02853e5c..d57f47923d1a1 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/LatvianAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LatvianAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.lv.LatvianAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class LatvianAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final LatvianAnalyzer analyzer; - public LatvianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + LatvianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new LatvianAnalyzer( Analysis.parseStopWords(env, settings, LatvianAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/LithuanianAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LithuanianAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/LithuanianAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LithuanianAnalyzerProvider.java index c8d25ce3342fe..b6db45d41744d 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/LithuanianAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LithuanianAnalyzerProvider.java @@ -17,13 +17,15 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.lt.LithuanianAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; /** * Provider for {@link LithuanianAnalyzer} @@ -32,7 +34,7 @@ public class LithuanianAnalyzerProvider extends AbstractIndexAnalyzerProvider

  • { private final NorwegianAnalyzer analyzer; - public NorwegianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + NorwegianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new NorwegianAnalyzer( Analysis.parseStopWords(env, settings, NorwegianAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/PersianAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PersianAnalyzerProvider.java similarity index 83% rename from server/src/main/java/org/elasticsearch/index/analysis/PersianAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PersianAnalyzerProvider.java index 298a6dacf48aa..2cca6bbe09b24 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/PersianAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PersianAnalyzerProvider.java @@ -17,18 +17,20 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.fa.PersianAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class PersianAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final PersianAnalyzer analyzer; - public PersianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + PersianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new PersianAnalyzer(Analysis.parseStopWords(env, settings, PersianAnalyzer.getDefaultStopSet())); analyzer.setVersion(version); diff --git a/server/src/main/java/org/elasticsearch/index/analysis/PortugueseAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PortugueseAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/PortugueseAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PortugueseAnalyzerProvider.java index cb0b0e6a20954..9bf27c8da763c 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/PortugueseAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PortugueseAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.pt.PortugueseAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class PortugueseAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final PortugueseAnalyzer analyzer; - public PortugueseAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + PortugueseAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new PortugueseAnalyzer( Analysis.parseStopWords(env, settings, PortugueseAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/RomanianAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/RomanianAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/RomanianAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/RomanianAnalyzerProvider.java index d92663d65c03d..a71344a2bdb85 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/RomanianAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/RomanianAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.ro.RomanianAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class RomanianAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final RomanianAnalyzer analyzer; - public RomanianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + RomanianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new RomanianAnalyzer( Analysis.parseStopWords(env, settings, RomanianAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/RussianAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/RussianAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/RussianAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/RussianAnalyzerProvider.java index 9529f31a67b3d..4d015ae3578e6 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/RussianAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/RussianAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.ru.RussianAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class RussianAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final RussianAnalyzer analyzer; - public RussianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + RussianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new RussianAnalyzer( Analysis.parseStopWords(env, settings, RussianAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/SoraniAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SoraniAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/SoraniAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SoraniAnalyzerProvider.java index ea5ab7d885a73..1391e924bbc21 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/SoraniAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SoraniAnalyzerProvider.java @@ -17,13 +17,15 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.ckb.SoraniAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; /** * Provider for {@link SoraniAnalyzer} @@ -32,7 +34,7 @@ public class SoraniAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final SpanishAnalyzer analyzer; - public SpanishAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + SpanishAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new SpanishAnalyzer( Analysis.parseStopWords(env, settings, SpanishAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/SwedishAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SwedishAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/SwedishAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SwedishAnalyzerProvider.java index 6e868609fa93d..bf01e63e625e5 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/SwedishAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SwedishAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.sv.SwedishAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class SwedishAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final SwedishAnalyzer analyzer; - public SwedishAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + SwedishAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new SwedishAnalyzer( Analysis.parseStopWords(env, settings, SwedishAnalyzer.getDefaultStopSet()), diff --git a/server/src/main/java/org/elasticsearch/index/analysis/ThaiAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ThaiAnalyzerProvider.java similarity index 83% rename from server/src/main/java/org/elasticsearch/index/analysis/ThaiAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ThaiAnalyzerProvider.java index e692e952c5d41..7f9543904d9fc 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/ThaiAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ThaiAnalyzerProvider.java @@ -17,18 +17,20 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.th.ThaiAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class ThaiAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final ThaiAnalyzer analyzer; - public ThaiAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + ThaiAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new ThaiAnalyzer( Analysis.parseStopWords(env, settings, ThaiAnalyzer.getDefaultStopSet())); diff --git a/server/src/main/java/org/elasticsearch/index/analysis/TurkishAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/TurkishAnalyzerProvider.java similarity index 85% rename from server/src/main/java/org/elasticsearch/index/analysis/TurkishAnalyzerProvider.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/TurkishAnalyzerProvider.java index e3445c2c2fc58..ecf9d082e7017 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/TurkishAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/TurkishAnalyzerProvider.java @@ -17,19 +17,21 @@ * under the License. */ -package org.elasticsearch.index.analysis; +package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.CharArraySet; import org.apache.lucene.analysis.tr.TurkishAnalyzer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; +import org.elasticsearch.index.analysis.Analysis; public class TurkishAnalyzerProvider extends AbstractIndexAnalyzerProvider { private final TurkishAnalyzer analyzer; - public TurkishAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { + TurkishAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); analyzer = new TurkishAnalyzer( Analysis.parseStopWords(env, settings, TurkishAnalyzer.getDefaultStopSet()), diff --git a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/20_analyzers.yml b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/20_analyzers.yml index fa0476c0f11aa..fa8f6eef8b924 100644 --- a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/20_analyzers.yml +++ b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/20_analyzers.yml @@ -592,3 +592,525 @@ analyzer: my_analyzer - length: { tokens: 1 } - match: { tokens.0.token: tisch } + +--- +"greek": + - do: + indices.create: + index: test + body: + settings: + analysis: + analyzer: + my_analyzer: + type: greek + + - do: + indices.analyze: + body: + text: Μία + analyzer: greek + - length: { tokens: 1 } + - match: { tokens.0.token: μια } + + - do: + indices.analyze: + index: test + body: + text: Μία + analyzer: my_analyzer + - length: { tokens: 1 } + - match: { tokens.0.token: μια } + +--- +"hindi": + - do: + indices.create: + index: test + body: + settings: + analysis: + analyzer: + my_analyzer: + type: hindi + + - do: + indices.analyze: + body: + text: हिन्दी + analyzer: hindi + - length: { tokens: 1 } + - match: { tokens.0.token: हिंद } + + - do: + indices.analyze: + index: test + body: + text: हिन्दी + analyzer: my_analyzer + - length: { tokens: 1 } + - match: { tokens.0.token: हिंद } + +--- +"hungarian": + - do: + indices.create: + index: test + body: + settings: + analysis: + analyzer: + my_analyzer: + type: hungarian + + - do: + indices.analyze: + body: + text: babakocsi + analyzer: hungarian + - length: { tokens: 1 } + - match: { tokens.0.token: babakocs } + + - do: + indices.analyze: + index: test + body: + text: babakocsi + analyzer: my_analyzer + - length: { tokens: 1 } + - match: { tokens.0.token: babakocs } + +--- +"indonesian": + - do: + indices.create: + index: test + body: + settings: + analysis: + analyzer: + my_analyzer: + type: indonesian + + - do: + indices.analyze: + body: + text: peledakan + analyzer: indonesian + - length: { tokens: 1 } + - match: { tokens.0.token: ledak } + + - do: + indices.analyze: + index: test + body: + text: peledakan + analyzer: my_analyzer + - length: { tokens: 1 } + - match: { tokens.0.token: ledak } + +--- +"irish": + - do: + indices.create: + index: test + body: + settings: + analysis: + analyzer: + my_analyzer: + type: irish + + - do: + indices.analyze: + body: + text: siopadóireacht + analyzer: irish + - length: { tokens: 1 } + - match: { tokens.0.token: siopadóir } + + - do: + indices.analyze: + index: test + body: + text: siopadóireacht + analyzer: my_analyzer + - length: { tokens: 1 } + - match: { tokens.0.token: siopadóir } + +--- +"italian": + - do: + indices.create: + index: test + body: + settings: + analysis: + analyzer: + my_analyzer: + type: italian + + - do: + indices.analyze: + body: + text: abbandonata + analyzer: italian + - length: { tokens: 1 } + - match: { tokens.0.token: abbandonat } + + - do: + indices.analyze: + index: test + body: + text: abbandonata + analyzer: my_analyzer + - length: { tokens: 1 } + - match: { tokens.0.token: abbandonat } + +--- +"latvian": + - do: + indices.create: + index: test + body: + settings: + analysis: + analyzer: + my_analyzer: + type: latvian + + - do: + indices.analyze: + body: + text: tirgiem + analyzer: latvian + - length: { tokens: 1 } + - match: { tokens.0.token: tirg } + + - do: + indices.analyze: + index: test + body: + text: tirgiem + analyzer: my_analyzer + - length: { tokens: 1 } + - match: { tokens.0.token: tirg } + +--- +"lithuanian": + - do: + indices.create: + index: test + body: + settings: + analysis: + analyzer: + my_analyzer: + type: lithuanian + + - do: + indices.analyze: + body: + text: vaikų + analyzer: lithuanian + - length: { tokens: 1 } + - match: { tokens.0.token: vaik } + + - do: + indices.analyze: + index: test + body: + text: vaikų + analyzer: my_analyzer + - length: { tokens: 1 } + - match: { tokens.0.token: vaik } + +--- +"norwegian": + - do: + indices.create: + index: test + body: + settings: + analysis: + analyzer: + my_analyzer: + type: norwegian + + - do: + indices.analyze: + body: + text: havnedistriktene + analyzer: norwegian + - length: { tokens: 1 } + - match: { tokens.0.token: havnedistrikt } + + - do: + indices.analyze: + index: test + body: + text: havnedistriktene + analyzer: my_analyzer + - length: { tokens: 1 } + - match: { tokens.0.token: havnedistrikt } + +--- +"persian": + - do: + indices.create: + index: test + body: + settings: + analysis: + analyzer: + my_analyzer: + type: persian + + - do: + indices.analyze: + body: + text: می‌خورد + analyzer: persian + - length: { tokens: 1 } + - match: { tokens.0.token: خورد } + + - do: + indices.analyze: + index: test + body: + text: می‌خورد + analyzer: my_analyzer + - length: { tokens: 1 } + - match: { tokens.0.token: خورد } + +--- +"portuguese": + - do: + indices.create: + index: test + body: + settings: + analysis: + analyzer: + my_analyzer: + type: portuguese + + - do: + indices.analyze: + body: + text: quilométricas + analyzer: portuguese + - length: { tokens: 1 } + - match: { tokens.0.token: quilometric } + + - do: + indices.analyze: + index: test + body: + text: quilométricas + analyzer: my_analyzer + - length: { tokens: 1 } + - match: { tokens.0.token: quilometric } + +--- +"romanian": + - do: + indices.create: + index: test + body: + settings: + analysis: + analyzer: + my_analyzer: + type: romanian + + - do: + indices.analyze: + body: + text: absenţa + analyzer: romanian + - length: { tokens: 1 } + - match: { tokens.0.token: absenţ } + + - do: + indices.analyze: + index: test + body: + text: absenţa + analyzer: my_analyzer + - length: { tokens: 1 } + - match: { tokens.0.token: absenţ } + +--- +"russian": + - do: + indices.create: + index: test + body: + settings: + analysis: + analyzer: + my_analyzer: + type: russian + + - do: + indices.analyze: + body: + text: Вместе с тем о + analyzer: russian + - length: { tokens: 1 } + - match: { tokens.0.token: вмест } + + - do: + indices.analyze: + index: test + body: + text: Вместе с тем о + analyzer: my_analyzer + - length: { tokens: 1 } + - match: { tokens.0.token: вмест } + +--- +"sorani": + - do: + indices.create: + index: test + body: + settings: + analysis: + analyzer: + my_analyzer: + type: sorani + + - do: + indices.analyze: + body: + text: پیاوە + analyzer: sorani + - length: { tokens: 1 } + - match: { tokens.0.token: پیاو } + + - do: + indices.analyze: + index: test + body: + text: پیاوە + analyzer: my_analyzer + - length: { tokens: 1 } + - match: { tokens.0.token: پیاو } + +--- +"spanish": + - do: + indices.create: + index: test + body: + settings: + analysis: + analyzer: + my_analyzer: + type: spanish + + - do: + indices.analyze: + body: + text: chicana + analyzer: spanish + - length: { tokens: 1 } + - match: { tokens.0.token: chican } + + - do: + indices.analyze: + index: test + body: + text: chicana + analyzer: my_analyzer + - length: { tokens: 1 } + - match: { tokens.0.token: chican } + +--- +"swedish": + - do: + indices.create: + index: test + body: + settings: + analysis: + analyzer: + my_analyzer: + type: swedish + + - do: + indices.analyze: + body: + text: jaktkarlarne + analyzer: swedish + - length: { tokens: 1 } + - match: { tokens.0.token: jaktkarl } + + - do: + indices.analyze: + index: test + body: + text: jaktkarlarne + analyzer: my_analyzer + - length: { tokens: 1 } + - match: { tokens.0.token: jaktkarl } + +--- +"turkish": + - do: + indices.create: + index: test + body: + settings: + analysis: + analyzer: + my_analyzer: + type: turkish + + - do: + indices.analyze: + body: + text: ağacı + analyzer: turkish + - length: { tokens: 1 } + - match: { tokens.0.token: ağaç } + + - do: + indices.analyze: + index: test + body: + text: ağacı + analyzer: my_analyzer + - length: { tokens: 1 } + - match: { tokens.0.token: ağaç } + +--- +"thai": + - do: + indices.create: + index: test + body: + settings: + analysis: + analyzer: + my_analyzer: + type: thai + + - do: + indices.analyze: + body: + text: ๑๒๓๔ + analyzer: thai + - length: { tokens: 1 } + - match: { tokens.0.token: "1234" } + + - do: + indices.analyze: + index: test + body: + text: ๑๒๓๔ + analyzer: my_analyzer + - length: { tokens: 1 } + - match: { tokens.0.token: "1234" } diff --git a/server/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java b/server/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java index 6b7860c0cf949..364732dc1833d 100644 --- a/server/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java +++ b/server/src/main/java/org/elasticsearch/indices/analysis/AnalysisModule.java @@ -30,39 +30,21 @@ import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.analysis.AnalyzerProvider; import org.elasticsearch.index.analysis.CharFilterFactory; -import org.elasticsearch.index.analysis.GreekAnalyzerProvider; -import org.elasticsearch.index.analysis.HindiAnalyzerProvider; -import org.elasticsearch.index.analysis.HungarianAnalyzerProvider; import org.elasticsearch.index.analysis.HunspellTokenFilterFactory; -import org.elasticsearch.index.analysis.IndonesianAnalyzerProvider; -import org.elasticsearch.index.analysis.IrishAnalyzerProvider; -import org.elasticsearch.index.analysis.ItalianAnalyzerProvider; import org.elasticsearch.index.analysis.KeywordAnalyzerProvider; -import org.elasticsearch.index.analysis.LatvianAnalyzerProvider; -import org.elasticsearch.index.analysis.LithuanianAnalyzerProvider; -import org.elasticsearch.index.analysis.NorwegianAnalyzerProvider; -import org.elasticsearch.index.analysis.PersianAnalyzerProvider; -import org.elasticsearch.index.analysis.PortugueseAnalyzerProvider; import org.elasticsearch.index.analysis.PreBuiltAnalyzerProviderFactory; import org.elasticsearch.index.analysis.PreConfiguredCharFilter; import org.elasticsearch.index.analysis.PreConfiguredTokenFilter; import org.elasticsearch.index.analysis.PreConfiguredTokenizer; -import org.elasticsearch.index.analysis.RomanianAnalyzerProvider; -import org.elasticsearch.index.analysis.RussianAnalyzerProvider; import org.elasticsearch.index.analysis.ShingleTokenFilterFactory; import org.elasticsearch.index.analysis.SimpleAnalyzerProvider; -import org.elasticsearch.index.analysis.SoraniAnalyzerProvider; -import org.elasticsearch.index.analysis.SpanishAnalyzerProvider; import org.elasticsearch.index.analysis.StandardAnalyzerProvider; import org.elasticsearch.index.analysis.StandardTokenFilterFactory; import org.elasticsearch.index.analysis.StandardTokenizerFactory; import org.elasticsearch.index.analysis.StopAnalyzerProvider; import org.elasticsearch.index.analysis.StopTokenFilterFactory; -import org.elasticsearch.index.analysis.SwedishAnalyzerProvider; -import org.elasticsearch.index.analysis.ThaiAnalyzerProvider; import org.elasticsearch.index.analysis.TokenFilterFactory; import org.elasticsearch.index.analysis.TokenizerFactory; -import org.elasticsearch.index.analysis.TurkishAnalyzerProvider; import org.elasticsearch.index.analysis.WhitespaceAnalyzerProvider; import org.elasticsearch.plugins.AnalysisPlugin; @@ -227,24 +209,6 @@ private NamedRegistry>> setupAnalyzers(List analyzers.register("stop", StopAnalyzerProvider::new); analyzers.register("whitespace", WhitespaceAnalyzerProvider::new); analyzers.register("keyword", KeywordAnalyzerProvider::new); - analyzers.register("greek", GreekAnalyzerProvider::new); - analyzers.register("hindi", HindiAnalyzerProvider::new); - analyzers.register("hungarian", HungarianAnalyzerProvider::new); - analyzers.register("indonesian", IndonesianAnalyzerProvider::new); - analyzers.register("irish", IrishAnalyzerProvider::new); - analyzers.register("italian", ItalianAnalyzerProvider::new); - analyzers.register("latvian", LatvianAnalyzerProvider::new); - analyzers.register("lithuanian", LithuanianAnalyzerProvider::new); - analyzers.register("norwegian", NorwegianAnalyzerProvider::new); - analyzers.register("persian", PersianAnalyzerProvider::new); - analyzers.register("portuguese", PortugueseAnalyzerProvider::new); - analyzers.register("romanian", RomanianAnalyzerProvider::new); - analyzers.register("russian", RussianAnalyzerProvider::new); - analyzers.register("sorani", SoraniAnalyzerProvider::new); - analyzers.register("spanish", SpanishAnalyzerProvider::new); - analyzers.register("swedish", SwedishAnalyzerProvider::new); - analyzers.register("turkish", TurkishAnalyzerProvider::new); - analyzers.register("thai", ThaiAnalyzerProvider::new); analyzers.extractAndRegister(plugins, AnalysisPlugin::getAnalyzers); return analyzers; } diff --git a/server/src/main/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzers.java b/server/src/main/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzers.java index 0e9aed3c142d9..0f31a8a46f1db 100644 --- a/server/src/main/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzers.java +++ b/server/src/main/java/org/elasticsearch/indices/analysis/PreBuiltAnalyzers.java @@ -20,30 +20,12 @@ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.CharArraySet; -import org.apache.lucene.analysis.ckb.SoraniAnalyzer; import org.apache.lucene.analysis.core.KeywordAnalyzer; import org.apache.lucene.analysis.core.SimpleAnalyzer; import org.apache.lucene.analysis.core.StopAnalyzer; import org.apache.lucene.analysis.core.WhitespaceAnalyzer; -import org.apache.lucene.analysis.el.GreekAnalyzer; -import org.apache.lucene.analysis.es.SpanishAnalyzer; -import org.apache.lucene.analysis.fa.PersianAnalyzer; -import org.apache.lucene.analysis.ga.IrishAnalyzer; -import org.apache.lucene.analysis.hi.HindiAnalyzer; -import org.apache.lucene.analysis.hu.HungarianAnalyzer; -import org.apache.lucene.analysis.id.IndonesianAnalyzer; -import org.apache.lucene.analysis.it.ItalianAnalyzer; -import org.apache.lucene.analysis.lt.LithuanianAnalyzer; -import org.apache.lucene.analysis.lv.LatvianAnalyzer; -import org.apache.lucene.analysis.no.NorwegianAnalyzer; -import org.apache.lucene.analysis.pt.PortugueseAnalyzer; -import org.apache.lucene.analysis.ro.RomanianAnalyzer; -import org.apache.lucene.analysis.ru.RussianAnalyzer; import org.apache.lucene.analysis.standard.ClassicAnalyzer; import org.apache.lucene.analysis.standard.StandardAnalyzer; -import org.apache.lucene.analysis.sv.SwedishAnalyzer; -import org.apache.lucene.analysis.th.ThaiAnalyzer; -import org.apache.lucene.analysis.tr.TurkishAnalyzer; import org.elasticsearch.Version; import org.elasticsearch.indices.analysis.PreBuiltCacheFactory.CachingStrategy; @@ -110,168 +92,6 @@ protected Analyzer create(Version version) { a.setVersion(version.luceneVersion); return a; } - }, - - GREEK { - @Override - protected Analyzer create(Version version) { - Analyzer a = new GreekAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - HINDI { - @Override - protected Analyzer create(Version version) { - Analyzer a = new HindiAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - HUNGARIAN { - @Override - protected Analyzer create(Version version) { - Analyzer a = new HungarianAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - INDONESIAN { - @Override - protected Analyzer create(Version version) { - Analyzer a = new IndonesianAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - IRISH { - @Override - protected Analyzer create(Version version) { - Analyzer a = new IrishAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - ITALIAN { - @Override - protected Analyzer create(Version version) { - Analyzer a = new ItalianAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - LATVIAN { - @Override - protected Analyzer create(Version version) { - Analyzer a = new LatvianAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - LITHUANIAN { - @Override - protected Analyzer create(Version version) { - Analyzer a = new LithuanianAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - NORWEGIAN { - @Override - protected Analyzer create(Version version) { - Analyzer a = new NorwegianAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - PERSIAN { - @Override - protected Analyzer create(Version version) { - Analyzer a = new PersianAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - PORTUGUESE { - @Override - protected Analyzer create(Version version) { - Analyzer a = new PortugueseAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - ROMANIAN { - @Override - protected Analyzer create(Version version) { - Analyzer a = new RomanianAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - RUSSIAN { - @Override - protected Analyzer create(Version version) { - Analyzer a = new RussianAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - SORANI { - @Override - protected Analyzer create(Version version) { - Analyzer a = new SoraniAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - SPANISH { - @Override - protected Analyzer create(Version version) { - Analyzer a = new SpanishAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - SWEDISH { - @Override - protected Analyzer create(Version version) { - Analyzer a = new SwedishAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - TURKISH { - @Override - protected Analyzer create(Version version) { - Analyzer a = new TurkishAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } - }, - - THAI { - @Override - protected Analyzer create(Version version) { - Analyzer a = new ThaiAnalyzer(); - a.setVersion(version.luceneVersion); - return a; - } }; protected abstract Analyzer create(Version version); diff --git a/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java b/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java index a31dcc81f722e..47f30e10ef912 100644 --- a/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java +++ b/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java @@ -165,7 +165,7 @@ public void testVersionedAnalyzers() throws Exception { assertEquals(Version.V_5_0_0.luceneVersion, indexAnalyzers.get("standard").analyzer().getVersion()); assertEquals(Version.V_5_0_0.luceneVersion, - indexAnalyzers.get("thai").analyzer().getVersion()); + indexAnalyzers.get("stop").analyzer().getVersion()); assertThat(indexAnalyzers.get("custom7").analyzer(), is(instanceOf(StandardAnalyzer.class))); assertEquals(org.apache.lucene.util.Version.fromBits(3,6,0), From c4f8df3ad6b8657557a7efb9399e70927e1f0340 Mon Sep 17 00:00:00 2001 From: Sohaib Iftikhar Date: Mon, 18 Jun 2018 15:59:29 +0200 Subject: [PATCH 21/34] REST high-level client: add validate query API (#31077) Adds the validate query API to the high level rest client. --- .../elasticsearch/client/IndicesClient.java | 32 +++++ .../client/RequestConverters.java | 15 +++ .../elasticsearch/client/IndicesClientIT.java | 39 ++++++ .../client/RequestConvertersTests.java | 35 ++++++ .../IndicesClientDocumentationIT.java | 83 +++++++++++++ .../indices/validate_query.asciidoc | 113 ++++++++++++++++++ .../high-level/supported-apis.asciidoc | 2 + .../validate/query/QueryExplanation.java | 82 ++++++++++++- .../validate/query/ValidateQueryRequest.java | 12 +- .../validate/query/ValidateQueryResponse.java | 53 +++++--- .../indices/RestValidateQueryAction.java | 3 +- .../validate/query/QueryExplanationTests.java | 59 +++++++++ .../query/ValidateQueryResponseTests.java | 110 +++++++++++++++++ 13 files changed, 618 insertions(+), 20 deletions(-) create mode 100644 docs/java-rest/high-level/indices/validate_query.asciidoc create mode 100644 server/src/test/java/org/elasticsearch/action/admin/indices/validate/query/QueryExplanationTests.java create mode 100644 server/src/test/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponseTests.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java index 5f85b18091d72..30a42eb333f4a 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java @@ -57,6 +57,8 @@ import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateResponse; +import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest; +import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryResponse; import org.elasticsearch.rest.RestStatus; import java.io.IOException; @@ -661,6 +663,36 @@ public void putTemplateAsync(PutIndexTemplateRequest putIndexTemplateRequest, Re PutIndexTemplateResponse::fromXContent, listener, emptySet()); } + /** + * Validate a potentially expensive query without executing it. + *

    + * See Validate Query API + * on elastic.co + * @param validateQueryRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public ValidateQueryResponse validateQuery(ValidateQueryRequest validateQueryRequest, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(validateQueryRequest, RequestConverters::validateQuery, options, + ValidateQueryResponse::fromXContent, emptySet()); + } + + /** + * Asynchronously validate a potentially expensive query without executing it. + *

    + * See Validate Query API + * on elastic.co + * @param validateQueryRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void validateQueryAsync(ValidateQueryRequest validateQueryRequest, RequestOptions options, + ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(validateQueryRequest, RequestConverters::validateQuery, options, + ValidateQueryResponse::fromXContent, listener, emptySet()); + } + /** * Gets index templates using the Index Templates API * See Index Templates API diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index 93bf6a1a19881..ab85af9f1fd7e 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -58,6 +58,7 @@ import org.elasticsearch.action.admin.indices.shrink.ResizeType; import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesRequest; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; +import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; @@ -856,6 +857,20 @@ static Request putTemplate(PutIndexTemplateRequest putIndexTemplateRequest) thro return request; } + static Request validateQuery(ValidateQueryRequest validateQueryRequest) throws IOException { + String[] indices = validateQueryRequest.indices() == null ? Strings.EMPTY_ARRAY : validateQueryRequest.indices(); + String[] types = validateQueryRequest.types() == null || indices.length <= 0 ? Strings.EMPTY_ARRAY : validateQueryRequest.types(); + String endpoint = endpoint(indices, types, "_validate/query"); + Request request = new Request(HttpGet.METHOD_NAME, endpoint); + Params params = new Params(request); + params.withIndicesOptions(validateQueryRequest.indicesOptions()); + params.putParam("explain", Boolean.toString(validateQueryRequest.explain())); + params.putParam("all_shards", Boolean.toString(validateQueryRequest.allShards())); + params.putParam("rewrite", Boolean.toString(validateQueryRequest.rewrite())); + request.setEntity(createEntity(validateQueryRequest, REQUEST_BODY_CONTENT_TYPE)); + return request; + } + static Request getAlias(GetAliasesRequest getAliasesRequest) { String[] indices = getAliasesRequest.indices() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.indices(); String[] aliases = getAliasesRequest.aliases() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.aliases(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java index 82ac161f5afe0..c226b5349267c 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java @@ -20,6 +20,7 @@ package org.elasticsearch.client; import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; @@ -63,6 +64,8 @@ import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateResponse; +import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest; +import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.WriteRequest; @@ -80,6 +83,8 @@ import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.rest.RestStatus; import java.io.IOException; @@ -1155,6 +1160,40 @@ public void testPutTemplateBadRequests() throws Exception { assertThat(unknownSettingError.getDetailedMessage(), containsString("unknown setting [index.this-setting-does-not-exist]")); } + public void testValidateQuery() throws IOException{ + String index = "some_index"; + createIndex(index, Settings.EMPTY); + QueryBuilder builder = QueryBuilders + .boolQuery() + .must(QueryBuilders.queryStringQuery("*:*")) + .filter(QueryBuilders.termQuery("user", "kimchy")); + ValidateQueryRequest request = new ValidateQueryRequest(index).query(builder); + request.explain(randomBoolean()); + ValidateQueryResponse response = execute(request, highLevelClient().indices()::validateQuery, + highLevelClient().indices()::validateQueryAsync); + assertTrue(response.isValid()); + } + + public void testInvalidValidateQuery() throws IOException{ + String index = "shakespeare"; + + createIndex(index, Settings.EMPTY); + Request postDoc = new Request(HttpPost.METHOD_NAME, "/" + index + "/1"); + postDoc.setJsonEntity( + "{\"type\":\"act\",\"line_id\":1,\"play_name\":\"Henry IV\", \"speech_number\":\"\"," + + "\"line_number\":\"\",\"speaker\":\"\",\"text_entry\":\"ACT I\"}"); + assertOK(client().performRequest(postDoc)); + + QueryBuilder builder = QueryBuilders + .queryStringQuery("line_id:foo") + .lenient(false); + ValidateQueryRequest request = new ValidateQueryRequest(index).query(builder); + request.explain(true); + ValidateQueryResponse response = execute(request, highLevelClient().indices()::validateQuery, + highLevelClient().indices()::validateQueryAsync); + assertFalse(response.isValid()); + } + public void testGetIndexTemplate() throws Exception { RestHighLevelClient client = highLevelClient(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index aa8221f30991e..60f427b490462 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -60,6 +60,7 @@ import org.elasticsearch.action.admin.indices.shrink.ResizeType; import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesRequest; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; +import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkShardRequest; import org.elasticsearch.action.delete.DeleteRequest; @@ -1895,6 +1896,40 @@ public void testPutTemplateRequest() throws Exception { assertToXContentBody(putTemplateRequest, request.getEntity()); } + public void testValidateQuery() throws Exception { + String[] indices = randomBoolean() ? null : randomIndicesNames(0, 5); + String[] types = randomBoolean() ? generateRandomStringArray(5, 5, false, false) : null; + ValidateQueryRequest validateQueryRequest; + if (randomBoolean()) { + validateQueryRequest = new ValidateQueryRequest(indices); + } else { + validateQueryRequest = new ValidateQueryRequest(); + validateQueryRequest.indices(indices); + } + validateQueryRequest.types(types); + Map expectedParams = new HashMap<>(); + setRandomIndicesOptions(validateQueryRequest::indicesOptions, validateQueryRequest::indicesOptions, expectedParams); + validateQueryRequest.explain(randomBoolean()); + validateQueryRequest.rewrite(randomBoolean()); + validateQueryRequest.allShards(randomBoolean()); + expectedParams.put("explain", Boolean.toString(validateQueryRequest.explain())); + expectedParams.put("rewrite", Boolean.toString(validateQueryRequest.rewrite())); + expectedParams.put("all_shards", Boolean.toString(validateQueryRequest.allShards())); + Request request = RequestConverters.validateQuery(validateQueryRequest); + StringJoiner endpoint = new StringJoiner("/", "/", ""); + if (indices != null && indices.length > 0) { + endpoint.add(String.join(",", indices)); + if (types != null && types.length > 0) { + endpoint.add(String.join(",", types)); + } + } + endpoint.add("_validate/query"); + assertThat(request.getEndpoint(), equalTo(endpoint.toString())); + assertThat(request.getParameters(), equalTo(expectedParams)); + assertToXContentBody(validateQueryRequest, request.getEntity()); + assertThat(request.getMethod(), equalTo(HttpGet.METHOD_NAME)); + } + public void testGetTemplateRequest() throws Exception { Map encodes = new HashMap<>(); encodes.put("log", "log"); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java index 7bd6b16cecc99..9cc28152d03e3 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java @@ -62,6 +62,9 @@ import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest; import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateResponse; +import org.elasticsearch.action.admin.indices.validate.query.QueryExplanation; +import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest; +import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryResponse; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.IndicesOptions; @@ -81,6 +84,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.rest.RestStatus; @@ -2128,4 +2132,83 @@ public void onFailure(Exception e) { assertTrue(latch.await(30L, TimeUnit.SECONDS)); } + + public void testValidateQuery() throws IOException, InterruptedException { + RestHighLevelClient client = highLevelClient(); + + String index = "some_index"; + createIndex(index, Settings.EMPTY); + + // tag::validate-query-request + ValidateQueryRequest request = new ValidateQueryRequest(index); // <1> + // end::validate-query-request + + // tag::validate-query-request-query + QueryBuilder builder = QueryBuilders + .boolQuery() // <1> + .must(QueryBuilders.queryStringQuery("*:*")) + .filter(QueryBuilders.termQuery("user", "kimchy")); + request.query(builder); // <2> + // end::validate-query-request-query + + // tag::validate-query-request-explain + request.explain(true); // <1> + // end::validate-query-request-explain + + // tag::validate-query-request-allShards + request.allShards(true); // <1> + // end::validate-query-request-allShards + + // tag::validate-query-request-rewrite + request.rewrite(true); // <1> + // end::validate-query-request-rewrite + + // tag::validate-query-execute + ValidateQueryResponse response = client.indices().validateQuery(request, RequestOptions.DEFAULT); // <1> + // end::validate-query-execute + + // tag::validate-query-response + boolean isValid = response.isValid(); // <1> + int totalShards = response.getTotalShards(); // <2> + int successfulShards = response.getSuccessfulShards(); // <3> + int failedShards = response.getFailedShards(); // <4> + if (failedShards > 0) { + for(DefaultShardOperationFailedException failure: response.getShardFailures()) { // <5> + String failedIndex = failure.index(); // <6> + int shardId = failure.shardId(); // <7> + String reason = failure.reason(); // <8> + } + } + for(QueryExplanation explanation: response.getQueryExplanation()) { // <9> + String explanationIndex = explanation.getIndex(); // <10> + int shardId = explanation.getShard(); // <11> + String explanationString = explanation.getExplanation(); // <12> + } + // end::validate-query-response + + // tag::validate-query-execute-listener + ActionListener listener = + new ActionListener() { + @Override + public void onResponse(ValidateQueryResponse validateQueryResponse) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::validate-query-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::validate-query-execute-async + client.indices().validateQueryAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::validate-query-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } } diff --git a/docs/java-rest/high-level/indices/validate_query.asciidoc b/docs/java-rest/high-level/indices/validate_query.asciidoc new file mode 100644 index 0000000000000..3b3b184b02875 --- /dev/null +++ b/docs/java-rest/high-level/indices/validate_query.asciidoc @@ -0,0 +1,113 @@ +[[java-rest-high-indices-validate-query]] +=== Validate Query API + +[[java-rest-high-indices-validate-query-request]] +==== Validate Query Request + +A `ValidateQueryRequest` requires one or more `indices` on which the query is validated. If no index +is provided the request is executed on all indices. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[validate-query-request] +-------------------------------------------------- +<1> The index on which to run the request. + +In addition it also needs the query that needs to be validated. The query can be built using the `QueryBuilders` utility class. +The following code snippet builds a sample boolean query. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[validate-query-request-query] +-------------------------------------------------- +<1> Build the desired query. +<2> Set it to the request. + +==== Optional arguments +The following arguments can optionally be provided: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[validate-query-request-explain] +-------------------------------------------------- +<1> The explain parameter can be set to true to get more detailed information about why a query failed + +By default, the request is executed on a single shard only, which is randomly selected. The detailed explanation of +the query may depend on which shard is being hit, and therefore may vary from one request to another. So, in case of +query rewrite the `allShards` parameter should be used to get response from all available shards. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[validate-query-request-allShards] +-------------------------------------------------- +<1> Set the allShards parameter. + +When the query is valid, the explanation defaults to the string representation of that query. With rewrite set to true, +the explanation is more detailed showing the actual Lucene query that will be executed + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[validate-query-request-rewrite] +-------------------------------------------------- +<1> Set the rewrite parameter. + +[[java-rest-high-indices-validate-query-sync]] +==== Synchronous Execution + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[validate-query-execute] +-------------------------------------------------- +<1> Execute the request and get back the response in a ValidateQueryResponse object. + +[[java-rest-high-indices-validate-query-async]] +==== Asynchronous Execution + +The asynchronous execution of a validate query request requires both the `ValidateQueryRequest` +instance and an `ActionListener` instance to be passed to the asynchronous +method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[validate-query-execute-async] +-------------------------------------------------- +<1> The `ValidateQueryRequest` to execute and the `ActionListener` to use when +the execution completes + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for `ValidateQueryResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[validate-query-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is +provided as an argument +<2> Called in case of failure. The raised exception is provided as an argument + +[[java-rest-high-indices-validate-query-response]] +==== Validate Query Response + +The returned `ValidateQueryResponse` allows to retrieve information about the executed + operation as follows: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[validate-query-response] +-------------------------------------------------- +<1> Check if the query is valid or not. +<2> Get total number of shards. +<3> Get number of shards that were successful. +<4> Get number of shards that failed. +<5> Get the shard failures as `DefaultShardOperationFailedException`. +<6> Get the index of a failed shard. +<7> Get the shard id of a failed shard. +<8> Get the reason for shard failure. +<9> Get the detailed explanation for the shards (if explain was set to `true`). +<10> Get the index to which a particular explanation belongs. +<11> Get the shard id to which a particular explanation belongs. +<12> Get the actual explanation string. \ No newline at end of file diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index b33c2421b06d3..4cd87a521d104 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -73,6 +73,7 @@ Index Management:: * <> * <> * <> +* <> Mapping Management:: * <> @@ -103,6 +104,7 @@ include::indices/get_alias.asciidoc[] include::indices/put_settings.asciidoc[] include::indices/get_settings.asciidoc[] include::indices/put_template.asciidoc[] +include::indices/validate_query.asciidoc[] include::indices/get_templates.asciidoc[] == Cluster APIs diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/QueryExplanation.java b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/QueryExplanation.java index 780bf037f0e28..e330a0b8565fc 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/QueryExplanation.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/QueryExplanation.java @@ -20,16 +20,57 @@ package org.elasticsearch.action.admin.indices.validate.query; import org.elasticsearch.Version; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContentFragment; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; +import java.util.Objects; -public class QueryExplanation implements Streamable { +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; + +public class QueryExplanation implements Streamable, ToXContentFragment { + + public static final String INDEX_FIELD = "index"; + public static final String SHARD_FIELD = "shard"; + public static final String VALID_FIELD = "valid"; + public static final String ERROR_FIELD = "error"; + public static final String EXPLANATION_FIELD = "explanation"; public static final int RANDOM_SHARD = -1; + @SuppressWarnings("unchecked") + static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "query_explanation", + true, + a -> { + int shard = RANDOM_SHARD; + if (a[1] != null) { + shard = (int)a[1]; + } + return new QueryExplanation( + (String)a[0], + shard, + (boolean)a[2], + (String)a[3], + (String)a[4] + ); + } + ); + static { + PARSER.declareString(optionalConstructorArg(), new ParseField(INDEX_FIELD)); + PARSER.declareInt(optionalConstructorArg(), new ParseField(SHARD_FIELD)); + PARSER.declareBoolean(constructorArg(), new ParseField(VALID_FIELD)); + PARSER.declareString(optionalConstructorArg(), new ParseField(EXPLANATION_FIELD)); + PARSER.declareString(optionalConstructorArg(), new ParseField(ERROR_FIELD)); + } + private String index; private int shard = RANDOM_SHARD; @@ -110,4 +151,43 @@ public static QueryExplanation readQueryExplanation(StreamInput in) throws IOEx exp.readFrom(in); return exp; } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + if (getIndex() != null) { + builder.field(INDEX_FIELD, getIndex()); + } + if(getShard() >= 0) { + builder.field(SHARD_FIELD, getShard()); + } + builder.field(VALID_FIELD, isValid()); + if (getError() != null) { + builder.field(ERROR_FIELD, getError()); + } + if (getExplanation() != null) { + builder.field(EXPLANATION_FIELD, getExplanation()); + } + return builder; + } + + public static QueryExplanation fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + QueryExplanation other = (QueryExplanation) o; + return Objects.equals(getIndex(), other.getIndex()) && + Objects.equals(getShard(), other.getShard()) && + Objects.equals(isValid(), other.isValid()) && + Objects.equals(getError(), other.getError()) && + Objects.equals(getExplanation(), other.getExplanation()); + } + + @Override + public int hashCode() { + return Objects.hash(getIndex(), getShard(), isValid(), getError(), getExplanation()); + } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequest.java index 5953a5548c465..7694e7583c898 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequest.java @@ -27,6 +27,8 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; @@ -38,7 +40,7 @@ *

    * The request requires the query to be set using {@link #query(QueryBuilder)} */ -public class ValidateQueryRequest extends BroadcastRequest { +public class ValidateQueryRequest extends BroadcastRequest implements ToXContentObject { private QueryBuilder query = new MatchAllQueryBuilder(); @@ -179,4 +181,12 @@ public String toString() { return "[" + Arrays.toString(indices) + "]" + Arrays.toString(types) + ", query[" + query + "], explain:" + explain + ", rewrite:" + rewrite + ", all_shards:" + allShards; } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("query"); + query.toXContent(builder, params); + return builder.endObject(); + } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponse.java index 5bb11dd56e00b..f766e1d9c6aa4 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponse.java @@ -21,16 +21,22 @@ import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.broadcast.BroadcastResponse; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.List; import static org.elasticsearch.action.admin.indices.validate.query.QueryExplanation.readQueryExplanation; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; /** * The response of the validate action. @@ -39,12 +45,33 @@ */ public class ValidateQueryResponse extends BroadcastResponse { - public static final String INDEX_FIELD = "index"; - public static final String SHARD_FIELD = "shard"; public static final String VALID_FIELD = "valid"; public static final String EXPLANATIONS_FIELD = "explanations"; - public static final String ERROR_FIELD = "error"; - public static final String EXPLANATION_FIELD = "explanation"; + + @SuppressWarnings("unchecked") + static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "validate_query", + true, + arg -> { + BroadcastResponse response = (BroadcastResponse) arg[0]; + return + new ValidateQueryResponse( + (boolean)arg[1], + (List)arg[2], + response.getTotalShards(), + response.getSuccessfulShards(), + response.getFailedShards(), + Arrays.asList(response.getShardFailures()) + ); + } + ); + static { + declareBroadcastFields(PARSER); + PARSER.declareBoolean(constructorArg(), new ParseField(VALID_FIELD)); + PARSER.declareObjectArray( + optionalConstructorArg(), QueryExplanation.PARSER, new ParseField(EXPLANATIONS_FIELD) + ); + } private boolean valid; @@ -112,22 +139,14 @@ protected void addCustomXContentFields(XContentBuilder builder, Params params) t builder.startArray(EXPLANATIONS_FIELD); for (QueryExplanation explanation : getQueryExplanation()) { builder.startObject(); - if (explanation.getIndex() != null) { - builder.field(INDEX_FIELD, explanation.getIndex()); - } - if(explanation.getShard() >= 0) { - builder.field(SHARD_FIELD, explanation.getShard()); - } - builder.field(VALID_FIELD, explanation.isValid()); - if (explanation.getError() != null) { - builder.field(ERROR_FIELD, explanation.getError()); - } - if (explanation.getExplanation() != null) { - builder.field(EXPLANATION_FIELD, explanation.getExplanation()); - } + explanation.toXContent(builder, params); builder.endObject(); } builder.endArray(); } } + + public static ValidateQueryResponse fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryAction.java index 57486396f911b..d1a97d74d047f 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryAction.java @@ -19,6 +19,7 @@ package org.elasticsearch.rest.action.admin.indices; +import org.elasticsearch.action.admin.indices.validate.query.QueryExplanation; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryResponse; import org.elasticsearch.action.support.IndicesOptions; @@ -101,7 +102,7 @@ private static BytesRestResponse buildErrorResponse(XContentBuilder builder, Str builder.startObject(); builder.field(ValidateQueryResponse.VALID_FIELD, false); if (explain) { - builder.field(ValidateQueryResponse.ERROR_FIELD, error); + builder.field(QueryExplanation.ERROR_FIELD, error); } builder.endObject(); return new BytesRestResponse(OK, builder); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/validate/query/QueryExplanationTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/validate/query/QueryExplanationTests.java new file mode 100644 index 0000000000000..db167e0c7669e --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/validate/query/QueryExplanationTests.java @@ -0,0 +1,59 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.validate.query; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; + +import java.io.IOException; + +public class QueryExplanationTests extends AbstractStreamableXContentTestCase { + + static QueryExplanation createRandomQueryExplanation(boolean isValid) { + String index = "index_" + randomInt(1000); + int shard = randomInt(100); + Boolean valid = isValid; + String errorField = null; + if (!valid) { + errorField = randomAlphaOfLength(randomIntBetween(10, 100)); + } + String explanation = randomAlphaOfLength(randomIntBetween(10, 100)); + return new QueryExplanation(index, shard, valid, explanation, errorField); + } + + static QueryExplanation createRandomQueryExplanation() { + return createRandomQueryExplanation(randomBoolean()); + } + + @Override + protected QueryExplanation doParseInstance(XContentParser parser) throws IOException { + return QueryExplanation.fromXContent(parser); + } + + @Override + protected QueryExplanation createBlankInstance() { + return new QueryExplanation(); + } + + @Override + protected QueryExplanation createTestInstance() { + return createRandomQueryExplanation(); + } +} diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponseTests.java new file mode 100644 index 0000000000000..d72aae8fa2bd1 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryResponseTests.java @@ -0,0 +1,110 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.validate.query; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.support.DefaultShardOperationFailedException; +import org.elasticsearch.action.support.broadcast.AbstractBroadcastResponseTestCase; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +public class ValidateQueryResponseTests extends AbstractBroadcastResponseTestCase { + + private static ValidateQueryResponse createRandomValidateQueryResponse( + int totalShards, int successfulShards, int failedShards, List failures) { + boolean valid = failedShards == 0; + List queryExplanations = new ArrayList<>(totalShards); + for(DefaultShardOperationFailedException failure: failures) { + queryExplanations.add( + new QueryExplanation( + failure.index(), failure.shardId(), false, failure.reason(), null + ) + ); + } + return new ValidateQueryResponse( + valid, queryExplanations, totalShards, successfulShards, failedShards, failures + ); + } + + private static ValidateQueryResponse createRandomValidateQueryResponse() { + int totalShards = randomIntBetween(1, 10); + int successfulShards = randomIntBetween(0, totalShards); + int failedShards = totalShards - successfulShards; + boolean valid = failedShards == 0; + List queryExplanations = new ArrayList<>(totalShards); + List shardFailures = new ArrayList<>(failedShards); + for (int i=0; i queryExplSet = new HashSet<>(response.getQueryExplanation()); + assertEquals(response.isValid(), parsedResponse.isValid()); + assertEquals(response.getQueryExplanation().size(), parsedResponse.getQueryExplanation().size()); + assertTrue(queryExplSet.containsAll(parsedResponse.getQueryExplanation())); + } + + @Override + protected ValidateQueryResponse createTestInstance(int totalShards, int successfulShards, int failedShards, + List failures) { + return createRandomValidateQueryResponse(totalShards, successfulShards, failedShards, failures); + } + + @Override + public void testToXContent() { + ValidateQueryResponse response = createTestInstance(10, 10, 0, new ArrayList<>()); + String output = Strings.toString(response); + assertEquals("{\"_shards\":{\"total\":10,\"successful\":10,\"failed\":0},\"valid\":true}", output); + } +} From 1502812c1a4b7dd08417a45798b8fe66311e7028 Mon Sep 17 00:00:00 2001 From: Zachary Tong Date: Mon, 18 Jun 2018 10:01:28 -0400 Subject: [PATCH 22/34] Percentile/Ranks should return null instead of NaN when empty (#30460) The other metric aggregations (min/max/etc) return `null` as their XContent value and string when nothing was computed (due to empty/missing fields). Percentiles and Percentile Ranks, however, return `NaN `which is inconsistent and confusing for the user. This fixes the inconsistency by making the aggs return `null`. This applies to both the numeric value and the "as string" value. Note: like the metric aggs, this does not change the value if fetched directly from the percentiles object, which will return as `NaN`/`"NaN"`. This only changes the XContent output. While this is a bugfix, it still breaks bwc in a minor way as the response changes from prior version. Closes #29066 --- .../release-notes/7.0.0-alpha1.asciidoc | 6 ++ .../elasticsearch/search/DocValueFormat.java | 16 +++++ .../percentiles/ParsedPercentiles.java | 11 ++-- .../hdr/AbstractInternalHDRPercentiles.java | 10 ++-- .../AbstractInternalTDigestPercentiles.java | 10 ++-- .../AbstractPercentilesTestCase.java | 58 ++++++++++++++++++- .../InternalPercentilesRanksTestCase.java | 8 +++ .../InternalPercentilesTestCase.java | 8 +++ .../hdr/InternalHDRPercentilesRanksTests.java | 1 + 9 files changed, 112 insertions(+), 16 deletions(-) diff --git a/docs/reference/release-notes/7.0.0-alpha1.asciidoc b/docs/reference/release-notes/7.0.0-alpha1.asciidoc index 1cc328f16598b..cf2e1e30be050 100644 --- a/docs/reference/release-notes/7.0.0-alpha1.asciidoc +++ b/docs/reference/release-notes/7.0.0-alpha1.asciidoc @@ -16,3 +16,9 @@ Cross-Cluster-Search:: Rest API:: * The Clear Cache API only supports `POST` as HTTP method + +Aggregations:: +* The Percentiles and PercentileRanks aggregations now return `null` in the REST response, + instead of `NaN`. This makes it consistent with the rest of the aggregations. Note: + this only applies to the REST response, the java objects continue to return `NaN` (also + consistent with other aggregations) \ No newline at end of file diff --git a/server/src/main/java/org/elasticsearch/search/DocValueFormat.java b/server/src/main/java/org/elasticsearch/search/DocValueFormat.java index 242e088747341..3a3b1c680aba1 100644 --- a/server/src/main/java/org/elasticsearch/search/DocValueFormat.java +++ b/server/src/main/java/org/elasticsearch/search/DocValueFormat.java @@ -394,6 +394,22 @@ public String format(long value) { @Override public String format(double value) { + /** + * Explicitly check for NaN, since it formats to "�" or "NaN" depending on JDK version. + * + * Decimal formatter uses the JRE's default symbol list (via Locale.ROOT above). In JDK8, + * this translates into using {@link sun.util.locale.provider.JRELocaleProviderAdapter}, which loads + * {@link sun.text.resources.FormatData} for symbols. There, `NaN` is defined as `\ufffd` (�) + * + * In JDK9+, {@link sun.util.cldr.CLDRLocaleProviderAdapter} is used instead, which loads + * {@link sun.text.resources.cldr.FormatData}. There, `NaN` is defined as `"NaN"` + * + * Since the character � isn't very useful, and makes the output change depending on JDK version, + * we manually check to see if the value is NaN and return the string directly. + */ + if (Double.isNaN(value)) { + return String.valueOf(Double.NaN); + } return format.format(value); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/ParsedPercentiles.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/ParsedPercentiles.java index 3f56b21dcd8a0..2c7da76446d5a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/ParsedPercentiles.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/ParsedPercentiles.java @@ -92,9 +92,9 @@ protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) builder.startObject(CommonFields.VALUES.getPreferredName()); for (Map.Entry percentile : percentiles.entrySet()) { Double key = percentile.getKey(); - builder.field(String.valueOf(key), percentile.getValue()); - - if (valuesAsString) { + Double value = percentile.getValue(); + builder.field(String.valueOf(key), value.isNaN() ? null : value); + if (valuesAsString && value.isNaN() == false) { builder.field(key + "_as_string", getPercentileAsString(key)); } } @@ -106,8 +106,9 @@ protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) builder.startObject(); { builder.field(CommonFields.KEY.getPreferredName(), key); - builder.field(CommonFields.VALUE.getPreferredName(), percentile.getValue()); - if (valuesAsString) { + Double value = percentile.getValue(); + builder.field(CommonFields.VALUE.getPreferredName(), value.isNaN() ? null : value); + if (valuesAsString && value.isNaN() == false) { builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), getPercentileAsString(key)); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/AbstractInternalHDRPercentiles.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/AbstractInternalHDRPercentiles.java index 48d35de6cb6ab..a7b359d59373c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/AbstractInternalHDRPercentiles.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/AbstractInternalHDRPercentiles.java @@ -123,9 +123,9 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th for(int i = 0; i < keys.length; ++i) { String key = String.valueOf(keys[i]); double value = value(keys[i]); - builder.field(key, value); - if (format != DocValueFormat.RAW) { - builder.field(key + "_as_string", format.format(value)); + builder.field(key, state.getTotalCount() == 0 ? null : value); + if (format != DocValueFormat.RAW && state.getTotalCount() > 0) { + builder.field(key + "_as_string", format.format(value).toString()); } } builder.endObject(); @@ -135,8 +135,8 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th double value = value(keys[i]); builder.startObject(); builder.field(CommonFields.KEY.getPreferredName(), keys[i]); - builder.field(CommonFields.VALUE.getPreferredName(), value); - if (format != DocValueFormat.RAW) { + builder.field(CommonFields.VALUE.getPreferredName(), state.getTotalCount() == 0 ? null : value); + if (format != DocValueFormat.RAW && state.getTotalCount() > 0) { builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), format.format(value).toString()); } builder.endObject(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/AbstractInternalTDigestPercentiles.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/AbstractInternalTDigestPercentiles.java index 3806d7feb9550..0938710406a7b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/AbstractInternalTDigestPercentiles.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/AbstractInternalTDigestPercentiles.java @@ -106,9 +106,9 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th for(int i = 0; i < keys.length; ++i) { String key = String.valueOf(keys[i]); double value = value(keys[i]); - builder.field(key, value); - if (format != DocValueFormat.RAW) { - builder.field(key + "_as_string", format.format(value)); + builder.field(key, state.size() == 0 ? null : value); + if (format != DocValueFormat.RAW && state.size() > 0) { + builder.field(key + "_as_string", format.format(value).toString()); } } builder.endObject(); @@ -118,8 +118,8 @@ public XContentBuilder doXContentBody(XContentBuilder builder, Params params) th double value = value(keys[i]); builder.startObject(); builder.field(CommonFields.KEY.getPreferredName(), keys[i]); - builder.field(CommonFields.VALUE.getPreferredName(), value); - if (format != DocValueFormat.RAW) { + builder.field(CommonFields.VALUE.getPreferredName(), state.size() == 0 ? null : value); + if (format != DocValueFormat.RAW && state.size() > 0) { builder.field(CommonFields.VALUE_AS_STRING.getPreferredName(), format.format(value).toString()); } builder.endObject(); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/AbstractPercentilesTestCase.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/AbstractPercentilesTestCase.java index e54a2a8b9a14f..c4a3d3b2ffcef 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/AbstractPercentilesTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/AbstractPercentilesTestCase.java @@ -19,6 +19,10 @@ package org.elasticsearch.search.aggregations.metrics.percentiles; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregation.CommonFields; import org.elasticsearch.search.aggregations.InternalAggregation; @@ -27,11 +31,14 @@ import java.io.IOException; import java.util.Arrays; +import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.function.Predicate; +import static org.hamcrest.Matchers.equalTo; + public abstract class AbstractPercentilesTestCase> extends InternalAggregationTestCase { @@ -49,7 +56,7 @@ public void setUp() throws Exception { @Override protected T createTestInstance(String name, List pipelineAggregators, Map metaData) { - int numValues = randomInt(100); + int numValues = frequently() ? randomInt(100) : 0; double[] values = new double[numValues]; for (int i = 0; i < numValues; ++i) { values[i] = randomDouble(); @@ -89,4 +96,53 @@ public static double[] randomPercents(boolean sorted) { protected Predicate excludePathsFromXContentInsertion() { return path -> path.endsWith(CommonFields.VALUES.getPreferredName()); } + + protected abstract void assertPercentile(T agg, Double value); + + public void testEmptyRanksXContent() throws IOException { + double[] percents = new double[]{1,2,3}; + boolean keyed = randomBoolean(); + DocValueFormat docValueFormat = randomNumericDocValueFormat(); + + T agg = createTestInstance("test", Collections.emptyList(), Collections.emptyMap(), keyed, docValueFormat, percents, new double[0]); + + for (Percentile percentile : agg) { + Double value = percentile.getValue(); + assertPercentile(agg, value); + } + + XContentBuilder builder = JsonXContent.contentBuilder().prettyPrint(); + builder.startObject(); + agg.doXContentBody(builder, ToXContent.EMPTY_PARAMS); + builder.endObject(); + String expected; + if (keyed) { + expected = "{\n" + + " \"values\" : {\n" + + " \"1.0\" : null,\n" + + " \"2.0\" : null,\n" + + " \"3.0\" : null\n" + + " }\n" + + "}"; + } else { + expected = "{\n" + + " \"values\" : [\n" + + " {\n" + + " \"key\" : 1.0,\n" + + " \"value\" : null\n" + + " },\n" + + " {\n" + + " \"key\" : 2.0,\n" + + " \"value\" : null\n" + + " },\n" + + " {\n" + + " \"key\" : 3.0,\n" + + " \"value\" : null\n" + + " }\n" + + " ]\n" + + "}"; + } + + assertThat(Strings.toString(builder), equalTo(expected)); + } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/InternalPercentilesRanksTestCase.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/InternalPercentilesRanksTestCase.java index f45b7cce51e37..a63fd42da7d96 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/InternalPercentilesRanksTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/InternalPercentilesRanksTestCase.java @@ -22,6 +22,8 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.ParsedAggregation; +import static org.hamcrest.Matchers.equalTo; + public abstract class InternalPercentilesRanksTestCase extends AbstractPercentilesTestCase { @@ -39,4 +41,10 @@ protected final void assertFromXContent(T aggregation, ParsedAggregation parsedA Class parsedClass = implementationClass(); assertTrue(parsedClass != null && parsedClass.isInstance(parsedAggregation)); } + + @Override + protected void assertPercentile(T agg, Double value) { + assertThat(agg.percent(value), equalTo(Double.NaN)); + assertThat(agg.percentAsString(value), equalTo("NaN")); + } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/InternalPercentilesTestCase.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/InternalPercentilesTestCase.java index be105f2af80b6..1024577a6b6ed 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/InternalPercentilesTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/InternalPercentilesTestCase.java @@ -24,6 +24,8 @@ import java.util.List; +import static org.hamcrest.Matchers.equalTo; + public abstract class InternalPercentilesTestCase extends AbstractPercentilesTestCase { @Override @@ -49,4 +51,10 @@ public static double[] randomPercents() { } return percents; } + + @Override + protected void assertPercentile(T agg, Double value) { + assertThat(agg.percentile(value), equalTo(Double.NaN)); + assertThat(agg.percentileAsString(value), equalTo("NaN")); + } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/InternalHDRPercentilesRanksTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/InternalHDRPercentilesRanksTests.java index dcbd5cdbd5a3a..ee0e3602f2039 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/InternalHDRPercentilesRanksTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/InternalHDRPercentilesRanksTests.java @@ -31,6 +31,7 @@ import java.util.List; import java.util.Map; + public class InternalHDRPercentilesRanksTests extends InternalPercentilesRanksTestCase { @Override From 02a4ef38a7960895b4e26111167676ec49302cba Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Mon, 18 Jun 2018 16:46:04 +0200 Subject: [PATCH 23/34] Use system context for cluster state update tasks (#31241) This commit makes it so that cluster state update tasks always run under the system context, only restoring the original context when the listener that was provided with the task is called. A notable exception is the clusterStatePublished(...) callback which will still run under system context, because it's defined on the executor-level, and not the task level, and only called once for the combined batch of tasks and can therefore not be uniquely identified with a task / thread context. Relates #30603 --- .../cluster/ClusterStateTaskExecutor.java | 3 + .../cluster/ClusterStateUpdateTask.java | 6 ++ .../cluster/service/MasterService.java | 35 ++++--- .../transport/RemoteClusterConnection.java | 2 - .../cluster/service/MasterServiceTests.java | 82 +++++++++++++++++ .../xpack/core/ml/MlMetadata.java | 13 ++- .../core/ml/datafeed/DatafeedUpdate.java | 9 +- .../core/ml/datafeed/DatafeedUpdateTests.java | 8 +- .../ml/action/TransportDeleteJobAction.java | 3 +- .../ml/action/TransportPutDatafeedAction.java | 14 +-- .../action/TransportUpdateDatafeedAction.java | 6 +- .../xpack/ml/job/JobManager.java | 5 +- .../xpack/ml/MlMetadataTests.java | 92 ++++++++++++++----- .../action/TransportCloseJobActionTests.java | 2 +- .../TransportStartDatafeedActionTests.java | 6 +- .../TransportStopDatafeedActionTests.java | 12 +-- .../ml/datafeed/DatafeedManagerTests.java | 2 +- .../datafeed/DatafeedNodeSelectorTests.java | 23 +++-- .../xpack/ml/integration/DeleteJobIT.java | 5 +- 19 files changed, 236 insertions(+), 92 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java b/server/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java index 024389dd22c7f..6c536a7019bb0 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java @@ -41,6 +41,9 @@ default boolean runOnlyOnMaster() { /** * Callback invoked after new cluster state is published. Note that * this method is not invoked if the cluster state was not updated. + * + * Note that this method will be executed using system context. + * * @param clusterChangedEvent the change event for this cluster state change, containing * both old and new states */ diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterStateUpdateTask.java b/server/src/main/java/org/elasticsearch/cluster/ClusterStateUpdateTask.java index b298e7e915dea..9dc9c7f6f52d0 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterStateUpdateTask.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterStateUpdateTask.java @@ -62,6 +62,12 @@ public String describeTasks(List tasks) { */ public abstract void onFailure(String source, Exception e); + @Override + public final void clusterStatePublished(ClusterChangedEvent clusterChangedEvent) { + // final, empty implementation here as this method should only be defined in combination + // with a batching executor as it will always be executed within the system context. + } + /** * If the cluster state update task wasn't processed by the provided timeout, call * {@link ClusterStateTaskListener#onFailure(String, Exception)}. May return null to indicate no timeout is needed (default). diff --git a/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java b/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java index 4432d864fd36a..2543be4811c1e 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java @@ -47,6 +47,7 @@ import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.concurrent.FutureUtils; import org.elasticsearch.common.util.concurrent.PrioritizedEsThreadPoolExecutor; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.threadpool.ThreadPool; @@ -59,6 +60,7 @@ import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.function.BiConsumer; +import java.util.function.Supplier; import java.util.stream.Collectors; import static org.elasticsearch.cluster.service.ClusterService.CLUSTER_SERVICE_SLOW_TASK_LOGGING_THRESHOLD_SETTING; @@ -426,26 +428,28 @@ public TimeValue getMaxTaskWaitTime() { return threadPoolExecutor.getMaxTaskWaitTime(); } - private SafeClusterStateTaskListener safe(ClusterStateTaskListener listener) { + private SafeClusterStateTaskListener safe(ClusterStateTaskListener listener, Supplier contextSupplier) { if (listener instanceof AckedClusterStateTaskListener) { - return new SafeAckedClusterStateTaskListener((AckedClusterStateTaskListener) listener, logger); + return new SafeAckedClusterStateTaskListener((AckedClusterStateTaskListener) listener, contextSupplier, logger); } else { - return new SafeClusterStateTaskListener(listener, logger); + return new SafeClusterStateTaskListener(listener, contextSupplier, logger); } } private static class SafeClusterStateTaskListener implements ClusterStateTaskListener { private final ClusterStateTaskListener listener; + protected final Supplier context; private final Logger logger; - SafeClusterStateTaskListener(ClusterStateTaskListener listener, Logger logger) { + SafeClusterStateTaskListener(ClusterStateTaskListener listener, Supplier context, Logger logger) { this.listener = listener; + this.context = context; this.logger = logger; } @Override public void onFailure(String source, Exception e) { - try { + try (ThreadContext.StoredContext ignore = context.get()) { listener.onFailure(source, e); } catch (Exception inner) { inner.addSuppressed(e); @@ -456,7 +460,7 @@ public void onFailure(String source, Exception e) { @Override public void onNoLongerMaster(String source) { - try { + try (ThreadContext.StoredContext ignore = context.get()) { listener.onNoLongerMaster(source); } catch (Exception e) { logger.error(() -> new ParameterizedMessage( @@ -466,7 +470,7 @@ public void onNoLongerMaster(String source) { @Override public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { - try { + try (ThreadContext.StoredContext ignore = context.get()) { listener.clusterStateProcessed(source, oldState, newState); } catch (Exception e) { logger.error(() -> new ParameterizedMessage( @@ -480,8 +484,9 @@ private static class SafeAckedClusterStateTaskListener extends SafeClusterStateT private final AckedClusterStateTaskListener listener; private final Logger logger; - SafeAckedClusterStateTaskListener(AckedClusterStateTaskListener listener, Logger logger) { - super(listener, logger); + SafeAckedClusterStateTaskListener(AckedClusterStateTaskListener listener, Supplier context, + Logger logger) { + super(listener, context, logger); this.listener = listener; this.logger = logger; } @@ -493,7 +498,7 @@ public boolean mustAck(DiscoveryNode discoveryNode) { @Override public void onAllNodesAcked(@Nullable Exception e) { - try { + try (ThreadContext.StoredContext ignore = context.get()) { listener.onAllNodesAcked(e); } catch (Exception inner) { inner.addSuppressed(e); @@ -503,7 +508,7 @@ public void onAllNodesAcked(@Nullable Exception e) { @Override public void onAckTimeout() { - try { + try (ThreadContext.StoredContext ignore = context.get()) { listener.onAckTimeout(); } catch (Exception e) { logger.error("exception thrown by listener while notifying on ack timeout", e); @@ -724,9 +729,13 @@ public void submitStateUpdateTasks(final String source, if (!lifecycle.started()) { return; } - try { + final ThreadContext threadContext = threadPool.getThreadContext(); + final Supplier supplier = threadContext.newRestorableContext(false); + try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { + threadContext.markAsSystemContext(); + List safeTasks = tasks.entrySet().stream() - .map(e -> taskBatcher.new UpdateTask(config.priority(), source, e.getKey(), safe(e.getValue()), executor)) + .map(e -> taskBatcher.new UpdateTask(config.priority(), source, e.getKey(), safe(e.getValue(), supplier), executor)) .collect(Collectors.toList()); taskBatcher.submitTasks(safeTasks, config.timeout()); } catch (EsRejectedExecutionException e) { diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java index e37f46c5517db..c86ea61980a87 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java @@ -556,7 +556,6 @@ public ClusterStateResponse newInstance() { @Override public void handleResponse(ClusterStateResponse response) { - assert transportService.getThreadPool().getThreadContext().isSystemContext() == false : "context is a system context"; try { if (remoteClusterName.get() == null) { assert response.getClusterName().value() != null; @@ -597,7 +596,6 @@ public void handleResponse(ClusterStateResponse response) { @Override public void handleException(TransportException exp) { - assert transportService.getThreadPool().getThreadContext().isSystemContext() == false : "context is a system context"; logger.warn(() -> new ParameterizedMessage("fetching nodes from external cluster {} failed", clusterAlias), exp); try { IOUtils.closeWhileHandlingException(connection); diff --git a/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java index f75363c7ab5c7..20587d31f5359 100644 --- a/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java @@ -34,12 +34,14 @@ import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Priority; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.BaseFuture; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.MockLogAppender; @@ -52,6 +54,7 @@ import org.junit.BeforeClass; import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -168,6 +171,85 @@ public void onFailure(String source, Exception e) { nonMaster.close(); } + public void testThreadContext() throws InterruptedException { + final TimedMasterService master = createTimedMasterService(true); + final CountDownLatch latch = new CountDownLatch(1); + + try (ThreadContext.StoredContext ignored = threadPool.getThreadContext().stashContext()) { + final Map expectedHeaders = Collections.singletonMap("test", "test"); + threadPool.getThreadContext().putHeader(expectedHeaders); + + final TimeValue ackTimeout = randomBoolean() ? TimeValue.ZERO : TimeValue.timeValueMillis(randomInt(10000)); + final TimeValue masterTimeout = randomBoolean() ? TimeValue.ZERO : TimeValue.timeValueMillis(randomInt(10000)); + + master.submitStateUpdateTask("test", new AckedClusterStateUpdateTask(null, null) { + @Override + public ClusterState execute(ClusterState currentState) { + assertTrue(threadPool.getThreadContext().isSystemContext()); + assertEquals(Collections.emptyMap(), threadPool.getThreadContext().getHeaders()); + + if (randomBoolean()) { + return ClusterState.builder(currentState).build(); + } else if (randomBoolean()) { + return currentState; + } else { + throw new IllegalArgumentException("mock failure"); + } + } + + @Override + public void onFailure(String source, Exception e) { + assertFalse(threadPool.getThreadContext().isSystemContext()); + assertEquals(expectedHeaders, threadPool.getThreadContext().getHeaders()); + latch.countDown(); + } + + @Override + public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { + assertFalse(threadPool.getThreadContext().isSystemContext()); + assertEquals(expectedHeaders, threadPool.getThreadContext().getHeaders()); + latch.countDown(); + } + + @Override + protected Void newResponse(boolean acknowledged) { + return null; + } + + public TimeValue ackTimeout() { + return ackTimeout; + } + + @Override + public TimeValue timeout() { + return masterTimeout; + } + + @Override + public void onAllNodesAcked(@Nullable Exception e) { + assertFalse(threadPool.getThreadContext().isSystemContext()); + assertEquals(expectedHeaders, threadPool.getThreadContext().getHeaders()); + latch.countDown(); + } + + @Override + public void onAckTimeout() { + assertFalse(threadPool.getThreadContext().isSystemContext()); + assertEquals(expectedHeaders, threadPool.getThreadContext().getHeaders()); + latch.countDown(); + } + + }); + + assertFalse(threadPool.getThreadContext().isSystemContext()); + assertEquals(expectedHeaders, threadPool.getThreadContext().getHeaders()); + } + + latch.await(); + + master.close(); + } + /* * test that a listener throwing an exception while handling a * notification does not prevent publication notification to the diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetadata.java index 5e145306f8c1f..85e5c99fe3581 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetadata.java @@ -20,7 +20,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -293,7 +292,7 @@ public Builder deleteJob(String jobId, PersistentTasksCustomMetaData tasks) { return this; } - public Builder putDatafeed(DatafeedConfig datafeedConfig, ThreadContext threadContext) { + public Builder putDatafeed(DatafeedConfig datafeedConfig, Map headers) { if (datafeeds.containsKey(datafeedConfig.getId())) { throw new ResourceAlreadyExistsException("A datafeed with id [" + datafeedConfig.getId() + "] already exists"); } @@ -302,13 +301,13 @@ public Builder putDatafeed(DatafeedConfig datafeedConfig, ThreadContext threadCo Job job = jobs.get(jobId); DatafeedJobValidator.validate(datafeedConfig, job); - if (threadContext != null) { + if (headers.isEmpty() == false) { // Adjust the request, adding security headers from the current thread context DatafeedConfig.Builder builder = new DatafeedConfig.Builder(datafeedConfig); - Map headers = threadContext.getHeaders().entrySet().stream() + Map securityHeaders = headers.entrySet().stream() .filter(e -> ClientHelper.SECURITY_HEADER_FILTERS.contains(e.getKey())) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); - builder.setHeaders(headers); + builder.setHeaders(securityHeaders); datafeedConfig = builder.build(); } @@ -328,7 +327,7 @@ private void checkJobIsAvailableForDatafeed(String jobId) { } } - public Builder updateDatafeed(DatafeedUpdate update, PersistentTasksCustomMetaData persistentTasks, ThreadContext threadContext) { + public Builder updateDatafeed(DatafeedUpdate update, PersistentTasksCustomMetaData persistentTasks, Map headers) { String datafeedId = update.getId(); DatafeedConfig oldDatafeedConfig = datafeeds.get(datafeedId); if (oldDatafeedConfig == null) { @@ -336,7 +335,7 @@ public Builder updateDatafeed(DatafeedUpdate update, PersistentTasksCustomMetaDa } checkDatafeedIsStopped(() -> Messages.getMessage(Messages.DATAFEED_CANNOT_UPDATE_IN_CURRENT_STATE, datafeedId, DatafeedState.STARTED), datafeedId, persistentTasks); - DatafeedConfig newDatafeedConfig = update.apply(oldDatafeedConfig, threadContext); + DatafeedConfig newDatafeedConfig = update.apply(oldDatafeedConfig, headers); if (newDatafeedConfig.getJobId().equals(oldDatafeedConfig.getJobId()) == false) { checkJobIsAvailableForDatafeed(newDatafeedConfig.getJobId()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdate.java index 444532a7e3f15..27498bd1549ee 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdate.java @@ -12,7 +12,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -264,7 +263,7 @@ ChunkingConfig getChunkingConfig() { * Applies the update to the given {@link DatafeedConfig} * @return a new {@link DatafeedConfig} that contains the update */ - public DatafeedConfig apply(DatafeedConfig datafeedConfig, ThreadContext threadContext) { + public DatafeedConfig apply(DatafeedConfig datafeedConfig, Map headers) { if (id.equals(datafeedConfig.getId()) == false) { throw new IllegalArgumentException("Cannot apply update to datafeedConfig with different id"); } @@ -301,12 +300,12 @@ public DatafeedConfig apply(DatafeedConfig datafeedConfig, ThreadContext threadC builder.setChunkingConfig(chunkingConfig); } - if (threadContext != null) { + if (headers.isEmpty() == false) { // Adjust the request, adding security headers from the current thread context - Map headers = threadContext.getHeaders().entrySet().stream() + Map securityHeaders = headers.entrySet().stream() .filter(e -> ClientHelper.SECURITY_HEADER_FILTERS.contains(e.getKey())) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); - builder.setHeaders(headers); + builder.setHeaders(securityHeaders); } return builder.build(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java index d059e567d1588..358f9d1c97bd7 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java @@ -114,7 +114,7 @@ public void testApply_failBecauseTargetDatafeedHasDifferentId() { public void testApply_givenEmptyUpdate() { DatafeedConfig datafeed = DatafeedConfigTests.createRandomizedDatafeedConfig("foo"); - DatafeedConfig updatedDatafeed = new DatafeedUpdate.Builder(datafeed.getId()).build().apply(datafeed, null); + DatafeedConfig updatedDatafeed = new DatafeedUpdate.Builder(datafeed.getId()).build().apply(datafeed, Collections.emptyMap()); assertThat(datafeed, equalTo(updatedDatafeed)); } @@ -125,7 +125,7 @@ public void testApply_givenPartialUpdate() { DatafeedUpdate.Builder updated = new DatafeedUpdate.Builder(datafeed.getId()); updated.setScrollSize(datafeed.getScrollSize() + 1); - DatafeedConfig updatedDatafeed = update.build().apply(datafeed, null); + DatafeedConfig updatedDatafeed = update.build().apply(datafeed, Collections.emptyMap()); DatafeedConfig.Builder expectedDatafeed = new DatafeedConfig.Builder(datafeed); expectedDatafeed.setScrollSize(datafeed.getScrollSize() + 1); @@ -149,7 +149,7 @@ public void testApply_givenFullUpdateNoAggregations() { update.setScrollSize(8000); update.setChunkingConfig(ChunkingConfig.newManual(TimeValue.timeValueHours(1))); - DatafeedConfig updatedDatafeed = update.build().apply(datafeed, null); + DatafeedConfig updatedDatafeed = update.build().apply(datafeed, Collections.emptyMap()); assertThat(updatedDatafeed.getJobId(), equalTo("bar")); assertThat(updatedDatafeed.getIndices(), equalTo(Collections.singletonList("i_2"))); @@ -175,7 +175,7 @@ public void testApply_givenAggregations() { update.setAggregations(new AggregatorFactories.Builder().addAggregator( AggregationBuilders.histogram("a").interval(300000).field("time").subAggregation(maxTime))); - DatafeedConfig updatedDatafeed = update.build().apply(datafeed, null); + DatafeedConfig updatedDatafeed = update.build().apply(datafeed, Collections.emptyMap()); assertThat(updatedDatafeed.getIndices(), equalTo(Collections.singletonList("i_1"))); assertThat(updatedDatafeed.getTypes(), equalTo(Collections.singletonList("t_1"))); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java index 81f4a90f575af..ede92fbbab950 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteJobAction.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.ClusterStateUpdateTask; @@ -213,7 +212,7 @@ public void onFailure(String source, Exception e) { } @Override - public void clusterStatePublished(ClusterChangedEvent clusterChangedEvent) { + public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { logger.debug("Job [" + jobId + "] is successfully marked as deleted"); listener.onResponse(true); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDatafeedAction.java index 08a9dfb09c1d9..88c72578023f9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutDatafeedAction.java @@ -41,6 +41,7 @@ import org.elasticsearch.xpack.core.security.support.Exceptions; import java.io.IOException; +import java.util.Map; public class TransportPutDatafeedAction extends TransportMasterNodeAction { @@ -95,7 +96,7 @@ protected void masterOperation(PutDatafeedAction.Request request, ClusterState s client.execute(HasPrivilegesAction.INSTANCE, privRequest, privResponseListener); } else { - putDatafeed(request, listener); + putDatafeed(request, threadPool.getThreadContext().getHeaders(), listener); } } @@ -103,7 +104,7 @@ private void handlePrivsResponse(String username, PutDatafeedAction.Request requ HasPrivilegesResponse response, ActionListener listener) throws IOException { if (response.isCompleteMatch()) { - putDatafeed(request, listener); + putDatafeed(request, threadPool.getThreadContext().getHeaders(), listener); } else { XContentBuilder builder = JsonXContent.contentBuilder(); builder.startObject(); @@ -120,7 +121,8 @@ private void handlePrivsResponse(String username, PutDatafeedAction.Request requ } } - private void putDatafeed(PutDatafeedAction.Request request, ActionListener listener) { + private void putDatafeed(PutDatafeedAction.Request request, Map headers, + ActionListener listener) { clusterService.submitStateUpdateTask( "put-datafeed-" + request.getDatafeed().getId(), @@ -136,16 +138,16 @@ protected PutDatafeedAction.Response newResponse(boolean acknowledged) { @Override public ClusterState execute(ClusterState currentState) { - return putDatafeed(request, currentState); + return putDatafeed(request, headers, currentState); } }); } - private ClusterState putDatafeed(PutDatafeedAction.Request request, ClusterState clusterState) { + private ClusterState putDatafeed(PutDatafeedAction.Request request, Map headers, ClusterState clusterState) { XPackPlugin.checkReadyForXPackCustomMetadata(clusterState); MlMetadata currentMetadata = MlMetadata.getMlMetadata(clusterState); MlMetadata newMetadata = new MlMetadata.Builder(currentMetadata) - .putDatafeed(request.getDatafeed(), threadPool.getThreadContext()).build(); + .putDatafeed(request.getDatafeed(), headers).build(); return ClusterState.builder(clusterState).metaData( MetaData.builder(clusterState.getMetaData()).putCustom(MLMetadataField.TYPE, newMetadata).build()) .build(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateDatafeedAction.java index 4d752fe294081..4e43cbb185330 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateDatafeedAction.java @@ -27,6 +27,8 @@ import org.elasticsearch.xpack.core.ml.datafeed.DatafeedUpdate; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; +import java.util.Map; + public class TransportUpdateDatafeedAction extends TransportMasterNodeAction { @Inject @@ -50,6 +52,8 @@ protected PutDatafeedAction.Response newResponse() { @Override protected void masterOperation(UpdateDatafeedAction.Request request, ClusterState state, ActionListener listener) { + final Map headers = threadPool.getThreadContext().getHeaders(); + clusterService.submitStateUpdateTask("update-datafeed-" + request.getUpdate().getId(), new AckedClusterStateUpdateTask(request, listener) { private volatile DatafeedConfig updatedDatafeed; @@ -69,7 +73,7 @@ public ClusterState execute(ClusterState currentState) { PersistentTasksCustomMetaData persistentTasks = currentState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); MlMetadata newMetadata = new MlMetadata.Builder(currentMetadata) - .updateDatafeed(update, persistentTasks, threadPool.getThreadContext()).build(); + .updateDatafeed(update, persistentTasks, headers).build(); updatedDatafeed = newMetadata.getDatafeed(update.getId()); return ClusterState.builder(currentState).metaData( MetaData.builder(currentState.getMetaData()).putCustom(MLMetadataField.TYPE, newMetadata).build()).build(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java index 1fd73b96667b2..fe6deea55e3aa 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; -import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.metadata.MetaData; @@ -347,8 +346,8 @@ public void onFailure(String source, Exception e) { } @Override - public void clusterStatePublished(ClusterChangedEvent clusterChangedEvent) { - afterClusterStateUpdate(clusterChangedEvent.state(), request); + public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { + afterClusterStateUpdate(newState, request); actionListener.onResponse(new PutJobAction.Response(updatedJob.get())); } }); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlMetadataTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlMetadataTests.java index f6fb2db3c9bb9..ecfe712858331 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlMetadataTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlMetadataTests.java @@ -30,9 +30,11 @@ import org.elasticsearch.xpack.core.ml.job.config.JobTaskState; import org.elasticsearch.xpack.core.ml.job.config.JobTests; import org.elasticsearch.persistent.PersistentTasksCustomMetaData; +import org.elasticsearch.xpack.core.security.authc.AuthenticationServiceField; import java.util.Collections; import java.util.Date; +import java.util.HashMap; import java.util.Map; import static org.elasticsearch.xpack.core.ml.job.config.JobTests.buildJobBuilder; @@ -42,6 +44,7 @@ import static org.elasticsearch.xpack.ml.datafeed.DatafeedManagerTests.createDatafeedJob; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.sameInstance; @@ -63,7 +66,7 @@ protected MlMetadata createTestInstance() { } job = new Job.Builder(job).setAnalysisConfig(analysisConfig).build(); builder.putJob(job, false); - builder.putDatafeed(datafeedConfig, null); + builder.putDatafeed(datafeedConfig, Collections.emptyMap()); } else { builder.putJob(job, false); } @@ -164,7 +167,7 @@ public void testRemoveJob_failDatafeedRefersToJob() { DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build(); MlMetadata.Builder builder = new MlMetadata.Builder(); builder.putJob(job1, false); - builder.putDatafeed(datafeedConfig1, null); + builder.putDatafeed(datafeedConfig1, Collections.emptyMap()); ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, () -> builder.deleteJob(job1.getId(), new PersistentTasksCustomMetaData(0L, Collections.emptyMap()))); @@ -184,7 +187,7 @@ public void testCrudDatafeed() { DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build(); MlMetadata.Builder builder = new MlMetadata.Builder(); builder.putJob(job1, false); - builder.putDatafeed(datafeedConfig1, null); + builder.putDatafeed(datafeedConfig1, Collections.emptyMap()); MlMetadata result = builder.build(); assertThat(result.getJobs().get("job_id"), sameInstance(job1)); @@ -201,7 +204,7 @@ public void testPutDatafeed_failBecauseJobDoesNotExist() { DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", "missing-job").build(); MlMetadata.Builder builder = new MlMetadata.Builder(); - expectThrows(ResourceNotFoundException.class, () -> builder.putDatafeed(datafeedConfig1, null)); + expectThrows(ResourceNotFoundException.class, () -> builder.putDatafeed(datafeedConfig1, Collections.emptyMap())); } public void testPutDatafeed_failBecauseJobIsBeingDeleted() { @@ -210,7 +213,7 @@ public void testPutDatafeed_failBecauseJobIsBeingDeleted() { MlMetadata.Builder builder = new MlMetadata.Builder(); builder.putJob(job1, false); - expectThrows(ResourceNotFoundException.class, () -> builder.putDatafeed(datafeedConfig1, null)); + expectThrows(ResourceNotFoundException.class, () -> builder.putDatafeed(datafeedConfig1, Collections.emptyMap())); } public void testPutDatafeed_failBecauseDatafeedIdIsAlreadyTaken() { @@ -218,9 +221,9 @@ public void testPutDatafeed_failBecauseDatafeedIdIsAlreadyTaken() { DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build(); MlMetadata.Builder builder = new MlMetadata.Builder(); builder.putJob(job1, false); - builder.putDatafeed(datafeedConfig1, null); + builder.putDatafeed(datafeedConfig1, Collections.emptyMap()); - expectThrows(ResourceAlreadyExistsException.class, () -> builder.putDatafeed(datafeedConfig1, null)); + expectThrows(ResourceAlreadyExistsException.class, () -> builder.putDatafeed(datafeedConfig1, Collections.emptyMap())); } public void testPutDatafeed_failBecauseJobAlreadyHasDatafeed() { @@ -229,10 +232,10 @@ public void testPutDatafeed_failBecauseJobAlreadyHasDatafeed() { DatafeedConfig datafeedConfig2 = createDatafeedConfig("datafeed2", job1.getId()).build(); MlMetadata.Builder builder = new MlMetadata.Builder(); builder.putJob(job1, false); - builder.putDatafeed(datafeedConfig1, null); + builder.putDatafeed(datafeedConfig1, Collections.emptyMap()); ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, - () -> builder.putDatafeed(datafeedConfig2, null)); + () -> builder.putDatafeed(datafeedConfig2, Collections.emptyMap())); assertThat(e.status(), equalTo(RestStatus.CONFLICT)); } @@ -246,7 +249,23 @@ public void testPutDatafeed_failBecauseJobIsNotCompatibleForDatafeed() { MlMetadata.Builder builder = new MlMetadata.Builder(); builder.putJob(job1.build(now), false); - expectThrows(ElasticsearchStatusException.class, () -> builder.putDatafeed(datafeedConfig1, null)); + expectThrows(ElasticsearchStatusException.class, () -> builder.putDatafeed(datafeedConfig1, Collections.emptyMap())); + } + + public void testPutDatafeed_setsSecurityHeaders() { + Job datafeedJob = createDatafeedJob().build(new Date()); + DatafeedConfig datafeedConfig = createDatafeedConfig("datafeed1", datafeedJob.getId()).build(); + MlMetadata.Builder builder = new MlMetadata.Builder(); + builder.putJob(datafeedJob, false); + + Map headers = new HashMap<>(); + headers.put("unrelated_header", "unrelated_header_value"); + headers.put(AuthenticationServiceField.RUN_AS_USER_HEADER, "permitted_run_as_user"); + builder.putDatafeed(datafeedConfig, headers); + MlMetadata metadata = builder.build(); + assertThat(metadata.getDatafeed("datafeed1").getHeaders().size(), equalTo(1)); + assertThat(metadata.getDatafeed("datafeed1").getHeaders(), + hasEntry(AuthenticationServiceField.RUN_AS_USER_HEADER, "permitted_run_as_user")); } public void testUpdateDatafeed() { @@ -254,12 +273,13 @@ public void testUpdateDatafeed() { DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build(); MlMetadata.Builder builder = new MlMetadata.Builder(); builder.putJob(job1, false); - builder.putDatafeed(datafeedConfig1, null); + builder.putDatafeed(datafeedConfig1, Collections.emptyMap()); MlMetadata beforeMetadata = builder.build(); DatafeedUpdate.Builder update = new DatafeedUpdate.Builder(datafeedConfig1.getId()); update.setScrollSize(5000); - MlMetadata updatedMetadata = new MlMetadata.Builder(beforeMetadata).updateDatafeed(update.build(), null, null).build(); + MlMetadata updatedMetadata = + new MlMetadata.Builder(beforeMetadata).updateDatafeed(update.build(), null, Collections.emptyMap()).build(); DatafeedConfig updatedDatafeed = updatedMetadata.getDatafeed(datafeedConfig1.getId()); assertThat(updatedDatafeed.getJobId(), equalTo(datafeedConfig1.getJobId())); @@ -271,7 +291,8 @@ public void testUpdateDatafeed() { public void testUpdateDatafeed_failBecauseDatafeedDoesNotExist() { DatafeedUpdate.Builder update = new DatafeedUpdate.Builder("job_id"); update.setScrollSize(5000); - expectThrows(ResourceNotFoundException.class, () -> new MlMetadata.Builder().updateDatafeed(update.build(), null, null).build()); + expectThrows(ResourceNotFoundException.class, + () -> new MlMetadata.Builder().updateDatafeed(update.build(), null, Collections.emptyMap()).build()); } public void testUpdateDatafeed_failBecauseDatafeedIsNotStopped() { @@ -279,7 +300,7 @@ public void testUpdateDatafeed_failBecauseDatafeedIsNotStopped() { DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build(); MlMetadata.Builder builder = new MlMetadata.Builder(); builder.putJob(job1, false); - builder.putDatafeed(datafeedConfig1, null); + builder.putDatafeed(datafeedConfig1, Collections.emptyMap()); MlMetadata beforeMetadata = builder.build(); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); @@ -300,14 +321,14 @@ public void testUpdateDatafeed_failBecauseNewJobIdDoesNotExist() { DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build(); MlMetadata.Builder builder = new MlMetadata.Builder(); builder.putJob(job1, false); - builder.putDatafeed(datafeedConfig1, null); + builder.putDatafeed(datafeedConfig1, Collections.emptyMap()); MlMetadata beforeMetadata = builder.build(); DatafeedUpdate.Builder update = new DatafeedUpdate.Builder(datafeedConfig1.getId()); update.setJobId(job1.getId() + "_2"); expectThrows(ResourceNotFoundException.class, - () -> new MlMetadata.Builder(beforeMetadata).updateDatafeed(update.build(), null, null)); + () -> new MlMetadata.Builder(beforeMetadata).updateDatafeed(update.build(), null, Collections.emptyMap())); } public void testUpdateDatafeed_failBecauseNewJobHasAnotherDatafeedAttached() { @@ -319,25 +340,46 @@ public void testUpdateDatafeed_failBecauseNewJobHasAnotherDatafeedAttached() { MlMetadata.Builder builder = new MlMetadata.Builder(); builder.putJob(job1, false); builder.putJob(job2.build(), false); - builder.putDatafeed(datafeedConfig1, null); - builder.putDatafeed(datafeedConfig2, null); + builder.putDatafeed(datafeedConfig1, Collections.emptyMap()); + builder.putDatafeed(datafeedConfig2, Collections.emptyMap()); MlMetadata beforeMetadata = builder.build(); DatafeedUpdate.Builder update = new DatafeedUpdate.Builder(datafeedConfig1.getId()); update.setJobId(job2.getId()); ElasticsearchStatusException e = expectThrows(ElasticsearchStatusException.class, - () -> new MlMetadata.Builder(beforeMetadata).updateDatafeed(update.build(), null, null)); + () -> new MlMetadata.Builder(beforeMetadata).updateDatafeed(update.build(), null, Collections.emptyMap())); assertThat(e.status(), equalTo(RestStatus.CONFLICT)); assertThat(e.getMessage(), equalTo("A datafeed [datafeed2] already exists for job [job_id_2]")); } + public void testUpdateDatafeed_setsSecurityHeaders() { + Job datafeedJob = createDatafeedJob().build(new Date()); + DatafeedConfig datafeedConfig = createDatafeedConfig("datafeed1", datafeedJob.getId()).build(); + MlMetadata.Builder builder = new MlMetadata.Builder(); + builder.putJob(datafeedJob, false); + builder.putDatafeed(datafeedConfig, Collections.emptyMap()); + MlMetadata beforeMetadata = builder.build(); + assertTrue(beforeMetadata.getDatafeed("datafeed1").getHeaders().isEmpty()); + + DatafeedUpdate.Builder update = new DatafeedUpdate.Builder(datafeedConfig.getId()); + update.setQueryDelay(TimeValue.timeValueMinutes(5)); + + Map headers = new HashMap<>(); + headers.put("unrelated_header", "unrelated_header_value"); + headers.put(AuthenticationServiceField.RUN_AS_USER_HEADER, "permitted_run_as_user"); + MlMetadata afterMetadata = new MlMetadata.Builder(beforeMetadata).updateDatafeed(update.build(), null, headers).build(); + Map updatedHeaders = afterMetadata.getDatafeed("datafeed1").getHeaders(); + assertThat(updatedHeaders.size(), equalTo(1)); + assertThat(updatedHeaders, hasEntry(AuthenticationServiceField.RUN_AS_USER_HEADER, "permitted_run_as_user")); + } + public void testRemoveDatafeed_failBecauseDatafeedStarted() { Job job1 = createDatafeedJob().build(new Date()); DatafeedConfig datafeedConfig1 = createDatafeedConfig("datafeed1", job1.getId()).build(); MlMetadata.Builder builder = new MlMetadata.Builder(); builder.putJob(job1, false); - builder.putDatafeed(datafeedConfig1, null); + builder.putDatafeed(datafeedConfig1, Collections.emptyMap()); MlMetadata result = builder.build(); assertThat(result.getJobs().get("job_id"), sameInstance(job1)); @@ -378,9 +420,9 @@ public void testExpandJobIds() { public void testExpandDatafeedIds() { MlMetadata.Builder mlMetadataBuilder = newMlMetadataWithJobs("bar-1", "foo-1", "foo-2"); - mlMetadataBuilder.putDatafeed(createDatafeedConfig("bar-1-feed", "bar-1").build(), null); - mlMetadataBuilder.putDatafeed(createDatafeedConfig("foo-1-feed", "foo-1").build(), null); - mlMetadataBuilder.putDatafeed(createDatafeedConfig("foo-2-feed", "foo-2").build(), null); + mlMetadataBuilder.putDatafeed(createDatafeedConfig("bar-1-feed", "bar-1").build(), Collections.emptyMap()); + mlMetadataBuilder.putDatafeed(createDatafeedConfig("foo-1-feed", "foo-1").build(), Collections.emptyMap()); + mlMetadataBuilder.putDatafeed(createDatafeedConfig("foo-2-feed", "foo-2").build(), Collections.emptyMap()); MlMetadata mlMetadata = mlMetadataBuilder.build(); @@ -409,7 +451,7 @@ protected MlMetadata mutateInstance(MlMetadata instance) { metadataBuilder.putJob(entry.getValue(), true); } for (Map.Entry entry : datafeeds.entrySet()) { - metadataBuilder.putDatafeed(entry.getValue(), null); + metadataBuilder.putDatafeed(entry.getValue(), Collections.emptyMap()); } switch (between(0, 1)) { @@ -430,7 +472,7 @@ protected MlMetadata mutateInstance(MlMetadata instance) { } randomJob = new Job.Builder(randomJob).setAnalysisConfig(analysisConfig).build(); metadataBuilder.putJob(randomJob, false); - metadataBuilder.putDatafeed(datafeedConfig, null); + metadataBuilder.putDatafeed(datafeedConfig, Collections.emptyMap()); break; default: throw new AssertionError("Illegal randomisation branch"); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportCloseJobActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportCloseJobActionTests.java index d65fc1476e75e..0e7ad29c54da9 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportCloseJobActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportCloseJobActionTests.java @@ -51,7 +51,7 @@ public void testValidate_datafeedIsStarted() { MlMetadata.Builder mlBuilder = new MlMetadata.Builder(); mlBuilder.putJob(BaseMlIntegTestCase.createScheduledJob("job_id").build(new Date()), false); mlBuilder.putDatafeed(BaseMlIntegTestCase.createDatafeed("datafeed_id", "job_id", - Collections.singletonList("*")), null); + Collections.singletonList("*")), Collections.emptyMap()); final PersistentTasksCustomMetaData.Builder startDataFeedTaskBuilder = PersistentTasksCustomMetaData.builder(); addJobTask("job_id", null, JobState.OPENED, startDataFeedTaskBuilder); addTask("datafeed_id", 0L, null, DatafeedState.STARTED, startDataFeedTaskBuilder); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportStartDatafeedActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportStartDatafeedActionTests.java index af9446ed972cb..72c8d361dd882 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportStartDatafeedActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportStartDatafeedActionTests.java @@ -45,7 +45,7 @@ public void testValidate_jobClosed() { PersistentTasksCustomMetaData tasks = PersistentTasksCustomMetaData.builder().build(); DatafeedConfig datafeedConfig1 = DatafeedManagerTests.createDatafeedConfig("foo-datafeed", "job_id").build(); MlMetadata mlMetadata2 = new MlMetadata.Builder(mlMetadata1) - .putDatafeed(datafeedConfig1, null) + .putDatafeed(datafeedConfig1, Collections.emptyMap()) .build(); Exception e = expectThrows(ElasticsearchStatusException.class, () -> TransportStartDatafeedAction.validate("foo-datafeed", mlMetadata2, tasks)); @@ -62,7 +62,7 @@ public void testValidate_jobOpening() { PersistentTasksCustomMetaData tasks = tasksBuilder.build(); DatafeedConfig datafeedConfig1 = DatafeedManagerTests.createDatafeedConfig("foo-datafeed", "job_id").build(); MlMetadata mlMetadata2 = new MlMetadata.Builder(mlMetadata1) - .putDatafeed(datafeedConfig1, null) + .putDatafeed(datafeedConfig1, Collections.emptyMap()) .build(); TransportStartDatafeedAction.validate("foo-datafeed", mlMetadata2, tasks); @@ -78,7 +78,7 @@ public void testValidate_jobOpened() { PersistentTasksCustomMetaData tasks = tasksBuilder.build(); DatafeedConfig datafeedConfig1 = DatafeedManagerTests.createDatafeedConfig("foo-datafeed", "job_id").build(); MlMetadata mlMetadata2 = new MlMetadata.Builder(mlMetadata1) - .putDatafeed(datafeedConfig1, null) + .putDatafeed(datafeedConfig1, Collections.emptyMap()) .build(); TransportStartDatafeedAction.validate("foo-datafeed", mlMetadata2, tasks); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedActionTests.java index 55a0f4006bcdd..934642986de96 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedActionTests.java @@ -42,7 +42,7 @@ public void testValidate() { DatafeedConfig datafeedConfig = createDatafeedConfig("foo", "job_id").build(); MlMetadata mlMetadata2 = new MlMetadata.Builder().putJob(job, false) - .putDatafeed(datafeedConfig, null) + .putDatafeed(datafeedConfig, Collections.emptyMap()) .build(); TransportStopDatafeedAction.validateDatafeedTask("foo", mlMetadata2); } @@ -54,12 +54,12 @@ public void testResolveDataFeedIds_GivenDatafeedId() { addTask("datafeed_1", 0L, "node-1", DatafeedState.STARTED, tasksBuilder); Job job = BaseMlIntegTestCase.createScheduledJob("job_id_1").build(new Date()); DatafeedConfig datafeedConfig = createDatafeedConfig("datafeed_1", "job_id_1").build(); - mlMetadataBuilder.putJob(job, false).putDatafeed(datafeedConfig, null); + mlMetadataBuilder.putJob(job, false).putDatafeed(datafeedConfig, Collections.emptyMap()); addTask("datafeed_2", 0L, "node-1", DatafeedState.STOPPED, tasksBuilder); job = BaseMlIntegTestCase.createScheduledJob("job_id_2").build(new Date()); datafeedConfig = createDatafeedConfig("datafeed_2", "job_id_2").build(); - mlMetadataBuilder.putJob(job, false).putDatafeed(datafeedConfig, null); + mlMetadataBuilder.putJob(job, false).putDatafeed(datafeedConfig, Collections.emptyMap()); PersistentTasksCustomMetaData tasks = tasksBuilder.build(); MlMetadata mlMetadata = mlMetadataBuilder.build(); @@ -86,17 +86,17 @@ public void testResolveDataFeedIds_GivenAll() { addTask("datafeed_1", 0L, "node-1", DatafeedState.STARTED, tasksBuilder); Job job = BaseMlIntegTestCase.createScheduledJob("job_id_1").build(new Date()); DatafeedConfig datafeedConfig = createDatafeedConfig("datafeed_1", "job_id_1").build(); - mlMetadataBuilder.putJob(job, false).putDatafeed(datafeedConfig, null); + mlMetadataBuilder.putJob(job, false).putDatafeed(datafeedConfig, Collections.emptyMap()); addTask("datafeed_2", 0L, "node-1", DatafeedState.STOPPED, tasksBuilder); job = BaseMlIntegTestCase.createScheduledJob("job_id_2").build(new Date()); datafeedConfig = createDatafeedConfig("datafeed_2", "job_id_2").build(); - mlMetadataBuilder.putJob(job, false).putDatafeed(datafeedConfig, null); + mlMetadataBuilder.putJob(job, false).putDatafeed(datafeedConfig, Collections.emptyMap()); addTask("datafeed_3", 0L, "node-1", DatafeedState.STOPPING, tasksBuilder); job = BaseMlIntegTestCase.createScheduledJob("job_id_3").build(new Date()); datafeedConfig = createDatafeedConfig("datafeed_3", "job_id_3").build(); - mlMetadataBuilder.putJob(job, false).putDatafeed(datafeedConfig, null); + mlMetadataBuilder.putJob(job, false).putDatafeed(datafeedConfig, Collections.emptyMap()); PersistentTasksCustomMetaData tasks = tasksBuilder.build(); MlMetadata mlMetadata = mlMetadataBuilder.build(); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManagerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManagerTests.java index f609f0c8c5ed9..6ce03d22b64f0 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManagerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedManagerTests.java @@ -84,7 +84,7 @@ public void setUpTests() { Job job = createDatafeedJob().build(new Date()); mlMetadata.putJob(job, false); DatafeedConfig datafeed = createDatafeedConfig("datafeed_id", job.getId()).build(); - mlMetadata.putDatafeed(datafeed, null); + mlMetadata.putDatafeed(datafeed, Collections.emptyMap()); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); addJobTask(job.getId(), "node_id", JobState.OPENED, tasksBuilder); PersistentTasksCustomMetaData tasks = tasksBuilder.build(); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelectorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelectorTests.java index 96ae3b5ef38b6..f3fa804bb27b9 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelectorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedNodeSelectorTests.java @@ -68,7 +68,7 @@ public void testSelectNode_GivenJobIsOpened() { MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder(); Job job = createScheduledJob("job_id").build(new Date()); mlMetadataBuilder.putJob(job, false); - mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("foo")), null); + mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("foo")), Collections.emptyMap()); mlMetadata = mlMetadataBuilder.build(); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); @@ -86,7 +86,7 @@ public void testSelectNode_GivenJobIsOpening() { MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder(); Job job = createScheduledJob("job_id").build(new Date()); mlMetadataBuilder.putJob(job, false); - mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("foo")), null); + mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("foo")), Collections.emptyMap()); mlMetadata = mlMetadataBuilder.build(); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); @@ -106,7 +106,7 @@ public void testNoJobTask() { mlMetadataBuilder.putJob(job, false); // Using wildcard index name to test for index resolving as well - mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("fo*")), null); + mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("fo*")), Collections.emptyMap()); mlMetadata = mlMetadataBuilder.build(); tasks = PersistentTasksCustomMetaData.builder().build(); @@ -128,7 +128,7 @@ public void testSelectNode_GivenJobFailedOrClosed() { MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder(); Job job = createScheduledJob("job_id").build(new Date()); mlMetadataBuilder.putJob(job, false); - mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("foo")), null); + mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("foo")), Collections.emptyMap()); mlMetadata = mlMetadataBuilder.build(); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); @@ -156,7 +156,7 @@ public void testShardUnassigned() { mlMetadataBuilder.putJob(job, false); // Using wildcard index name to test for index resolving as well - mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("fo*")), null); + mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("fo*")), Collections.emptyMap()); mlMetadata = mlMetadataBuilder.build(); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); @@ -182,7 +182,7 @@ public void testShardNotAllActive() { mlMetadataBuilder.putJob(job, false); // Using wildcard index name to test for index resolving as well - mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("fo*")), null); + mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("fo*")), Collections.emptyMap()); mlMetadata = mlMetadataBuilder.build(); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); @@ -207,7 +207,8 @@ public void testIndexDoesntExist() { MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder(); Job job = createScheduledJob("job_id").build(new Date()); mlMetadataBuilder.putJob(job, false); - mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("not_foo")), null); + mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("not_foo")), + Collections.emptyMap()); mlMetadata = mlMetadataBuilder.build(); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); @@ -231,7 +232,8 @@ public void testRemoteIndex() { MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder(); Job job = createScheduledJob("job_id").build(new Date()); mlMetadataBuilder.putJob(job, false); - mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("remote:foo")), null); + mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("remote:foo")), + Collections.emptyMap()); mlMetadata = mlMetadataBuilder.build(); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); @@ -248,7 +250,7 @@ public void testSelectNode_jobTaskStale() { MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder(); Job job = createScheduledJob("job_id").build(new Date()); mlMetadataBuilder.putJob(job, false); - mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("foo")), null); + mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("foo")), Collections.emptyMap()); mlMetadata = mlMetadataBuilder.build(); String nodeId = randomBoolean() ? "node_id2" : null; @@ -286,7 +288,8 @@ public void testSelectNode_GivenJobOpeningAndIndexDoesNotExist() { MlMetadata.Builder mlMetadataBuilder = new MlMetadata.Builder(); Job job = createScheduledJob("job_id").build(new Date()); mlMetadataBuilder.putJob(job, false); - mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("not_foo")), null); + mlMetadataBuilder.putDatafeed(createDatafeed("datafeed_id", job.getId(), Collections.singletonList("not_foo")), + Collections.emptyMap()); mlMetadata = mlMetadataBuilder.build(); PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteJobIT.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteJobIT.java index 357c2bc232552..14ec4813a749e 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteJobIT.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteJobIT.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.ml.integration; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.metadata.MetaData; @@ -47,7 +46,7 @@ public void onFailure(String source, Exception e) { } @Override - public void clusterStatePublished(ClusterChangedEvent clusterChangedEvent) { + public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { markAsDeletedLatch.countDown(); } }); @@ -90,7 +89,7 @@ public void onFailure(String source, Exception e) { } @Override - public void clusterStatePublished(ClusterChangedEvent clusterChangedEvent) { + public void clusterStateProcessed(String source, ClusterState oldState, ClusterState newState) { removeJobLatch.countDown(); } }); From 3f5ebb862de56efed4edcdb8b338590cb54be6f5 Mon Sep 17 00:00:00 2001 From: Julie Tibshirani Date: Mon, 18 Jun 2018 08:21:41 -0700 Subject: [PATCH 24/34] Clarify that IP range data can be specified in CIDR notation. (#31374) --- docs/reference/mapping/types/range.asciidoc | 24 +++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/docs/reference/mapping/types/range.asciidoc b/docs/reference/mapping/types/range.asciidoc index 3013204e4ca21..a7ab6346176cb 100644 --- a/docs/reference/mapping/types/range.asciidoc +++ b/docs/reference/mapping/types/range.asciidoc @@ -168,6 +168,30 @@ This query produces a similar result: -------------------------------------------------- // TESTRESPONSE[s/"took": 13/"took" : $body.took/] +[[ip-range]] +==== IP Range + +In addition to the range format above, IP ranges can be provided in +https://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing#CIDR_notation[CIDR] notation: + +[source,js] +-------------------------------------------------- +PUT range_index/_mapping/_doc +{ + "properties": { + "ip_whitelist": { + "type": "ip_range" + } + } +} + +PUT range_index/_doc/2 +{ + "ip_whitelist" : "192.168.0.0/16" +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:range_index] [[range-params]] ==== Parameters for range fields From ea92864eb179cb68af3f2a774713527f493f550b Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Mon, 18 Jun 2018 08:48:23 -0700 Subject: [PATCH 25/34] [DOCS] Adds testing for security APIs (#31345) --- x-pack/docs/build.gradle | 4 ---- .../en/rest-api/security/authenticate.asciidoc | 14 ++++++++------ x-pack/docs/en/rest-api/security/ssl.asciidoc | 1 + x-pack/docs/en/rest-api/security/tokens.asciidoc | 1 + x-pack/docs/en/rest-api/security/users.asciidoc | 1 + 5 files changed, 11 insertions(+), 10 deletions(-) diff --git a/x-pack/docs/build.gradle b/x-pack/docs/build.gradle index 5bff371d9c26a..ed70fcd44a7f2 100644 --- a/x-pack/docs/build.gradle +++ b/x-pack/docs/build.gradle @@ -16,9 +16,6 @@ buildRestTests.expectedUnconvertedCandidates = [ 'en/ml/functions/rare.asciidoc', 'en/ml/functions/sum.asciidoc', 'en/ml/functions/time.asciidoc', - 'en/rest-api/security/ssl.asciidoc', - 'en/rest-api/security/users.asciidoc', - 'en/rest-api/security/tokens.asciidoc', 'en/rest-api/watcher/put-watch.asciidoc', 'en/security/authentication/user-cache.asciidoc', 'en/security/authorization/field-and-document-access-control.asciidoc', @@ -76,7 +73,6 @@ buildRestTests.expectedUnconvertedCandidates = [ 'en/rest-api/ml/update-snapshot.asciidoc', 'en/rest-api/ml/validate-detector.asciidoc', 'en/rest-api/ml/validate-job.asciidoc', - 'en/rest-api/security/authenticate.asciidoc', 'en/rest-api/watcher/stats.asciidoc', 'en/watcher/example-watches/watching-time-series-data.asciidoc', ] diff --git a/x-pack/docs/en/rest-api/security/authenticate.asciidoc b/x-pack/docs/en/rest-api/security/authenticate.asciidoc index ba837ddfd2c20..ab259762332f9 100644 --- a/x-pack/docs/en/rest-api/security/authenticate.asciidoc +++ b/x-pack/docs/en/rest-api/security/authenticate.asciidoc @@ -35,12 +35,14 @@ The following example output provides information about the "rdeniro" user: -------------------------------------------------- { "username": "rdeniro", - "roles": [ - "admin", - "kibana4" + "roles": [ + "admin" ], - "metadata" : { - "employee_id": "8675309" - } + "full_name": null, + "email": null, + "metadata": { }, + "enabled": true } -------------------------------------------------- +// TESTRESPONSE[s/"rdeniro"/"$body.username"/] +// TESTRESPONSE[s/"admin"/"superuser"/] \ No newline at end of file diff --git a/x-pack/docs/en/rest-api/security/ssl.asciidoc b/x-pack/docs/en/rest-api/security/ssl.asciidoc index f7a40c6d87607..6462699570fb0 100644 --- a/x-pack/docs/en/rest-api/security/ssl.asciidoc +++ b/x-pack/docs/en/rest-api/security/ssl.asciidoc @@ -109,3 +109,4 @@ The API returns the following results: } ] ---- +// NOTCONSOLE \ No newline at end of file diff --git a/x-pack/docs/en/rest-api/security/tokens.asciidoc b/x-pack/docs/en/rest-api/security/tokens.asciidoc index 70f255ead37c0..f991a5c0cb836 100644 --- a/x-pack/docs/en/rest-api/security/tokens.asciidoc +++ b/x-pack/docs/en/rest-api/security/tokens.asciidoc @@ -98,6 +98,7 @@ by the value of the `access_token`. -------------------------------------------------- curl -H "Authorization: Bearer dGhpcyBpcyBub3QgYSByZWFsIHRva2VuIGJ1dCBpdCBpcyBvbmx5IHRlc3QgZGF0YS4gZG8gbm90IHRyeSB0byByZWFkIHRva2VuIQ==" http://localhost:9200/_cluster/health -------------------------------------------------- +// NOTCONSOLE [[security-api-refresh-token]] To extend the life of an existing token, the token api may be called again with the refresh diff --git a/x-pack/docs/en/rest-api/security/users.asciidoc b/x-pack/docs/en/rest-api/security/users.asciidoc index 926193481afbc..c84da5c7d75ff 100644 --- a/x-pack/docs/en/rest-api/security/users.asciidoc +++ b/x-pack/docs/en/rest-api/security/users.asciidoc @@ -115,6 +115,7 @@ authenticated. For example: -------------------------------------------------- curl -u jacknich:j@rV1s http://localhost:9200/_cluster/health -------------------------------------------------- +// NOTCONSOLE [[security-api-get-user]] To retrieve a native user, submit a GET request to the `/_xpack/security/user/` From 73549281e8443a0b579b3f606a41b720465d8054 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Mon, 18 Jun 2018 12:06:42 -0400 Subject: [PATCH 26/34] Docs: Use the default distribution to test docs (#31251) This switches the docs tests from the `oss-zip` distribution to the `zip` distribution so they have xpack installed and configured with the default basic license. The goal is to be able to merge the `x-pack/docs` directory into the `docs` directory, marking the x-pack docs with some kind of marker. This is the first step in that process. This also enables `-Dtests.distribution` support for the `docs` directory so you can run the tests against the `oss-zip` distribution with something like ``` ./gradlew -p docs check -Dtests.distribution=oss-zip ``` We can set up Jenkins to run both. Relates to #30665 --- .../gradle/doc/DocsTestPlugin.groovy | 4 +++ docs/Versions.asciidoc | 1 + docs/build.gradle | 1 - docs/plugins/discovery-azure-classic.asciidoc | 2 +- docs/reference/cat/nodeattrs.asciidoc | 25 +++++++++++++++--- docs/reference/cat/templates.asciidoc | 7 +++++ docs/reference/cat/thread_pool.asciidoc | 25 +++++++++++------- docs/reference/cluster/nodes-info.asciidoc | 4 +-- docs/reference/cluster/stats.asciidoc | 26 +++++++++---------- .../setup/install/check-running.asciidoc | 2 +- 10 files changed, 66 insertions(+), 31 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/DocsTestPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/DocsTestPlugin.groovy index f674dbd33cdfd..27f122b8610ee 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/DocsTestPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/doc/DocsTestPlugin.groovy @@ -32,6 +32,8 @@ public class DocsTestPlugin extends RestTestPlugin { public void apply(Project project) { project.pluginManager.apply('elasticsearch.standalone-rest-test') super.apply(project) + // The distribution can be configured with -Dtests.distribution on the command line + project.integTestCluster.distribution = System.getProperty('tests.distribution', 'zip') // Docs are published separately so no need to assemble project.tasks.remove(project.assemble) project.build.dependsOn.remove('assemble') @@ -43,6 +45,8 @@ public class DocsTestPlugin extends RestTestPlugin { '\\{version\\}': VersionProperties.elasticsearch.toString().replace('-SNAPSHOT', ''), '\\{lucene_version\\}' : VersionProperties.lucene.replaceAll('-snapshot-\\w+$', ''), + '\\{build_flavor\\}' : + project.integTestCluster.distribution.startsWith('oss-') ? 'oss' : 'default', ] Task listSnippets = project.tasks.create('listSnippets', SnippetsTask) listSnippets.group 'Docs' diff --git a/docs/Versions.asciidoc b/docs/Versions.asciidoc index 608d5b9288e6e..948a3387f0350 100644 --- a/docs/Versions.asciidoc +++ b/docs/Versions.asciidoc @@ -5,6 +5,7 @@ :branch: master :jdk: 1.8.0_131 :jdk_major: 8 +:build_flavor: default ////////// release-state can be: released | prerelease | unreleased diff --git a/docs/build.gradle b/docs/build.gradle index 6498de0218120..f1d1324192b16 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -20,7 +20,6 @@ apply plugin: 'elasticsearch.docs-test' integTestCluster { - distribution = 'oss-zip' /* Enable regexes in painless so our tests don't complain about example * snippets that use them. */ setting 'script.painless.regex.enabled', 'true' diff --git a/docs/plugins/discovery-azure-classic.asciidoc b/docs/plugins/discovery-azure-classic.asciidoc index c56991b8f507f..1c1925de878aa 100644 --- a/docs/plugins/discovery-azure-classic.asciidoc +++ b/docs/plugins/discovery-azure-classic.asciidoc @@ -372,7 +372,7 @@ This command should give you a JSON result: "cluster_uuid" : "AT69_T_DTp-1qgIJlatQqA", "version" : { "number" : "{version}", - "build_flavor" : "oss", + "build_flavor" : "{build_flavor}", "build_type" : "zip", "build_hash" : "f27399d", "build_date" : "2016-03-30T09:51:41.449Z", diff --git a/docs/reference/cat/nodeattrs.asciidoc b/docs/reference/cat/nodeattrs.asciidoc index 196f142cc35e1..6c474c2117943 100644 --- a/docs/reference/cat/nodeattrs.asciidoc +++ b/docs/reference/cat/nodeattrs.asciidoc @@ -9,15 +9,23 @@ For example: GET /_cat/nodeattrs?v -------------------------------------------------- // CONSOLE +// TEST[s/\?v/\?v&s=node,attr/] +// Sort the resulting attributes so we can assert on them more easilly Could look like: [source,txt] -------------------------------------------------- node host ip attr value -EK_AsJb 127.0.0.1 127.0.0.1 testattr test +... +node-0 127.0.0.1 127.0.0.1 testattr test +... -------------------------------------------------- -// TESTRESPONSE[s/EK_AsJb/.+/ _cat] +// TESTRESPONSE[s/\.\.\.\n$/\n(.+ xpack\\.installed true\n)?\n/] +// TESTRESPONSE[s/\.\.\.\n/(.+ ml\\..+\n)*/ _cat] +// If xpack is not installed then neither ... with match anything +// If xpack is installed then the first ... contains ml attributes +// and the second contains xpack.installed=true The first few columns (`node`, `host`, `ip`) give you basic info per node and the `attr` and `value` columns give you the custom node attributes, @@ -46,15 +54,24 @@ mode (`v`). The header name will match the supplied value (e.g., GET /_cat/nodeattrs?v&h=name,pid,attr,value -------------------------------------------------- // CONSOLE +// TEST[s/,value/,value&s=node,attr/] +// Sort the resulting attributes so we can assert on them more easilly Might look like: [source,txt] -------------------------------------------------- name pid attr value -EK_AsJb 19566 testattr test +... +node-0 19566 testattr test +... -------------------------------------------------- -// TESTRESPONSE[s/EK_AsJb/.+/ s/19566/\\d*/ _cat] +// TESTRESPONSE[s/19566/\\d*/] +// TESTRESPONSE[s/\.\.\.\n$/\n(.+ xpack\\.installed true\n)?\n/] +// TESTRESPONSE[s/\.\.\.\n/(.+ ml\\..+\n)*/ _cat] +// If xpack is not installed then neither ... with match anything +// If xpack is installed then the first ... contains ml attributes +// and the second contains xpack.installed=true [cols="<,<,<,<,<",options="header",subs="normal"] |======================================================================= diff --git a/docs/reference/cat/templates.asciidoc b/docs/reference/cat/templates.asciidoc index bc221d13552c0..076e84b72b5d3 100644 --- a/docs/reference/cat/templates.asciidoc +++ b/docs/reference/cat/templates.asciidoc @@ -8,9 +8,16 @@ The `templates` command provides information about existing templates. GET /_cat/templates?v&s=name -------------------------------------------------- // CONSOLE +// TEST[s/templates/templates\/template*/] // TEST[s/^/PUT _template\/template0\n{"index_patterns": "te*", "order": 0}\n/] // TEST[s/^/PUT _template\/template1\n{"index_patterns": "tea*", "order": 1}\n/] // TEST[s/^/PUT _template\/template2\n{"index_patterns": "teak*", "order": 2, "version": 7}\n/] +// The substitions do two things: +// 1. Filter the response to just templates matching the te* pattern +// so that we only get the templates we expect regardless of which +// templates exist. If xpack is installed there will be unexpected +// templates. +// 2. Create some templates to expect in the response. which looks like diff --git a/docs/reference/cat/thread_pool.asciidoc b/docs/reference/cat/thread_pool.asciidoc index 306650feb958b..9528b7829e37f 100644 --- a/docs/reference/cat/thread_pool.asciidoc +++ b/docs/reference/cat/thread_pool.asciidoc @@ -18,18 +18,19 @@ node-0 analyze 0 0 0 node-0 fetch_shard_started 0 0 0 node-0 fetch_shard_store 0 0 0 node-0 flush 0 0 0 -node-0 force_merge 0 0 0 -node-0 generic 0 0 0 -node-0 get 0 0 0 -node-0 listener 0 0 0 -node-0 management 1 0 0 -node-0 refresh 0 0 0 -node-0 search 0 0 0 -node-0 snapshot 0 0 0 -node-0 warmer 0 0 0 +... node-0 write 0 0 0 -------------------------------------------------- +// TESTRESPONSE[s/\.\.\./(node-0 .+ 0 0 0\n)+/] // TESTRESPONSE[s/\d+/\\d+/ _cat] +// The substitutions do two things: +// 1. Expect any number of extra thread pools. This allows us to only list a +// few thread pools. The list would be super long otherwise. In addition, +// if xpack is installed then the list will contain more thread pools and +// this way we don't have to assert about them. +// 2. Expect any number of active, queued, or rejected items. We really don't +// know how many there will be and we just want to assert that there are +// numbers in the response, not *which* numbers are there. The first column is the node name @@ -52,10 +53,16 @@ generic get listener management +ml_autodetect (default distro only) +ml_datafeed (default distro only) +ml_utility (default distro only) refresh +rollup_indexing (default distro only)` search +security-token-key (default distro only) snapshot warmer +watcher (default distro only) write -------------------------------------------------- diff --git a/docs/reference/cluster/nodes-info.asciidoc b/docs/reference/cluster/nodes-info.asciidoc index 6522d0f5ad68a..2cd61dd905ff6 100644 --- a/docs/reference/cluster/nodes-info.asciidoc +++ b/docs/reference/cluster/nodes-info.asciidoc @@ -142,7 +142,7 @@ The result will look similar to: "host": "node-0.elastic.co", "ip": "192.168.17", "version": "{version}", - "build_flavor": "oss", + "build_flavor": "{build_flavor}", "build_type": "zip", "build_hash": "587409e", "roles": [ @@ -237,7 +237,7 @@ The result will look similar to: "host": "node-0.elastic.co", "ip": "192.168.17", "version": "{version}", - "build_flavor": "oss", + "build_flavor": "{build_flavor}", "build_type": "zip", "build_hash": "587409e", "roles": [], diff --git a/docs/reference/cluster/stats.asciidoc b/docs/reference/cluster/stats.asciidoc index 6efb4dced8bb8..191da2660d668 100644 --- a/docs/reference/cluster/stats.asciidoc +++ b/docs/reference/cluster/stats.asciidoc @@ -192,23 +192,23 @@ Will return, for example: "description": "Ingest processor that extracts information from a user agent", "classname": "org.elasticsearch.ingest.useragent.IngestUserAgentPlugin", "has_native_controller": false - } + }, + ... ], - "network_types" : { - "transport_types" : { - "netty4" : 1 - }, - "http_types" : { - "netty4" : 1 - } - } + ... } } -------------------------------------------------- // TESTRESPONSE[s/"plugins": \[[^\]]*\]/"plugins": $body.$_path/] +// TESTRESPONSE[s/\.\.\./"network_types": "replace_me"/] // TESTRESPONSE[s/: (\-)?[0-9]+/: $body.$_path/] // TESTRESPONSE[s/: "[^"]*"/: $body.$_path/] -//// -The TESTRESPONSE above replace all the fields values by the expected ones in the test, -because we don't really care about the field values but we want to check the fields names. -//// \ No newline at end of file +// These replacements do a few things: +// 1. Ignore the contents of the `plugins` object because we don't know all of +// the plugins that will be in it. And because we figure folks don't need to +// see an exhaustive list anyway. +// 2. The last ... contains more things that we don't think are important to +// include in the output. +// 3. All of the numbers and strings on the right hand side of *every* field in +// the response are ignored. So we're really only asserting things about the +// the shape of this response, not the values in it. diff --git a/docs/reference/setup/install/check-running.asciidoc b/docs/reference/setup/install/check-running.asciidoc index 0cfc4b329ecfa..7b95a10158d2f 100644 --- a/docs/reference/setup/install/check-running.asciidoc +++ b/docs/reference/setup/install/check-running.asciidoc @@ -19,7 +19,7 @@ which should give you a response something like this: "cluster_uuid" : "AT69_T_DTp-1qgIJlatQqA", "version" : { "number" : "{version}", - "build_flavor" : "oss", + "build_flavor" : "{build_flavor}", "build_type" : "zip", "build_hash" : "f27399d", "build_date" : "2016-03-30T09:51:41.449Z", From f3297ed23a9472e2f184b0ac4b1f367daef44cbc Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Mon, 18 Jun 2018 19:02:51 +0200 Subject: [PATCH 27/34] Packaging: Remove windows bin files from the tar distribution (#30596) This commit removes windows specific files from the tar distribution. Windows users use the zip, linux users use the tar. --- distribution/archives/build.gradle | 11 ---------- distribution/build.gradle | 21 +++++++++++++++++-- .../migration/migrate_7_0/packaging.asciidoc | 5 +++++ .../test/resources/packaging/utils/xpack.bash | 12 ----------- 4 files changed, 24 insertions(+), 25 deletions(-) diff --git a/distribution/archives/build.gradle b/distribution/archives/build.gradle index 71606c2c027a5..16b2a8a7a6ce6 100644 --- a/distribution/archives/build.gradle +++ b/distribution/archives/build.gradle @@ -18,7 +18,6 @@ */ import org.apache.tools.ant.taskdefs.condition.Os -import org.apache.tools.ant.filters.FixCrLfFilter import org.elasticsearch.gradle.BuildPlugin import org.elasticsearch.gradle.EmptyDirTask import org.elasticsearch.gradle.LoggedExec @@ -59,13 +58,6 @@ CopySpec archiveFiles(CopySpec modulesFiles, String distributionType, boolean os } into('bin') { with binFiles(distributionType, oss) - with copySpec { - from('../src/bin') { - include '*.bat' - filter(FixCrLfFilter, eol: FixCrLfFilter.CrLf.newInstance('crlf')) - } - MavenFilteringHack.filter(it, expansionsForDistribution(distributionType, oss)) - } } into('') { from { @@ -88,9 +80,6 @@ CopySpec archiveFiles(CopySpec modulesFiles, String distributionType, boolean os } with noticeFile - from('../src') { - include 'bin/*.exe' - } into('modules') { with modulesFiles } diff --git a/distribution/build.gradle b/distribution/build.gradle index ff3a06b4dc577..6ffb678cb2ba8 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -21,6 +21,7 @@ import org.elasticsearch.gradle.ConcatFilesTask import org.elasticsearch.gradle.MavenFilteringHack import org.elasticsearch.gradle.NoticeTask import org.elasticsearch.gradle.test.RunTask +import org.apache.tools.ant.filters.FixCrLfFilter import java.nio.file.Path @@ -281,15 +282,28 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { binFiles = { distributionType, oss -> copySpec { + // non-windows files, for all distributions with copySpec { - // main bin files, processed with distribution specific substitutions - // everything except windows files from '../src/bin' exclude '*.exe' exclude '*.bat' eachFile { it.setMode(0755) } MavenFilteringHack.filter(it, expansionsForDistribution(distributionType, oss)) } + // windows files, only for zip + if (distributionType == 'zip') { + with copySpec { + from '../src/bin' + include '*.bat' + filter(FixCrLfFilter, eol: FixCrLfFilter.CrLf.newInstance('crlf')) + MavenFilteringHack.filter(it, expansionsForDistribution(distributionType, oss)) + } + with copySpec { + from '../src/bin' + include '*.exe' + } + } + // module provided bin files with copySpec { eachFile { it.setMode(0755) } if (oss) { @@ -297,6 +311,9 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) { } else { from project(':distribution').buildDefaultBin } + if (distributionType != 'zip') { + exclude '*.bat' + } } } } diff --git a/docs/reference/migration/migrate_7_0/packaging.asciidoc b/docs/reference/migration/migrate_7_0/packaging.asciidoc index 4070d6807332d..934522db7162f 100644 --- a/docs/reference/migration/migrate_7_0/packaging.asciidoc +++ b/docs/reference/migration/migrate_7_0/packaging.asciidoc @@ -8,3 +8,8 @@ The systemd service file `/usr/lib/systemd/system/elasticsearch.service` was previously marked as a configuration file in rpm and deb packages. Overrides to the systemd elasticsearch service should be made in `/etc/systemd/system/elasticsearch.service.d/override.conf`. + +==== tar package no longer includes windows specific files + +The tar package previously included files in the `bin` directory meant only +for windows. These files have been removed. Use the `zip` package instead. diff --git a/x-pack/qa/vagrant/src/test/resources/packaging/utils/xpack.bash b/x-pack/qa/vagrant/src/test/resources/packaging/utils/xpack.bash index 3e44ee9f83a58..c267744194a1c 100644 --- a/x-pack/qa/vagrant/src/test/resources/packaging/utils/xpack.bash +++ b/x-pack/qa/vagrant/src/test/resources/packaging/utils/xpack.bash @@ -15,30 +15,18 @@ verify_xpack_installation() { #assert_file "$ESHOME/bin" d $user $group 755 local binaryFiles=( 'elasticsearch-certgen' - 'elasticsearch-certgen.bat' 'elasticsearch-certutil' - 'elasticsearch-certutil.bat' 'elasticsearch-croneval' - 'elasticsearch-croneval.bat' 'elasticsearch-migrate' - 'elasticsearch-migrate.bat' 'elasticsearch-saml-metadata' - 'elasticsearch-saml-metadata.bat' 'elasticsearch-setup-passwords' - 'elasticsearch-setup-passwords.bat' 'elasticsearch-sql-cli' - 'elasticsearch-sql-cli.bat' "elasticsearch-sql-cli-$(cat version).jar" # This jar is executable so we pitch it in bin so folks will find it 'elasticsearch-syskeygen' - 'elasticsearch-syskeygen.bat' 'elasticsearch-users' - 'elasticsearch-users.bat' 'x-pack-env' - 'x-pack-env.bat' 'x-pack-security-env' - 'x-pack-security-env.bat' 'x-pack-watcher-env' - 'x-pack-watcher-env.bat' ) local binaryFilesCount=5 # start with oss distro number From 340313b0482dbddde07b64172d668d27277e35bb Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Mon, 18 Jun 2018 19:03:46 +0200 Subject: [PATCH 28/34] RestAPI: Reject forcemerge requests with a body (#30792) This commit adds validation to forcemerge rest requests which contain a body. All parameters to force merge must be part of http params. closes #29584 --- .../admin/indices/RestForceMergeAction.java | 3 ++ .../forcemerge/RestForceMergeActionTests.java | 47 +++++++++++++++++++ 2 files changed, 50 insertions(+) create mode 100644 server/src/test/java/org/elasticsearch/action/admin/indices/forcemerge/RestForceMergeActionTests.java diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestForceMergeAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestForceMergeAction.java index dcc397be14263..6ec4cec77193e 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestForceMergeAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestForceMergeAction.java @@ -47,6 +47,9 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { + if (request.hasContent()) { + throw new IllegalArgumentException("forcemerge takes arguments in query parameters, not in the request body"); + } ForceMergeRequest mergeRequest = new ForceMergeRequest(Strings.splitStringByCommaToArray(request.param("index"))); mergeRequest.indicesOptions(IndicesOptions.fromRequest(request, mergeRequest.indicesOptions())); mergeRequest.maxNumSegments(request.paramAsInt("max_num_segments", mergeRequest.maxNumSegments())); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/forcemerge/RestForceMergeActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/forcemerge/RestForceMergeActionTests.java new file mode 100644 index 0000000000000..aeb5beb09e2fc --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/forcemerge/RestForceMergeActionTests.java @@ -0,0 +1,47 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.indices.forcemerge; + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.action.admin.indices.RestForceMergeAction; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.rest.FakeRestRequest; + +import static org.hamcrest.Matchers.equalTo; +import static org.mockito.Mockito.mock; + +public class RestForceMergeActionTests extends ESTestCase { + + public void testBodyRejection() throws Exception { + final RestForceMergeAction handler = new RestForceMergeAction(Settings.EMPTY, mock(RestController.class)); + String json = JsonXContent.contentBuilder().startObject().field("max_num_segments", 1).endObject().toString(); + final FakeRestRequest request = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY) + .withContent(new BytesArray(json), XContentType.JSON).build(); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> handler.prepareRequest(request, mock(NodeClient.class))); + assertThat(e.getMessage(), equalTo("forcemerge takes arguments in query parameters, not in the request body")); + } +} From d9a6d69a0ddcb434811705067b40a0630edea3de Mon Sep 17 00:00:00 2001 From: Igor Motov Date: Mon, 18 Jun 2018 13:50:52 -0400 Subject: [PATCH 29/34] Fix defaults in GeoShapeFieldMapper output (#31302) GeoShapeFieldMapper should show actual defaults instead of placeholder values when the mapping is requested with include_defaults=true. Closes #23206 --- .../index/mapper/GeoShapeFieldMapper.java | 17 +++- .../mapper/GeoShapeFieldMapperTests.java | 77 +++++++++++++++++++ 2 files changed, 92 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java index c0158f61c3af7..7b083c2ce9e0f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java @@ -546,11 +546,24 @@ protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, if (includeDefaults || fieldType().tree().equals(Defaults.TREE) == false) { builder.field(Names.TREE, fieldType().tree()); } - if (includeDefaults || fieldType().treeLevels() != 0) { + + if (fieldType().treeLevels() != 0) { builder.field(Names.TREE_LEVELS, fieldType().treeLevels()); + } else if(includeDefaults && fieldType().precisionInMeters() == -1) { // defaults only make sense if precision is not specified + if ("geohash".equals(fieldType().tree())) { + builder.field(Names.TREE_LEVELS, Defaults.GEOHASH_LEVELS); + } else if ("legacyquadtree".equals(fieldType().tree())) { + builder.field(Names.TREE_LEVELS, Defaults.QUADTREE_LEVELS); + } else if ("quadtree".equals(fieldType().tree())) { + builder.field(Names.TREE_LEVELS, Defaults.QUADTREE_LEVELS); + } else { + throw new IllegalArgumentException("Unknown prefix tree type [" + fieldType().tree() + "]"); + } } - if (includeDefaults || fieldType().precisionInMeters() != -1) { + if (fieldType().precisionInMeters() != -1) { builder.field(Names.TREE_PRESISION, DistanceUnit.METERS.toString(fieldType().precisionInMeters())); + } else if (includeDefaults && fieldType().treeLevels() == 0) { // defaults only make sense if tree levels are not specified + builder.field(Names.TREE_PRESISION, DistanceUnit.METERS.toString(50)); } if (includeDefaults || fieldType().strategyName() != Defaults.STRATEGY) { builder.field(Names.STRATEGY, fieldType().strategyName()); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java index 201e749cd22e7..00b3b7c7f3e73 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java @@ -27,6 +27,8 @@ import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.geo.builders.ShapeBuilder; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -34,6 +36,7 @@ import java.io.IOException; import java.util.Collection; +import java.util.Collections; import static org.elasticsearch.index.mapper.GeoPointFieldMapper.Names.IGNORE_Z_VALUE; import static org.hamcrest.Matchers.containsString; @@ -517,4 +520,78 @@ public void testEmptyName() throws Exception { assertThat(e.getMessage(), containsString("name cannot be empty string")); } + public void testSerializeDefaults() throws Exception { + DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "quadtree") + .endObject().endObject() + .endObject().endObject()); + DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); + String serialized = toXContentString((GeoShapeFieldMapper) defaultMapper.mappers().getMapper("location")); + assertTrue(serialized, serialized.contains("\"precision\":\"50.0m\"")); + assertTrue(serialized, serialized.contains("\"tree_levels\":21")); + } + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "geohash") + .endObject().endObject() + .endObject().endObject()); + DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); + String serialized = toXContentString((GeoShapeFieldMapper) defaultMapper.mappers().getMapper("location")); + assertTrue(serialized, serialized.contains("\"precision\":\"50.0m\"")); + assertTrue(serialized, serialized.contains("\"tree_levels\":9")); + } + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "quadtree") + .field("tree_levels", "6") + .endObject().endObject() + .endObject().endObject()); + DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); + String serialized = toXContentString((GeoShapeFieldMapper) defaultMapper.mappers().getMapper("location")); + assertFalse(serialized, serialized.contains("\"precision\":")); + assertTrue(serialized, serialized.contains("\"tree_levels\":6")); + } + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "quadtree") + .field("precision", "6") + .endObject().endObject() + .endObject().endObject()); + DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); + String serialized = toXContentString((GeoShapeFieldMapper) defaultMapper.mappers().getMapper("location")); + assertTrue(serialized, serialized.contains("\"precision\":\"6.0m\"")); + assertFalse(serialized, serialized.contains("\"tree_levels\":")); + } + { + String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("tree", "quadtree") + .field("precision", "6m") + .field("tree_levels", "5") + .endObject().endObject() + .endObject().endObject()); + DocumentMapper defaultMapper = parser.parse("type1", new CompressedXContent(mapping)); + String serialized = toXContentString((GeoShapeFieldMapper) defaultMapper.mappers().getMapper("location")); + assertTrue(serialized, serialized.contains("\"precision\":\"6.0m\"")); + assertTrue(serialized, serialized.contains("\"tree_levels\":5")); + } + } + + public String toXContentString(GeoShapeFieldMapper mapper) throws IOException { + XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); + mapper.doXContentBody(builder, true, new ToXContent.MapParams(Collections.singletonMap("include_defaults", "true"))); + return Strings.toString(builder.endObject()); + } + } From 2a8381d3fa5c8d88aff662d3c1066a6e258578c2 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Mon, 18 Jun 2018 15:05:34 -0400 Subject: [PATCH 30/34] Avoid sending duplicate remote failed shard requests (#31313) Today if a write replication request fails, we will send a shard-failed message to the master node to fail that replica. However, if there are many ongoing write replication requests and the master node is busy, we might overwhelm the cluster and the master node with many shard-failed requests. This commit tries to minimize the shard-failed requests in the above scenario by caching the ongoing shard-failed requests. This issue was discussed at https://discuss.elastic.co/t/half-dead-node-lead-to-cluster-hang/113658/25. --- .../action/shard/ShardStateAction.java | 123 ++++++++++++++- ...rdFailedClusterStateTaskExecutorTests.java | 64 ++++---- .../action/shard/ShardStateActionTests.java | 146 +++++++++++++++++- 3 files changed, 299 insertions(+), 34 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java b/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java index 915e900b9ddf1..f690efa4c9a0c 100644 --- a/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java +++ b/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java @@ -25,10 +25,10 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterChangedEvent; -import org.elasticsearch.cluster.ClusterStateTaskExecutor; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateObserver; import org.elasticsearch.cluster.ClusterStateTaskConfig; +import org.elasticsearch.cluster.ClusterStateTaskExecutor; import org.elasticsearch.cluster.ClusterStateTaskListener; import org.elasticsearch.cluster.MasterNodeChangePredicate; import org.elasticsearch.cluster.NotMasterException; @@ -48,6 +48,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.node.NodeClosedException; @@ -68,7 +69,9 @@ import java.util.HashSet; import java.util.List; import java.util.Locale; +import java.util.Objects; import java.util.Set; +import java.util.concurrent.ConcurrentMap; import java.util.function.Predicate; public class ShardStateAction extends AbstractComponent { @@ -80,6 +83,10 @@ public class ShardStateAction extends AbstractComponent { private final ClusterService clusterService; private final ThreadPool threadPool; + // a list of shards that failed during replication + // we keep track of these shards in order to avoid sending duplicate failed shard requests for a single failing shard. + private final ConcurrentMap remoteFailedShardsCache = ConcurrentCollections.newConcurrentMap(); + @Inject public ShardStateAction(Settings settings, ClusterService clusterService, TransportService transportService, AllocationService allocationService, RoutingService routingService, ThreadPool threadPool) { @@ -146,8 +153,35 @@ private static boolean isMasterChannelException(TransportException exp) { */ public void remoteShardFailed(final ShardId shardId, String allocationId, long primaryTerm, boolean markAsStale, final String message, @Nullable final Exception failure, Listener listener) { assert primaryTerm > 0L : "primary term should be strictly positive"; - FailedShardEntry shardEntry = new FailedShardEntry(shardId, allocationId, primaryTerm, message, failure, markAsStale); - sendShardAction(SHARD_FAILED_ACTION_NAME, clusterService.state(), shardEntry, listener); + final FailedShardEntry shardEntry = new FailedShardEntry(shardId, allocationId, primaryTerm, message, failure, markAsStale); + final CompositeListener compositeListener = new CompositeListener(listener); + final CompositeListener existingListener = remoteFailedShardsCache.putIfAbsent(shardEntry, compositeListener); + if (existingListener == null) { + sendShardAction(SHARD_FAILED_ACTION_NAME, clusterService.state(), shardEntry, new Listener() { + @Override + public void onSuccess() { + try { + compositeListener.onSuccess(); + } finally { + remoteFailedShardsCache.remove(shardEntry); + } + } + @Override + public void onFailure(Exception e) { + try { + compositeListener.onFailure(e); + } finally { + remoteFailedShardsCache.remove(shardEntry); + } + } + }); + } else { + existingListener.addListener(listener); + } + } + + int remoteShardFailedCacheSize() { + return remoteFailedShardsCache.size(); } /** @@ -414,6 +448,23 @@ public String toString() { components.add("markAsStale [" + markAsStale + "]"); return String.join(", ", components); } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + FailedShardEntry that = (FailedShardEntry) o; + // Exclude message and exception from equals and hashCode + return Objects.equals(this.shardId, that.shardId) && + Objects.equals(this.allocationId, that.allocationId) && + primaryTerm == that.primaryTerm && + markAsStale == that.markAsStale; + } + + @Override + public int hashCode() { + return Objects.hash(shardId, allocationId, primaryTerm, markAsStale); + } } public void shardStarted(final ShardRouting shardRouting, final String message, Listener listener) { @@ -585,6 +636,72 @@ default void onFailure(final Exception e) { } + /** + * A composite listener that allows registering multiple listeners dynamically. + */ + static final class CompositeListener implements Listener { + private boolean isNotified = false; + private Exception failure = null; + private final List listeners = new ArrayList<>(); + + CompositeListener(Listener listener) { + listeners.add(listener); + } + + void addListener(Listener listener) { + final boolean ready; + synchronized (this) { + ready = this.isNotified; + if (ready == false) { + listeners.add(listener); + } + } + if (ready) { + if (failure != null) { + listener.onFailure(failure); + } else { + listener.onSuccess(); + } + } + } + + private void onCompleted(Exception failure) { + synchronized (this) { + this.failure = failure; + this.isNotified = true; + } + RuntimeException firstException = null; + for (Listener listener : listeners) { + try { + if (failure != null) { + listener.onFailure(failure); + } else { + listener.onSuccess(); + } + } catch (RuntimeException innerEx) { + if (firstException == null) { + firstException = innerEx; + } else { + firstException.addSuppressed(innerEx); + } + } + } + if (firstException != null) { + throw firstException; + } + } + + @Override + public void onSuccess() { + onCompleted(null); + } + + @Override + public void onFailure(Exception failure) { + onCompleted(failure); + } + } + public static class NoLongerPrimaryShardException extends ElasticsearchException { public NoLongerPrimaryShardException(ShardId shardId, String msg) { diff --git a/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java b/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java index 9eeef54dfd796..01d0c518c1be7 100644 --- a/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java @@ -22,11 +22,11 @@ import com.carrotsearch.hppc.cursors.ObjectCursor; import org.apache.lucene.index.CorruptIndexException; import org.elasticsearch.Version; -import org.elasticsearch.cluster.action.shard.ShardStateAction.FailedShardEntry; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateTaskExecutor; import org.elasticsearch.cluster.ESAllocationTestCase; +import org.elasticsearch.cluster.action.shard.ShardStateAction.FailedShardEntry; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNodes; @@ -43,6 +43,7 @@ import org.elasticsearch.cluster.routing.allocation.StaleShard; import org.elasticsearch.cluster.routing.allocation.decider.ClusterRebalanceAllocationDecider; import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.index.Index; @@ -53,9 +54,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; -import java.util.Map; import java.util.Set; -import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.IntStream; @@ -131,10 +130,15 @@ ClusterState applyFailedShards(ClusterState currentState, List fail tasks.addAll(failingTasks); tasks.addAll(nonExistentTasks); ClusterStateTaskExecutor.ClusterTasksResult result = failingExecutor.execute(currentState, tasks); - Map taskResultMap = - failingTasks.stream().collect(Collectors.toMap(Function.identity(), task -> ClusterStateTaskExecutor.TaskResult.failure(new RuntimeException("simulated applyFailedShards failure")))); - taskResultMap.putAll(nonExistentTasks.stream().collect(Collectors.toMap(Function.identity(), task -> ClusterStateTaskExecutor.TaskResult.success()))); - assertTaskResults(taskResultMap, result, currentState, false); + List> taskResultList = new ArrayList<>(); + for (FailedShardEntry failingTask : failingTasks) { + taskResultList.add(Tuple.tuple(failingTask, + ClusterStateTaskExecutor.TaskResult.failure(new RuntimeException("simulated applyFailedShards failure")))); + } + for (FailedShardEntry nonExistentTask : nonExistentTasks) { + taskResultList.add(Tuple.tuple(nonExistentTask, ClusterStateTaskExecutor.TaskResult.success())); + } + assertTaskResults(taskResultList, result, currentState, false); } public void testIllegalShardFailureRequests() throws Exception { @@ -147,14 +151,14 @@ public void testIllegalShardFailureRequests() throws Exception { tasks.add(new FailedShardEntry(failingTask.shardId, failingTask.allocationId, randomIntBetween(1, (int) primaryTerm - 1), failingTask.message, failingTask.failure, randomBoolean())); } - Map taskResultMap = - tasks.stream().collect(Collectors.toMap( - Function.identity(), - task -> ClusterStateTaskExecutor.TaskResult.failure(new ShardStateAction.NoLongerPrimaryShardException(task.shardId, - "primary term [" + task.primaryTerm + "] did not match current primary term [" + - currentState.metaData().index(task.shardId.getIndex()).primaryTerm(task.shardId.id()) + "]")))); + List> taskResultList = tasks.stream() + .map(task -> Tuple.tuple(task, ClusterStateTaskExecutor.TaskResult.failure( + new ShardStateAction.NoLongerPrimaryShardException(task.shardId, "primary term [" + + task.primaryTerm + "] did not match current primary term [" + + currentState.metaData().index(task.shardId.getIndex()).primaryTerm(task.shardId.id()) + "]")))) + .collect(Collectors.toList()); ClusterStateTaskExecutor.ClusterTasksResult result = executor.execute(currentState, tasks); - assertTaskResults(taskResultMap, result, currentState, false); + assertTaskResults(taskResultList, result, currentState, false); } public void testMarkAsStaleWhenFailingShard() throws Exception { @@ -251,44 +255,44 @@ private static void assertTasksSuccessful( ClusterState clusterState, boolean clusterStateChanged ) { - Map taskResultMap = - tasks.stream().collect(Collectors.toMap(Function.identity(), task -> ClusterStateTaskExecutor.TaskResult.success())); - assertTaskResults(taskResultMap, result, clusterState, clusterStateChanged); + List> taskResultList = tasks.stream() + .map(t -> Tuple.tuple(t, ClusterStateTaskExecutor.TaskResult.success())).collect(Collectors.toList()); + assertTaskResults(taskResultList, result, clusterState, clusterStateChanged); } private static void assertTaskResults( - Map taskResultMap, + List> taskResultList, ClusterStateTaskExecutor.ClusterTasksResult result, ClusterState clusterState, boolean clusterStateChanged ) { // there should be as many task results as tasks - assertEquals(taskResultMap.size(), result.executionResults.size()); + assertEquals(taskResultList.size(), result.executionResults.size()); - for (Map.Entry entry : taskResultMap.entrySet()) { + for (Tuple entry : taskResultList) { // every task should have a corresponding task result - assertTrue(result.executionResults.containsKey(entry.getKey())); + assertTrue(result.executionResults.containsKey(entry.v1())); // the task results are as expected - assertEquals(entry.getKey().toString(), entry.getValue().isSuccess(), result.executionResults.get(entry.getKey()).isSuccess()); + assertEquals(entry.v1().toString(), entry.v2().isSuccess(), result.executionResults.get(entry.v1()).isSuccess()); } List shards = clusterState.getRoutingTable().allShards(); - for (Map.Entry entry : taskResultMap.entrySet()) { - if (entry.getValue().isSuccess()) { + for (Tuple entry : taskResultList) { + if (entry.v2().isSuccess()) { // the shard was successfully failed and so should not be in the routing table for (ShardRouting shard : shards) { if (shard.assignedToNode()) { - assertFalse("entry key " + entry.getKey() + ", shard routing " + shard, - entry.getKey().getShardId().equals(shard.shardId()) && - entry.getKey().getAllocationId().equals(shard.allocationId().getId())); + assertFalse("entry key " + entry.v1() + ", shard routing " + shard, + entry.v1().getShardId().equals(shard.shardId()) && + entry.v1().getAllocationId().equals(shard.allocationId().getId())); } } } else { // check we saw the expected failure - ClusterStateTaskExecutor.TaskResult actualResult = result.executionResults.get(entry.getKey()); - assertThat(actualResult.getFailure(), instanceOf(entry.getValue().getFailure().getClass())); - assertThat(actualResult.getFailure().getMessage(), equalTo(entry.getValue().getFailure().getMessage())); + ClusterStateTaskExecutor.TaskResult actualResult = result.executionResults.get(entry.v1()); + assertThat(actualResult.getFailure(), instanceOf(entry.v2().getFailure().getClass())); + assertThat(actualResult.getFailure().getMessage(), equalTo(entry.v2().getFailure().getMessage())); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java b/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java index bbd326ff2fedb..1d78cdeb98374 100644 --- a/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardStateActionTests.java @@ -59,9 +59,10 @@ import org.junit.BeforeClass; import java.io.IOException; -import java.util.UUID; import java.util.Collections; +import java.util.UUID; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.Phaser; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; @@ -73,6 +74,8 @@ import static org.elasticsearch.test.ClusterServiceUtils.setState; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; +import static org.hamcrest.CoreMatchers.sameInstance; +import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; @@ -138,6 +141,7 @@ public void tearDown() throws Exception { clusterService.close(); transportService.close(); super.tearDown(); + assertThat(shardStateAction.remoteShardFailedCacheSize(), equalTo(0)); } @AfterClass @@ -381,6 +385,89 @@ public void onFailure(Exception e) { assertThat(failure.get().getMessage(), equalTo(catastrophicError.getMessage())); } + public void testCacheRemoteShardFailed() throws Exception { + final String index = "test"; + setState(clusterService, ClusterStateCreationUtils.stateWithActivePrimary(index, true, randomInt(5))); + ShardRouting failedShard = getRandomShardRouting(index); + boolean markAsStale = randomBoolean(); + int numListeners = between(1, 100); + CountDownLatch latch = new CountDownLatch(numListeners); + long primaryTerm = randomLongBetween(1, Long.MAX_VALUE); + for (int i = 0; i < numListeners; i++) { + shardStateAction.remoteShardFailed(failedShard.shardId(), failedShard.allocationId().getId(), + primaryTerm, markAsStale, "test", getSimulatedFailure(), new ShardStateAction.Listener() { + @Override + public void onSuccess() { + latch.countDown(); + } + @Override + public void onFailure(Exception e) { + latch.countDown(); + } + }); + } + CapturingTransport.CapturedRequest[] capturedRequests = transport.getCapturedRequestsAndClear(); + assertThat(capturedRequests, arrayWithSize(1)); + transport.handleResponse(capturedRequests[0].requestId, TransportResponse.Empty.INSTANCE); + latch.await(); + assertThat(transport.capturedRequests(), arrayWithSize(0)); + } + + public void testRemoteShardFailedConcurrently() throws Exception { + final String index = "test"; + final AtomicBoolean shutdown = new AtomicBoolean(false); + setState(clusterService, ClusterStateCreationUtils.stateWithActivePrimary(index, true, randomInt(5))); + ShardRouting[] failedShards = new ShardRouting[between(1, 5)]; + for (int i = 0; i < failedShards.length; i++) { + failedShards[i] = getRandomShardRouting(index); + } + Thread[] clientThreads = new Thread[between(1, 6)]; + int iterationsPerThread = scaledRandomIntBetween(50, 500); + Phaser barrier = new Phaser(clientThreads.length + 2); // one for master thread, one for the main thread + Thread masterThread = new Thread(() -> { + barrier.arriveAndAwaitAdvance(); + while (shutdown.get() == false) { + for (CapturingTransport.CapturedRequest request : transport.getCapturedRequestsAndClear()) { + if (randomBoolean()) { + transport.handleResponse(request.requestId, TransportResponse.Empty.INSTANCE); + } else { + transport.handleRemoteError(request.requestId, randomFrom(getSimulatedFailure())); + } + } + } + }); + masterThread.start(); + + AtomicInteger notifiedResponses = new AtomicInteger(); + for (int t = 0; t < clientThreads.length; t++) { + clientThreads[t] = new Thread(() -> { + barrier.arriveAndAwaitAdvance(); + for (int i = 0; i < iterationsPerThread; i++) { + ShardRouting failedShard = randomFrom(failedShards); + shardStateAction.remoteShardFailed(failedShard.shardId(), failedShard.allocationId().getId(), + randomLongBetween(1, Long.MAX_VALUE), randomBoolean(), "test", getSimulatedFailure(), new ShardStateAction.Listener() { + @Override + public void onSuccess() { + notifiedResponses.incrementAndGet(); + } + @Override + public void onFailure(Exception e) { + notifiedResponses.incrementAndGet(); + } + }); + } + }); + clientThreads[t].start(); + } + barrier.arriveAndAwaitAdvance(); + for (Thread t : clientThreads) { + t.join(); + } + assertBusy(() -> assertThat(notifiedResponses.get(), equalTo(clientThreads.length * iterationsPerThread))); + shutdown.set(true); + masterThread.join(); + } + private ShardRouting getRandomShardRouting(String index) { IndexRoutingTable indexRoutingTable = clusterService.state().routingTable().index(index); ShardsIterator shardsIterator = indexRoutingTable.randomAllActiveShardsIt(); @@ -452,4 +539,61 @@ BytesReference serialize(Writeable writeable, Version version) throws IOExceptio return out.bytes(); } } + + public void testCompositeListener() throws Exception { + AtomicInteger successCount = new AtomicInteger(); + AtomicInteger failureCount = new AtomicInteger(); + Exception failure = randomBoolean() ? getSimulatedFailure() : null; + ShardStateAction.CompositeListener compositeListener = new ShardStateAction.CompositeListener(new ShardStateAction.Listener() { + @Override + public void onSuccess() { + successCount.incrementAndGet(); + } + @Override + public void onFailure(Exception e) { + assertThat(e, sameInstance(failure)); + failureCount.incrementAndGet(); + } + }); + int iterationsPerThread = scaledRandomIntBetween(100, 1000); + Thread[] threads = new Thread[between(1, 4)]; + Phaser barrier = new Phaser(threads.length + 1); + for (int i = 0; i < threads.length; i++) { + threads[i] = new Thread(() -> { + barrier.arriveAndAwaitAdvance(); + for (int n = 0; n < iterationsPerThread; n++) { + compositeListener.addListener(new ShardStateAction.Listener() { + @Override + public void onSuccess() { + successCount.incrementAndGet(); + } + @Override + public void onFailure(Exception e) { + assertThat(e, sameInstance(failure)); + failureCount.incrementAndGet(); + } + }); + } + }); + threads[i].start(); + } + barrier.arriveAndAwaitAdvance(); + if (failure != null) { + compositeListener.onFailure(failure); + } else { + compositeListener.onSuccess(); + } + for (Thread t : threads) { + t.join(); + } + assertBusy(() -> { + if (failure != null) { + assertThat(successCount.get(), equalTo(0)); + assertThat(failureCount.get(), equalTo(threads.length*iterationsPerThread + 1)); + } else { + assertThat(successCount.get(), equalTo(threads.length*iterationsPerThread + 1)); + assertThat(failureCount.get(), equalTo(0)); + } + }); + } } From 4ad334f8e0af43f6aa38bd8dbe7bc7227cd9eb11 Mon Sep 17 00:00:00 2001 From: Lee Hinman Date: Mon, 18 Jun 2018 15:04:26 -0600 Subject: [PATCH 31/34] Fix reference to XContentBuilder.string() (#31337) In 6.3 this was moved to `Strings.toString(XContentBuilder)` as part of the XContent extraction. This commit fixes the docs to reference the new method. Resolves #31326 --- docs/java-api/docs/index_.asciidoc | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/docs/java-api/docs/index_.asciidoc b/docs/java-api/docs/index_.asciidoc index b455a7ab01ff3..2ce19cfffa098 100644 --- a/docs/java-api/docs/index_.asciidoc +++ b/docs/java-api/docs/index_.asciidoc @@ -99,11 +99,13 @@ Note that you can also add arrays with `startArray(String)` and other XContentBuilder objects. If you need to see the generated JSON content, you can use the -`string()` method. +`Strings.toString()` method. [source,java] -------------------------------------------------- -String json = builder.string(); +import org.elasticsearch.common.Strings; + +String json = Strings.toString(builder); -------------------------------------------------- From e67aa96c81f309ebedc1d529e255b48069b262d1 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Mon, 18 Jun 2018 23:53:04 +0200 Subject: [PATCH 32/34] Core: Combine Action and GenericAction (#31405) Since #30966, Action no longer has anything but a call to the GenericAction super constructor. This commit renames GenericAction into Action, thus eliminating the Action class. Additionally, this commit removes the Request generic parameter of the class, since it was unused. --- .../noop/action/bulk/NoopBulkAction.java | 3 +- .../noop/action/search/NoopSearchAction.java | 3 +- .../ingest/common/GrokProcessorGetAction.java | 2 +- .../mustache/MultiSearchTemplateAction.java | 2 +- .../script/mustache/SearchTemplateAction.java | 2 +- .../painless/PainlessExecuteAction.java | 2 +- .../index/rankeval/RankEvalAction.java | 2 +- .../rankeval/RankEvalRequestBuilder.java | 2 +- .../AbstractBaseReindexRestHandler.java | 4 +- .../AbstractBulkByQueryRestHandler.java | 4 +- .../BulkByScrollParallelizationHelper.java | 6 +- .../index/reindex/RethrottleAction.java | 2 +- .../reindex/RethrottleRequestBuilder.java | 2 +- .../reindex/AsyncBulkByScrollActionTests.java | 2 +- .../java/org/elasticsearch/action/Action.java | 44 ++++++++++-- .../elasticsearch/action/ActionModule.java | 10 +-- .../action/ActionRequestBuilder.java | 4 +- .../elasticsearch/action/GenericAction.java | 67 ------------------- .../action/TransportActionNodeProxy.java | 4 +- .../ClusterAllocationExplainAction.java | 2 +- .../cluster/health/ClusterHealthAction.java | 2 +- .../hotthreads/NodesHotThreadsAction.java | 2 +- .../cluster/node/info/NodesInfoAction.java | 2 +- .../NodesReloadSecureSettingsAction.java | 2 +- .../cluster/node/stats/NodesStatsAction.java | 2 +- .../node/tasks/cancel/CancelTasksAction.java | 2 +- .../cluster/node/tasks/get/GetTaskAction.java | 2 +- .../node/tasks/list/ListTasksAction.java | 2 +- .../cluster/node/usage/NodesUsageAction.java | 2 +- .../node/usage/NodesUsageRequestBuilder.java | 2 +- .../cluster/remote/RemoteInfoAction.java | 2 +- .../delete/DeleteRepositoryAction.java | 2 +- .../get/GetRepositoriesAction.java | 2 +- .../repositories/put/PutRepositoryAction.java | 2 +- .../verify/VerifyRepositoryAction.java | 2 +- .../cluster/reroute/ClusterRerouteAction.java | 2 +- .../settings/ClusterUpdateSettingsAction.java | 2 +- .../shards/ClusterSearchShardsAction.java | 2 +- .../create/CreateSnapshotAction.java | 2 +- .../delete/DeleteSnapshotAction.java | 2 +- .../snapshots/get/GetSnapshotsAction.java | 2 +- .../restore/RestoreSnapshotAction.java | 2 +- .../status/SnapshotsStatusAction.java | 2 +- .../cluster/state/ClusterStateAction.java | 2 +- .../cluster/stats/ClusterStatsAction.java | 2 +- .../DeleteStoredScriptAction.java | 2 +- .../storedscripts/GetStoredScriptAction.java | 2 +- .../storedscripts/PutStoredScriptAction.java | 2 +- .../tasks/PendingClusterTasksAction.java | 2 +- .../indices/alias/IndicesAliasesAction.java | 2 +- .../alias/exists/AliasesExistAction.java | 3 +- .../alias/get/BaseAliasesRequestBuilder.java | 2 +- .../indices/alias/get/GetAliasesAction.java | 2 +- .../admin/indices/analyze/AnalyzeAction.java | 2 +- .../cache/clear/ClearIndicesCacheAction.java | 2 +- .../admin/indices/close/CloseIndexAction.java | 2 +- .../indices/create/CreateIndexAction.java | 2 +- .../indices/delete/DeleteIndexAction.java | 2 +- .../exists/indices/IndicesExistsAction.java | 2 +- .../exists/types/TypesExistsAction.java | 2 +- .../admin/indices/flush/FlushAction.java | 2 +- .../indices/flush/SyncedFlushAction.java | 2 +- .../indices/forcemerge/ForceMergeAction.java | 2 +- .../admin/indices/get/GetIndexAction.java | 2 +- .../mapping/get/GetFieldMappingsAction.java | 2 +- .../mapping/get/GetMappingsAction.java | 2 +- .../indices/mapping/put/PutMappingAction.java | 2 +- .../admin/indices/open/OpenIndexAction.java | 2 +- .../indices/recovery/RecoveryAction.java | 2 +- .../admin/indices/refresh/RefreshAction.java | 2 +- .../indices/rollover/RolloverAction.java | 2 +- .../segments/IndicesSegmentsAction.java | 2 +- .../settings/get/GetSettingsAction.java | 2 +- .../settings/put/UpdateSettingsAction.java | 2 +- .../IndicesShardStoreRequestBuilder.java | 2 +- .../shards/IndicesShardStoresAction.java | 2 +- .../admin/indices/shrink/ResizeAction.java | 2 +- .../indices/shrink/ResizeRequestBuilder.java | 2 +- .../admin/indices/shrink/ShrinkAction.java | 2 +- .../indices/stats/IndicesStatsAction.java | 2 +- .../delete/DeleteIndexTemplateAction.java | 2 +- .../template/get/GetIndexTemplatesAction.java | 2 +- .../template/put/PutIndexTemplateAction.java | 2 +- .../upgrade/get/UpgradeStatusAction.java | 2 +- .../indices/upgrade/post/UpgradeAction.java | 2 +- .../upgrade/post/UpgradeSettingsAction.java | 2 +- .../validate/query/ValidateQueryAction.java | 2 +- .../elasticsearch/action/bulk/BulkAction.java | 2 +- .../action/delete/DeleteAction.java | 2 +- .../action/explain/ExplainAction.java | 2 +- .../fieldcaps/FieldCapabilitiesAction.java | 2 +- .../elasticsearch/action/get/GetAction.java | 2 +- .../action/get/MultiGetAction.java | 2 +- .../action/index/IndexAction.java | 2 +- .../action/ingest/DeletePipelineAction.java | 2 +- .../action/ingest/GetPipelineAction.java | 2 +- .../action/ingest/IngestActionForwarder.java | 2 +- .../action/ingest/PutPipelineAction.java | 2 +- .../action/ingest/SimulatePipelineAction.java | 2 +- .../elasticsearch/action/main/MainAction.java | 2 +- .../action/search/ClearScrollAction.java | 2 +- .../action/search/MultiSearchAction.java | 2 +- .../action/search/SearchAction.java | 2 +- .../action/search/SearchScrollAction.java | 2 +- .../BroadcastOperationRequestBuilder.java | 2 +- .../master/AcknowledgedRequestBuilder.java | 2 +- .../MasterNodeOperationRequestBuilder.java | 2 +- ...MasterNodeReadOperationRequestBuilder.java | 2 +- .../info/ClusterInfoRequestBuilder.java | 2 +- .../nodes/NodesOperationRequestBuilder.java | 2 +- .../ReplicationRequestBuilder.java | 2 +- .../InstanceShardOperationRequestBuilder.java | 2 +- .../SingleShardOperationRequestBuilder.java | 2 +- .../support/tasks/TasksRequestBuilder.java | 2 +- .../termvectors/MultiTermVectorsAction.java | 2 +- .../action/termvectors/TermVectorsAction.java | 2 +- .../action/update/UpdateAction.java | 2 +- .../client/ElasticsearchClient.java | 4 +- .../elasticsearch/client/FilterClient.java | 2 +- .../client/ParentTaskAssigningClient.java | 2 +- .../elasticsearch/client/node/NodeClient.java | 17 +++-- .../client/support/AbstractClient.java | 16 ++--- .../client/transport/TransportClient.java | 7 +- .../transport/TransportProxyClient.java | 11 ++- .../AbstractBulkByScrollRequestBuilder.java | 2 +- ...stractBulkIndexByScrollRequestBuilder.java | 2 +- .../index/reindex/DeleteByQueryAction.java | 2 +- .../reindex/DeleteByQueryRequestBuilder.java | 4 +- .../index/reindex/ReindexAction.java | 2 +- .../index/reindex/ReindexRequestBuilder.java | 4 +- .../index/reindex/UpdateByQueryAction.java | 2 +- .../reindex/UpdateByQueryRequestBuilder.java | 4 +- .../java/org/elasticsearch/node/Node.java | 4 +- .../CompletionPersistentTaskAction.java | 2 +- .../persistent/PersistentTasksService.java | 2 +- .../RemovePersistentTaskAction.java | 2 +- .../persistent/StartPersistentTaskAction.java | 2 +- .../UpdatePersistentTaskStatusAction.java | 2 +- .../elasticsearch/plugins/ActionPlugin.java | 14 ++-- .../transport/RemoteClusterAwareClient.java | 2 +- .../action/ActionModuleTests.java | 2 +- ...nericActionTests.java => ActionTests.java} | 4 +- .../cluster/node/tasks/TestTaskPlugin.java | 9 ++- .../client/AbstractClientHeadersTestCase.java | 6 +- .../ParentTaskAssigningClientTests.java | 4 +- .../client/node/NodeClientHeadersTests.java | 10 +-- .../TransportClientHeadersTests.java | 4 +- .../indices/settings/UpdateSettingsIT.java | 2 +- .../persistent/TestPersistentTasksPlugin.java | 2 +- .../elasticsearch/test/client/NoOpClient.java | 2 +- .../license/DeleteLicenseAction.java | 2 +- .../license/GetBasicStatusAction.java | 2 +- .../license/GetLicenseAction.java | 2 +- .../license/GetTrialStatusAction.java | 2 +- .../license/PostStartBasicAction.java | 2 +- .../license/PostStartTrialAction.java | 2 +- .../license/PutLicenseAction.java | 2 +- .../xpack/core/ClientHelper.java | 10 +-- .../xpack/core/XPackClientPlugin.java | 4 +- .../elasticsearch/xpack/core/XPackPlugin.java | 6 +- .../xpack/core/action/XPackInfoAction.java | 2 +- .../xpack/core/action/XPackUsageAction.java | 2 +- .../deprecation/DeprecationInfoAction.java | 2 +- .../core/graph/action/GraphExploreAction.java | 2 +- .../xpack/core/ml/action/CloseJobAction.java | 2 +- .../core/ml/action/DeleteCalendarAction.java | 2 +- .../ml/action/DeleteCalendarEventAction.java | 2 +- .../core/ml/action/DeleteDatafeedAction.java | 2 +- .../ml/action/DeleteExpiredDataAction.java | 2 +- .../core/ml/action/DeleteFilterAction.java | 2 +- .../xpack/core/ml/action/DeleteJobAction.java | 2 +- .../ml/action/DeleteModelSnapshotAction.java | 2 +- .../ml/action/FinalizeJobExecutionAction.java | 2 +- .../xpack/core/ml/action/FlushJobAction.java | 2 +- .../core/ml/action/ForecastJobAction.java | 2 +- .../core/ml/action/GetBucketsAction.java | 2 +- .../ml/action/GetCalendarEventsAction.java | 2 +- .../core/ml/action/GetCalendarsAction.java | 2 +- .../core/ml/action/GetCategoriesAction.java | 2 +- .../core/ml/action/GetDatafeedsAction.java | 2 +- .../ml/action/GetDatafeedsStatsAction.java | 2 +- .../core/ml/action/GetFiltersAction.java | 2 +- .../core/ml/action/GetInfluencersAction.java | 2 +- .../xpack/core/ml/action/GetJobsAction.java | 2 +- .../core/ml/action/GetJobsStatsAction.java | 2 +- .../ml/action/GetModelSnapshotsAction.java | 2 +- .../ml/action/GetOverallBucketsAction.java | 2 +- .../core/ml/action/GetRecordsAction.java | 2 +- .../core/ml/action/IsolateDatafeedAction.java | 2 +- .../core/ml/action/KillProcessAction.java | 2 +- .../xpack/core/ml/action/MlInfoAction.java | 2 +- .../xpack/core/ml/action/OpenJobAction.java | 2 +- .../core/ml/action/PersistJobAction.java | 2 +- .../ml/action/PostCalendarEventsAction.java | 2 +- .../xpack/core/ml/action/PostDataAction.java | 2 +- .../core/ml/action/PreviewDatafeedAction.java | 2 +- .../core/ml/action/PutCalendarAction.java | 2 +- .../core/ml/action/PutDatafeedAction.java | 2 +- .../xpack/core/ml/action/PutFilterAction.java | 2 +- .../xpack/core/ml/action/PutJobAction.java | 2 +- .../ml/action/RevertModelSnapshotAction.java | 3 +- .../core/ml/action/StartDatafeedAction.java | 2 +- .../core/ml/action/StopDatafeedAction.java | 2 +- .../ml/action/UpdateCalendarJobAction.java | 2 +- .../core/ml/action/UpdateDatafeedAction.java | 2 +- .../xpack/core/ml/action/UpdateJobAction.java | 2 +- .../ml/action/UpdateModelSnapshotAction.java | 2 +- .../core/ml/action/UpdateProcessAction.java | 2 +- .../ml/action/ValidateDetectorAction.java | 2 +- .../ml/action/ValidateJobConfigAction.java | 2 +- .../action/MonitoringBulkAction.java | 2 +- .../rollup/action/DeleteRollupJobAction.java | 2 +- .../rollup/action/GetRollupCapsAction.java | 2 +- .../rollup/action/GetRollupJobsAction.java | 2 +- .../rollup/action/PutRollupJobAction.java | 2 +- .../rollup/action/RollupSearchAction.java | 2 +- .../rollup/action/StartRollupJobAction.java | 2 +- .../rollup/action/StopRollupJobAction.java | 2 +- .../action/realm/ClearRealmCacheAction.java | 2 +- .../action/role/ClearRolesCacheAction.java | 2 +- .../action/role/DeleteRoleAction.java | 2 +- .../security/action/role/GetRolesAction.java | 2 +- .../security/action/role/PutRoleAction.java | 2 +- .../rolemapping/DeleteRoleMappingAction.java | 2 +- .../rolemapping/GetRoleMappingsAction.java | 4 +- .../rolemapping/PutRoleMappingAction.java | 2 +- .../action/saml/SamlAuthenticateAction.java | 2 +- .../saml/SamlInvalidateSessionAction.java | 2 +- .../action/saml/SamlLogoutAction.java | 2 +- .../saml/SamlPrepareAuthenticationAction.java | 2 +- .../action/token/CreateTokenAction.java | 2 +- .../token/CreateTokenRequestBuilder.java | 2 +- .../action/token/InvalidateTokenAction.java | 2 +- .../action/token/RefreshTokenAction.java | 2 +- .../action/user/AuthenticateAction.java | 2 +- .../action/user/ChangePasswordAction.java | 2 +- .../action/user/DeleteUserAction.java | 2 +- .../security/action/user/GetUsersAction.java | 2 +- .../action/user/HasPrivilegesAction.java | 2 +- .../security/action/user/PutUserAction.java | 2 +- .../action/user/SetEnabledAction.java | 2 +- .../ssl/action/GetCertificateInfoAction.java | 2 +- .../upgrade/actions/IndexUpgradeAction.java | 2 +- .../actions/IndexUpgradeInfoAction.java | 2 +- .../transport/actions/ack/AckWatchAction.java | 2 +- .../actions/activate/ActivateWatchAction.java | 2 +- .../actions/delete/DeleteWatchAction.java | 2 +- .../actions/execute/ExecuteWatchAction.java | 2 +- .../transport/actions/get/GetWatchAction.java | 2 +- .../transport/actions/put/PutWatchAction.java | 2 +- .../actions/service/WatcherServiceAction.java | 2 +- .../actions/stats/WatcherStatsAction.java | 2 +- .../action/oauth2/RestGetTokenAction.java | 2 +- ...sportSamlInvalidateSessionActionTests.java | 2 +- .../index/IndexAuditTrailMutedTests.java | 2 +- .../authc/esnative/NativeUsersStoreTests.java | 2 +- .../support/SecurityIndexManagerTests.java | 4 +- .../sql/plugin/SqlClearCursorAction.java | 2 +- .../xpack/sql/plugin/SqlQueryAction.java | 2 +- .../xpack/sql/plugin/SqlTranslateAction.java | 2 +- 260 files changed, 359 insertions(+), 400 deletions(-) delete mode 100644 server/src/main/java/org/elasticsearch/action/GenericAction.java rename server/src/test/java/org/elasticsearch/action/{GenericActionTests.java => ActionTests.java} (93%) diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/NoopBulkAction.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/NoopBulkAction.java index 73678b2f5ea65..2bfd3b0cc8ed4 100644 --- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/NoopBulkAction.java +++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/NoopBulkAction.java @@ -19,10 +19,9 @@ package org.elasticsearch.plugin.noop.action.bulk; import org.elasticsearch.action.Action; -import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; -public class NoopBulkAction extends Action { +public class NoopBulkAction extends Action { public static final String NAME = "mock:data/write/bulk"; public static final NoopBulkAction INSTANCE = new NoopBulkAction(); diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/NoopSearchAction.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/NoopSearchAction.java index ca2c3d9adfc41..e7e515594a55d 100644 --- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/NoopSearchAction.java +++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/NoopSearchAction.java @@ -19,10 +19,9 @@ package org.elasticsearch.plugin.noop.action.search; import org.elasticsearch.action.Action; -import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; -public class NoopSearchAction extends Action { +public class NoopSearchAction extends Action { public static final NoopSearchAction INSTANCE = new NoopSearchAction(); public static final String NAME = "mock:data/read/search"; diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java index 5204a07b1c969..4b74bb800458d 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java @@ -52,7 +52,7 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestStatus.OK; -public class GrokProcessorGetAction extends Action { +public class GrokProcessorGetAction extends Action { public static final GrokProcessorGetAction INSTANCE = new GrokProcessorGetAction(); public static final String NAME = "cluster:admin/ingest/processor/grok/get"; diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateAction.java index 21b9a11e1f214..372b328bbfc1a 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class MultiSearchTemplateAction extends Action { +public class MultiSearchTemplateAction extends Action { public static final MultiSearchTemplateAction INSTANCE = new MultiSearchTemplateAction(); public static final String NAME = "indices:data/read/msearch/template"; diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateAction.java index 1246e8e8e9bf2..a08329f48dcbb 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class SearchTemplateAction extends Action { +public class SearchTemplateAction extends Action { public static final SearchTemplateAction INSTANCE = new SearchTemplateAction(); public static final String NAME = "indices:data/read/search/template"; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java index f91d349f80657..5430303feb262 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java @@ -62,7 +62,7 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.rest.RestStatus.OK; -public class PainlessExecuteAction extends Action { +public class PainlessExecuteAction extends Action { static final PainlessExecuteAction INSTANCE = new PainlessExecuteAction(); private static final String NAME = "cluster:admin/scripts/painless/execute"; diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalAction.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalAction.java index 441cbb5fac108..54e89fe0e98b8 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalAction.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalAction.java @@ -24,7 +24,7 @@ /** * Action for explaining evaluating search ranking results. */ -public class RankEvalAction extends Action { +public class RankEvalAction extends Action { public static final RankEvalAction INSTANCE = new RankEvalAction(); public static final String NAME = "indices:data/read/rank_eval"; diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalRequestBuilder.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalRequestBuilder.java index 146c987eff0ac..4108a817f046e 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalRequestBuilder.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalRequestBuilder.java @@ -25,7 +25,7 @@ public class RankEvalRequestBuilder extends ActionRequestBuilder { - public RankEvalRequestBuilder(ElasticsearchClient client, Action action, + public RankEvalRequestBuilder(ElasticsearchClient client, Action action, RankEvalRequest request) { super(client, action, request); } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBaseReindexRestHandler.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBaseReindexRestHandler.java index de0c39b8f65ba..bf333352dd55c 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBaseReindexRestHandler.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBaseReindexRestHandler.java @@ -20,7 +20,7 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.GenericAction; +import org.elasticsearch.action.Action; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.settings.Settings; @@ -38,7 +38,7 @@ public abstract class AbstractBaseReindexRestHandler< Request extends AbstractBulkByScrollRequest, - A extends GenericAction + A extends Action > extends BaseRestHandler { private final A action; diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByQueryRestHandler.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByQueryRestHandler.java index 230828ed3fca9..fab94494fe13d 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByQueryRestHandler.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByQueryRestHandler.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.reindex; -import org.elasticsearch.action.GenericAction; +import org.elasticsearch.action.Action; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; @@ -38,7 +38,7 @@ */ public abstract class AbstractBulkByQueryRestHandler< Request extends AbstractBulkByScrollRequest, - A extends GenericAction> extends AbstractBaseReindexRestHandler { + A extends Action> extends AbstractBaseReindexRestHandler { protected AbstractBulkByQueryRestHandler(Settings settings, A action) { super(settings, action); diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkByScrollParallelizationHelper.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkByScrollParallelizationHelper.java index 3cc0901c81e21..799d5874e15ba 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkByScrollParallelizationHelper.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkByScrollParallelizationHelper.java @@ -61,7 +61,7 @@ private BulkByScrollParallelizationHelper() {} static > void startSlicedAction( Request request, BulkByScrollTask task, - Action action, + Action action, ActionListener listener, Client client, DiscoveryNode node, @@ -85,7 +85,7 @@ static > void startSlicedAc private static > void sliceConditionally( Request request, BulkByScrollTask task, - Action action, + Action action, ActionListener listener, Client client, DiscoveryNode node, @@ -118,7 +118,7 @@ private static int countSlicesBasedOnShards(ClusterSearchShardsResponse response private static > void sendSubRequests( Client client, - Action action, + Action action, String localNodeId, BulkByScrollTask task, Request request, diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RethrottleAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RethrottleAction.java index ff0803c77425f..27c1b851439ae 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RethrottleAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RethrottleAction.java @@ -22,7 +22,7 @@ import org.elasticsearch.action.Action; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; -public class RethrottleAction extends Action { +public class RethrottleAction extends Action { public static final RethrottleAction INSTANCE = new RethrottleAction(); public static final String NAME = "cluster:admin/reindex/rethrottle"; diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RethrottleRequestBuilder.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RethrottleRequestBuilder.java index b70389b5c9f3e..25407e6dc93d5 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RethrottleRequestBuilder.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RethrottleRequestBuilder.java @@ -29,7 +29,7 @@ */ public class RethrottleRequestBuilder extends TasksRequestBuilder { public RethrottleRequestBuilder(ElasticsearchClient client, - Action action) { + Action action) { super(client, action, new RethrottleRequest()); } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java index 727710e8b6bdd..9d22b90ee7f5b 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java @@ -745,7 +745,7 @@ private class MyMockClient extends FilterClient { @SuppressWarnings("unchecked") protected > void doExecute( - Action action, Request request, ActionListener listener) { + Action action, Request request, ActionListener listener) { if (false == expectedHeaders.equals(threadPool().getThreadContext().getHeaders())) { listener.onFailure( new RuntimeException("Expected " + expectedHeaders + " but got " + threadPool().getThreadContext().getHeaders())); diff --git a/server/src/main/java/org/elasticsearch/action/Action.java b/server/src/main/java/org/elasticsearch/action/Action.java index 8d419f379d3b0..2fc49d69ed1cc 100644 --- a/server/src/main/java/org/elasticsearch/action/Action.java +++ b/server/src/main/java/org/elasticsearch/action/Action.java @@ -19,13 +19,49 @@ package org.elasticsearch.action; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.transport.TransportRequestOptions; + /** - * Base action. Supports building the Request through a RequestBuilder. + * A generic action. Should strive to make it a singleton. */ -public abstract class Action - extends GenericAction { +public abstract class Action { + + private final String name; + /** + * @param name The name of the action, must be unique across actions. + */ protected Action(String name) { - super(name); + this.name = name; + } + + /** + * The name of the action. Must be unique across actions. + */ + public String name() { + return this.name; + } + + /** + * Creates a new response instance. + */ + public abstract Response newResponse(); + + /** + * Optional request options for the action. + */ + public TransportRequestOptions transportOptions(Settings settings) { + return TransportRequestOptions.EMPTY; + } + + @Override + public boolean equals(Object o) { + return o instanceof Action && name.equals(((Action) o).name()); + } + + @Override + public int hashCode() { + return name.hashCode(); } } diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index 7ddb39b6d6225..324e75d64d80f 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -406,8 +406,8 @@ public void register(ActionHandler handler) { } public void register( - GenericAction action, Class> transportAction, - Class... supportTransportActions) { + Action action, Class> transportAction, + Class... supportTransportActions) { register(new ActionHandler<>(action, transportAction, supportTransportActions)); } } @@ -675,10 +675,10 @@ protected void configure() { bind(AutoCreateIndex.class).toInstance(autoCreateIndex); bind(TransportLivenessAction.class).asEagerSingleton(); - // register GenericAction -> transportAction Map used by NodeClient + // register Action -> transportAction Map used by NodeClient @SuppressWarnings("rawtypes") - MapBinder transportActionsBinder - = MapBinder.newMapBinder(binder(), GenericAction.class, TransportAction.class); + MapBinder transportActionsBinder + = MapBinder.newMapBinder(binder(), Action.class, TransportAction.class); for (ActionHandler action : actions.values()) { // bind the action as eager singleton, so the map binder one will reuse it bind(action.getTransportAction()).asEagerSingleton(); diff --git a/server/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java index 208c609333c8c..a3aa8ac2a5222 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java @@ -26,11 +26,11 @@ public abstract class ActionRequestBuilder { - protected final Action action; + protected final Action action; protected final Request request; protected final ElasticsearchClient client; - protected ActionRequestBuilder(ElasticsearchClient client, Action action, Request request) { + protected ActionRequestBuilder(ElasticsearchClient client, Action action, Request request) { Objects.requireNonNull(action, "action must not be null"); this.action = action; this.request = request; diff --git a/server/src/main/java/org/elasticsearch/action/GenericAction.java b/server/src/main/java/org/elasticsearch/action/GenericAction.java deleted file mode 100644 index 6220a1b2062bf..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/GenericAction.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.action; - -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.transport.TransportRequestOptions; - -/** - * A generic action. Should strive to make it a singleton. - */ -public abstract class GenericAction { - - private final String name; - - /** - * @param name The name of the action, must be unique across actions. - */ - protected GenericAction(String name) { - this.name = name; - } - - /** - * The name of the action. Must be unique across actions. - */ - public String name() { - return this.name; - } - - /** - * Creates a new response instance. - */ - public abstract Response newResponse(); - - /** - * Optional request options for the action. - */ - public TransportRequestOptions transportOptions(Settings settings) { - return TransportRequestOptions.EMPTY; - } - - @Override - public boolean equals(Object o) { - return o instanceof GenericAction && name.equals(((GenericAction) o).name()); - } - - @Override - public int hashCode() { - return name.hashCode(); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/TransportActionNodeProxy.java b/server/src/main/java/org/elasticsearch/action/TransportActionNodeProxy.java index 2e7cbec93d9ae..c369deb0b10b3 100644 --- a/server/src/main/java/org/elasticsearch/action/TransportActionNodeProxy.java +++ b/server/src/main/java/org/elasticsearch/action/TransportActionNodeProxy.java @@ -31,10 +31,10 @@ public class TransportActionNodeProxy extends AbstractComponent { private final TransportService transportService; - private final GenericAction action; + private final Action action; private final TransportRequestOptions transportOptions; - public TransportActionNodeProxy(Settings settings, GenericAction action, TransportService transportService) { + public TransportActionNodeProxy(Settings settings, Action action, TransportService transportService) { super(settings); this.action = action; this.transportService = transportService; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainAction.java index 04fb3e915d20f..19d5378b305ed 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainAction.java @@ -24,7 +24,7 @@ /** * Action for explaining shard allocation for a shard in the cluster */ -public class ClusterAllocationExplainAction extends Action { +public class ClusterAllocationExplainAction extends Action { public static final ClusterAllocationExplainAction INSTANCE = new ClusterAllocationExplainAction(); public static final String NAME = "cluster:monitor/allocation/explain"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthAction.java index 31781ca13fe86..0cd148ee231e0 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/health/ClusterHealthAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class ClusterHealthAction extends Action { +public class ClusterHealthAction extends Action { public static final ClusterHealthAction INSTANCE = new ClusterHealthAction(); public static final String NAME = "cluster:monitor/health"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsAction.java index a3b4161732700..4ea7ee5bc3bbe 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class NodesHotThreadsAction extends Action { +public class NodesHotThreadsAction extends Action { public static final NodesHotThreadsAction INSTANCE = new NodesHotThreadsAction(); public static final String NAME = "cluster:monitor/nodes/hot_threads"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoAction.java index e46bc54d80004..edc5ed7e83f0f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class NodesInfoAction extends Action { +public class NodesInfoAction extends Action { public static final NodesInfoAction INSTANCE = new NodesInfoAction(); public static final String NAME = "cluster:monitor/nodes/info"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsAction.java index ccaeca8702f0b..19e8fc1929c5d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/NodesReloadSecureSettingsAction.java @@ -22,7 +22,7 @@ import org.elasticsearch.action.Action; public class NodesReloadSecureSettingsAction - extends Action { + extends Action { public static final NodesReloadSecureSettingsAction INSTANCE = new NodesReloadSecureSettingsAction(); public static final String NAME = "cluster:admin/nodes/reload_secure_settings"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsAction.java index d8018716135cb..bc8c81ef1e0f5 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class NodesStatsAction extends Action { +public class NodesStatsAction extends Action { public static final NodesStatsAction INSTANCE = new NodesStatsAction(); public static final String NAME = "cluster:monitor/nodes/stats"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksAction.java index 0c21c9a919849..0ea6162e59c7f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/CancelTasksAction.java @@ -24,7 +24,7 @@ /** * Action for cancelling running tasks */ -public class CancelTasksAction extends Action { +public class CancelTasksAction extends Action { public static final CancelTasksAction INSTANCE = new CancelTasksAction(); public static final String NAME = "cluster:admin/tasks/cancel"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskAction.java index 9dfe1e83e5f9a..d1e27e49088c7 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/GetTaskAction.java @@ -24,7 +24,7 @@ /** * Action for retrieving a list of currently running tasks */ -public class GetTaskAction extends Action { +public class GetTaskAction extends Action { public static final GetTaskAction INSTANCE = new GetTaskAction(); public static final String NAME = "cluster:monitor/task/get"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksAction.java index e054f074aa21e..b02d019859fcf 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/ListTasksAction.java @@ -24,7 +24,7 @@ /** * Action for retrieving a list of currently running tasks */ -public class ListTasksAction extends Action { +public class ListTasksAction extends Action { public static final ListTasksAction INSTANCE = new ListTasksAction(); public static final String NAME = "cluster:monitor/tasks/lists"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/NodesUsageAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/NodesUsageAction.java index 7722339ebe0ee..6bc6dce54945a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/NodesUsageAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/NodesUsageAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class NodesUsageAction extends Action { +public class NodesUsageAction extends Action { public static final NodesUsageAction INSTANCE = new NodesUsageAction(); public static final String NAME = "cluster:monitor/nodes/usage"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/NodesUsageRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/NodesUsageRequestBuilder.java index 05ab953a0d919..d8af249be0251 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/NodesUsageRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/usage/NodesUsageRequestBuilder.java @@ -26,7 +26,7 @@ public class NodesUsageRequestBuilder extends NodesOperationRequestBuilder { - public NodesUsageRequestBuilder(ElasticsearchClient client, Action action) { + public NodesUsageRequestBuilder(ElasticsearchClient client, Action action) { super(client, action, new NodesUsageRequest()); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/RemoteInfoAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/RemoteInfoAction.java index 6e3c877156d80..3b998049daaaa 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/RemoteInfoAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/RemoteInfoAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public final class RemoteInfoAction extends Action { +public final class RemoteInfoAction extends Action { public static final String NAME = "cluster:monitor/remote/info"; public static final RemoteInfoAction INSTANCE = new RemoteInfoAction(); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/delete/DeleteRepositoryAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/delete/DeleteRepositoryAction.java index 3d7f9187ea256..c5c72bb97f1e6 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/delete/DeleteRepositoryAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/delete/DeleteRepositoryAction.java @@ -24,7 +24,7 @@ /** * Unregister repository action */ -public class DeleteRepositoryAction extends Action { +public class DeleteRepositoryAction extends Action { public static final DeleteRepositoryAction INSTANCE = new DeleteRepositoryAction(); public static final String NAME = "cluster:admin/repository/delete"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/GetRepositoriesAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/GetRepositoriesAction.java index 0ec28a4781deb..d89e466461d9b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/GetRepositoriesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/get/GetRepositoriesAction.java @@ -24,7 +24,7 @@ /** * Get repositories action */ -public class GetRepositoriesAction extends Action { +public class GetRepositoriesAction extends Action { public static final GetRepositoriesAction INSTANCE = new GetRepositoriesAction(); public static final String NAME = "cluster:admin/repository/get"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/PutRepositoryAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/PutRepositoryAction.java index 3c3d24fb2d3f5..801e0a513b077 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/PutRepositoryAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/put/PutRepositoryAction.java @@ -24,7 +24,7 @@ /** * Register repository action */ -public class PutRepositoryAction extends Action { +public class PutRepositoryAction extends Action { public static final PutRepositoryAction INSTANCE = new PutRepositoryAction(); public static final String NAME = "cluster:admin/repository/put"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryAction.java index 4d5865fdf9cee..743b0a57f3aa3 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/repositories/verify/VerifyRepositoryAction.java @@ -24,7 +24,7 @@ /** * Unregister repository action */ -public class VerifyRepositoryAction extends Action { +public class VerifyRepositoryAction extends Action { public static final VerifyRepositoryAction INSTANCE = new VerifyRepositoryAction(); public static final String NAME = "cluster:admin/repository/verify"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteAction.java index 4366465ac69e9..06b083e3414b2 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class ClusterRerouteAction extends Action { +public class ClusterRerouteAction extends Action { public static final ClusterRerouteAction INSTANCE = new ClusterRerouteAction(); public static final String NAME = "cluster:admin/reroute"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsAction.java index 347deae04c049..af6a87f5a57e6 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class ClusterUpdateSettingsAction extends Action { +public class ClusterUpdateSettingsAction extends Action { public static final ClusterUpdateSettingsAction INSTANCE = new ClusterUpdateSettingsAction(); public static final String NAME = "cluster:admin/settings/update"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsAction.java index 3184298ef8b2e..ec936c623a24a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class ClusterSearchShardsAction extends Action { +public class ClusterSearchShardsAction extends Action { public static final ClusterSearchShardsAction INSTANCE = new ClusterSearchShardsAction(); public static final String NAME = "indices:admin/shards/search_shards"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotAction.java index c51ff1ff009cd..d37132a1d81e7 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotAction.java @@ -24,7 +24,7 @@ /** * Create snapshot action */ -public class CreateSnapshotAction extends Action { +public class CreateSnapshotAction extends Action { public static final CreateSnapshotAction INSTANCE = new CreateSnapshotAction(); public static final String NAME = "cluster:admin/snapshot/create"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotAction.java index 0a806e2d82d5e..ac04ea6690562 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/delete/DeleteSnapshotAction.java @@ -24,7 +24,7 @@ /** * Delete snapshot action */ -public class DeleteSnapshotAction extends Action { +public class DeleteSnapshotAction extends Action { public static final DeleteSnapshotAction INSTANCE = new DeleteSnapshotAction(); public static final String NAME = "cluster:admin/snapshot/delete"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsAction.java index 7eb23a836aad2..b5015ff5c23b0 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsAction.java @@ -24,7 +24,7 @@ /** * Get snapshots action */ -public class GetSnapshotsAction extends Action { +public class GetSnapshotsAction extends Action { public static final GetSnapshotsAction INSTANCE = new GetSnapshotsAction(); public static final String NAME = "cluster:admin/snapshot/get"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotAction.java index 4043784d470a7..e633ce43e66cd 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotAction.java @@ -24,7 +24,7 @@ /** * Restore snapshot action */ -public class RestoreSnapshotAction extends Action { +public class RestoreSnapshotAction extends Action { public static final RestoreSnapshotAction INSTANCE = new RestoreSnapshotAction(); public static final String NAME = "cluster:admin/snapshot/restore"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusAction.java index 20d1e80451d5b..ea28d26b40ffa 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusAction.java @@ -24,7 +24,7 @@ /** * Snapshots status action */ -public class SnapshotsStatusAction extends Action { +public class SnapshotsStatusAction extends Action { public static final SnapshotsStatusAction INSTANCE = new SnapshotsStatusAction(); public static final String NAME = "cluster:admin/snapshot/status"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateAction.java index 663622b62a319..f48df06d53c6f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/state/ClusterStateAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class ClusterStateAction extends Action { +public class ClusterStateAction extends Action { public static final ClusterStateAction INSTANCE = new ClusterStateAction(); public static final String NAME = "cluster:monitor/state"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsAction.java index a62c0a9743c57..049ce62d9df40 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class ClusterStatsAction extends Action { +public class ClusterStatsAction extends Action { public static final ClusterStatsAction INSTANCE = new ClusterStatsAction(); public static final String NAME = "cluster:monitor/stats"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/DeleteStoredScriptAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/DeleteStoredScriptAction.java index ab99a6aa8a75b..070d8d055ea89 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/DeleteStoredScriptAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/DeleteStoredScriptAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class DeleteStoredScriptAction extends Action { +public class DeleteStoredScriptAction extends Action { public static final DeleteStoredScriptAction INSTANCE = new DeleteStoredScriptAction(); public static final String NAME = "cluster:admin/script/delete"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptAction.java index b5d5e864d0701..e8015a4487496 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class GetStoredScriptAction extends Action { +public class GetStoredScriptAction extends Action { public static final GetStoredScriptAction INSTANCE = new GetStoredScriptAction(); public static final String NAME = "cluster:admin/script/get"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptAction.java index 1924276e1c4da..41c345fd00733 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptAction.java @@ -22,7 +22,7 @@ import org.elasticsearch.action.Action; -public class PutStoredScriptAction extends Action { +public class PutStoredScriptAction extends Action { public static final PutStoredScriptAction INSTANCE = new PutStoredScriptAction(); public static final String NAME = "cluster:admin/script/put"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/tasks/PendingClusterTasksAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/tasks/PendingClusterTasksAction.java index 98b17e9968405..296c65146a03c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/tasks/PendingClusterTasksAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/tasks/PendingClusterTasksAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class PendingClusterTasksAction extends Action { +public class PendingClusterTasksAction extends Action { public static final PendingClusterTasksAction INSTANCE = new PendingClusterTasksAction(); public static final String NAME = "cluster:monitor/task"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesAction.java index f584a7520fe87..378e017855a13 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/IndicesAliasesAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class IndicesAliasesAction extends Action { +public class IndicesAliasesAction extends Action { public static final IndicesAliasesAction INSTANCE = new IndicesAliasesAction(); public static final String NAME = "indices:admin/aliases"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/exists/AliasesExistAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/exists/AliasesExistAction.java index de4d044e4cb0b..dfaebab076c66 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/exists/AliasesExistAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/exists/AliasesExistAction.java @@ -20,9 +20,8 @@ package org.elasticsearch.action.admin.indices.alias.exists; import org.elasticsearch.action.Action; -import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; -public class AliasesExistAction extends Action { +public class AliasesExistAction extends Action { public static final AliasesExistAction INSTANCE = new AliasesExistAction(); public static final String NAME = "indices:admin/aliases/exists"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/BaseAliasesRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/BaseAliasesRequestBuilder.java index f50ae07d1f596..ebecc0dbf18ea 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/BaseAliasesRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/BaseAliasesRequestBuilder.java @@ -28,7 +28,7 @@ public abstract class BaseAliasesRequestBuilder> extends MasterNodeReadOperationRequestBuilder { - public BaseAliasesRequestBuilder(ElasticsearchClient client, Action action, String... aliases) { + public BaseAliasesRequestBuilder(ElasticsearchClient client, Action action, String... aliases) { super(client, action, new GetAliasesRequest(aliases)); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesAction.java index d8f2453c2cbd0..db423c2aaaa07 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/alias/get/GetAliasesAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class GetAliasesAction extends Action { +public class GetAliasesAction extends Action { public static final GetAliasesAction INSTANCE = new GetAliasesAction(); public static final String NAME = "indices:admin/aliases/get"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java index 1288eccede285..e2bbd655992de 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class AnalyzeAction extends Action { +public class AnalyzeAction extends Action { public static final AnalyzeAction INSTANCE = new AnalyzeAction(); public static final String NAME = "indices:admin/analyze"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheAction.java index f0b0b19ad5ffa..e5bdd53bdc7a7 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/ClearIndicesCacheAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class ClearIndicesCacheAction extends Action { +public class ClearIndicesCacheAction extends Action { public static final ClearIndicesCacheAction INSTANCE = new ClearIndicesCacheAction(); public static final String NAME = "indices:admin/cache/clear"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/close/CloseIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/close/CloseIndexAction.java index f206324f5ea54..5c3d60dd44013 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/close/CloseIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/close/CloseIndexAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class CloseIndexAction extends Action { +public class CloseIndexAction extends Action { public static final CloseIndexAction INSTANCE = new CloseIndexAction(); public static final String NAME = "indices:admin/close"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexAction.java index cceadaabefddc..3993ea7256f61 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class CreateIndexAction extends Action { +public class CreateIndexAction extends Action { public static final CreateIndexAction INSTANCE = new CreateIndexAction(); public static final String NAME = "indices:admin/create"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexAction.java index 40cabf5e78175..680501af3b761 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/delete/DeleteIndexAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class DeleteIndexAction extends Action { +public class DeleteIndexAction extends Action { public static final DeleteIndexAction INSTANCE = new DeleteIndexAction(); public static final String NAME = "indices:admin/delete"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/exists/indices/IndicesExistsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/exists/indices/IndicesExistsAction.java index af655b9d1b081..b878994549f5a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/exists/indices/IndicesExistsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/exists/indices/IndicesExistsAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class IndicesExistsAction extends Action { +public class IndicesExistsAction extends Action { public static final IndicesExistsAction INSTANCE = new IndicesExistsAction(); public static final String NAME = "indices:admin/exists"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/exists/types/TypesExistsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/exists/types/TypesExistsAction.java index 3f3e1d98b5058..0b508110d7f50 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/exists/types/TypesExistsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/exists/types/TypesExistsAction.java @@ -20,7 +20,7 @@ import org.elasticsearch.action.Action; -public class TypesExistsAction extends Action { +public class TypesExistsAction extends Action { public static final TypesExistsAction INSTANCE = new TypesExistsAction(); public static final String NAME = "indices:admin/types/exists"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushAction.java index 41e7d214760b3..60d5b43a6c189 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/FlushAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class FlushAction extends Action { +public class FlushAction extends Action { public static final FlushAction INSTANCE = new FlushAction(); public static final String NAME = "indices:admin/flush"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushAction.java index 7501f2dc2c238..5005cd2ec0878 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/SyncedFlushAction.java @@ -22,7 +22,7 @@ import org.elasticsearch.action.Action; -public class SyncedFlushAction extends Action { +public class SyncedFlushAction extends Action { public static final SyncedFlushAction INSTANCE = new SyncedFlushAction(); public static final String NAME = "indices:admin/synced_flush"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeAction.java index 524d41333012b..51095435343cb 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class ForceMergeAction extends Action { +public class ForceMergeAction extends Action { public static final ForceMergeAction INSTANCE = new ForceMergeAction(); public static final String NAME = "indices:admin/forcemerge"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexAction.java index 843d331db95a0..86396f246a414 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/get/GetIndexAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class GetIndexAction extends Action { +public class GetIndexAction extends Action { public static final GetIndexAction INSTANCE = new GetIndexAction(); public static final String NAME = "indices:admin/get"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsAction.java index 58fd60f997429..5aa19652b676d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class GetFieldMappingsAction extends Action { +public class GetFieldMappingsAction extends Action { public static final GetFieldMappingsAction INSTANCE = new GetFieldMappingsAction(); public static final String NAME = "indices:admin/mappings/fields/get"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsAction.java index 08042baa803ef..8bae685fff5dd 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/GetMappingsAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class GetMappingsAction extends Action { +public class GetMappingsAction extends Action { public static final GetMappingsAction INSTANCE = new GetMappingsAction(); public static final String NAME = "indices:admin/mappings/get"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingAction.java index cdbf0bb8b53c2..63ab198f816bf 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class PutMappingAction extends Action { +public class PutMappingAction extends Action { public static final PutMappingAction INSTANCE = new PutMappingAction(); public static final String NAME = "indices:admin/mapping/put"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/open/OpenIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/open/OpenIndexAction.java index 669dbfc78a5b0..ea6f1eb6afd95 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/open/OpenIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/open/OpenIndexAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class OpenIndexAction extends Action { +public class OpenIndexAction extends Action { public static final OpenIndexAction INSTANCE = new OpenIndexAction(); public static final String NAME = "indices:admin/open"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/recovery/RecoveryAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/recovery/RecoveryAction.java index 453d52c02a087..bfe261b58843a 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/recovery/RecoveryAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/recovery/RecoveryAction.java @@ -24,7 +24,7 @@ /** * Recovery information action */ -public class RecoveryAction extends Action { +public class RecoveryAction extends Action { public static final RecoveryAction INSTANCE = new RecoveryAction(); public static final String NAME = "indices:monitor/recovery"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshAction.java index 059e26d29fec5..b0dac076b2f49 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/refresh/RefreshAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class RefreshAction extends Action { +public class RefreshAction extends Action { public static final RefreshAction INSTANCE = new RefreshAction(); public static final String NAME = "indices:admin/refresh"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverAction.java index 18edb82af08d7..0bcd4eefe0762 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/RolloverAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class RolloverAction extends Action { +public class RolloverAction extends Action { public static final RolloverAction INSTANCE = new RolloverAction(); public static final String NAME = "indices:admin/rollover"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsAction.java index 420a2dea04cc5..669c31d6b087d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/segments/IndicesSegmentsAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class IndicesSegmentsAction extends Action { +public class IndicesSegmentsAction extends Action { public static final IndicesSegmentsAction INSTANCE = new IndicesSegmentsAction(); public static final String NAME = "indices:monitor/segments"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsAction.java index b06af8726878a..e4149aaf8f15f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/get/GetSettingsAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class GetSettingsAction extends Action { +public class GetSettingsAction extends Action { public static final GetSettingsAction INSTANCE = new GetSettingsAction(); public static final String NAME = "indices:monitor/settings/get"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsAction.java index 9a8c667e18202..7f82c8f29e750 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/settings/put/UpdateSettingsAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class UpdateSettingsAction extends Action { +public class UpdateSettingsAction extends Action { public static final UpdateSettingsAction INSTANCE = new UpdateSettingsAction(); public static final String NAME = "indices:admin/settings/update"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestBuilder.java index bb21ea9f85eac..cf38feae56f13 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestBuilder.java @@ -30,7 +30,7 @@ */ public class IndicesShardStoreRequestBuilder extends MasterNodeReadOperationRequestBuilder { - public IndicesShardStoreRequestBuilder(ElasticsearchClient client, Action action, String... indices) { + public IndicesShardStoreRequestBuilder(ElasticsearchClient client, Action action, String... indices) { super(client, action, new IndicesShardStoresRequest(indices)); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresAction.java index 8bb08ee0f09a8..d3ce0077d5e94 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoresAction.java @@ -28,7 +28,7 @@ * Shard store information reports which nodes hold shard copies, how recent they are * and any exceptions on opening the shard index or from previous engine failures */ -public class IndicesShardStoresAction extends Action { +public class IndicesShardStoresAction extends Action { public static final IndicesShardStoresAction INSTANCE = new IndicesShardStoresAction(); public static final String NAME = "indices:monitor/shard_stores"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeAction.java index 5791999b7fc85..72dcb17f212be 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeAction.java @@ -22,7 +22,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.Action; -public class ResizeAction extends Action { +public class ResizeAction extends Action { public static final ResizeAction INSTANCE = new ResizeAction(); public static final String NAME = "indices:admin/resize"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestBuilder.java index 36f645c16170c..e4b9a34b00415 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestBuilder.java @@ -27,7 +27,7 @@ public class ResizeRequestBuilder extends AcknowledgedRequestBuilder { - public ResizeRequestBuilder(ElasticsearchClient client, Action action) { + public ResizeRequestBuilder(ElasticsearchClient client, Action action) { super(client, action, new ResizeRequest()); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ShrinkAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ShrinkAction.java index cddc8390b546e..a2c689ba360ee 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ShrinkAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ShrinkAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class ShrinkAction extends Action { +public class ShrinkAction extends Action { public static final ShrinkAction INSTANCE = new ShrinkAction(); public static final String NAME = "indices:admin/shrink"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsAction.java index 60363722193cb..6765279eb6b87 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class IndicesStatsAction extends Action { +public class IndicesStatsAction extends Action { public static final IndicesStatsAction INSTANCE = new IndicesStatsAction(); public static final String NAME = "indices:monitor/stats"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/DeleteIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/DeleteIndexTemplateAction.java index 5cef3045846a2..47f37b9e6cfee 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/DeleteIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/delete/DeleteIndexTemplateAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class DeleteIndexTemplateAction extends Action { +public class DeleteIndexTemplateAction extends Action { public static final DeleteIndexTemplateAction INSTANCE = new DeleteIndexTemplateAction(); public static final String NAME = "indices:admin/template/delete"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesAction.java index 1735e7fee3873..ac00b80079ca1 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetIndexTemplatesAction.java @@ -20,7 +20,7 @@ import org.elasticsearch.action.Action; -public class GetIndexTemplatesAction extends Action { +public class GetIndexTemplatesAction extends Action { public static final GetIndexTemplatesAction INSTANCE = new GetIndexTemplatesAction(); public static final String NAME = "indices:admin/template/get"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateAction.java index b73384452e6ee..399170eaeced9 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class PutIndexTemplateAction extends Action { +public class PutIndexTemplateAction extends Action { public static final PutIndexTemplateAction INSTANCE = new PutIndexTemplateAction(); public static final String NAME = "indices:admin/template/put"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusAction.java index 9e078a53bb714..57506b615d692 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/UpgradeStatusAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class UpgradeStatusAction extends Action { +public class UpgradeStatusAction extends Action { public static final UpgradeStatusAction INSTANCE = new UpgradeStatusAction(); public static final String NAME = "indices:monitor/upgrade"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeAction.java index 20f780cd0bce7..7ec83930e44bf 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeAction.java @@ -24,7 +24,7 @@ /** * Upgrade index/indices action. */ -public class UpgradeAction extends Action { +public class UpgradeAction extends Action { public static final UpgradeAction INSTANCE = new UpgradeAction(); public static final String NAME = "indices:admin/upgrade"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsAction.java index aabb9b3c660d4..5f1ee88e34e8b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/UpgradeSettingsAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class UpgradeSettingsAction extends Action { +public class UpgradeSettingsAction extends Action { public static final UpgradeSettingsAction INSTANCE = new UpgradeSettingsAction(); public static final String NAME = "internal:indices/admin/upgrade"; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryAction.java index 47c982d5f6d85..93151dd8a2ba9 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class ValidateQueryAction extends Action { +public class ValidateQueryAction extends Action { public static final ValidateQueryAction INSTANCE = new ValidateQueryAction(); public static final String NAME = "indices:admin/validate/query"; diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkAction.java index ea7d0160fd996..84b854a98ee86 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkAction.java @@ -23,7 +23,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.transport.TransportRequestOptions; -public class BulkAction extends Action { +public class BulkAction extends Action { public static final BulkAction INSTANCE = new BulkAction(); public static final String NAME = "indices:data/write/bulk"; diff --git a/server/src/main/java/org/elasticsearch/action/delete/DeleteAction.java b/server/src/main/java/org/elasticsearch/action/delete/DeleteAction.java index 84135b617b59b..d78b6f60bffc3 100644 --- a/server/src/main/java/org/elasticsearch/action/delete/DeleteAction.java +++ b/server/src/main/java/org/elasticsearch/action/delete/DeleteAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class DeleteAction extends Action { +public class DeleteAction extends Action { public static final DeleteAction INSTANCE = new DeleteAction(); public static final String NAME = "indices:data/write/delete"; diff --git a/server/src/main/java/org/elasticsearch/action/explain/ExplainAction.java b/server/src/main/java/org/elasticsearch/action/explain/ExplainAction.java index b298e62f514a1..13c9d94e7dbc7 100644 --- a/server/src/main/java/org/elasticsearch/action/explain/ExplainAction.java +++ b/server/src/main/java/org/elasticsearch/action/explain/ExplainAction.java @@ -24,7 +24,7 @@ /** * Entry point for the explain feature. */ -public class ExplainAction extends Action { +public class ExplainAction extends Action { public static final ExplainAction INSTANCE = new ExplainAction(); public static final String NAME = "indices:data/read/explain"; diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesAction.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesAction.java index 24896d3c620d1..39c6ecce308e0 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesAction.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class FieldCapabilitiesAction extends Action { +public class FieldCapabilitiesAction extends Action { public static final FieldCapabilitiesAction INSTANCE = new FieldCapabilitiesAction(); public static final String NAME = "indices:data/read/field_caps"; diff --git a/server/src/main/java/org/elasticsearch/action/get/GetAction.java b/server/src/main/java/org/elasticsearch/action/get/GetAction.java index c9df5ffb98e00..a622fd5a8178b 100644 --- a/server/src/main/java/org/elasticsearch/action/get/GetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/GetAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class GetAction extends Action { +public class GetAction extends Action { public static final GetAction INSTANCE = new GetAction(); public static final String NAME = "indices:data/read/get"; diff --git a/server/src/main/java/org/elasticsearch/action/get/MultiGetAction.java b/server/src/main/java/org/elasticsearch/action/get/MultiGetAction.java index 1080b71fb9f65..9b69e33239b82 100644 --- a/server/src/main/java/org/elasticsearch/action/get/MultiGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/MultiGetAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class MultiGetAction extends Action { +public class MultiGetAction extends Action { public static final MultiGetAction INSTANCE = new MultiGetAction(); public static final String NAME = "indices:data/read/mget"; diff --git a/server/src/main/java/org/elasticsearch/action/index/IndexAction.java b/server/src/main/java/org/elasticsearch/action/index/IndexAction.java index 9642f28b2a417..4f3e6068a2a23 100644 --- a/server/src/main/java/org/elasticsearch/action/index/IndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/index/IndexAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class IndexAction extends Action { +public class IndexAction extends Action { public static final IndexAction INSTANCE = new IndexAction(); public static final String NAME = "indices:data/write/index"; diff --git a/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineAction.java b/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineAction.java index 03271f439c709..e0df57a6dadf6 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineAction.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/DeletePipelineAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class DeletePipelineAction extends Action { +public class DeletePipelineAction extends Action { public static final DeletePipelineAction INSTANCE = new DeletePipelineAction(); public static final String NAME = "cluster:admin/ingest/pipeline/delete"; diff --git a/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineAction.java b/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineAction.java index 82e1a546b6e52..b2305227ac67f 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineAction.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/GetPipelineAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class GetPipelineAction extends Action { +public class GetPipelineAction extends Action { public static final GetPipelineAction INSTANCE = new GetPipelineAction(); public static final String NAME = "cluster:admin/ingest/pipeline/get"; diff --git a/server/src/main/java/org/elasticsearch/action/ingest/IngestActionForwarder.java b/server/src/main/java/org/elasticsearch/action/ingest/IngestActionForwarder.java index 991e233220726..6f5147c38bdbb 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/IngestActionForwarder.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/IngestActionForwarder.java @@ -47,7 +47,7 @@ public IngestActionForwarder(TransportService transportService) { ingestNodes = new DiscoveryNode[0]; } - public void forwardIngestRequest(Action action, ActionRequest request, ActionListener listener) { + public void forwardIngestRequest(Action action, ActionRequest request, ActionListener listener) { transportService.sendRequest(randomIngestNode(), action.name(), request, new ActionListenerResponseHandler(listener, action::newResponse)); } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineAction.java b/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineAction.java index 068cbea08399b..c4784598ae75f 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineAction.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class PutPipelineAction extends Action { +public class PutPipelineAction extends Action { public static final PutPipelineAction INSTANCE = new PutPipelineAction(); public static final String NAME = "cluster:admin/ingest/pipeline/put"; diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineAction.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineAction.java index 4da35ba25b7eb..afeb4e01fb0ff 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineAction.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class SimulatePipelineAction extends Action { +public class SimulatePipelineAction extends Action { public static final SimulatePipelineAction INSTANCE = new SimulatePipelineAction(); public static final String NAME = "cluster:admin/ingest/pipeline/simulate"; diff --git a/server/src/main/java/org/elasticsearch/action/main/MainAction.java b/server/src/main/java/org/elasticsearch/action/main/MainAction.java index 10fd1f9887c7d..831ddd0983fad 100644 --- a/server/src/main/java/org/elasticsearch/action/main/MainAction.java +++ b/server/src/main/java/org/elasticsearch/action/main/MainAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class MainAction extends Action { +public class MainAction extends Action { public static final String NAME = "cluster:monitor/main"; public static final MainAction INSTANCE = new MainAction(); diff --git a/server/src/main/java/org/elasticsearch/action/search/ClearScrollAction.java b/server/src/main/java/org/elasticsearch/action/search/ClearScrollAction.java index 4edda430c5cda..660ed1ee17860 100644 --- a/server/src/main/java/org/elasticsearch/action/search/ClearScrollAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/ClearScrollAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class ClearScrollAction extends Action { +public class ClearScrollAction extends Action { public static final ClearScrollAction INSTANCE = new ClearScrollAction(); public static final String NAME = "indices:data/read/scroll/clear"; diff --git a/server/src/main/java/org/elasticsearch/action/search/MultiSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/MultiSearchAction.java index c2c8c4ce23256..298c7593f6c97 100644 --- a/server/src/main/java/org/elasticsearch/action/search/MultiSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/MultiSearchAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class MultiSearchAction extends Action { +public class MultiSearchAction extends Action { public static final MultiSearchAction INSTANCE = new MultiSearchAction(); public static final String NAME = "indices:data/read/msearch"; diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchAction.java index 80bc1abcca8c6..e028f6c5cd524 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class SearchAction extends Action { +public class SearchAction extends Action { public static final SearchAction INSTANCE = new SearchAction(); public static final String NAME = "indices:data/read/search"; diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchScrollAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchScrollAction.java index 209e3c7f583e2..ff72a7e5e51f3 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchScrollAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchScrollAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class SearchScrollAction extends Action { +public class SearchScrollAction extends Action { public static final SearchScrollAction INSTANCE = new SearchScrollAction(); public static final String NAME = "indices:data/read/scroll"; diff --git a/server/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastOperationRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastOperationRequestBuilder.java index 056381cc7dc11..f2ba67e3a9f4c 100644 --- a/server/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastOperationRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/support/broadcast/BroadcastOperationRequestBuilder.java @@ -27,7 +27,7 @@ public abstract class BroadcastOperationRequestBuilder, Response extends BroadcastResponse, RequestBuilder extends BroadcastOperationRequestBuilder> extends ActionRequestBuilder { - protected BroadcastOperationRequestBuilder(ElasticsearchClient client, Action action, Request request) { + protected BroadcastOperationRequestBuilder(ElasticsearchClient client, Action action, Request request) { super(client, action, request); } diff --git a/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequestBuilder.java index 45d92801170f5..1269024aeca8c 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/AcknowledgedRequestBuilder.java @@ -28,7 +28,7 @@ public abstract class AcknowledgedRequestBuilder, Response extends AcknowledgedResponse, RequestBuilder extends AcknowledgedRequestBuilder> extends MasterNodeOperationRequestBuilder { - protected AcknowledgedRequestBuilder(ElasticsearchClient client, Action action, Request request) { + protected AcknowledgedRequestBuilder(ElasticsearchClient client, Action action, Request request) { super(client, action, request); } diff --git a/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeOperationRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeOperationRequestBuilder.java index 1302d2003da19..33a7b344743f4 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeOperationRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeOperationRequestBuilder.java @@ -31,7 +31,7 @@ public abstract class MasterNodeOperationRequestBuilder, Response extends ActionResponse, RequestBuilder extends MasterNodeOperationRequestBuilder> extends ActionRequestBuilder { - protected MasterNodeOperationRequestBuilder(ElasticsearchClient client, Action action, Request request) { + protected MasterNodeOperationRequestBuilder(ElasticsearchClient client, Action action, Request request) { super(client, action, request); } diff --git a/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeReadOperationRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeReadOperationRequestBuilder.java index 96735f101e6a7..85a66d33db91e 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeReadOperationRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/MasterNodeReadOperationRequestBuilder.java @@ -29,7 +29,7 @@ public abstract class MasterNodeReadOperationRequestBuilder, Response extends ActionResponse, RequestBuilder extends MasterNodeReadOperationRequestBuilder> extends MasterNodeOperationRequestBuilder { - protected MasterNodeReadOperationRequestBuilder(ElasticsearchClient client, Action action, Request request) { + protected MasterNodeReadOperationRequestBuilder(ElasticsearchClient client, Action action, Request request) { super(client, action, request); } diff --git a/server/src/main/java/org/elasticsearch/action/support/master/info/ClusterInfoRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/support/master/info/ClusterInfoRequestBuilder.java index 26dedf15da80d..d8989ba10bc01 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/info/ClusterInfoRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/info/ClusterInfoRequestBuilder.java @@ -28,7 +28,7 @@ public abstract class ClusterInfoRequestBuilder, Response extends ActionResponse, Builder extends ClusterInfoRequestBuilder> extends MasterNodeReadOperationRequestBuilder { - protected ClusterInfoRequestBuilder(ElasticsearchClient client, Action action, Request request) { + protected ClusterInfoRequestBuilder(ElasticsearchClient client, Action action, Request request) { super(client, action, request); } diff --git a/server/src/main/java/org/elasticsearch/action/support/nodes/NodesOperationRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/support/nodes/NodesOperationRequestBuilder.java index 37c06a419589a..f81e6c9cf41c7 100644 --- a/server/src/main/java/org/elasticsearch/action/support/nodes/NodesOperationRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/support/nodes/NodesOperationRequestBuilder.java @@ -27,7 +27,7 @@ public abstract class NodesOperationRequestBuilder, Response extends BaseNodesResponse, RequestBuilder extends NodesOperationRequestBuilder> extends ActionRequestBuilder { - protected NodesOperationRequestBuilder(ElasticsearchClient client, Action action, Request request) { + protected NodesOperationRequestBuilder(ElasticsearchClient client, Action action, Request request) { super(client, action, request); } diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequestBuilder.java index 32fbaf70ab35c..9dc7a899d033c 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/ReplicationRequestBuilder.java @@ -29,7 +29,7 @@ public abstract class ReplicationRequestBuilder, Response extends ActionResponse, RequestBuilder extends ReplicationRequestBuilder> extends ActionRequestBuilder { - protected ReplicationRequestBuilder(ElasticsearchClient client, Action action, Request request) { + protected ReplicationRequestBuilder(ElasticsearchClient client, Action action, Request request) { super(client, action, request); } diff --git a/server/src/main/java/org/elasticsearch/action/support/single/instance/InstanceShardOperationRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/support/single/instance/InstanceShardOperationRequestBuilder.java index 0c5d26b0aed64..0880ed9fa6205 100644 --- a/server/src/main/java/org/elasticsearch/action/support/single/instance/InstanceShardOperationRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/support/single/instance/InstanceShardOperationRequestBuilder.java @@ -28,7 +28,7 @@ public abstract class InstanceShardOperationRequestBuilder, Response extends ActionResponse, RequestBuilder extends InstanceShardOperationRequestBuilder> extends ActionRequestBuilder { - protected InstanceShardOperationRequestBuilder(ElasticsearchClient client, Action action, Request request) { + protected InstanceShardOperationRequestBuilder(ElasticsearchClient client, Action action, Request request) { super(client, action, request); } diff --git a/server/src/main/java/org/elasticsearch/action/support/single/shard/SingleShardOperationRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/support/single/shard/SingleShardOperationRequestBuilder.java index 1de3479fb5d52..b5a2d67bc9d51 100644 --- a/server/src/main/java/org/elasticsearch/action/support/single/shard/SingleShardOperationRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/support/single/shard/SingleShardOperationRequestBuilder.java @@ -27,7 +27,7 @@ public abstract class SingleShardOperationRequestBuilder, Response extends ActionResponse, RequestBuilder extends SingleShardOperationRequestBuilder> extends ActionRequestBuilder { - protected SingleShardOperationRequestBuilder(ElasticsearchClient client, Action action, Request request) { + protected SingleShardOperationRequestBuilder(ElasticsearchClient client, Action action, Request request) { super(client, action, request); } diff --git a/server/src/main/java/org/elasticsearch/action/support/tasks/TasksRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/support/tasks/TasksRequestBuilder.java index cda4497437a7b..52e3c2cf44e33 100644 --- a/server/src/main/java/org/elasticsearch/action/support/tasks/TasksRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/support/tasks/TasksRequestBuilder.java @@ -33,7 +33,7 @@ public class TasksRequestBuilder< RequestBuilder extends TasksRequestBuilder > extends ActionRequestBuilder { - protected TasksRequestBuilder(ElasticsearchClient client, Action action, Request request) { + protected TasksRequestBuilder(ElasticsearchClient client, Action action, Request request) { super(client, action, request); } diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsAction.java b/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsAction.java index f7a9eda6cc265..a894b3480f10f 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsAction.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/MultiTermVectorsAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class MultiTermVectorsAction extends Action { +public class MultiTermVectorsAction extends Action { public static final MultiTermVectorsAction INSTANCE = new MultiTermVectorsAction(); public static final String NAME = "indices:data/read/mtv"; diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsAction.java b/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsAction.java index ded987c52a085..e701efe93ba7a 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsAction.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class TermVectorsAction extends Action { +public class TermVectorsAction extends Action { public static final TermVectorsAction INSTANCE = new TermVectorsAction(); public static final String NAME = "indices:data/read/tv"; diff --git a/server/src/main/java/org/elasticsearch/action/update/UpdateAction.java b/server/src/main/java/org/elasticsearch/action/update/UpdateAction.java index 734169283dd39..1c8c80b61cef1 100644 --- a/server/src/main/java/org/elasticsearch/action/update/UpdateAction.java +++ b/server/src/main/java/org/elasticsearch/action/update/UpdateAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class UpdateAction extends Action { +public class UpdateAction extends Action { public static final UpdateAction INSTANCE = new UpdateAction(); public static final String NAME = "indices:data/write/update"; diff --git a/server/src/main/java/org/elasticsearch/client/ElasticsearchClient.java b/server/src/main/java/org/elasticsearch/client/ElasticsearchClient.java index a0021e6e4c1a8..284c2b8d51420 100644 --- a/server/src/main/java/org/elasticsearch/client/ElasticsearchClient.java +++ b/server/src/main/java/org/elasticsearch/client/ElasticsearchClient.java @@ -39,7 +39,7 @@ public interface ElasticsearchClient { * @return A future allowing to get back the response. */ ActionFuture execute( - Action action, Request request); + Action action, Request request); /** * Executes a generic action, denoted by an {@link Action}. @@ -51,7 +51,7 @@ ActionFuture The response type. */ void execute( - Action action, Request request, ActionListener listener); + Action action, Request request, ActionListener listener); /** * Returns the threadpool used to execute requests on this client diff --git a/server/src/main/java/org/elasticsearch/client/FilterClient.java b/server/src/main/java/org/elasticsearch/client/FilterClient.java index ac94cdf017680..bfccabac58043 100644 --- a/server/src/main/java/org/elasticsearch/client/FilterClient.java +++ b/server/src/main/java/org/elasticsearch/client/FilterClient.java @@ -63,7 +63,7 @@ public void close() { @Override protected > void doExecute( - Action action, Request request, ActionListener listener) { + Action action, Request request, ActionListener listener) { in().execute(action, request, listener); } diff --git a/server/src/main/java/org/elasticsearch/client/ParentTaskAssigningClient.java b/server/src/main/java/org/elasticsearch/client/ParentTaskAssigningClient.java index 180c0f21b8d9a..a0934ba633dd5 100644 --- a/server/src/main/java/org/elasticsearch/client/ParentTaskAssigningClient.java +++ b/server/src/main/java/org/elasticsearch/client/ParentTaskAssigningClient.java @@ -61,7 +61,7 @@ public Client unwrap() { protected < Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder - > void doExecute(Action action, Request request, ActionListener listener) { + > void doExecute(Action action, Request request, ActionListener listener) { request.setParentTask(parentTask); super.doExecute(action, request, listener); } diff --git a/server/src/main/java/org/elasticsearch/client/node/NodeClient.java b/server/src/main/java/org/elasticsearch/client/node/NodeClient.java index 3f214c90b6701..9e50fa56fab60 100644 --- a/server/src/main/java/org/elasticsearch/client/node/NodeClient.java +++ b/server/src/main/java/org/elasticsearch/client/node/NodeClient.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.GenericAction; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.client.Client; import org.elasticsearch.client.support.AbstractClient; @@ -43,10 +42,10 @@ */ public class NodeClient extends AbstractClient { - private Map actions; + private Map actions; /** * The id of the local {@link DiscoveryNode}. Useful for generating task ids from tasks returned by - * {@link #executeLocally(GenericAction, ActionRequest, TaskListener)}. + * {@link #executeLocally(Action, ActionRequest, TaskListener)}. */ private Supplier localNodeId; private RemoteClusterService remoteClusterService; @@ -55,7 +54,7 @@ public NodeClient(Settings settings, ThreadPool threadPool) { super(settings, threadPool); } - public void initialize(Map actions, Supplier localNodeId, + public void initialize(Map actions, Supplier localNodeId, RemoteClusterService remoteClusterService) { this.actions = actions; this.localNodeId = localNodeId; @@ -71,7 +70,7 @@ public void close() { public < Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder - > void doExecute(Action action, Request request, ActionListener listener) { + > void doExecute(Action action, Request request, ActionListener listener) { // Discard the task because the Client interface doesn't use it. executeLocally(action, request, listener); } @@ -83,7 +82,7 @@ > void doExecute(Action action, Request request, ActionListen */ public < Request extends ActionRequest, Response extends ActionResponse - > Task executeLocally(GenericAction action, Request request, ActionListener listener) { + > Task executeLocally(Action action, Request request, ActionListener listener) { return transportAction(action).execute(request, listener); } @@ -93,13 +92,13 @@ > Task executeLocally(GenericAction action, Request request, */ public < Request extends ActionRequest, Response extends ActionResponse - > Task executeLocally(GenericAction action, Request request, TaskListener listener) { + > Task executeLocally(Action action, Request request, TaskListener listener) { return transportAction(action).execute(request, listener); } /** * The id of the local {@link DiscoveryNode}. Useful for generating task ids from tasks returned by - * {@link #executeLocally(GenericAction, ActionRequest, TaskListener)}. + * {@link #executeLocally(Action, ActionRequest, TaskListener)}. */ public String getLocalNodeId() { return localNodeId.get(); @@ -111,7 +110,7 @@ public String getLocalNodeId() { @SuppressWarnings("unchecked") private < Request extends ActionRequest, Response extends ActionResponse - > TransportAction transportAction(GenericAction action) { + > TransportAction transportAction(Action action) { if (actions == null) { throw new IllegalStateException("NodeClient has not been initialized"); } diff --git a/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java b/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java index dc70da4e61f7e..12db219f8ec78 100644 --- a/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java +++ b/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java @@ -385,7 +385,7 @@ public final AdminClient admin() { @Override public final ActionFuture execute( - Action action, Request request) { + Action action, Request request) { PlainActionFuture actionFuture = PlainActionFuture.newFuture(); execute(action, request, actionFuture); return actionFuture; @@ -396,12 +396,12 @@ public final Ac */ @Override public final void execute( - Action action, Request request, ActionListener listener) { + Action action, Request request, ActionListener listener) { listener = threadedWrapper.wrap(listener); doExecute(action, request, listener); } - protected abstract > void doExecute(Action action, Request request, ActionListener listener); + protected abstract > void doExecute(Action action, Request request, ActionListener listener); @Override public ActionFuture index(final IndexRequest request) { @@ -698,13 +698,13 @@ static class ClusterAdmin implements ClusterAdminClient { @Override public ActionFuture execute( - Action action, Request request) { + Action action, Request request) { return client.execute(action, request); } @Override public void execute( - Action action, Request request, ActionListener listener) { + Action action, Request request, ActionListener listener) { client.execute(action, request, listener); } @@ -1238,13 +1238,13 @@ static class IndicesAdmin implements IndicesAdminClient { @Override public ActionFuture execute( - Action action, Request request) { + Action action, Request request) { return client.execute(action, request); } @Override public void execute( - Action action, Request request, ActionListener listener) { + Action action, Request request, ActionListener listener) { client.execute(action, request, listener); } @@ -1764,7 +1764,7 @@ public void getSettings(GetSettingsRequest request, ActionListener headers) { return new FilterClient(this) { @Override - protected > void doExecute(Action action, Request request, ActionListener listener) { + protected > void doExecute(Action action, Request request, ActionListener listener) { ThreadContext threadContext = threadPool().getThreadContext(); try (ThreadContext.StoredContext ctx = threadContext.stashAndMergeHeaders(headers)) { super.doExecute(action, request, listener); diff --git a/server/src/main/java/org/elasticsearch/client/transport/TransportClient.java b/server/src/main/java/org/elasticsearch/client/transport/TransportClient.java index ecdecc4457bdd..53f6dea21c7d1 100644 --- a/server/src/main/java/org/elasticsearch/client/transport/TransportClient.java +++ b/server/src/main/java/org/elasticsearch/client/transport/TransportClient.java @@ -26,7 +26,6 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.GenericAction; import org.elasticsearch.client.support.AbstractClient; import org.elasticsearch.cluster.ClusterModule; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -209,10 +208,10 @@ private static ClientTemplate buildTemplate(Settings providedSettings, Settings // construct the list of client actions final List actionPlugins = pluginsService.filterPlugins(ActionPlugin.class); - final List clientActions = + final List clientActions = actionPlugins.stream().flatMap(p -> p.getClientActions().stream()).collect(Collectors.toList()); // add all the base actions - final List> baseActions = + final List> baseActions = actionModule.getActions().values().stream().map(ActionPlugin.ActionHandler::getAction).collect(Collectors.toList()); clientActions.addAll(baseActions); final TransportProxyClient proxy = new TransportProxyClient(settings, transportService, nodesService, clientActions); @@ -378,7 +377,7 @@ public void close() { } @Override - protected > void doExecute(Action action, Request request, ActionListener listener) { + protected > void doExecute(Action action, Request request, ActionListener listener) { proxy.execute(action, request, listener); } diff --git a/server/src/main/java/org/elasticsearch/client/transport/TransportProxyClient.java b/server/src/main/java/org/elasticsearch/client/transport/TransportProxyClient.java index 249fd54ef69d9..d79e2a9119e6a 100644 --- a/server/src/main/java/org/elasticsearch/client/transport/TransportProxyClient.java +++ b/server/src/main/java/org/elasticsearch/client/transport/TransportProxyClient.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.GenericAction; import org.elasticsearch.action.TransportActionNodeProxy; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.transport.TransportService; @@ -41,19 +40,17 @@ final class TransportProxyClient { private final Map proxies; TransportProxyClient(Settings settings, TransportService transportService, - TransportClientNodesService nodesService, List actions) { + TransportClientNodesService nodesService, List actions) { this.nodesService = nodesService; Map proxies = new HashMap<>(); - for (GenericAction action : actions) { - if (action instanceof Action) { - proxies.put((Action) action, new TransportActionNodeProxy(settings, action, transportService)); - } + for (Action action : actions) { + proxies.put(action, new TransportActionNodeProxy(settings, action, transportService)); } this.proxies = unmodifiableMap(proxies); } public > void execute(final Action action, + ActionRequestBuilder> void execute(final Action action, final Request request, ActionListener listener) { final TransportActionNodeProxy proxy = proxies.get(action); assert proxy != null : "no proxy found for action: " + action; diff --git a/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequestBuilder.java b/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequestBuilder.java index 5ffccef23f51c..227814e24302e 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkByScrollRequestBuilder.java @@ -35,7 +35,7 @@ public abstract class AbstractBulkByScrollRequestBuilder< private final SearchRequestBuilder source; protected AbstractBulkByScrollRequestBuilder(ElasticsearchClient client, - Action action, SearchRequestBuilder source, Request request) { + Action action, SearchRequestBuilder source, Request request) { super(client, action, request); this.source = source; } diff --git a/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkIndexByScrollRequestBuilder.java b/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkIndexByScrollRequestBuilder.java index d8cf2f49149f7..519c6e062d565 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkIndexByScrollRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/AbstractBulkIndexByScrollRequestBuilder.java @@ -30,7 +30,7 @@ public abstract class AbstractBulkIndexByScrollRequestBuilder< extends AbstractBulkByScrollRequestBuilder { protected AbstractBulkIndexByScrollRequestBuilder(ElasticsearchClient client, - Action action, SearchRequestBuilder search, Request request) { + Action action, SearchRequestBuilder search, Request request) { super(client, action, search, request); } diff --git a/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryAction.java b/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryAction.java index 639c615727fe0..c7cfe28e2c0be 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryAction.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class DeleteByQueryAction extends Action { +public class DeleteByQueryAction extends Action { public static final DeleteByQueryAction INSTANCE = new DeleteByQueryAction(); public static final String NAME = "indices:data/write/delete/byquery"; diff --git a/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequestBuilder.java b/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequestBuilder.java index ef2057806e1ce..202aa6e93f19d 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/DeleteByQueryRequestBuilder.java @@ -27,12 +27,12 @@ public class DeleteByQueryRequestBuilder extends AbstractBulkByScrollRequestBuilder { - public DeleteByQueryRequestBuilder(ElasticsearchClient client, Action action) { + public DeleteByQueryRequestBuilder(ElasticsearchClient client, Action action) { this(client, action, new SearchRequestBuilder(client, SearchAction.INSTANCE)); } private DeleteByQueryRequestBuilder(ElasticsearchClient client, - Action action, + Action action, SearchRequestBuilder search) { super(client, action, search, new DeleteByQueryRequest(search.request())); } diff --git a/server/src/main/java/org/elasticsearch/index/reindex/ReindexAction.java b/server/src/main/java/org/elasticsearch/index/reindex/ReindexAction.java index c4e4a68d114d0..86d0c96602a3c 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/ReindexAction.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/ReindexAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class ReindexAction extends Action { +public class ReindexAction extends Action { public static final ReindexAction INSTANCE = new ReindexAction(); public static final String NAME = "indices:data/write/reindex"; diff --git a/server/src/main/java/org/elasticsearch/index/reindex/ReindexRequestBuilder.java b/server/src/main/java/org/elasticsearch/index/reindex/ReindexRequestBuilder.java index 3d20bca989b83..c775ae197db93 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/ReindexRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/ReindexRequestBuilder.java @@ -31,13 +31,13 @@ public class ReindexRequestBuilder extends private final IndexRequestBuilder destination; public ReindexRequestBuilder(ElasticsearchClient client, - Action action) { + Action action) { this(client, action, new SearchRequestBuilder(client, SearchAction.INSTANCE), new IndexRequestBuilder(client, IndexAction.INSTANCE)); } private ReindexRequestBuilder(ElasticsearchClient client, - Action action, + Action action, SearchRequestBuilder search, IndexRequestBuilder destination) { super(client, action, search, new ReindexRequest(search.request(), destination.request())); this.destination = destination; diff --git a/server/src/main/java/org/elasticsearch/index/reindex/UpdateByQueryAction.java b/server/src/main/java/org/elasticsearch/index/reindex/UpdateByQueryAction.java index b2fb0a205eb08..250a267ea255d 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/UpdateByQueryAction.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/UpdateByQueryAction.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.Action; -public class UpdateByQueryAction extends Action { +public class UpdateByQueryAction extends Action { public static final UpdateByQueryAction INSTANCE = new UpdateByQueryAction(); public static final String NAME = "indices:data/write/update/byquery"; diff --git a/server/src/main/java/org/elasticsearch/index/reindex/UpdateByQueryRequestBuilder.java b/server/src/main/java/org/elasticsearch/index/reindex/UpdateByQueryRequestBuilder.java index dc56bb9cf9de3..6b9600dea5eae 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/UpdateByQueryRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/UpdateByQueryRequestBuilder.java @@ -27,12 +27,12 @@ public class UpdateByQueryRequestBuilder extends AbstractBulkIndexByScrollRequestBuilder { - public UpdateByQueryRequestBuilder(ElasticsearchClient client, Action action) { + public UpdateByQueryRequestBuilder(ElasticsearchClient client, Action action) { this(client, action, new SearchRequestBuilder(client, SearchAction.INSTANCE)); } private UpdateByQueryRequestBuilder(ElasticsearchClient client, - Action action, + Action action, SearchRequestBuilder search) { super(client, action, search, new UpdateByQueryRequest(search.request())); } diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index aa9b3943e8863..4440153dd361e 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -27,7 +27,7 @@ import org.elasticsearch.ElasticsearchTimeoutException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionModule; -import org.elasticsearch.action.GenericAction; +import org.elasticsearch.action.Action; import org.elasticsearch.action.search.SearchExecutionStatsCollector; import org.elasticsearch.action.search.SearchPhaseController; import org.elasticsearch.action.search.SearchTransportService; @@ -540,7 +540,7 @@ protected Node(final Environment environment, Collection .map(injector::getInstance).collect(Collectors.toList())); resourcesToClose.addAll(pluginLifecycleComponents); this.pluginLifecycleComponents = Collections.unmodifiableList(pluginLifecycleComponents); - client.initialize(injector.getInstance(new Key>() {}), + client.initialize(injector.getInstance(new Key>() {}), () -> clusterService.localNode().getId(), transportService.getRemoteClusterService()); logger.debug("initializing HTTP handlers ..."); diff --git a/server/src/main/java/org/elasticsearch/persistent/CompletionPersistentTaskAction.java b/server/src/main/java/org/elasticsearch/persistent/CompletionPersistentTaskAction.java index b7281e1026a63..1e0f83958bbac 100644 --- a/server/src/main/java/org/elasticsearch/persistent/CompletionPersistentTaskAction.java +++ b/server/src/main/java/org/elasticsearch/persistent/CompletionPersistentTaskAction.java @@ -48,7 +48,7 @@ * Action that is used by executor node to indicate that the persistent action finished or failed on the node and needs to be * removed from the cluster state in case of successful completion or restarted on some other node in case of failure. */ -public class CompletionPersistentTaskAction extends Action { +public class CompletionPersistentTaskAction extends Action { public static final CompletionPersistentTaskAction INSTANCE = new CompletionPersistentTaskAction(); public static final String NAME = "cluster:admin/persistent/completion"; diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksService.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksService.java index d0c791e3df046..665a803a2d9cf 100644 --- a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksService.java +++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksService.java @@ -137,7 +137,7 @@ public void sendRemoveRequest(final String taskId, final ActionListener - void execute(final Req request, final Action action, final ActionListener> listener) { + void execute(final Req request, final Action action, final ActionListener> listener) { try { final ThreadContext threadContext = client.threadPool().getThreadContext(); final Supplier supplier = threadContext.newRestorableContext(false); diff --git a/server/src/main/java/org/elasticsearch/persistent/RemovePersistentTaskAction.java b/server/src/main/java/org/elasticsearch/persistent/RemovePersistentTaskAction.java index f278b5bcc5e22..38890e6a12c12 100644 --- a/server/src/main/java/org/elasticsearch/persistent/RemovePersistentTaskAction.java +++ b/server/src/main/java/org/elasticsearch/persistent/RemovePersistentTaskAction.java @@ -42,7 +42,7 @@ import java.io.IOException; import java.util.Objects; -public class RemovePersistentTaskAction extends Action { +public class RemovePersistentTaskAction extends Action { public static final RemovePersistentTaskAction INSTANCE = new RemovePersistentTaskAction(); public static final String NAME = "cluster:admin/persistent/remove"; diff --git a/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java b/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java index 6b338c2469717..eb71b7ad13641 100644 --- a/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java +++ b/server/src/main/java/org/elasticsearch/persistent/StartPersistentTaskAction.java @@ -49,7 +49,7 @@ /** * This action can be used to add the record for the persistent action to the cluster state. */ -public class StartPersistentTaskAction extends Action { +public class StartPersistentTaskAction extends Action { public static final StartPersistentTaskAction INSTANCE = new StartPersistentTaskAction(); public static final String NAME = "cluster:admin/persistent/start"; diff --git a/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java b/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java index a639e4bde5360..040db78aa450b 100644 --- a/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java +++ b/server/src/main/java/org/elasticsearch/persistent/UpdatePersistentTaskStatusAction.java @@ -44,7 +44,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; -public class UpdatePersistentTaskStatusAction extends Action { +public class UpdatePersistentTaskStatusAction extends Action { public static final UpdatePersistentTaskStatusAction INSTANCE = new UpdatePersistentTaskStatusAction(); public static final String NAME = "cluster:admin/persistent/update_status"; diff --git a/server/src/main/java/org/elasticsearch/plugins/ActionPlugin.java b/server/src/main/java/org/elasticsearch/plugins/ActionPlugin.java index 7454d74349ea6..eb8b7130d7054 100644 --- a/server/src/main/java/org/elasticsearch/plugins/ActionPlugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/ActionPlugin.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.GenericAction; +import org.elasticsearch.action.Action; import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.action.support.TransportActions; @@ -65,10 +65,10 @@ public interface ActionPlugin { } /** - * Client actions added by this plugin. This defaults to all of the {@linkplain GenericAction} in + * Client actions added by this plugin. This defaults to all of the {@linkplain Action} in * {@linkplain ActionPlugin#getActions()}. */ - default List getClientActions() { + default List getClientActions() { return getActions().stream().map(a -> a.action).collect(Collectors.toList()); } @@ -111,7 +111,7 @@ default UnaryOperator getRestHandlerWrapper(ThreadContext threadCon } final class ActionHandler { - private final GenericAction action; + private final Action action; private final Class> transportAction; private final Class[] supportTransportActions; @@ -119,14 +119,14 @@ final class ActionHandler action, Class> transportAction, - Class... supportTransportActions) { + public ActionHandler(Action action, Class> transportAction, + Class... supportTransportActions) { this.action = action; this.transportAction = transportAction; this.supportTransportActions = supportTransportActions; } - public GenericAction getAction() { + public Action getAction() { return action; } diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterAwareClient.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterAwareClient.java index 70a07db12475f..aca8cdccaddb8 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterAwareClient.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterAwareClient.java @@ -45,7 +45,7 @@ final class RemoteClusterAwareClient extends AbstractClient { @Override protected > - void doExecute(Action action, Request request, ActionListener listener) { + void doExecute(Action action, Request request, ActionListener listener) { remoteClusterService.ensureConnected(clusterAlias, ActionListener.wrap(res -> { Transport.Connection connection = remoteClusterService.getConnection(clusterAlias); service.sendRequest(connection, action.name(), request, TransportRequestOptions.EMPTY, diff --git a/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java b/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java index a766a3e3c5635..f79cb02d83aa5 100644 --- a/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java +++ b/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java @@ -88,7 +88,7 @@ protected FakeTransportAction(Settings settings, String actionName, ThreadPool t protected void doExecute(FakeRequest request, ActionListener listener) { } } - class FakeAction extends GenericAction { + class FakeAction extends Action { protected FakeAction() { super("fake"); } diff --git a/server/src/test/java/org/elasticsearch/action/GenericActionTests.java b/server/src/test/java/org/elasticsearch/action/ActionTests.java similarity index 93% rename from server/src/test/java/org/elasticsearch/action/GenericActionTests.java rename to server/src/test/java/org/elasticsearch/action/ActionTests.java index 1bbff4b2a99b5..c159d36ca9158 100644 --- a/server/src/test/java/org/elasticsearch/action/GenericActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/ActionTests.java @@ -21,10 +21,10 @@ import org.elasticsearch.test.ESTestCase; -public class GenericActionTests extends ESTestCase { +public class ActionTests extends ESTestCase { public void testEquals() { - class FakeAction extends GenericAction { + class FakeAction extends Action { protected FakeAction(String name) { super(name); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java index a98c9088b8dc0..bac8f4a8730da 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java @@ -45,7 +45,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.ToXContent.Params; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.plugins.ActionPlugin; @@ -323,7 +322,7 @@ protected NodeResponse nodeOperation(NodeRequest request) { } - public static class TestTaskAction extends Action { + public static class TestTaskAction extends Action { public static final TestTaskAction INSTANCE = new TestTaskAction(); public static final String NAME = "cluster:admin/tasks/test"; @@ -340,7 +339,7 @@ public NodesResponse newResponse() { public static class NodesRequestBuilder extends NodesOperationRequestBuilder { - protected NodesRequestBuilder(ElasticsearchClient client, Action action) { + protected NodesRequestBuilder(ElasticsearchClient client, Action action) { super(client, action, new NodesRequest("test")); } @@ -454,7 +453,7 @@ protected void taskOperation(UnblockTestTasksRequest request, Task task, ActionL } - public static class UnblockTestTasksAction extends Action { + public static class UnblockTestTasksAction extends Action { public static final UnblockTestTasksAction INSTANCE = new UnblockTestTasksAction(); public static final String NAME = "cluster:admin/tasks/testunblock"; @@ -472,7 +471,7 @@ public UnblockTestTasksResponse newResponse() { public static class UnblockTestTasksRequestBuilder extends ActionRequestBuilder { protected UnblockTestTasksRequestBuilder(ElasticsearchClient client, - Action action) { + Action action) { super(client, action, new UnblockTestTasksRequest()); } } diff --git a/server/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java b/server/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java index db9f9d83c816a..5dea451dbacfd 100644 --- a/server/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java +++ b/server/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java @@ -21,7 +21,7 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.GenericAction; +import org.elasticsearch.action.Action; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteAction; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotAction; import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction; @@ -56,7 +56,7 @@ public abstract class AbstractClientHeadersTestCase extends ESTestCase { .put(ThreadContext.PREFIX + ".key2", "val 2") .build(); - private static final GenericAction[] ACTIONS = new GenericAction[] { + private static final Action[] ACTIONS = new Action[] { // client actions GetAction.INSTANCE, SearchAction.INSTANCE, DeleteAction.INSTANCE, DeleteStoredScriptAction.INSTANCE, IndexAction.INSTANCE, @@ -92,7 +92,7 @@ public void tearDown() throws Exception { terminate(threadPool); } - protected abstract Client buildClient(Settings headersSettings, GenericAction[] testedActions); + protected abstract Client buildClient(Settings headersSettings, Action[] testedActions); public void testActions() { diff --git a/server/src/test/java/org/elasticsearch/client/ParentTaskAssigningClientTests.java b/server/src/test/java/org/elasticsearch/client/ParentTaskAssigningClientTests.java index 00b293bcffb34..bff713a225482 100644 --- a/server/src/test/java/org/elasticsearch/client/ParentTaskAssigningClientTests.java +++ b/server/src/test/java/org/elasticsearch/client/ParentTaskAssigningClientTests.java @@ -41,8 +41,8 @@ public void testSetsParentId() { protected < Request extends ActionRequest, Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder - > void doExecute( Action action, Request request, - ActionListener listener) { + > void doExecute(Action action, Request request, + ActionListener listener) { assertEquals(parentTaskId[0], request.getParentTask()); super.doExecute(action, request, listener); } diff --git a/server/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java b/server/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java index 5e739cc325040..dc1f146b452de 100644 --- a/server/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java +++ b/server/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.GenericAction; +import org.elasticsearch.action.Action; import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.TransportAction; @@ -39,7 +39,7 @@ public class NodeClientHeadersTests extends AbstractClientHeadersTestCase { private static final ActionFilters EMPTY_FILTERS = new ActionFilters(Collections.emptySet()); @Override - protected Client buildClient(Settings headersSettings, GenericAction[] testedActions) { + protected Client buildClient(Settings headersSettings, Action[] testedActions) { Settings settings = HEADER_SETTINGS; Actions actions = new Actions(settings, threadPool, testedActions); NodeClient client = new NodeClient(settings, threadPool); @@ -47,10 +47,10 @@ protected Client buildClient(Settings headersSettings, GenericAction[] testedAct return client; } - private static class Actions extends HashMap { + private static class Actions extends HashMap { - private Actions(Settings settings, ThreadPool threadPool, GenericAction[] actions) { - for (GenericAction action : actions) { + private Actions(Settings settings, ThreadPool threadPool, Action[] actions) { + for (Action action : actions) { put(action, new InternalTransportAction(settings, action.name(), threadPool)); } } diff --git a/server/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java b/server/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java index b9ba6aa0050e4..82806938a0b32 100644 --- a/server/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java +++ b/server/src/test/java/org/elasticsearch/client/transport/TransportClientHeadersTests.java @@ -20,7 +20,7 @@ package org.elasticsearch.client.transport; import org.elasticsearch.Version; -import org.elasticsearch.action.GenericAction; +import org.elasticsearch.action.Action; import org.elasticsearch.action.admin.cluster.node.liveness.LivenessResponse; import org.elasticsearch.action.admin.cluster.node.liveness.TransportLivenessAction; import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; @@ -76,7 +76,7 @@ public void tearDown() throws Exception { } @Override - protected Client buildClient(Settings headersSettings, GenericAction[] testedActions) { + protected Client buildClient(Settings headersSettings, Action[] testedActions) { transportService = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null); transportService.start(); transportService.acceptIncomingRequests(); diff --git a/server/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java b/server/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java index 8093e7d38a14d..069f965ac6b3b 100644 --- a/server/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java +++ b/server/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java @@ -162,7 +162,7 @@ public List> getSettings() { } public static class UpdateInternalIndexAction - extends Action { + extends Action { private static final UpdateInternalIndexAction INSTANCE = new UpdateInternalIndexAction(); private static final String NAME = "indices:admin/settings/update-internal-index"; diff --git a/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java b/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java index 063a861b5c315..ddcda1058039c 100644 --- a/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java +++ b/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java @@ -383,7 +383,7 @@ protected AllocatedPersistentTask createTask(long id, String type, String action } } - public static class TestTaskAction extends Action { + public static class TestTaskAction extends Action { public static final TestTaskAction INSTANCE = new TestTaskAction(); public static final String NAME = "cluster:admin/persistent/task_test"; diff --git a/test/framework/src/main/java/org/elasticsearch/test/client/NoOpClient.java b/test/framework/src/main/java/org/elasticsearch/test/client/NoOpClient.java index 93cb3475a1249..8ef08a259821a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/client/NoOpClient.java +++ b/test/framework/src/main/java/org/elasticsearch/test/client/NoOpClient.java @@ -54,7 +54,7 @@ public NoOpClient(String testName) { protected > - void doExecute(Action action, Request request, ActionListener listener) { + void doExecute(Action action, Request request, ActionListener listener) { listener.onResponse(null); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/DeleteLicenseAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/DeleteLicenseAction.java index 309246ca9d760..a2c8d609be084 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/DeleteLicenseAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/DeleteLicenseAction.java @@ -7,7 +7,7 @@ import org.elasticsearch.action.Action; -public class DeleteLicenseAction extends Action { +public class DeleteLicenseAction extends Action { public static final DeleteLicenseAction INSTANCE = new DeleteLicenseAction(); public static final String NAME = "cluster:admin/xpack/license/delete"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetBasicStatusAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetBasicStatusAction.java index 75d5c2da10680..be97ff59172fd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetBasicStatusAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetBasicStatusAction.java @@ -7,7 +7,7 @@ import org.elasticsearch.action.Action; -public class GetBasicStatusAction extends Action { +public class GetBasicStatusAction extends Action { public static final GetBasicStatusAction INSTANCE = new GetBasicStatusAction(); public static final String NAME = "cluster:admin/xpack/license/basic_status"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetLicenseAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetLicenseAction.java index be8d46e31d0b3..a6f19ea95b1e8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetLicenseAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetLicenseAction.java @@ -7,7 +7,7 @@ import org.elasticsearch.action.Action; -public class GetLicenseAction extends Action { +public class GetLicenseAction extends Action { public static final GetLicenseAction INSTANCE = new GetLicenseAction(); public static final String NAME = "cluster:monitor/xpack/license/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusAction.java index 619300b1d7791..69c14e1b6dc83 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/GetTrialStatusAction.java @@ -7,7 +7,7 @@ import org.elasticsearch.action.Action; -public class GetTrialStatusAction extends Action { +public class GetTrialStatusAction extends Action { public static final GetTrialStatusAction INSTANCE = new GetTrialStatusAction(); public static final String NAME = "cluster:admin/xpack/license/trial_status"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartBasicAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartBasicAction.java index 747632d9d1d8a..864bfa2b7780f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartBasicAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartBasicAction.java @@ -7,7 +7,7 @@ import org.elasticsearch.action.Action; -public class PostStartBasicAction extends Action { +public class PostStartBasicAction extends Action { public static final PostStartBasicAction INSTANCE = new PostStartBasicAction(); public static final String NAME = "cluster:admin/xpack/license/start_basic"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartTrialAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartTrialAction.java index c7817f73b91cb..609fa42caabc2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartTrialAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PostStartTrialAction.java @@ -7,7 +7,7 @@ import org.elasticsearch.action.Action; -public class PostStartTrialAction extends Action { +public class PostStartTrialAction extends Action { public static final PostStartTrialAction INSTANCE = new PostStartTrialAction(); public static final String NAME = "cluster:admin/xpack/license/start_trial"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PutLicenseAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PutLicenseAction.java index b96f13190eddb..d93957a9d8bae 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PutLicenseAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/PutLicenseAction.java @@ -7,7 +7,7 @@ import org.elasticsearch.action.Action; -public class PutLicenseAction extends Action { +public class PutLicenseAction extends Action { public static final PutLicenseAction INSTANCE = new PutLicenseAction(); public static final String NAME = "cluster:admin/xpack/license/put"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java index 8b6d72aac3ce8..ff3091bde93b9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java @@ -81,8 +81,8 @@ public static v */ public static > void executeAsyncWithOrigin( - Client client, String origin, Action action, Request request, - ActionListener listener) { + Client client, String origin, Action action, Request request, + ActionListener listener) { final ThreadContext threadContext = client.threadPool().getThreadContext(); final Supplier supplier = threadContext.newRestorableContext(false); try (ThreadContext.StoredContext ignore = stashWithOrigin(threadContext, origin)) { @@ -140,8 +140,8 @@ public static T executeWithHeaders(Map> void executeWithHeadersAsync( - Map headers, String origin, Client client, Action action, Request request, - ActionListener listener) { + Map headers, String origin, Client client, Action action, Request request, + ActionListener listener) { Map filteredHeaders = headers.entrySet().stream().filter(e -> SECURITY_HEADER_FILTERS.contains(e.getKey())) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); @@ -178,7 +178,7 @@ private ClientWithOrigin(Client in, String origin) { @Override protected > void doExecute( - Action action, Request request, ActionListener listener) { + Action action, Request request, ActionListener listener) { final Supplier supplier = in().threadPool().getThreadContext().newRestorableContext(false); try (ThreadContext.StoredContext ignore = in().threadPool().getThreadContext().stashContext()) { in().threadPool().getThreadContext().putTransient(ACTION_ORIGIN_TRANSIENT_NAME, origin); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java index 049089e62cf26..2894138248b8c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java @@ -5,7 +5,7 @@ */ package org.elasticsearch.xpack.core; -import org.elasticsearch.action.GenericAction; +import org.elasticsearch.action.Action; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.NamedDiff; import org.elasticsearch.cluster.metadata.MetaData; @@ -204,7 +204,7 @@ static Settings additionalSettings(final Settings settings, final boolean enable } @Override - public List getClientActions() { + public List getClientActions() { return Arrays.asList( // deprecation DeprecationInfoAction.INSTANCE, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java index db36aabf7ac6a..920081572cfc7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java @@ -11,7 +11,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.GenericAction; +import org.elasticsearch.action.Action; import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.client.Client; import org.elasticsearch.client.transport.TransportClient; @@ -261,8 +261,8 @@ public Collection createComponents(Client client, ClusterService cluster } @Override - public List getClientActions() { - List actions = new ArrayList<>(); + public List getClientActions() { + List actions = new ArrayList<>(); actions.addAll(licensing.getClientActions()); actions.addAll(super.getClientActions()); return actions; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoAction.java index 585153000a24b..8690973d7678e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoAction.java @@ -8,7 +8,7 @@ import org.elasticsearch.action.Action; import org.elasticsearch.license.XPackInfoResponse; -public class XPackInfoAction extends Action { +public class XPackInfoAction extends Action { public static final String NAME = "cluster:monitor/xpack/info"; public static final XPackInfoAction INSTANCE = new XPackInfoAction(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageAction.java index 906aaf3f4dada..40311a4e88457 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageAction.java @@ -7,7 +7,7 @@ import org.elasticsearch.action.Action; -public class XPackUsageAction extends Action { +public class XPackUsageAction extends Action { public static final String NAME = "cluster:monitor/xpack/usage"; public static final XPackUsageAction INSTANCE = new XPackUsageAction(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecationInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecationInfoAction.java index d3b7bd1452901..09c6a0d57524e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecationInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecationInfoAction.java @@ -37,7 +37,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; -public class DeprecationInfoAction extends Action { +public class DeprecationInfoAction extends Action { public static final DeprecationInfoAction INSTANCE = new DeprecationInfoAction(); public static final String NAME = "cluster:admin/xpack/deprecation/info"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/GraphExploreAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/GraphExploreAction.java index ebc8e1be32051..5503eb692558b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/GraphExploreAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/graph/action/GraphExploreAction.java @@ -7,7 +7,7 @@ import org.elasticsearch.action.Action; -public class GraphExploreAction extends Action { +public class GraphExploreAction extends Action { public static final GraphExploreAction INSTANCE = new GraphExploreAction(); public static final String NAME = "indices:data/read/xpack/graph/explore"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CloseJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CloseJobAction.java index 3710f5b96f60c..019bad54a5d24 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CloseJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/CloseJobAction.java @@ -27,7 +27,7 @@ import java.io.IOException; import java.util.Objects; -public class CloseJobAction extends Action { +public class CloseJobAction extends Action { public static final CloseJobAction INSTANCE = new CloseJobAction(); public static final String NAME = "cluster:admin/xpack/ml/job/close"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarAction.java index 4cdbd03cf4752..a6e5f8da88cd7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarAction.java @@ -19,7 +19,7 @@ import java.io.IOException; import java.util.Objects; -public class DeleteCalendarAction extends Action { +public class DeleteCalendarAction extends Action { public static final DeleteCalendarAction INSTANCE = new DeleteCalendarAction(); public static final String NAME = "cluster:admin/xpack/ml/calendars/delete"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarEventAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarEventAction.java index 9eaf643be7688..24fc55f59ccb7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarEventAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteCalendarEventAction.java @@ -20,7 +20,7 @@ import java.io.IOException; import java.util.Objects; -public class DeleteCalendarEventAction extends Action { +public class DeleteCalendarEventAction extends Action { public static final DeleteCalendarEventAction INSTANCE = new DeleteCalendarEventAction(); public static final String NAME = "cluster:admin/xpack/ml/calendars/events/delete"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDatafeedAction.java index 44580763601ed..fba0fe4cf1f6c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDatafeedAction.java @@ -23,7 +23,7 @@ import java.io.IOException; import java.util.Objects; -public class DeleteDatafeedAction extends Action { +public class DeleteDatafeedAction extends Action { public static final DeleteDatafeedAction INSTANCE = new DeleteDatafeedAction(); public static final String NAME = "cluster:admin/xpack/ml/datafeeds/delete"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteExpiredDataAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteExpiredDataAction.java index 56361cd7ed2a5..271d8ad5fa33c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteExpiredDataAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteExpiredDataAction.java @@ -20,7 +20,7 @@ import java.io.IOException; import java.util.Objects; -public class DeleteExpiredDataAction extends Action { +public class DeleteExpiredDataAction extends Action { public static final DeleteExpiredDataAction INSTANCE = new DeleteExpiredDataAction(); public static final String NAME = "cluster:admin/xpack/ml/delete_expired_data"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteFilterAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteFilterAction.java index 8faa5e5876178..b271c6fbdf02e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteFilterAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteFilterAction.java @@ -20,7 +20,7 @@ import java.util.Objects; -public class DeleteFilterAction extends Action { +public class DeleteFilterAction extends Action { public static final DeleteFilterAction INSTANCE = new DeleteFilterAction(); public static final String NAME = "cluster:admin/xpack/ml/filters/delete"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java index bbc59594b586e..71e5f8fad71d7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java @@ -24,7 +24,7 @@ import java.util.Map; import java.util.Objects; -public class DeleteJobAction extends Action { +public class DeleteJobAction extends Action { public static final DeleteJobAction INSTANCE = new DeleteJobAction(); public static final String NAME = "cluster:admin/xpack/ml/job/delete"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteModelSnapshotAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteModelSnapshotAction.java index 70c3a90d4fad2..a80fbc7863825 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteModelSnapshotAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteModelSnapshotAction.java @@ -19,7 +19,7 @@ import java.io.IOException; -public class DeleteModelSnapshotAction extends Action { +public class DeleteModelSnapshotAction extends Action { public static final DeleteModelSnapshotAction INSTANCE = new DeleteModelSnapshotAction(); public static final String NAME = "cluster:admin/xpack/ml/job/model_snapshots/delete"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FinalizeJobExecutionAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FinalizeJobExecutionAction.java index 4532c422d25ea..558d25f62de32 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FinalizeJobExecutionAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FinalizeJobExecutionAction.java @@ -16,7 +16,7 @@ import java.io.IOException; -public class FinalizeJobExecutionAction extends Action { +public class FinalizeJobExecutionAction extends Action { public static final FinalizeJobExecutionAction INSTANCE = new FinalizeJobExecutionAction(); public static final String NAME = "cluster:internal/xpack/ml/job/finalize_job_execution"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FlushJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FlushJobAction.java index 206128a8784ca..ef086b5126228 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FlushJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FlushJobAction.java @@ -26,7 +26,7 @@ import java.util.Date; import java.util.Objects; -public class FlushJobAction extends Action { +public class FlushJobAction extends Action { public static final FlushJobAction INSTANCE = new FlushJobAction(); public static final String NAME = "cluster:admin/xpack/ml/job/flush"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ForecastJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ForecastJobAction.java index 75ba8f195370c..327941a2c055c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ForecastJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ForecastJobAction.java @@ -24,7 +24,7 @@ import java.io.IOException; import java.util.Objects; -public class ForecastJobAction extends Action { +public class ForecastJobAction extends Action { public static final ForecastJobAction INSTANCE = new ForecastJobAction(); public static final String NAME = "cluster:admin/xpack/ml/job/forecast"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetBucketsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetBucketsAction.java index 6abbd2dfbdfd9..29b3d4bb8d557 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetBucketsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetBucketsAction.java @@ -30,7 +30,7 @@ import java.io.IOException; import java.util.Objects; -public class GetBucketsAction extends Action { +public class GetBucketsAction extends Action { public static final GetBucketsAction INSTANCE = new GetBucketsAction(); public static final String NAME = "cluster:monitor/xpack/ml/job/results/buckets/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCalendarEventsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCalendarEventsAction.java index 19cf114772d8a..6c707fcbdb726 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCalendarEventsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCalendarEventsAction.java @@ -29,7 +29,7 @@ import java.io.IOException; import java.util.Objects; -public class GetCalendarEventsAction extends Action { +public class GetCalendarEventsAction extends Action { public static final GetCalendarEventsAction INSTANCE = new GetCalendarEventsAction(); public static final String NAME = "cluster:monitor/xpack/ml/calendars/events/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCalendarsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCalendarsAction.java index 60ab7adbbcedf..1fb945ef2422b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCalendarsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCalendarsAction.java @@ -29,7 +29,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; -public class GetCalendarsAction extends Action { +public class GetCalendarsAction extends Action { public static final GetCalendarsAction INSTANCE = new GetCalendarsAction(); public static final String NAME = "cluster:monitor/xpack/ml/calendars/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCategoriesAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCategoriesAction.java index 3bb459013995d..4b4dcb5f79cc5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCategoriesAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetCategoriesAction.java @@ -29,7 +29,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; -public class GetCategoriesAction extends Action { +public class GetCategoriesAction extends Action { public static final GetCategoriesAction INSTANCE = new GetCategoriesAction(); public static final String NAME = "cluster:monitor/xpack/ml/job/results/categories/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsAction.java index 5bd6e96e79cf9..d75e03b87f20d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsAction.java @@ -25,7 +25,7 @@ import java.io.IOException; import java.util.Objects; -public class GetDatafeedsAction extends Action { +public class GetDatafeedsAction extends Action { public static final GetDatafeedsAction INSTANCE = new GetDatafeedsAction(); public static final String NAME = "cluster:monitor/xpack/ml/datafeeds/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsStatsAction.java index 9dc2042d9cfd3..823c158d2a6e8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetDatafeedsStatsAction.java @@ -30,7 +30,7 @@ import java.util.Map; import java.util.Objects; -public class GetDatafeedsStatsAction extends Action { +public class GetDatafeedsStatsAction extends Action { public static final GetDatafeedsStatsAction INSTANCE = new GetDatafeedsStatsAction(); public static final String NAME = "cluster:monitor/xpack/ml/datafeeds/stats/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetFiltersAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetFiltersAction.java index 541366248ee7c..f13f303396e08 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetFiltersAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetFiltersAction.java @@ -27,7 +27,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; -public class GetFiltersAction extends Action { +public class GetFiltersAction extends Action { public static final GetFiltersAction INSTANCE = new GetFiltersAction(); public static final String NAME = "cluster:admin/xpack/ml/filters/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetInfluencersAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetInfluencersAction.java index 803bfda63ebc2..35f0675c6237a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetInfluencersAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetInfluencersAction.java @@ -29,7 +29,7 @@ import java.util.Objects; public class GetInfluencersAction -extends Action { +extends Action { public static final GetInfluencersAction INSTANCE = new GetInfluencersAction(); public static final String NAME = "cluster:monitor/xpack/ml/job/results/influencers/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsAction.java index 8382345773450..063efc7145271 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsAction.java @@ -25,7 +25,7 @@ import java.io.IOException; import java.util.Objects; -public class GetJobsAction extends Action { +public class GetJobsAction extends Action { public static final GetJobsAction INSTANCE = new GetJobsAction(); public static final String NAME = "cluster:monitor/xpack/ml/job/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsStatsAction.java index c2c3c9b8d6c32..1ec9f0c473232 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetJobsStatsAction.java @@ -39,7 +39,7 @@ import java.util.Map; import java.util.Objects; -public class GetJobsStatsAction extends Action { +public class GetJobsStatsAction extends Action { public static final GetJobsStatsAction INSTANCE = new GetJobsStatsAction(); public static final String NAME = "cluster:monitor/xpack/ml/job/stats/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetModelSnapshotsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetModelSnapshotsAction.java index 3bd990577603b..c349fa6527be9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetModelSnapshotsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetModelSnapshotsAction.java @@ -29,7 +29,7 @@ import java.io.IOException; import java.util.Objects; -public class GetModelSnapshotsAction extends Action { +public class GetModelSnapshotsAction extends Action { public static final GetModelSnapshotsAction INSTANCE = new GetModelSnapshotsAction(); public static final String NAME = "cluster:monitor/xpack/ml/job/model_snapshots/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetOverallBucketsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetOverallBucketsAction.java index 81dad665577c8..e6ace63f44a7f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetOverallBucketsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetOverallBucketsAction.java @@ -48,7 +48,7 @@ * the interval. *

    */ -public class GetOverallBucketsAction extends Action { +public class GetOverallBucketsAction extends Action { public static final GetOverallBucketsAction INSTANCE = new GetOverallBucketsAction(); public static final String NAME = "cluster:monitor/xpack/ml/job/results/overall_buckets/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetRecordsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetRecordsAction.java index 7d1fb839704af..cd76c54f45277 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetRecordsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetRecordsAction.java @@ -29,7 +29,7 @@ import java.io.IOException; import java.util.Objects; -public class GetRecordsAction extends Action { +public class GetRecordsAction extends Action { public static final GetRecordsAction INSTANCE = new GetRecordsAction(); public static final String NAME = "cluster:monitor/xpack/ml/job/results/records/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/IsolateDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/IsolateDatafeedAction.java index 7bafe5056af67..451679f364600 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/IsolateDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/IsolateDatafeedAction.java @@ -35,7 +35,7 @@ * task ensures the current datafeed task can complete inconsequentially while * the datafeed persistent task may be stopped or reassigned on another node. */ -public class IsolateDatafeedAction extends Action { +public class IsolateDatafeedAction extends Action { public static final IsolateDatafeedAction INSTANCE = new IsolateDatafeedAction(); public static final String NAME = "cluster:internal/xpack/ml/datafeed/isolate"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/KillProcessAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/KillProcessAction.java index 5edb988351b0e..96440ebe50306 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/KillProcessAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/KillProcessAction.java @@ -16,7 +16,7 @@ import java.io.IOException; import java.util.Objects; -public class KillProcessAction extends Action { +public class KillProcessAction extends Action { public static final KillProcessAction INSTANCE = new KillProcessAction(); public static final String NAME = "cluster:internal/xpack/ml/job/kill/process"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlInfoAction.java index 38544e576171b..b0d635202c9fd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlInfoAction.java @@ -21,7 +21,7 @@ import java.util.Map; import java.util.Objects; -public class MlInfoAction extends Action { +public class MlInfoAction extends Action { public static final MlInfoAction INSTANCE = new MlInfoAction(); public static final String NAME = "cluster:monitor/xpack/ml/info/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/OpenJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/OpenJobAction.java index 0ce24aafefa79..0c7380349bde1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/OpenJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/OpenJobAction.java @@ -31,7 +31,7 @@ import java.io.IOException; import java.util.Objects; -public class OpenJobAction extends Action { +public class OpenJobAction extends Action { public static final OpenJobAction INSTANCE = new OpenJobAction(); public static final String NAME = "cluster:admin/xpack/ml/job/open"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PersistJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PersistJobAction.java index f1d86dd784abe..12ebed924dbcc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PersistJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PersistJobAction.java @@ -16,7 +16,7 @@ import java.io.IOException; import java.util.Objects; -public class PersistJobAction extends Action { +public class PersistJobAction extends Action { public static final PersistJobAction INSTANCE = new PersistJobAction(); public static final String NAME = "cluster:admin/xpack/ml/job/persist"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostCalendarEventsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostCalendarEventsAction.java index aa4fc9ee2ea44..beff26eb34d82 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostCalendarEventsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostCalendarEventsAction.java @@ -30,7 +30,7 @@ import java.util.Objects; import java.util.stream.Collectors; -public class PostCalendarEventsAction extends Action { +public class PostCalendarEventsAction extends Action { public static final PostCalendarEventsAction INSTANCE = new PostCalendarEventsAction(); public static final String NAME = "cluster:admin/xpack/ml/calendars/events/post"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostDataAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostDataAction.java index 69dd7e69ca149..ccc745d9742ad 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostDataAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PostDataAction.java @@ -24,7 +24,7 @@ import java.io.IOException; import java.util.Objects; -public class PostDataAction extends Action { +public class PostDataAction extends Action { public static final PostDataAction INSTANCE = new PostDataAction(); public static final String NAME = "cluster:admin/xpack/ml/job/data/post"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PreviewDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PreviewDatafeedAction.java index 46eb6578e86ec..af8a99b9828bb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PreviewDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PreviewDatafeedAction.java @@ -25,7 +25,7 @@ import java.io.InputStream; import java.util.Objects; -public class PreviewDatafeedAction extends Action { +public class PreviewDatafeedAction extends Action { public static final PreviewDatafeedAction INSTANCE = new PreviewDatafeedAction(); public static final String NAME = "cluster:admin/xpack/ml/datafeeds/preview"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutCalendarAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutCalendarAction.java index ace5869dc508d..345c4f1a96db4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutCalendarAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutCalendarAction.java @@ -28,7 +28,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; -public class PutCalendarAction extends Action { +public class PutCalendarAction extends Action { public static final PutCalendarAction INSTANCE = new PutCalendarAction(); public static final String NAME = "cluster:admin/xpack/ml/calendars/put"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java index 5142abac08622..a0c757a0be6a0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutDatafeedAction.java @@ -22,7 +22,7 @@ import java.io.IOException; import java.util.Objects; -public class PutDatafeedAction extends Action { +public class PutDatafeedAction extends Action { public static final PutDatafeedAction INSTANCE = new PutDatafeedAction(); public static final String NAME = "cluster:admin/xpack/ml/datafeeds/put"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutFilterAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutFilterAction.java index 8269a105b6463..0ed5e8f22aadb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutFilterAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutFilterAction.java @@ -25,7 +25,7 @@ import java.util.Objects; -public class PutFilterAction extends Action { +public class PutFilterAction extends Action { public static final PutFilterAction INSTANCE = new PutFilterAction(); public static final String NAME = "cluster:admin/xpack/ml/filters/put"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutJobAction.java index a556a58d50380..7e85198d2143e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutJobAction.java @@ -25,7 +25,7 @@ import java.util.List; import java.util.Objects; -public class PutJobAction extends Action { +public class PutJobAction extends Action { public static final PutJobAction INSTANCE = new PutJobAction(); public static final String NAME = "cluster:admin/xpack/ml/job/put"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/RevertModelSnapshotAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/RevertModelSnapshotAction.java index d5b14abd6f9c8..316598b6ab505 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/RevertModelSnapshotAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/RevertModelSnapshotAction.java @@ -29,8 +29,7 @@ import java.io.IOException; import java.util.Objects; -public class RevertModelSnapshotAction -extends Action { +public class RevertModelSnapshotAction extends Action { public static final RevertModelSnapshotAction INSTANCE = new RevertModelSnapshotAction(); public static final String NAME = "cluster:admin/xpack/ml/job/model_snapshots/revert"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java index 9df1f2fbd2e9b..5c45d33e744d6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartDatafeedAction.java @@ -34,7 +34,7 @@ import java.util.Objects; import java.util.function.LongSupplier; -public class StartDatafeedAction extends Action { +public class StartDatafeedAction extends Action { public static final ParseField START_TIME = new ParseField("start"); public static final ParseField END_TIME = new ParseField("end"); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDatafeedAction.java index 4df27e1b984a2..0117225141085 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StopDatafeedAction.java @@ -29,7 +29,7 @@ import java.io.IOException; import java.util.Objects; -public class StopDatafeedAction extends Action { +public class StopDatafeedAction extends Action { public static final StopDatafeedAction INSTANCE = new StopDatafeedAction(); public static final String NAME = "cluster:admin/xpack/ml/datafeed/stop"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateCalendarJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateCalendarJobAction.java index dfe6499d2c4e8..e70a2e3189b01 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateCalendarJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateCalendarJobAction.java @@ -18,7 +18,7 @@ import java.io.IOException; import java.util.Objects; -public class UpdateCalendarJobAction extends Action { +public class UpdateCalendarJobAction extends Action { public static final UpdateCalendarJobAction INSTANCE = new UpdateCalendarJobAction(); public static final String NAME = "cluster:admin/xpack/ml/calendars/jobs/update"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateDatafeedAction.java index 8ed170b8603b2..6ba34efa839b5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateDatafeedAction.java @@ -20,7 +20,7 @@ import java.io.IOException; import java.util.Objects; -public class UpdateDatafeedAction extends Action { +public class UpdateDatafeedAction extends Action { public static final UpdateDatafeedAction INSTANCE = new UpdateDatafeedAction(); public static final String NAME = "cluster:admin/xpack/ml/datafeeds/update"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateJobAction.java index 020d4c22ee31f..d4fe804c451af 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateJobAction.java @@ -24,7 +24,7 @@ import java.io.IOException; import java.util.Objects; -public class UpdateJobAction extends Action { +public class UpdateJobAction extends Action { public static final UpdateJobAction INSTANCE = new UpdateJobAction(); public static final String NAME = "cluster:admin/xpack/ml/job/update"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateModelSnapshotAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateModelSnapshotAction.java index 9703fed25ef9f..1414719693f2c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateModelSnapshotAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateModelSnapshotAction.java @@ -29,7 +29,7 @@ import java.io.IOException; import java.util.Objects; -public class UpdateModelSnapshotAction extends Action { +public class UpdateModelSnapshotAction extends Action { public static final UpdateModelSnapshotAction INSTANCE = new UpdateModelSnapshotAction(); public static final String NAME = "cluster:admin/xpack/ml/job/model_snapshots/update"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateProcessAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateProcessAction.java index b99a0b7400782..00b1d67bfff69 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateProcessAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateProcessAction.java @@ -24,7 +24,7 @@ import java.util.List; import java.util.Objects; -public class UpdateProcessAction extends Action { +public class UpdateProcessAction extends Action { public static final UpdateProcessAction INSTANCE = new UpdateProcessAction(); public static final String NAME = "cluster:internal/xpack/ml/job/update/process"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateDetectorAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateDetectorAction.java index aba3d492e488c..0e807664d86fa 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateDetectorAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateDetectorAction.java @@ -21,7 +21,7 @@ import java.io.IOException; import java.util.Objects; -public class ValidateDetectorAction extends Action { +public class ValidateDetectorAction extends Action { public static final ValidateDetectorAction INSTANCE = new ValidateDetectorAction(); public static final String NAME = "cluster:admin/xpack/ml/job/validate/detector"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateJobConfigAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateJobConfigAction.java index c4ef75417c241..00a8813c1f18b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateJobConfigAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/ValidateJobConfigAction.java @@ -22,7 +22,7 @@ import java.util.List; import java.util.Objects; -public class ValidateJobConfigAction extends Action { +public class ValidateJobConfigAction extends Action { public static final ValidateJobConfigAction INSTANCE = new ValidateJobConfigAction(); public static final String NAME = "cluster:admin/xpack/ml/job/validate"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/action/MonitoringBulkAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/action/MonitoringBulkAction.java index 05ea4b8ed2c6d..49fb085191e4e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/action/MonitoringBulkAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/monitoring/action/MonitoringBulkAction.java @@ -7,7 +7,7 @@ import org.elasticsearch.action.Action; -public class MonitoringBulkAction extends Action { +public class MonitoringBulkAction extends Action { public static final MonitoringBulkAction INSTANCE = new MonitoringBulkAction(); public static final String NAME = "cluster:admin/xpack/monitoring/bulk"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/DeleteRollupJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/DeleteRollupJobAction.java index 71ad404d44aa1..e59c6738d86b1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/DeleteRollupJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/DeleteRollupJobAction.java @@ -22,7 +22,7 @@ import java.io.IOException; import java.util.Objects; -public class DeleteRollupJobAction extends Action { +public class DeleteRollupJobAction extends Action { public static final DeleteRollupJobAction INSTANCE = new DeleteRollupJobAction(); public static final String NAME = "cluster:admin/xpack/rollup/delete"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupCapsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupCapsAction.java index 55f0d6139ae50..ea98c2f4628e2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupCapsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupCapsAction.java @@ -28,7 +28,7 @@ import java.util.Map; import java.util.Objects; -public class GetRollupCapsAction extends Action { +public class GetRollupCapsAction extends Action { public static final GetRollupCapsAction INSTANCE = new GetRollupCapsAction(); public static final String NAME = "cluster:monitor/xpack/rollup/get/caps"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupJobsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupJobsAction.java index 5372773867034..d5a5e7a07fadf 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupJobsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupJobsAction.java @@ -35,7 +35,7 @@ import java.util.List; import java.util.Objects; -public class GetRollupJobsAction extends Action { +public class GetRollupJobsAction extends Action { public static final GetRollupJobsAction INSTANCE = new GetRollupJobsAction(); public static final String NAME = "cluster:monitor/xpack/rollup/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/PutRollupJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/PutRollupJobAction.java index 6283b295cdb16..9c3767d418856 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/PutRollupJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/PutRollupJobAction.java @@ -25,7 +25,7 @@ import java.util.Map; import java.util.Objects; -public class PutRollupJobAction extends Action { +public class PutRollupJobAction extends Action { public static final PutRollupJobAction INSTANCE = new PutRollupJobAction(); public static final String NAME = "cluster:admin/xpack/rollup/put"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/RollupSearchAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/RollupSearchAction.java index c79eacb80ae02..3980282321cc8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/RollupSearchAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/RollupSearchAction.java @@ -11,7 +11,7 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.ElasticsearchClient; -public class RollupSearchAction extends Action { +public class RollupSearchAction extends Action { public static final RollupSearchAction INSTANCE = new RollupSearchAction(); public static final String NAME = "indices:admin/xpack/rollup/search"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/StartRollupJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/StartRollupJobAction.java index a3e4b4054e2fa..e3dcb1a882f9f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/StartRollupJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/StartRollupJobAction.java @@ -25,7 +25,7 @@ import java.util.Collections; import java.util.Objects; -public class StartRollupJobAction extends Action { +public class StartRollupJobAction extends Action { public static final StartRollupJobAction INSTANCE = new StartRollupJobAction(); public static final String NAME = "cluster:admin/xpack/rollup/start"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/StopRollupJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/StopRollupJobAction.java index 7b9b06f8ac891..eb48d640f21eb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/StopRollupJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/StopRollupJobAction.java @@ -24,7 +24,7 @@ import java.util.Collections; import java.util.Objects; -public class StopRollupJobAction extends Action { +public class StopRollupJobAction extends Action { public static final StopRollupJobAction INSTANCE = new StopRollupJobAction(); public static final String NAME = "cluster:admin/xpack/rollup/stop"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/realm/ClearRealmCacheAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/realm/ClearRealmCacheAction.java index a1c865a030651..7c3cd58a7f467 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/realm/ClearRealmCacheAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/realm/ClearRealmCacheAction.java @@ -7,7 +7,7 @@ import org.elasticsearch.action.Action; -public class ClearRealmCacheAction extends Action { +public class ClearRealmCacheAction extends Action { public static final ClearRealmCacheAction INSTANCE = new ClearRealmCacheAction(); public static final String NAME = "cluster:admin/xpack/security/realm/cache/clear"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/ClearRolesCacheAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/ClearRolesCacheAction.java index 7eebd671ab1df..096b5380181fb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/ClearRolesCacheAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/ClearRolesCacheAction.java @@ -10,7 +10,7 @@ /** * The action for clearing the cache used by native roles that are stored in an index. */ -public class ClearRolesCacheAction extends Action { +public class ClearRolesCacheAction extends Action { public static final ClearRolesCacheAction INSTANCE = new ClearRolesCacheAction(); public static final String NAME = "cluster:admin/xpack/security/roles/cache/clear"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/DeleteRoleAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/DeleteRoleAction.java index 3261ea94f4515..6130f107fb726 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/DeleteRoleAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/DeleteRoleAction.java @@ -10,7 +10,7 @@ /** * Action for deleting a role from the security index */ -public class DeleteRoleAction extends Action { +public class DeleteRoleAction extends Action { public static final DeleteRoleAction INSTANCE = new DeleteRoleAction(); public static final String NAME = "cluster:admin/xpack/security/role/delete"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/GetRolesAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/GetRolesAction.java index 3489c2493e762..53126440afb9b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/GetRolesAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/GetRolesAction.java @@ -10,7 +10,7 @@ /** * Action to retrieve a role from the security index */ -public class GetRolesAction extends Action { +public class GetRolesAction extends Action { public static final GetRolesAction INSTANCE = new GetRolesAction(); public static final String NAME = "cluster:admin/xpack/security/role/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleAction.java index a9aa2c8f29aec..8396625e262ff 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/role/PutRoleAction.java @@ -10,7 +10,7 @@ /** * Action for adding a role to the security index */ -public class PutRoleAction extends Action { +public class PutRoleAction extends Action { public static final PutRoleAction INSTANCE = new PutRoleAction(); public static final String NAME = "cluster:admin/xpack/security/role/put"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/DeleteRoleMappingAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/DeleteRoleMappingAction.java index 065be4638dde9..6057daf959531 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/DeleteRoleMappingAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/DeleteRoleMappingAction.java @@ -11,7 +11,7 @@ * Action for deleting a role-mapping from the * org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore */ -public class DeleteRoleMappingAction extends Action { +public class DeleteRoleMappingAction extends Action { public static final DeleteRoleMappingAction INSTANCE = new DeleteRoleMappingAction(); public static final String NAME = "cluster:admin/xpack/security/role_mapping/delete"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/GetRoleMappingsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/GetRoleMappingsAction.java index 12797ed4d2b0f..e1488bf70913e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/GetRoleMappingsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/GetRoleMappingsAction.java @@ -9,10 +9,10 @@ /** * Action to retrieve one or more role-mappings from X-Pack security - + * * see org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore */ -public class GetRoleMappingsAction extends Action { +public class GetRoleMappingsAction extends Action { public static final GetRoleMappingsAction INSTANCE = new GetRoleMappingsAction(); public static final String NAME = "cluster:admin/xpack/security/role_mapping/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingAction.java index 4b9c2d542cce2..9c3068adf127f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/rolemapping/PutRoleMappingAction.java @@ -10,7 +10,7 @@ /** * Action for adding a role to the security index */ -public class PutRoleMappingAction extends Action { +public class PutRoleMappingAction extends Action { public static final PutRoleMappingAction INSTANCE = new PutRoleMappingAction(); public static final String NAME = "cluster:admin/xpack/security/role_mapping/put"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlAuthenticateAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlAuthenticateAction.java index 0cd2235f84384..fca733a3938a7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlAuthenticateAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlAuthenticateAction.java @@ -10,7 +10,7 @@ /** * Action for authenticating using SAML assertions */ -public final class SamlAuthenticateAction extends Action { +public final class SamlAuthenticateAction extends Action { public static final String NAME = "cluster:admin/xpack/security/saml/authenticate"; public static final SamlAuthenticateAction INSTANCE = new SamlAuthenticateAction(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlInvalidateSessionAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlInvalidateSessionAction.java index 51ce0f00ee66b..dc5aa09627564 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlInvalidateSessionAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlInvalidateSessionAction.java @@ -10,7 +10,7 @@ /** * Action to perform IdP-initiated logout for a SAML-SSO user */ -public final class SamlInvalidateSessionAction extends Action { +public final class SamlInvalidateSessionAction extends Action { public static final String NAME = "cluster:admin/xpack/security/saml/invalidate"; public static final SamlInvalidateSessionAction INSTANCE = new SamlInvalidateSessionAction(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlLogoutAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlLogoutAction.java index 8c2cb6f4599d6..9ea3a29ca4ad9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlLogoutAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlLogoutAction.java @@ -10,7 +10,7 @@ /** * Action for initiating a logout process for a SAML-SSO user */ -public final class SamlLogoutAction extends Action { +public final class SamlLogoutAction extends Action { public static final String NAME = "cluster:admin/xpack/security/saml/logout"; public static final SamlLogoutAction INSTANCE = new SamlLogoutAction(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlPrepareAuthenticationAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlPrepareAuthenticationAction.java index 10c2eb719983e..12ad23ca50199 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlPrepareAuthenticationAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/saml/SamlPrepareAuthenticationAction.java @@ -10,7 +10,7 @@ /** * Action for initiating an authentication process using SAML assertions */ -public final class SamlPrepareAuthenticationAction extends Action { +public final class SamlPrepareAuthenticationAction extends Action { public static final String NAME = "cluster:admin/xpack/security/saml/prepare"; public static final SamlPrepareAuthenticationAction INSTANCE = new SamlPrepareAuthenticationAction(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenAction.java index 9f2e937151ced..7b913f594e582 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenAction.java @@ -10,7 +10,7 @@ /** * Action for creating a new token */ -public final class CreateTokenAction extends Action { +public final class CreateTokenAction extends Action { public static final String NAME = "cluster:admin/xpack/security/token/create"; public static final CreateTokenAction INSTANCE = new CreateTokenAction(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenRequestBuilder.java index d94744dff373f..90790de7cd395 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/CreateTokenRequestBuilder.java @@ -17,7 +17,7 @@ public final class CreateTokenRequestBuilder extends ActionRequestBuilder { - public CreateTokenRequestBuilder(ElasticsearchClient client, Action action) { + public CreateTokenRequestBuilder(ElasticsearchClient client, Action action) { super(client, action, new CreateTokenRequest()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenAction.java index eca864546b222..679ee0756f638 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/InvalidateTokenAction.java @@ -10,7 +10,7 @@ /** * Action for invalidating a given token */ -public final class InvalidateTokenAction extends Action { +public final class InvalidateTokenAction extends Action { public static final String NAME = "cluster:admin/xpack/security/token/invalidate"; public static final InvalidateTokenAction INSTANCE = new InvalidateTokenAction(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/RefreshTokenAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/RefreshTokenAction.java index c847aa32898d3..3478af2ec00f2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/RefreshTokenAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/token/RefreshTokenAction.java @@ -7,7 +7,7 @@ import org.elasticsearch.action.Action; -public final class RefreshTokenAction extends Action { +public final class RefreshTokenAction extends Action { public static final String NAME = "cluster:admin/xpack/security/token/refresh"; public static final RefreshTokenAction INSTANCE = new RefreshTokenAction(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/AuthenticateAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/AuthenticateAction.java index 05c53063eb16d..18cfe85c8cb0c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/AuthenticateAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/AuthenticateAction.java @@ -7,7 +7,7 @@ import org.elasticsearch.action.Action; -public class AuthenticateAction extends Action { +public class AuthenticateAction extends Action { public static final String NAME = "cluster:admin/xpack/security/user/authenticate"; public static final AuthenticateAction INSTANCE = new AuthenticateAction(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/ChangePasswordAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/ChangePasswordAction.java index 23bfff8d80124..d01717a64eadc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/ChangePasswordAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/ChangePasswordAction.java @@ -7,7 +7,7 @@ import org.elasticsearch.action.Action; -public class ChangePasswordAction extends Action { +public class ChangePasswordAction extends Action { public static final ChangePasswordAction INSTANCE = new ChangePasswordAction(); public static final String NAME = "cluster:admin/xpack/security/user/change_password"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/DeleteUserAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/DeleteUserAction.java index a62381b6ecc6b..78666759dc0a7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/DeleteUserAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/DeleteUserAction.java @@ -10,7 +10,7 @@ /** * Action for deleting a native user. */ -public class DeleteUserAction extends Action { +public class DeleteUserAction extends Action { public static final DeleteUserAction INSTANCE = new DeleteUserAction(); public static final String NAME = "cluster:admin/xpack/security/user/delete"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/GetUsersAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/GetUsersAction.java index 5c9142671f4b2..49532049ba908 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/GetUsersAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/GetUsersAction.java @@ -10,7 +10,7 @@ /** * Action for retrieving a user from the security index */ -public class GetUsersAction extends Action { +public class GetUsersAction extends Action { public static final GetUsersAction INSTANCE = new GetUsersAction(); public static final String NAME = "cluster:admin/xpack/security/user/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesAction.java index 5db27db93ec91..30bb44a2c1c33 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/HasPrivilegesAction.java @@ -12,7 +12,7 @@ * This action is testing whether a user has the specified * {@link RoleDescriptor.IndicesPrivileges privileges} */ -public class HasPrivilegesAction extends Action { +public class HasPrivilegesAction extends Action { public static final HasPrivilegesAction INSTANCE = new HasPrivilegesAction(); public static final String NAME = "cluster:admin/xpack/security/user/has_privileges"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/PutUserAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/PutUserAction.java index 6009f89e69f40..20bbde2366b5e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/PutUserAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/PutUserAction.java @@ -10,7 +10,7 @@ /** * Action for putting (adding/updating) a native user. */ -public class PutUserAction extends Action { +public class PutUserAction extends Action { public static final PutUserAction INSTANCE = new PutUserAction(); public static final String NAME = "cluster:admin/xpack/security/user/put"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/SetEnabledAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/SetEnabledAction.java index ec010cc17a9b2..0368cdf7d7dbc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/SetEnabledAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/SetEnabledAction.java @@ -10,7 +10,7 @@ /** * This action is for setting the enabled flag on a native or reserved user */ -public class SetEnabledAction extends Action { +public class SetEnabledAction extends Action { public static final SetEnabledAction INSTANCE = new SetEnabledAction(); public static final String NAME = "cluster:admin/xpack/security/user/set_enabled"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/GetCertificateInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/GetCertificateInfoAction.java index e4115887f66a9..4e1a84773db7d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/GetCertificateInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/GetCertificateInfoAction.java @@ -25,7 +25,7 @@ * Action to obtain information about X.509 (SSL/TLS) certificates that are being used by X-Pack. * The primary use case is for tracking the expiry dates of certificates. */ -public class GetCertificateInfoAction extends Action { +public class GetCertificateInfoAction extends Action { public static final GetCertificateInfoAction INSTANCE = new GetCertificateInfoAction(); public static final String NAME = "cluster:monitor/xpack/ssl/certificates/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/actions/IndexUpgradeAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/actions/IndexUpgradeAction.java index 781cfe4d4d227..84a643ae72dae 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/actions/IndexUpgradeAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/actions/IndexUpgradeAction.java @@ -26,7 +26,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.xpack.core.upgrade.IndexUpgradeServiceFields.UPGRADE_INDEX_OPTIONS; -public class IndexUpgradeAction extends Action { +public class IndexUpgradeAction extends Action { public static final IndexUpgradeAction INSTANCE = new IndexUpgradeAction(); public static final String NAME = "cluster:admin/xpack/upgrade"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/actions/IndexUpgradeInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/actions/IndexUpgradeInfoAction.java index bfa6de10b1b28..f17dfbdb90b9b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/actions/IndexUpgradeInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/upgrade/actions/IndexUpgradeInfoAction.java @@ -26,7 +26,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; -public class IndexUpgradeInfoAction extends Action { +public class IndexUpgradeInfoAction extends Action { public static final IndexUpgradeInfoAction INSTANCE = new IndexUpgradeInfoAction(); public static final String NAME = "cluster:admin/xpack/upgrade/info"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/ack/AckWatchAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/ack/AckWatchAction.java index d6a7259087462..04ec95a369af2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/ack/AckWatchAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/ack/AckWatchAction.java @@ -10,7 +10,7 @@ /** * This action acks a watch in memory, and the index */ -public class AckWatchAction extends Action { +public class AckWatchAction extends Action { public static final AckWatchAction INSTANCE = new AckWatchAction(); public static final String NAME = "cluster:admin/xpack/watcher/watch/ack"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/activate/ActivateWatchAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/activate/ActivateWatchAction.java index 8f6f10ced9097..936a21711547e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/activate/ActivateWatchAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/activate/ActivateWatchAction.java @@ -10,7 +10,7 @@ /** * This action acks a watch in memory, and the index */ -public class ActivateWatchAction extends Action { +public class ActivateWatchAction extends Action { public static final ActivateWatchAction INSTANCE = new ActivateWatchAction(); public static final String NAME = "cluster:admin/xpack/watcher/watch/activate"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchAction.java index e40d6876f1c9f..8a16755a6dbce 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/delete/DeleteWatchAction.java @@ -10,7 +10,7 @@ /** * This action deletes an watch from in memory, the scheduler and the index */ -public class DeleteWatchAction extends Action { +public class DeleteWatchAction extends Action { public static final DeleteWatchAction INSTANCE = new DeleteWatchAction(); public static final String NAME = "cluster:admin/xpack/watcher/watch/delete"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/execute/ExecuteWatchAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/execute/ExecuteWatchAction.java index 5baa021d6f170..924f170959426 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/execute/ExecuteWatchAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/execute/ExecuteWatchAction.java @@ -11,7 +11,7 @@ * This action executes a watch, either ignoring the schedule and condition or just the schedule and can execute a subset of the actions, * optionally persisting the history entry */ -public class ExecuteWatchAction extends Action { +public class ExecuteWatchAction extends Action { public static final ExecuteWatchAction INSTANCE = new ExecuteWatchAction(); public static final String NAME = "cluster:admin/xpack/watcher/watch/execute"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/get/GetWatchAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/get/GetWatchAction.java index c411c0dbeb3f9..4df72a964b65f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/get/GetWatchAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/get/GetWatchAction.java @@ -8,7 +8,7 @@ /** * This action gets an watch by name */ -public class GetWatchAction extends org.elasticsearch.action.Action { +public class GetWatchAction extends org.elasticsearch.action.Action { public static final GetWatchAction INSTANCE = new GetWatchAction(); public static final String NAME = "cluster:monitor/xpack/watcher/watch/get"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/PutWatchAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/PutWatchAction.java index faf2faae182f9..56cedc457bda7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/PutWatchAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/put/PutWatchAction.java @@ -10,7 +10,7 @@ /** * This action puts an watch into the watch index and adds it to the scheduler */ -public class PutWatchAction extends Action { +public class PutWatchAction extends Action { public static final PutWatchAction INSTANCE = new PutWatchAction(); public static final String NAME = "cluster:admin/xpack/watcher/watch/put"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/service/WatcherServiceAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/service/WatcherServiceAction.java index a9682b2946d9a..0846bd10a80ff 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/service/WatcherServiceAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/service/WatcherServiceAction.java @@ -8,7 +8,7 @@ import org.elasticsearch.action.Action; -public class WatcherServiceAction extends Action { +public class WatcherServiceAction extends Action { public static final WatcherServiceAction INSTANCE = new WatcherServiceAction(); public static final String NAME = "cluster:admin/xpack/watcher/service"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/stats/WatcherStatsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/stats/WatcherStatsAction.java index 330b6ace97797..59fcff090f59e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/stats/WatcherStatsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/transport/actions/stats/WatcherStatsAction.java @@ -10,7 +10,7 @@ /** * This Action gets the stats for the watcher plugin */ -public class WatcherStatsAction extends Action { +public class WatcherStatsAction extends Action { public static final WatcherStatsAction INSTANCE = new WatcherStatsAction(); public static final String NAME = "cluster:monitor/xpack/watcher/stats/dist"; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestGetTokenAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestGetTokenAction.java index 672f4507dc814..2310afe4f77e7 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestGetTokenAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/oauth2/RestGetTokenAction.java @@ -71,7 +71,7 @@ public String getName() { protected RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClient client)throws IOException { try (XContentParser parser = request.contentParser()) { final CreateTokenRequest tokenRequest = PARSER.parse(parser, null); - final Action action = + final Action action = "refresh_token".equals(tokenRequest.getGrantType()) ? RefreshTokenAction.INSTANCE : CreateTokenAction.INSTANCE; return channel -> client.execute(action, tokenRequest, // this doesn't use the RestBuilderListener since we need to override the diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java index 15361d997a161..76d888d2c2e61 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java @@ -127,7 +127,7 @@ public void setup() throws Exception { protected > - void doExecute(Action action, Request request, ActionListener listener) { + void doExecute(Action action, Request request, ActionListener listener) { if (IndexAction.NAME.equals(action.name())) { assertThat(request, instanceOf(IndexRequest.class)); IndexRequest indexRequest = (IndexRequest) request; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailMutedTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailMutedTests.java index 1a5adc2e5ef44..c17134093c593 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailMutedTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailMutedTests.java @@ -72,7 +72,7 @@ class IClient extends FilterClient { @Override protected > void doExecute( - Action action, Request request, ActionListener listener) { + Action action, Request request, ActionListener listener) { clientCalled.set(true); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java index ee570dcadda31..3d739d57f480c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStoreTests.java @@ -77,7 +77,7 @@ public void setupMocks() { Response extends ActionResponse, RequestBuilder extends ActionRequestBuilder > void doExecute( - Action action, + Action action, Request request, ActionListener listener) { requests.add(new Tuple<>(request, listener)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java index 7754d387f1527..928c9bbd1b143 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java @@ -66,7 +66,7 @@ public class SecurityIndexManagerTests extends ESTestCase { public static final String INDEX_NAME = ".security"; private static final String TEMPLATE_NAME = "SecurityIndexManagerTests-template"; private SecurityIndexManager manager; - private Map, Map>> actions; + private Map, Map>> actions; @Before public void setUpManager() { @@ -83,7 +83,7 @@ public void setUpManager() { protected > - void doExecute(Action action, Request request, + void doExecute(Action action, Request request, ActionListener listener) { final Map> map = actions.getOrDefault(action, new HashMap<>()); map.put(request, listener); diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorAction.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorAction.java index 2c6daf2c4aef5..0908af76bebee 100644 --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorAction.java +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlClearCursorAction.java @@ -7,7 +7,7 @@ import org.elasticsearch.action.Action; -public class SqlClearCursorAction extends Action { +public class SqlClearCursorAction extends Action { public static final SqlClearCursorAction INSTANCE = new SqlClearCursorAction(); public static final String NAME = "indices:data/read/sql/close_cursor"; diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlQueryAction.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlQueryAction.java index 82b233968d981..5b9a5b1c3ef47 100644 --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlQueryAction.java +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlQueryAction.java @@ -7,7 +7,7 @@ import org.elasticsearch.action.Action; -public class SqlQueryAction extends Action { +public class SqlQueryAction extends Action { public static final SqlQueryAction INSTANCE = new SqlQueryAction(); public static final String NAME = "indices:data/read/sql"; diff --git a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlTranslateAction.java b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlTranslateAction.java index 3d44d26264eb3..978a11fbbb645 100644 --- a/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlTranslateAction.java +++ b/x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/plugin/SqlTranslateAction.java @@ -10,7 +10,7 @@ /** * Sql action for translating SQL queries into ES requests */ -public class SqlTranslateAction extends Action { +public class SqlTranslateAction extends Action { public static final SqlTranslateAction INSTANCE = new SqlTranslateAction(); public static final String NAME = "indices:data/read/sql/translate"; From 04e4e44409d8fe2d98793f8f137892e39a113c1f Mon Sep 17 00:00:00 2001 From: Vladimir Dolzhenko Date: Tue, 19 Jun 2018 14:21:11 +0200 Subject: [PATCH 33/34] Add get stored script and delete stored script to high level REST API (#31355) Add get stored script and delete stored script to high level REST API Relates to #27205 --- .../client/RequestConverters.java | 19 ++ .../client/RestHighLevelClient.java | 60 ++++++ .../client/RequestConvertersTests.java | 28 +++ .../elasticsearch/client/StoredScriptsIT.java | 105 +++++++++ .../StoredScriptsDocumentationIT.java | 204 ++++++++++++++++++ .../high-level/script/delete_script.asciidoc | 81 +++++++ .../high-level/script/get_script.asciidoc | 77 +++++++ .../high-level/supported-apis.asciidoc | 11 + .../rest-api-spec/api/get_script.json | 4 + .../DeleteStoredScriptResponse.java | 5 + .../GetStoredScriptResponse.java | 84 +++++++- .../TransportGetStoredScriptAction.java | 2 +- .../cluster/RestGetStoredScriptAction.java | 42 +--- .../script/StoredScriptSource.java | 6 +- .../DeleteStoredScriptResponseTests.java | 46 ++++ .../GetStoredScriptResponseTests.java | 61 ++++++ 16 files changed, 789 insertions(+), 46 deletions(-) create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/StoredScriptsIT.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/StoredScriptsDocumentationIT.java create mode 100644 docs/java-rest/high-level/script/delete_script.asciidoc create mode 100644 docs/java-rest/high-level/script/get_script.asciidoc create mode 100644 server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/DeleteStoredScriptResponseTests.java create mode 100644 server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponseTests.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index ab85af9f1fd7e..9d4582494eb91 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -37,6 +37,8 @@ import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; +import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; +import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; @@ -892,6 +894,23 @@ static Request getTemplates(GetIndexTemplatesRequest getIndexTemplatesRequest) t return request; } + static Request getScript(GetStoredScriptRequest getStoredScriptRequest) { + String endpoint = new EndpointBuilder().addPathPartAsIs("_scripts").addPathPart(getStoredScriptRequest.id()).build(); + Request request = new Request(HttpGet.METHOD_NAME, endpoint); + Params params = new Params(request); + params.withMasterTimeout(getStoredScriptRequest.masterNodeTimeout()); + return request; + } + + static Request deleteScript(DeleteStoredScriptRequest deleteStoredScriptRequest) { + String endpoint = new EndpointBuilder().addPathPartAsIs("_scripts").addPathPart(deleteStoredScriptRequest.id()).build(); + Request request = new Request(HttpDelete.METHOD_NAME, endpoint); + Params params = new Params(request); + params.withTimeout(deleteStoredScriptRequest.timeout()); + params.withMasterTimeout(deleteStoredScriptRequest.masterNodeTimeout()); + return request; + } + private static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType) throws IOException { BytesRef source = XContentHelper.toXContent(toXContent, xContentType, false).toBytesRef(); return new ByteArrayEntity(source.bytes, source.offset, source.length, createContentType(xContentType)); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java index 536b85925a4ba..6905cfdb8f714 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -26,6 +26,10 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; +import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptResponse; +import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; +import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.delete.DeleteRequest; @@ -652,6 +656,62 @@ public final FieldCapabilitiesResponse fieldCaps(FieldCapabilitiesRequest fieldC FieldCapabilitiesResponse::fromXContent, emptySet()); } + /** + * Get stored script by id. + * See + * How to use scripts on elastic.co + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public GetStoredScriptResponse getScript(GetStoredScriptRequest request, RequestOptions options) throws IOException { + return performRequestAndParseEntity(request, RequestConverters::getScript, options, + GetStoredScriptResponse::fromXContent, emptySet()); + } + + /** + * Asynchronously get stored script by id. + * See + * How to use scripts on elastic.co + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void getScriptAsync(GetStoredScriptRequest request, RequestOptions options, + ActionListener listener) { + performRequestAsyncAndParseEntity(request, RequestConverters::getScript, options, + GetStoredScriptResponse::fromXContent, listener, emptySet()); + } + + /** + * Delete stored script by id. + * See + * How to use scripts on elastic.co + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public DeleteStoredScriptResponse deleteScript(DeleteStoredScriptRequest request, RequestOptions options) throws IOException { + return performRequestAndParseEntity(request, RequestConverters::deleteScript, options, + DeleteStoredScriptResponse::fromXContent, emptySet()); + } + + /** + * Asynchronously delete stored script by id. + * See + * How to use scripts on elastic.co + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void deleteScriptAsync(DeleteStoredScriptRequest request, RequestOptions options, + ActionListener listener) { + performRequestAsyncAndParseEntity(request, RequestConverters::deleteScript, options, + DeleteStoredScriptResponse::fromXContent, listener, emptySet()); + } + /** * Asynchronously executes a request using the Field Capabilities API. * See Field Capabilities API diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index 60f427b490462..e7d56a4332b82 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -37,6 +37,8 @@ import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; +import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; +import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; @@ -1948,6 +1950,32 @@ public void testGetTemplateRequest() throws Exception { assertThat(request.getEntity(), nullValue()); } + public void testGetScriptRequest() { + GetStoredScriptRequest getStoredScriptRequest = new GetStoredScriptRequest("x-script"); + Map expectedParams = new HashMap<>(); + setRandomMasterTimeout(getStoredScriptRequest, expectedParams); + + Request request = RequestConverters.getScript(getStoredScriptRequest); + assertThat(request.getEndpoint(), equalTo("/_scripts/" + getStoredScriptRequest.id())); + assertThat(request.getMethod(), equalTo(HttpGet.METHOD_NAME)); + assertThat(request.getParameters(), equalTo(expectedParams)); + assertThat(request.getEntity(), nullValue()); + } + + public void testDeleteScriptRequest() { + DeleteStoredScriptRequest deleteStoredScriptRequest = new DeleteStoredScriptRequest("x-script"); + + Map expectedParams = new HashMap<>(); + setRandomTimeout(deleteStoredScriptRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); + setRandomMasterTimeout(deleteStoredScriptRequest, expectedParams); + + Request request = RequestConverters.deleteScript(deleteStoredScriptRequest); + assertThat(request.getEndpoint(), equalTo("/_scripts/" + deleteStoredScriptRequest.id())); + assertThat(request.getMethod(), equalTo(HttpDelete.METHOD_NAME)); + assertThat(request.getParameters(), equalTo(expectedParams)); + assertThat(request.getEntity(), nullValue()); + } + private static void assertToXContentBody(ToXContent expectedBody, HttpEntity actualEntity) throws IOException { BytesReference expectedBytes = XContentHelper.toXContent(expectedBody, REQUEST_BODY_CONTENT_TYPE, false); assertEquals(XContentType.JSON.mediaTypeWithoutParameters(), actualEntity.getContentType().getValue()); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/StoredScriptsIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/StoredScriptsIT.java new file mode 100644 index 0000000000000..e6d380a4cc0e1 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/StoredScriptsIT.java @@ -0,0 +1,105 @@ +package org.elasticsearch.client;/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + + +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; +import org.apache.http.util.EntityUtils; +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; +import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptResponse; +import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; +import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptResponse; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.StoredScriptSource; + +import java.util.Collections; + +import static java.util.Collections.emptyMap; +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.Matchers.equalTo; + +public class StoredScriptsIT extends ESRestHighLevelClientTestCase { + + final String id = "calculate-score"; + + public void testGetStoredScript() throws Exception { + final StoredScriptSource scriptSource = + new StoredScriptSource("painless", + "Math.log(_score * 2) + params.my_modifier", + Collections.singletonMap(Script.CONTENT_TYPE_OPTION, XContentType.JSON.mediaType())); + + final String script = Strings.toString(scriptSource.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS)); + // TODO: change to HighLevel PutStoredScriptRequest when it will be ready + // so far - using low-level REST API + Response putResponse = + adminClient() + .performRequest("PUT", "/_scripts/calculate-score", emptyMap(), + new StringEntity("{\"script\":" + script + "}", + ContentType.APPLICATION_JSON)); + assertEquals(putResponse.getStatusLine().getReasonPhrase(), 200, putResponse.getStatusLine().getStatusCode()); + assertEquals("{\"acknowledged\":true}", EntityUtils.toString(putResponse.getEntity())); + + GetStoredScriptRequest getRequest = new GetStoredScriptRequest("calculate-score"); + getRequest.masterNodeTimeout("50s"); + + GetStoredScriptResponse getResponse = execute(getRequest, highLevelClient()::getScript, + highLevelClient()::getScriptAsync); + + assertThat(getResponse.getSource(), equalTo(scriptSource)); + } + + public void testDeleteStoredScript() throws Exception { + final StoredScriptSource scriptSource = + new StoredScriptSource("painless", + "Math.log(_score * 2) + params.my_modifier", + Collections.singletonMap(Script.CONTENT_TYPE_OPTION, XContentType.JSON.mediaType())); + + final String script = Strings.toString(scriptSource.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS)); + // TODO: change to HighLevel PutStoredScriptRequest when it will be ready + // so far - using low-level REST API + Response putResponse = + adminClient() + .performRequest("PUT", "/_scripts/" + id, emptyMap(), + new StringEntity("{\"script\":" + script + "}", + ContentType.APPLICATION_JSON)); + assertEquals(putResponse.getStatusLine().getReasonPhrase(), 200, putResponse.getStatusLine().getStatusCode()); + assertEquals("{\"acknowledged\":true}", EntityUtils.toString(putResponse.getEntity())); + + DeleteStoredScriptRequest deleteRequest = new DeleteStoredScriptRequest(id); + deleteRequest.masterNodeTimeout("50s"); + deleteRequest.timeout("50s"); + + DeleteStoredScriptResponse deleteResponse = execute(deleteRequest, highLevelClient()::deleteScript, + highLevelClient()::deleteScriptAsync); + + assertThat(deleteResponse.isAcknowledged(), equalTo(true)); + + GetStoredScriptRequest getRequest = new GetStoredScriptRequest(id); + + final ElasticsearchStatusException statusException = expectThrows(ElasticsearchStatusException.class, + () -> execute(getRequest, highLevelClient()::getScript, + highLevelClient()::getScriptAsync)); + assertThat(statusException.status(), equalTo(RestStatus.NOT_FOUND)); + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/StoredScriptsDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/StoredScriptsDocumentationIT.java new file mode 100644 index 0000000000000..0aadae73ce66d --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/StoredScriptsDocumentationIT.java @@ -0,0 +1,204 @@ +package org.elasticsearch.client.documentation;/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; +import org.apache.http.util.EntityUtils; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.LatchedActionListener; +import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; +import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptResponse; +import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; +import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptResponse; +import org.elasticsearch.client.ESRestHighLevelClientTestCase; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestHighLevelClient; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.StoredScriptSource; + +import java.io.IOException; +import java.util.Collections; +import java.util.Map; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; + +import static java.util.Collections.emptyMap; +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.Matchers.equalTo; + +/** + * This class is used to generate the Java Stored Scripts API documentation. + * You need to wrap your code between two tags like: + * // tag::example + * // end::example + * + * Where example is your tag name. + * + * Then in the documentation, you can extract what is between tag and end tags with + * ["source","java",subs="attributes,callouts,macros"] + * -------------------------------------------------- + * include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[example] + * -------------------------------------------------- + * + * The column width of the code block is 84. If the code contains a line longer + * than 84, the line will be cut and a horizontal scroll bar will be displayed. + * (the code indentation of the tag is not included in the width) + */ +public class StoredScriptsDocumentationIT extends ESRestHighLevelClientTestCase { + + public void testGetStoredScript() throws Exception { + RestHighLevelClient client = highLevelClient(); + + final StoredScriptSource scriptSource = + new StoredScriptSource("painless", + "Math.log(_score * 2) + params.my_modifier", + Collections.singletonMap(Script.CONTENT_TYPE_OPTION, XContentType.JSON.mediaType())); + + putStoredScript("calculate-score", scriptSource); + + { + // tag::get-stored-script-request + GetStoredScriptRequest request = new GetStoredScriptRequest("calculate-score"); // <1> + // end::get-stored-script-request + + // tag::get-stored-script-request-masterTimeout + request.masterNodeTimeout(TimeValue.timeValueSeconds(50)); // <1> + request.masterNodeTimeout("50s"); // <2> + // end::get-stored-script-request-masterTimeout + + // tag::get-stored-script-execute + GetStoredScriptResponse getResponse = client.getScript(request, RequestOptions.DEFAULT); + // end::get-stored-script-execute + + // tag::get-stored-script-response + StoredScriptSource storedScriptSource = getResponse.getSource(); // <1> + + String lang = storedScriptSource.getLang(); // <2> + String source = storedScriptSource.getSource(); // <3> + Map options = storedScriptSource.getOptions(); // <4> + // end::get-stored-script-response + + assertThat(storedScriptSource, equalTo(scriptSource)); + + // tag::get-stored-script-execute-listener + ActionListener listener = + new ActionListener() { + @Override + public void onResponse(GetStoredScriptResponse response) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::get-stored-script-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::get-stored-script-execute-async + client.getScriptAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::get-stored-script-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + + } + + public void testDeleteStoredScript() throws Exception { + RestHighLevelClient client = highLevelClient(); + + final StoredScriptSource scriptSource = + new StoredScriptSource("painless", + "Math.log(_score * 2) + params.my_modifier", + Collections.singletonMap(Script.CONTENT_TYPE_OPTION, XContentType.JSON.mediaType())); + + putStoredScript("calculate-score", scriptSource); + + // tag::delete-stored-script-request + DeleteStoredScriptRequest deleteRequest = new DeleteStoredScriptRequest("calculate-score"); // <1> + // end::delete-stored-script-request + + // tag::delete-stored-script-request-masterTimeout + deleteRequest.masterNodeTimeout(TimeValue.timeValueSeconds(50)); // <1> + deleteRequest.masterNodeTimeout("50s"); // <2> + // end::delete-stored-script-request-masterTimeout + + // tag::delete-stored-script-request-timeout + deleteRequest.timeout(TimeValue.timeValueSeconds(60)); // <1> + deleteRequest.timeout("60s"); // <2> + // end::delete-stored-script-request-timeout + + // tag::delete-stored-script-execute + DeleteStoredScriptResponse deleteResponse = client.deleteScript(deleteRequest, RequestOptions.DEFAULT); + // end::delete-stored-script-execute + + // tag::delete-stored-script-response + boolean acknowledged = deleteResponse.isAcknowledged();// <1> + // end::delete-stored-script-response + + putStoredScript("calculate-score", scriptSource); + + // tag::delete-stored-script-execute-listener + ActionListener listener = + new ActionListener() { + @Override + public void onResponse(DeleteStoredScriptResponse response) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::delete-stored-script-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::delete-stored-script-execute-async + client.deleteScriptAsync(deleteRequest, RequestOptions.DEFAULT, listener); // <1> + // end::delete-stored-script-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + + private void putStoredScript(String id, StoredScriptSource scriptSource) throws IOException { + final String script = Strings.toString(scriptSource.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS)); + // TODO: change to HighLevel PutStoredScriptRequest when it will be ready + // so far - using low-level REST API + Response putResponse = + adminClient() + .performRequest("PUT", "/_scripts/" + id, emptyMap(), + new StringEntity("{\"script\":" + script + "}", + ContentType.APPLICATION_JSON)); + assertEquals(putResponse.getStatusLine().getReasonPhrase(), 200, putResponse.getStatusLine().getStatusCode()); + assertEquals("{\"acknowledged\":true}", EntityUtils.toString(putResponse.getEntity())); + } +} diff --git a/docs/java-rest/high-level/script/delete_script.asciidoc b/docs/java-rest/high-level/script/delete_script.asciidoc new file mode 100644 index 0000000000000..79b3b0b324715 --- /dev/null +++ b/docs/java-rest/high-level/script/delete_script.asciidoc @@ -0,0 +1,81 @@ +[[java-rest-high-delete-stored-script]] + +=== Delete Stored Script API + +[[java-rest-high-delete-stored-script-request]] +==== Delete Stored Script Request + +A `DeleteStoredScriptRequest` requires an `id`: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[delete-stored-script-request] +-------------------------------------------------- +<1> The id of the script + +==== Optional arguments +The following arguments can optionally be provided: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[delete-stored-script-request-timeout] +-------------------------------------------------- +<1> Timeout to wait for the all the nodes to acknowledge the stored script is deleted as a `TimeValue` +<2> Timeout to wait for the all the nodes to acknowledge the stored script is deleted as a `String` + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[delete-stored-script-request-masterTimeout] +-------------------------------------------------- +<1> Timeout to connect to the master node as a `TimeValue` +<2> Timeout to connect to the master node as a `String` + +[[java-rest-high-delete-stored-script-sync]] +==== Synchronous Execution +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[delete-stored-script-execute] +-------------------------------------------------- + +[[java-rest-high-delete-stored-script-async]] +==== Asynchronous Execution + +The asynchronous execution of a delete stored script request requires both the `DeleteStoredScriptRequest` +instance and an `ActionListener` instance to be passed to the asynchronous method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[delete-stored-script-execute-async] +-------------------------------------------------- +<1> The `DeleteStoredScriptRequest` to execute and the `ActionListener` to use when +the execution completes + +[[java-rest-high-delete-stored-script-listener]] +===== Action Listener + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for `DeleteStoredScriptResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[delete-stored-script-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is +provided as an argument +<2> Called in case of failure. The raised exception is provided as an argument + +[[java-rest-high-delete-stored-script-response]] +==== Delete Stored Script Response + +The returned `DeleteStoredScriptResponse` allows to retrieve information about the +executed operation as follows: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[delete-stored-script-response] +-------------------------------------------------- +<1> Indicates whether all of the nodes have acknowledged the request \ No newline at end of file diff --git a/docs/java-rest/high-level/script/get_script.asciidoc b/docs/java-rest/high-level/script/get_script.asciidoc new file mode 100644 index 0000000000000..a38bdad2bd6af --- /dev/null +++ b/docs/java-rest/high-level/script/get_script.asciidoc @@ -0,0 +1,77 @@ +[[java-rest-high-get-stored-script]] + +=== Get Stored Script API + +[[java-rest-high-get-stored-script-request]] +==== Get Stored Script Request + +A `GetStoredScriptRequest` requires an `id`: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[get-stored-script-request] +-------------------------------------------------- +<1> The id of the script + +==== Optional arguments +The following arguments can optionally be provided: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[get-stored-script-request-masterTimeout] +-------------------------------------------------- +<1> Timeout to connect to the master node as a `TimeValue` +<2> Timeout to connect to the master node as a `String` + +[[java-rest-high-get-stored-script-sync]] +==== Synchronous Execution +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[get-stored-script-execute] +-------------------------------------------------- + +[[java-rest-high-get-stored-script-async]] +==== Asynchronous Execution + +The asynchronous execution of a get stored script request requires both the `GetStoredScriptRequest` +instance and an `ActionListener` instance to be passed to the asynchronous method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[get-stored-script-execute-async] +-------------------------------------------------- +<1> The `GetStoredScriptRequest` to execute and the `ActionListener` to use when +the execution completes + +[[java-rest-high-get-stored-script-listener]] +===== Action Listener + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for `GetStoredScriptResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[get-stored-script-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is +provided as an argument +<2> Called in case of failure. The raised exception is provided as an argument + +[[java-rest-high-get-stored-script-response]] +==== Get Stored Script Response + +The returned `GetStoredScriptResponse` allows to retrieve information about the +executed operation as follows: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[get-stored-script-response] +-------------------------------------------------- +<1> The script object consists of a content and a metadata +<2> The language the script is written in, which defaults to `painless`. +<3> The content of the script +<4> Any named options that should be passed into the script. \ No newline at end of file diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index 4cd87a521d104..17acc8f13c04d 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -151,3 +151,14 @@ The Java High Level REST Client supports the following Tasks APIs: include::tasks/list_tasks.asciidoc[] include::tasks/cancel_tasks.asciidoc[] + +== Script APIs + +The Java High Level REST Client supports the following Scripts APIs: + +* <> +* <> + +include::script/get_script.asciidoc[] +include::script/delete_script.asciidoc[] + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/get_script.json b/rest-api-spec/src/main/resources/rest-api-spec/api/get_script.json index 2240f0e1a0b75..0b2d6c5a5b9c2 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/get_script.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/get_script.json @@ -13,6 +13,10 @@ } }, "params" : { + "master_timeout": { + "type" : "time", + "description" : "Specify timeout for connection to master" + } } }, "body": null diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/DeleteStoredScriptResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/DeleteStoredScriptResponse.java index 42f08ae73e06d..741c105866f46 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/DeleteStoredScriptResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/DeleteStoredScriptResponse.java @@ -20,6 +20,7 @@ package org.elasticsearch.action.admin.cluster.storedscripts; import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.xcontent.XContentParser; public class DeleteStoredScriptResponse extends AcknowledgedResponse { @@ -29,4 +30,8 @@ public class DeleteStoredScriptResponse extends AcknowledgedResponse { public DeleteStoredScriptResponse(boolean acknowledged) { super(acknowledged); } + + public static DeleteStoredScriptResponse fromXContent(XContentParser parser) { + return new DeleteStoredScriptResponse(parseAcknowledged(parser)); + } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponse.java index a394fe17f217f..7bfbbe7ad4d7f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponse.java @@ -21,25 +21,63 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.StatusToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.script.StoredScriptSource; import java.io.IOException; +import java.util.Objects; -public class GetStoredScriptResponse extends ActionResponse implements ToXContentObject { +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; +public class GetStoredScriptResponse extends ActionResponse implements StatusToXContentObject { + + public static final ParseField _ID_PARSE_FIELD = new ParseField("_id"); + public static final ParseField FOUND_PARSE_FIELD = new ParseField("found"); + public static final ParseField SCRIPT = new ParseField("script"); + + private static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>("GetStoredScriptResponse", + true, + (a, c) -> { + String id = (String) a[0]; + boolean found = (Boolean)a[1]; + StoredScriptSource scriptSource = (StoredScriptSource)a[2]; + return found ? new GetStoredScriptResponse(id, scriptSource) : new GetStoredScriptResponse(id, null); + }); + + static { + PARSER.declareField(constructorArg(), (p, c) -> p.text(), + _ID_PARSE_FIELD, ObjectParser.ValueType.STRING); + PARSER.declareField(constructorArg(), (p, c) -> p.booleanValue(), + FOUND_PARSE_FIELD, ObjectParser.ValueType.BOOLEAN); + PARSER.declareField(optionalConstructorArg(), (p, c) -> StoredScriptSource.fromXContent(p, true), + SCRIPT, ObjectParser.ValueType.OBJECT); + } + + private String id; private StoredScriptSource source; GetStoredScriptResponse() { } - GetStoredScriptResponse(StoredScriptSource source) { + GetStoredScriptResponse(String id, StoredScriptSource source) { + this.id = id; this.source = source; } + public String getId() { + return id; + } + /** * @return if a stored script and if not found null */ @@ -47,13 +85,30 @@ public StoredScriptSource getSource() { return source; } + @Override + public RestStatus status() { + return source != null ? RestStatus.OK : RestStatus.NOT_FOUND; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - source.toXContent(builder, params); + builder.startObject(); + + builder.field(_ID_PARSE_FIELD.getPreferredName(), id); + builder.field(FOUND_PARSE_FIELD.getPreferredName(), source != null); + if (source != null) { + builder.field(StoredScriptSource.SCRIPT_PARSE_FIELD.getPreferredName()); + source.toXContent(builder, params); + } + builder.endObject(); return builder; } + public static GetStoredScriptResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); @@ -67,6 +122,10 @@ public void readFrom(StreamInput in) throws IOException { } else { source = null; } + + if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + id = in.readString(); + } } @Override @@ -84,5 +143,22 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(source.getSource()); } } + if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + out.writeString(id); + } + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + GetStoredScriptResponse that = (GetStoredScriptResponse) o; + return Objects.equals(id, that.id) && + Objects.equals(source, that.source); + } + + @Override + public int hashCode() { + return Objects.hash(id, source); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/TransportGetStoredScriptAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/TransportGetStoredScriptAction.java index 63f24f31f59bd..368e40b96b7b3 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/TransportGetStoredScriptAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/TransportGetStoredScriptAction.java @@ -60,7 +60,7 @@ protected GetStoredScriptResponse newResponse() { @Override protected void masterOperation(GetStoredScriptRequest request, ClusterState state, ActionListener listener) throws Exception { - listener.onResponse(new GetStoredScriptResponse(scriptService.getStoredScript(state, request))); + listener.onResponse(new GetStoredScriptResponse(request.id(), scriptService.getStoredScript(state, request))); } @Override diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetStoredScriptAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetStoredScriptAction.java index 10050dda88235..1a14d50538237 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetStoredScriptAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestGetStoredScriptAction.java @@ -19,19 +19,12 @@ package org.elasticsearch.rest.action.admin.cluster; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; -import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptResponse; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.ParseField; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.BaseRestHandler; -import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestResponse; -import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.rest.action.RestBuilderListener; -import org.elasticsearch.script.StoredScriptSource; +import org.elasticsearch.rest.action.RestStatusToXContentListener; import java.io.IOException; @@ -39,9 +32,6 @@ public class RestGetStoredScriptAction extends BaseRestHandler { - public static final ParseField _ID_PARSE_FIELD = new ParseField("_id"); - public static final ParseField FOUND_PARSE_FIELD = new ParseField("found"); - public RestGetStoredScriptAction(Settings settings, RestController controller) { super(settings); @@ -57,33 +47,7 @@ public String getName() { public RestChannelConsumer prepareRequest(final RestRequest request, NodeClient client) throws IOException { String id = request.param("id"); GetStoredScriptRequest getRequest = new GetStoredScriptRequest(id); - - return channel -> client.admin().cluster().getStoredScript(getRequest, new RestBuilderListener(channel) { - @Override - public RestResponse buildResponse(GetStoredScriptResponse response, XContentBuilder builder) throws Exception { - builder.startObject(); - builder.field(_ID_PARSE_FIELD.getPreferredName(), id); - - StoredScriptSource source = response.getSource(); - boolean found = source != null; - builder.field(FOUND_PARSE_FIELD.getPreferredName(), found); - - if (found) { - builder.startObject(StoredScriptSource.SCRIPT_PARSE_FIELD.getPreferredName()); - builder.field(StoredScriptSource.LANG_PARSE_FIELD.getPreferredName(), source.getLang()); - builder.field(StoredScriptSource.SOURCE_PARSE_FIELD.getPreferredName(), source.getSource()); - - if (source.getOptions().isEmpty() == false) { - builder.field(StoredScriptSource.OPTIONS_PARSE_FIELD.getPreferredName(), source.getOptions()); - } - - builder.endObject(); - } - - builder.endObject(); - - return new BytesRestResponse(found ? RestStatus.OK : RestStatus.NOT_FOUND, builder); - } - }); + getRequest.masterNodeTimeout(request.paramAsTime("master_timeout", getRequest.masterNodeTimeout())); + return channel -> client.admin().cluster().getStoredScript(getRequest, new RestStatusToXContentListener<>(channel)); } } diff --git a/server/src/main/java/org/elasticsearch/script/StoredScriptSource.java b/server/src/main/java/org/elasticsearch/script/StoredScriptSource.java index 11f8769c86b1f..885d72bdec6f5 100644 --- a/server/src/main/java/org/elasticsearch/script/StoredScriptSource.java +++ b/server/src/main/java/org/elasticsearch/script/StoredScriptSource.java @@ -185,7 +185,7 @@ private StoredScriptSource build(boolean ignoreEmpty) { } } - private static final ObjectParser PARSER = new ObjectParser<>("stored script source", Builder::new); + private static final ObjectParser PARSER = new ObjectParser<>("stored script source", true, Builder::new); static { // Defines the fields necessary to parse a Script as XContent using an ObjectParser. @@ -481,7 +481,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(); builder.field(LANG_PARSE_FIELD.getPreferredName(), lang); builder.field(SOURCE_PARSE_FIELD.getPreferredName(), source); - builder.field(OPTIONS_PARSE_FIELD.getPreferredName(), options); + if (options.isEmpty() == false) { + builder.field(OPTIONS_PARSE_FIELD.getPreferredName(), options); + } builder.endObject(); return builder; diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/DeleteStoredScriptResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/DeleteStoredScriptResponseTests.java new file mode 100644 index 0000000000000..375a672263060 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/DeleteStoredScriptResponseTests.java @@ -0,0 +1,46 @@ +package org.elasticsearch.action.admin.cluster.storedscripts;/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; + +import java.io.IOException; + +public class DeleteStoredScriptResponseTests extends AbstractStreamableXContentTestCase { + + @Override + protected DeleteStoredScriptResponse doParseInstance(XContentParser parser) throws IOException { + return DeleteStoredScriptResponse.fromXContent(parser); + } + + @Override + protected DeleteStoredScriptResponse createBlankInstance() { + return new DeleteStoredScriptResponse(); + } + + @Override + protected DeleteStoredScriptResponse createTestInstance() { + return new DeleteStoredScriptResponse(randomBoolean()); + } + + @Override + protected DeleteStoredScriptResponse mutateInstance(DeleteStoredScriptResponse instance) throws IOException { + return new DeleteStoredScriptResponse(instance.isAcknowledged() == false); + } +} diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponseTests.java new file mode 100644 index 0000000000000..1c92c0c8c2bf7 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponseTests.java @@ -0,0 +1,61 @@ +package org.elasticsearch.action.admin.cluster.storedscripts;/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.StoredScriptSource; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; + +import java.io.IOException; +import java.util.Collections; +import java.util.Map; +import java.util.function.Predicate; + +public class GetStoredScriptResponseTests extends AbstractStreamableXContentTestCase { + + @Override + protected GetStoredScriptResponse doParseInstance(XContentParser parser) throws IOException { + return GetStoredScriptResponse.fromXContent(parser); + } + + @Override + protected GetStoredScriptResponse createBlankInstance() { + return new GetStoredScriptResponse(); + } + + @Override + protected GetStoredScriptResponse createTestInstance() { + return new GetStoredScriptResponse(randomAlphaOfLengthBetween(1, 10), randomScriptSource()); + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + return s -> "script.options".equals(s); + } + + private static StoredScriptSource randomScriptSource() { + final String lang = randomFrom("lang", "painless", "mustache"); + final String source = randomAlphaOfLengthBetween(1, 10); + final Map options = randomBoolean() + ? Collections.singletonMap(Script.CONTENT_TYPE_OPTION, XContentType.JSON.mediaType()) + : Collections.emptyMap(); + return new StoredScriptSource(lang, source, options); + } +} From 2396cbd449ce1e811ca43f29042151c5f8f5d79a Mon Sep 17 00:00:00 2001 From: Vladimir Dolzhenko Date: Tue, 19 Jun 2018 14:21:11 +0200 Subject: [PATCH 34/34] Add get stored script and delete stored script to high level REST API - post backport fix Relates to #27205 --- .../admin/cluster/storedscripts/GetStoredScriptResponse.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponse.java index 7bfbbe7ad4d7f..4cf686b9c282c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/GetStoredScriptResponse.java @@ -123,7 +123,7 @@ public void readFrom(StreamInput in) throws IOException { source = null; } - if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + if (in.getVersion().onOrAfter(Version.V_6_4_0)) { id = in.readString(); } } @@ -143,7 +143,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(source.getSource()); } } - if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + if (out.getVersion().onOrAfter(Version.V_6_4_0)) { out.writeString(id); } }