Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
Return List instead of an array from settings (#26903)
Today we return a `String[]` that requires copying values for every
access. Yet, we already store the setting as a list so we can also directly
return the unmodifiable list directly. This makes list / array access in settings
a much cheaper operation especially if lists are large.
  • Loading branch information
s1monw committed Oct 9, 2017
1 parent bf4c364 commit cdd7c1e
Show file tree
Hide file tree
Showing 67 changed files with 322 additions and 332 deletions.
Expand Up @@ -20,6 +20,7 @@
package org.elasticsearch.cluster.routing.allocation.decider;

import java.util.HashMap;
import java.util.List;
import java.util.Map;

import com.carrotsearch.hppc.ObjectIntHashMap;
Expand Down Expand Up @@ -85,7 +86,7 @@ public class AwarenessAllocationDecider extends AllocationDecider {

private volatile String[] awarenessAttributes;

private volatile Map<String, String[]> forcedAwarenessAttributes;
private volatile Map<String, List<String>> forcedAwarenessAttributes;

public AwarenessAllocationDecider(Settings settings, ClusterSettings clusterSettings) {
super(settings);
Expand All @@ -97,11 +98,11 @@ public AwarenessAllocationDecider(Settings settings, ClusterSettings clusterSett
}

private void setForcedAwarenessAttributes(Settings forceSettings) {
Map<String, String[]> forcedAwarenessAttributes = new HashMap<>();
Map<String, List<String>> forcedAwarenessAttributes = new HashMap<>();
Map<String, Settings> forceGroups = forceSettings.getAsGroups();
for (Map.Entry<String, Settings> entry : forceGroups.entrySet()) {
String[] aValues = entry.getValue().getAsArray("values");
if (aValues.length > 0) {
List<String> aValues = entry.getValue().getAsList("values");
if (aValues.size() > 0) {
forcedAwarenessAttributes.put(entry.getKey(), aValues);
}
}
Expand Down Expand Up @@ -169,7 +170,7 @@ private Decision underCapacity(ShardRouting shardRouting, RoutingNode node, Rout
}

int numberOfAttributes = nodesPerAttribute.size();
String[] fullValues = forcedAwarenessAttributes.get(awarenessAttribute);
List<String> fullValues = forcedAwarenessAttributes.get(awarenessAttribute);
if (fullValues != null) {
for (String fullValue : fullValues) {
if (!shardPerAttribute.containsKey(fullValue)) {
Expand Down
Expand Up @@ -804,14 +804,14 @@ private static class ListSetting<T> extends Setting<List<T>> {

private ListSetting(String key, Function<Settings, List<String>> defaultStringValue, Function<String, List<T>> parser,
Property... properties) {
super(new ListKey(key), (s) -> Setting.arrayToParsableString(defaultStringValue.apply(s).toArray(Strings.EMPTY_ARRAY)), parser,
super(new ListKey(key), (s) -> Setting.arrayToParsableString(defaultStringValue.apply(s)), parser,
properties);
this.defaultStringValue = defaultStringValue;
}

@Override
public String getRaw(Settings settings) {
String[] array = settings.getAsArray(getKey(), null);
List<String> array = settings.getAsList(getKey(), null);
return array == null ? defaultValue.apply(settings) : arrayToParsableString(array);
}

Expand All @@ -823,11 +823,11 @@ boolean hasComplexMatcher() {
@Override
public void diff(Settings.Builder builder, Settings source, Settings defaultSettings) {
if (exists(source) == false) {
String[] asArray = defaultSettings.getAsArray(getKey(), null);
if (asArray == null) {
builder.putArray(getKey(), defaultStringValue.apply(defaultSettings));
List<String> asList = defaultSettings.getAsList(getKey(), null);
if (asList == null) {
builder.putList(getKey(), defaultStringValue.apply(defaultSettings));
} else {
builder.putArray(getKey(), asArray);
builder.putList(getKey(), asList);
}
}
}
Expand Down Expand Up @@ -1087,7 +1087,7 @@ private static List<String> parseableStringToList(String parsableString) {
}
}

private static String arrayToParsableString(String[] array) {
private static String arrayToParsableString(List<String> array) {
try {
XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent());
builder.startArray();
Expand Down
48 changes: 24 additions & 24 deletions core/src/main/java/org/elasticsearch/common/settings/Settings.java
Expand Up @@ -366,48 +366,48 @@ public SizeValue getAsSize(String setting, SizeValue defaultValue) throws Settin
}

/**
* The values associated with a setting key as an array.
* The values associated with a setting key as an immutable list.
* <p>
* It will also automatically load a comma separated list under the settingPrefix and merge with
* the numbered format.
*
* @param key The setting prefix to load the array by
* @return The setting array values
* @param key The setting key to load the list by
* @return The setting list values
*/
public String[] getAsArray(String key) throws SettingsException {
return getAsArray(key, Strings.EMPTY_ARRAY, true);
public List<String> getAsList(String key) throws SettingsException {
return getAsList(key, Collections.emptyList());
}

/**
* The values associated with a setting key as an array.
* The values associated with a setting key as an immutable list.
* <p>
* If commaDelimited is true, it will automatically load a comma separated list under the settingPrefix and merge with
* the numbered format.
*
* @param key The setting key to load the array by
* @return The setting array values
* @param key The setting key to load the list by
* @return The setting list values
*/
public String[] getAsArray(String key, String[] defaultArray) throws SettingsException {
return getAsArray(key, defaultArray, true);
public List<String> getAsList(String key, List<String> defaultValue) throws SettingsException {
return getAsList(key, defaultValue, true);
}

/**
* The values associated with a setting key as an array.
* The values associated with a setting key as an immutable list.
* <p>
* It will also automatically load a comma separated list under the settingPrefix and merge with
* the numbered format.
*
* @param key The setting key to load the array by
* @param defaultArray The default array to use if no value is specified
* @param key The setting key to load the list by
* @param defaultValue The default value to use if no value is specified
* @param commaDelimited Whether to try to parse a string as a comma-delimited value
* @return The setting array values
* @return The setting list values
*/
public String[] getAsArray(String key, String[] defaultArray, Boolean commaDelimited) throws SettingsException {
public List<String> getAsList(String key, List<String> defaultValue, Boolean commaDelimited) throws SettingsException {
List<String> result = new ArrayList<>();
final Object valueFromPrefix = settings.get(key);
if (valueFromPrefix != null) {
if (valueFromPrefix instanceof List) {
result = ((List<String>) valueFromPrefix);
return ((List<String>) valueFromPrefix); // it's already unmodifiable since the builder puts it as a such
} else if (commaDelimited) {
String[] strings = Strings.splitStringByCommaToArray(get(key));
if (strings.length > 0) {
Expand All @@ -421,9 +421,9 @@ public String[] getAsArray(String key, String[] defaultArray, Boolean commaDelim
}

if (result.isEmpty()) {
return defaultArray;
return defaultValue;
}
return result.toArray(new String[result.size()]);
return Collections.unmodifiableList(result);
}


Expand Down Expand Up @@ -552,7 +552,7 @@ public static Settings readSettingsFromStream(StreamInput in) throws IOException
if (value == null) {
builder.putNull(key);
} else if (value instanceof List) {
builder.putArray(key, (List<String>) value);
builder.putList(key, (List<String>) value);
} else {
builder.put(key, value.toString());
}
Expand Down Expand Up @@ -679,7 +679,7 @@ private static void fromXContent(XContentParser parser, StringBuilder keyBuilder
}
String key = keyBuilder.toString();
validateValue(key, list, builder, parser, allowNullValues);
builder.putArray(key, list);
builder.putList(key, list);
} else if (parser.currentToken() == XContentParser.Token.VALUE_NULL) {
String key = keyBuilder.toString();
validateValue(key, null, builder, parser, allowNullValues);
Expand Down Expand Up @@ -898,7 +898,7 @@ public Builder copy(String key, String sourceKey, Settings source) {
}
final Object value = source.settings.get(sourceKey);
if (value instanceof List) {
return putArray(key, (List)value);
return putList(key, (List)value);
} else if (value == null) {
return putNull(key);
} else {
Expand Down Expand Up @@ -1022,8 +1022,8 @@ public Builder put(String setting, long value, ByteSizeUnit sizeUnit) {
* @param values The values
* @return The builder
*/
public Builder putArray(String setting, String... values) {
return putArray(setting, Arrays.asList(values));
public Builder putList(String setting, String... values) {
return putList(setting, Arrays.asList(values));
}

/**
Expand All @@ -1033,7 +1033,7 @@ public Builder putArray(String setting, String... values) {
* @param values The values
* @return The builder
*/
public Builder putArray(String setting, List<String> values) {
public Builder putList(String setting, List<String> values) {
remove(setting);
map.put(setting, Collections.unmodifiableList(new ArrayList<>(values)));
return this;
Expand Down
2 changes: 1 addition & 1 deletion core/src/main/java/org/elasticsearch/env/Environment.java
Expand Up @@ -153,7 +153,7 @@ public Environment(final Settings settings, final Path configPath) {
Settings.Builder finalSettings = Settings.builder().put(settings);
finalSettings.put(PATH_HOME_SETTING.getKey(), homeFile);
if (PATH_DATA_SETTING.exists(settings)) {
finalSettings.putArray(PATH_DATA_SETTING.getKey(), dataPaths);
finalSettings.putList(PATH_DATA_SETTING.getKey(), dataPaths);
}
finalSettings.put(PATH_LOGS_SETTING.getKey(), logsFile.toString());
this.settings = finalSettings.build();
Expand Down
Expand Up @@ -68,7 +68,6 @@
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
Expand Down Expand Up @@ -105,10 +104,10 @@ public static CharArraySet parseStemExclusion(Settings settings, CharArraySet de
if ("_none_".equals(value)) {
return CharArraySet.EMPTY_SET;
}
String[] stemExclusion = settings.getAsArray("stem_exclusion", null);
List<String> stemExclusion = settings.getAsList("stem_exclusion", null);
if (stemExclusion != null) {
// LUCENE 4 UPGRADE: Should be settings.getAsBoolean("stem_exclusion_case", false)?
return new CharArraySet(Arrays.asList(stemExclusion), false);
return new CharArraySet(stemExclusion, false);
} else {
return defaultStemExclusion;
}
Expand Down Expand Up @@ -161,7 +160,7 @@ public static CharArraySet parseWords(Environment env, Settings settings, String
if ("_none_".equals(value)) {
return CharArraySet.EMPTY_SET;
} else {
return resolveNamedWords(Arrays.asList(settings.getAsArray(name)), namedWords, ignoreCase);
return resolveNamedWords(settings.getAsList(name), namedWords, ignoreCase);
}
}
List<String> pathLoadedWords = getWordList(env, settings, name);
Expand Down Expand Up @@ -225,11 +224,11 @@ public static List<String> getWordList(Environment env, Settings settings, Strin
String wordListPath = settings.get(settingPrefix + "_path", null);

if (wordListPath == null) {
String[] explicitWordList = settings.getAsArray(settingPrefix, null);
List<String> explicitWordList = settings.getAsList(settingPrefix, null);
if (explicitWordList == null) {
return null;
} else {
return Arrays.asList(explicitWordList);
return explicitWordList;
}
}

Expand Down
Expand Up @@ -58,8 +58,8 @@ public void build(final Map<String, TokenizerFactory> tokenizers, final Map<Stri
throw new IllegalArgumentException("Custom Analyzer [" + name() + "] failed to find tokenizer under name [" + tokenizerName + "]");
}

String[] charFilterNames = analyzerSettings.getAsArray("char_filter");
List<CharFilterFactory> charFiltersList = new ArrayList<>(charFilterNames.length);
List<String> charFilterNames = analyzerSettings.getAsList("char_filter");
List<CharFilterFactory> charFiltersList = new ArrayList<>(charFilterNames.size());
for (String charFilterName : charFilterNames) {
CharFilterFactory charFilter = charFilters.get(charFilterName);
if (charFilter == null) {
Expand All @@ -74,8 +74,8 @@ public void build(final Map<String, TokenizerFactory> tokenizers, final Map<Stri

int offsetGap = analyzerSettings.getAsInt("offset_gap", -1);

String[] tokenFilterNames = analyzerSettings.getAsArray("filter");
List<TokenFilterFactory> tokenFilterList = new ArrayList<>(tokenFilterNames.length);
List<String> tokenFilterNames = analyzerSettings.getAsList("filter");
List<TokenFilterFactory> tokenFilterList = new ArrayList<>(tokenFilterNames.size());
for (String tokenFilterName : tokenFilterNames) {
TokenFilterFactory tokenFilter = tokenFilters.get(tokenFilterName);
if (tokenFilter == null) {
Expand Down
Expand Up @@ -50,8 +50,8 @@ public void build(final TokenizerFactory keywordTokenizerFactory, final Map<Stri
throw new IllegalArgumentException("Custom normalizer [" + name() + "] cannot configure a tokenizer");
}

String[] charFilterNames = analyzerSettings.getAsArray("char_filter");
List<CharFilterFactory> charFiltersList = new ArrayList<>(charFilterNames.length);
List<String> charFilterNames = analyzerSettings.getAsList("char_filter");
List<CharFilterFactory> charFiltersList = new ArrayList<>(charFilterNames.size());
for (String charFilterName : charFilterNames) {
CharFilterFactory charFilter = charFilters.get(charFilterName);
if (charFilter == null) {
Expand All @@ -66,8 +66,8 @@ public void build(final TokenizerFactory keywordTokenizerFactory, final Map<Stri
charFiltersList.add(charFilter);
}

String[] tokenFilterNames = analyzerSettings.getAsArray("filter");
List<TokenFilterFactory> tokenFilterList = new ArrayList<>(tokenFilterNames.length);
List<String> tokenFilterNames = analyzerSettings.getAsList("filter");
List<TokenFilterFactory> tokenFilterList = new ArrayList<>(tokenFilterNames.size());
for (String tokenFilterName : tokenFilterNames) {
TokenFilterFactory tokenFilter = tokenFilters.get(tokenFilterName);
if (tokenFilter == null) {
Expand Down
Expand Up @@ -41,7 +41,7 @@ public EdgeNGramTokenizerFactory(IndexSettings indexSettings, Environment enviro
super(indexSettings, name, settings);
this.minGram = settings.getAsInt("min_gram", NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE);
this.maxGram = settings.getAsInt("max_gram", NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE);
this.matcher = parseTokenChars(settings.getAsArray("token_chars"));
this.matcher = parseTokenChars(settings.getAsList("token_chars"));
}

@Override
Expand Down
Expand Up @@ -28,6 +28,7 @@
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;

Expand Down Expand Up @@ -65,8 +66,8 @@ public class NGramTokenizerFactory extends AbstractTokenizerFactory {
MATCHERS = unmodifiableMap(matchers);
}

static CharMatcher parseTokenChars(String[] characterClasses) {
if (characterClasses == null || characterClasses.length == 0) {
static CharMatcher parseTokenChars(List<String> characterClasses) {
if (characterClasses == null || characterClasses.isEmpty()) {
return null;
}
CharMatcher.Builder builder = new CharMatcher.Builder();
Expand All @@ -85,7 +86,7 @@ public NGramTokenizerFactory(IndexSettings indexSettings, Environment environmen
super(indexSettings, name, settings);
this.minGram = settings.getAsInt("min_gram", NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE);
this.maxGram = settings.getAsInt("max_gram", NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE);
this.matcher = parseTokenChars(settings.getAsArray("token_chars"));
this.matcher = parseTokenChars(settings.getAsList("token_chars"));
}

@Override
Expand Down
Expand Up @@ -62,7 +62,7 @@ public TokenStream create(TokenStream tokenStream) {

protected Reader getRulesFromSettings(Environment env) {
Reader rulesReader;
if (settings.getAsArray("synonyms", null) != null) {
if (settings.getAsList("synonyms", null) != null) {
List<String> rulesList = Analysis.getWordList(env, settings, "synonyms");
StringBuilder sb = new StringBuilder();
for (String line : rulesList) {
Expand Down
Expand Up @@ -40,7 +40,6 @@
import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider;
import org.elasticsearch.indices.analysis.AnalysisModuleTests.AppendCharFilter;
import org.elasticsearch.plugins.AnalysisPlugin;
import static org.elasticsearch.plugins.AnalysisPlugin.requriesAnalysisSettings;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.IndexSettingsModule;

Expand Down Expand Up @@ -73,7 +72,7 @@ public void setUp() throws Exception {
.put("index.analysis.analyzer.custom_analyzer.tokenizer", "standard")
.put("index.analysis.analyzer.custom_analyzer.filter", "mock")
.put("index.analysis.normalizer.my_normalizer.type", "custom")
.putArray("index.analysis.normalizer.my_normalizer.filter", "lowercase").build();
.putList("index.analysis.normalizer.my_normalizer.filter", "lowercase").build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings);
environment = new Environment(settings);
AnalysisPlugin plugin = new AnalysisPlugin() {
Expand Down
Expand Up @@ -210,7 +210,7 @@ protected void createIndexBasedOnFieldSettings(String index, String alias, TestF
Settings.Builder settings = Settings.builder()
.put(indexSettings())
.put("index.analysis.analyzer.tv_test.tokenizer", "standard")
.putArray("index.analysis.analyzer.tv_test.filter", "lowercase");
.putList("index.analysis.analyzer.tv_test.filter", "lowercase");
assertAcked(prepareCreate(index).addMapping("type1", mappingBuilder).setSettings(settings).addAlias(new Alias(alias)));
}

Expand Down

0 comments on commit cdd7c1e

Please sign in to comment.