Skip to content

Commit

Permalink
[CORE] Be more explicit if index.version.created is not set
Browse files Browse the repository at this point in the history
Today we use the current version which might enable features that are
not supported. We should throw an exception if this setting is not
present for any index.

Closes #8018
  • Loading branch information
s1monw committed Oct 14, 2014
1 parent 7e916d0 commit ac4b39b
Show file tree
Hide file tree
Showing 43 changed files with 165 additions and 123 deletions.
9 changes: 6 additions & 3 deletions src/main/java/org/elasticsearch/Version.java
Expand Up @@ -396,11 +396,14 @@ public static Version fromId(int id) {

/**
* Return the {@link Version} of Elasticsearch that has been used to create an index given its settings.
* @throws ElasticsearchIllegalStateException if the given index settings doesn't contain a value for the key {@value IndexMetaData#SETTING_VERSION_CREATED}
*/
public static Version indexCreated(Settings indexSettings) {
assert indexSettings.get(IndexMetaData.SETTING_UUID) == null // if the UUDI is there the index has actually been created otherwise this might be a test
|| indexSettings.getAsVersion(IndexMetaData.SETTING_VERSION_CREATED, null) != null : IndexMetaData.SETTING_VERSION_CREATED + " not set in IndexSettings";
return indexSettings.getAsVersion(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT);
final Version indexVersion = indexSettings.getAsVersion(IndexMetaData.SETTING_VERSION_CREATED, null);
if (indexVersion == null) {
throw new ElasticsearchIllegalStateException("[" + IndexMetaData.SETTING_VERSION_CREATED + "] is not present in the index settings for index with uuid: [" + indexSettings.get(IndexMetaData.SETTING_UUID) + "]");
}
return indexVersion;
}

public static void writeVersion(Version version, StreamOutput out) throws IOException {
Expand Down
Expand Up @@ -27,12 +27,14 @@
import org.apache.lucene.analysis.tokenattributes.TypeAttribute;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchIllegalArgumentException;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.single.custom.TransportSingleCustomOperationAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.routing.ShardsIterator;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.ImmutableSettings;
Expand Down Expand Up @@ -61,6 +63,8 @@ public class TransportAnalyzeAction extends TransportSingleCustomOperationAction

private final IndicesAnalysisService indicesAnalysisService;

private static final Settings DEFAULT_SETTINGS = ImmutableSettings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();

@Inject
public TransportAnalyzeAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService,
IndicesService indicesService, IndicesAnalysisService indicesAnalysisService, ActionFilters actionFilters) {
Expand Down Expand Up @@ -153,7 +157,7 @@ protected AnalyzeResponse shardOperation(AnalyzeRequest request, ShardId shardId
if (tokenizerFactoryFactory == null) {
throw new ElasticsearchIllegalArgumentException("failed to find global tokenizer under [" + request.tokenizer() + "]");
}
tokenizerFactory = tokenizerFactoryFactory.create(request.tokenizer(), ImmutableSettings.Builder.EMPTY_SETTINGS);
tokenizerFactory = tokenizerFactoryFactory.create(request.tokenizer(), DEFAULT_SETTINGS);
} else {
tokenizerFactory = indexService.analysisService().tokenizer(request.tokenizer());
if (tokenizerFactory == null) {
Expand All @@ -171,7 +175,7 @@ protected AnalyzeResponse shardOperation(AnalyzeRequest request, ShardId shardId
if (tokenFilterFactoryFactory == null) {
throw new ElasticsearchIllegalArgumentException("failed to find global token filter under [" + tokenFilterName + "]");
}
tokenFilterFactories[i] = tokenFilterFactoryFactory.create(tokenFilterName, ImmutableSettings.Builder.EMPTY_SETTINGS);
tokenFilterFactories[i] = tokenFilterFactoryFactory.create(tokenFilterName, DEFAULT_SETTINGS);
} else {
tokenFilterFactories[i] = indexService.analysisService().tokenFilter(tokenFilterName);
if (tokenFilterFactories[i] == null) {
Expand All @@ -194,7 +198,7 @@ protected AnalyzeResponse shardOperation(AnalyzeRequest request, ShardId shardId
if (charFilterFactoryFactory == null) {
throw new ElasticsearchIllegalArgumentException("failed to find global char filter under [" + charFilterName + "]");
}
charFilterFactories[i] = charFilterFactoryFactory.create(charFilterName, ImmutableSettings.Builder.EMPTY_SETTINGS);
charFilterFactories[i] = charFilterFactoryFactory.create(charFilterName, DEFAULT_SETTINGS);
} else {
charFilterFactories[i] = indexService.analysisService().charFilter(charFilterName);
if (charFilterFactories[i] == null) {
Expand Down
Expand Up @@ -24,6 +24,7 @@
import com.google.common.collect.ImmutableMap;
import org.elasticsearch.ElasticsearchIllegalArgumentException;
import org.elasticsearch.ElasticsearchIllegalStateException;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.block.ClusterBlock;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.node.DiscoveryNodeFilters;
Expand Down Expand Up @@ -193,7 +194,6 @@ private IndexMetaData(String index, long version, State state, Settings settings
this.mappings = mappings;
this.customs = customs;
this.totalNumberOfShards = numberOfShards() * (numberOfReplicas() + 1);

this.aliases = aliases;

ImmutableMap<String, String> requireMap = settings.getByPrefix("index.routing.allocation.require.").getAsMap();
Expand All @@ -216,6 +216,8 @@ private IndexMetaData(String index, long version, State state, Settings settings
}
}



public String index() {
return index;
}
Expand Down
Expand Up @@ -92,7 +92,7 @@ public static Version parseAnalysisVersion(@IndexSettings Settings indexSettings
return Lucene.parseVersion(sVersion, Lucene.ANALYZER_VERSION, logger);
}
// resolve the analysis version based on the version the index was created with
return indexSettings.getAsVersion(IndexMetaData.SETTING_VERSION_CREATED, org.elasticsearch.Version.CURRENT).luceneVersion;
return org.elasticsearch.Version.indexCreated(indexSettings).luceneVersion;
}

public static boolean isNoStopwords(Settings settings) {
Expand Down
29 changes: 15 additions & 14 deletions src/main/java/org/elasticsearch/index/analysis/AnalysisService.java
Expand Up @@ -54,8 +54,9 @@ public class AnalysisService extends AbstractIndexComponent implements Closeable
private final NamedAnalyzer defaultSearchAnalyzer;
private final NamedAnalyzer defaultSearchQuoteAnalyzer;

public AnalysisService(Index index) {
this(index, ImmutableSettings.Builder.EMPTY_SETTINGS, null, null, null, null, null);

public AnalysisService(Index index, Settings indexSettings) {
this(index, indexSettings, null, null, null, null, null);
}

@Inject
Expand All @@ -65,7 +66,7 @@ public AnalysisService(Index index, @IndexSettings Settings indexSettings, @Null
@Nullable Map<String, CharFilterFactoryFactory> charFilterFactoryFactories,
@Nullable Map<String, TokenFilterFactoryFactory> tokenFilterFactoryFactories) {
super(index, indexSettings);

Settings defaultSettings = ImmutableSettings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.indexCreated(indexSettings)).build();
Map<String, TokenizerFactory> tokenizers = newHashMap();
if (tokenizerFactoryFactories != null) {
Map<String, Settings> tokenizersSettings = indexSettings.getGroups("index.analysis.tokenizer");
Expand All @@ -75,7 +76,7 @@ public AnalysisService(Index index, @IndexSettings Settings indexSettings, @Null

Settings tokenizerSettings = tokenizersSettings.get(tokenizerName);
if (tokenizerSettings == null) {
tokenizerSettings = ImmutableSettings.Builder.EMPTY_SETTINGS;
tokenizerSettings = defaultSettings;
}

TokenizerFactory tokenizerFactory = tokenizerFactoryFactory.create(tokenizerName, tokenizerSettings);
Expand All @@ -88,12 +89,12 @@ public AnalysisService(Index index, @IndexSettings Settings indexSettings, @Null
for (Map.Entry<String, PreBuiltTokenizerFactoryFactory> entry : indicesAnalysisService.tokenizerFactories().entrySet()) {
String name = entry.getKey();
if (!tokenizers.containsKey(name)) {
tokenizers.put(name, entry.getValue().create(name, ImmutableSettings.Builder.EMPTY_SETTINGS));
tokenizers.put(name, entry.getValue().create(name, defaultSettings));
}
name = Strings.toCamelCase(entry.getKey());
if (!name.equals(entry.getKey())) {
if (!tokenizers.containsKey(name)) {
tokenizers.put(name, entry.getValue().create(name, ImmutableSettings.Builder.EMPTY_SETTINGS));
tokenizers.put(name, entry.getValue().create(name, defaultSettings));
}
}
}
Expand All @@ -110,7 +111,7 @@ public AnalysisService(Index index, @IndexSettings Settings indexSettings, @Null

Settings charFilterSettings = charFiltersSettings.get(charFilterName);
if (charFilterSettings == null) {
charFilterSettings = ImmutableSettings.Builder.EMPTY_SETTINGS;
charFilterSettings = defaultSettings;
}

CharFilterFactory tokenFilterFactory = charFilterFactoryFactory.create(charFilterName, charFilterSettings);
Expand All @@ -123,12 +124,12 @@ public AnalysisService(Index index, @IndexSettings Settings indexSettings, @Null
for (Map.Entry<String, PreBuiltCharFilterFactoryFactory> entry : indicesAnalysisService.charFilterFactories().entrySet()) {
String name = entry.getKey();
if (!charFilters.containsKey(name)) {
charFilters.put(name, entry.getValue().create(name, ImmutableSettings.Builder.EMPTY_SETTINGS));
charFilters.put(name, entry.getValue().create(name, defaultSettings));
}
name = Strings.toCamelCase(entry.getKey());
if (!name.equals(entry.getKey())) {
if (!charFilters.containsKey(name)) {
charFilters.put(name, entry.getValue().create(name, ImmutableSettings.Builder.EMPTY_SETTINGS));
charFilters.put(name, entry.getValue().create(name, defaultSettings));
}
}
}
Expand All @@ -145,7 +146,7 @@ public AnalysisService(Index index, @IndexSettings Settings indexSettings, @Null

Settings tokenFilterSettings = tokenFiltersSettings.get(tokenFilterName);
if (tokenFilterSettings == null) {
tokenFilterSettings = ImmutableSettings.Builder.EMPTY_SETTINGS;
tokenFilterSettings = defaultSettings;
}

TokenFilterFactory tokenFilterFactory = tokenFilterFactoryFactory.create(tokenFilterName, tokenFilterSettings);
Expand All @@ -159,12 +160,12 @@ public AnalysisService(Index index, @IndexSettings Settings indexSettings, @Null
for (Map.Entry<String, PreBuiltTokenFilterFactoryFactory> entry : indicesAnalysisService.tokenFilterFactories().entrySet()) {
String name = entry.getKey();
if (!tokenFilters.containsKey(name)) {
tokenFilters.put(name, entry.getValue().create(name, ImmutableSettings.Builder.EMPTY_SETTINGS));
tokenFilters.put(name, entry.getValue().create(name, defaultSettings));
}
name = Strings.toCamelCase(entry.getKey());
if (!name.equals(entry.getKey())) {
if (!tokenFilters.containsKey(name)) {
tokenFilters.put(name, entry.getValue().create(name, ImmutableSettings.Builder.EMPTY_SETTINGS));
tokenFilters.put(name, entry.getValue().create(name, defaultSettings));
}
}
}
Expand All @@ -180,7 +181,7 @@ public AnalysisService(Index index, @IndexSettings Settings indexSettings, @Null

Settings analyzerSettings = analyzersSettings.get(analyzerName);
if (analyzerSettings == null) {
analyzerSettings = ImmutableSettings.Builder.EMPTY_SETTINGS;
analyzerSettings = defaultSettings;
}

AnalyzerProvider analyzerFactory = analyzerFactoryFactory.create(analyzerName, analyzerSettings);
Expand All @@ -190,7 +191,7 @@ public AnalysisService(Index index, @IndexSettings Settings indexSettings, @Null
if (indicesAnalysisService != null) {
for (Map.Entry<String, PreBuiltAnalyzerProviderFactory> entry : indicesAnalysisService.analyzerProviderFactories().entrySet()) {
String name = entry.getKey();
Version indexVersion = indexSettings.getAsVersion(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT);
Version indexVersion = Version.indexCreated(indexSettings);
if (!analyzerProviders.containsKey(name)) {
analyzerProviders.put(name, entry.getValue().create(name, ImmutableSettings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, indexVersion).build()));
}
Expand Down
Expand Up @@ -53,7 +53,7 @@ public EdgeNGramTokenFilterFactory(Index index, @IndexSettings Settings indexSet
this.minGram = settings.getAsInt("min_gram", NGramTokenFilter.DEFAULT_MIN_NGRAM_SIZE);
this.maxGram = settings.getAsInt("max_gram", NGramTokenFilter.DEFAULT_MAX_NGRAM_SIZE);
this.side = EdgeNGramTokenFilter.Side.getSide(settings.get("side", Lucene43EdgeNGramTokenizer.DEFAULT_SIDE.getLabel()));
this.esVersion = indexSettings.getAsVersion(IndexMetaData.SETTING_VERSION_CREATED, org.elasticsearch.Version.CURRENT);
this.esVersion = org.elasticsearch.Version.indexCreated(indexSettings);
}

@Override
Expand Down
Expand Up @@ -60,7 +60,7 @@ public EdgeNGramTokenizerFactory(Index index, @IndexSettings Settings indexSetti
this.maxGram = settings.getAsInt("max_gram", NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE);
this.side = Lucene43EdgeNGramTokenizer.Side.getSide(settings.get("side", Lucene43EdgeNGramTokenizer.DEFAULT_SIDE.getLabel()));
this.matcher = parseTokenChars(settings.getAsArray("token_chars"));
this.esVersion = indexSettings.getAsVersion(IndexMetaData.SETTING_VERSION_CREATED, org.elasticsearch.Version.CURRENT);
this.esVersion = org.elasticsearch.Version.indexCreated(indexSettings);
}

@Override
Expand Down
Expand Up @@ -96,7 +96,7 @@ public NGramTokenizerFactory(Index index, @IndexSettings Settings indexSettings,
this.minGram = settings.getAsInt("min_gram", NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE);
this.maxGram = settings.getAsInt("max_gram", NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE);
this.matcher = parseTokenChars(settings.getAsArray("token_chars"));
this.esVersion = indexSettings.getAsVersion(IndexMetaData.SETTING_VERSION_CREATED, org.elasticsearch.Version.CURRENT);
this.esVersion = org.elasticsearch.Version.indexCreated(indexSettings);
}

@SuppressWarnings("deprecation")
Expand Down
Expand Up @@ -76,7 +76,7 @@ protected TokenStreamComponents createComponents(String s, Reader reader) {
public PatternAnalyzerProvider(Index index, @IndexSettings Settings indexSettings, Environment env, @Assisted String name, @Assisted Settings settings) {
super(index, indexSettings, name, settings);

Version esVersion = indexSettings.getAsVersion(IndexMetaData.SETTING_VERSION_CREATED, org.elasticsearch.Version.CURRENT);
Version esVersion = Version.indexCreated(indexSettings);
final CharArraySet defaultStopwords;
if (esVersion.onOrAfter(Version.V_1_0_0_RC1)) {
defaultStopwords = CharArraySet.EMPTY_SET;
Expand Down
Expand Up @@ -38,7 +38,7 @@ public PreBuiltAnalyzerProviderFactory(String name, AnalyzerScope scope, Analyze

@Override
public AnalyzerProvider create(String name, Settings settings) {
Version indexVersion = settings.getAsVersion(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT);
Version indexVersion = Version.indexCreated(settings);
if (!Version.CURRENT.equals(indexVersion)) {
PreBuiltAnalyzers preBuiltAnalyzers = PreBuiltAnalyzers.getOrDefault(name, null);
if (preBuiltAnalyzers != null) {
Expand Down
Expand Up @@ -34,7 +34,7 @@ public PreBuiltCharFilterFactoryFactory(CharFilterFactory charFilterFactory) {

@Override
public CharFilterFactory create(String name, Settings settings) {
Version indexVersion = settings.getAsVersion(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT);
Version indexVersion = Version.indexCreated(settings);
if (!Version.CURRENT.equals(indexVersion)) {
PreBuiltCharFilters preBuiltCharFilters = PreBuiltCharFilters.getOrDefault(name, null);
if (preBuiltCharFilters != null) {
Expand Down
Expand Up @@ -34,7 +34,7 @@ public PreBuiltTokenFilterFactoryFactory(TokenFilterFactory tokenFilterFactory)

@Override
public TokenFilterFactory create(String name, Settings settings) {
Version indexVersion = settings.getAsVersion(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT);
Version indexVersion = Version.indexCreated(settings);
if (!Version.CURRENT.equals(indexVersion)) {
PreBuiltTokenFilters preBuiltTokenFilters = PreBuiltTokenFilters.getOrDefault(name, null);
if (preBuiltTokenFilters != null) {
Expand Down
Expand Up @@ -34,7 +34,7 @@ public PreBuiltTokenizerFactoryFactory(TokenizerFactory tokenizerFactory) {

@Override
public TokenizerFactory create(String name, Settings settings) {
Version indexVersion = settings.getAsVersion(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT);
Version indexVersion = Version.indexCreated(settings);
if (!Version.CURRENT.equals(indexVersion)) {
PreBuiltTokenizers preBuiltTokenizers = PreBuiltTokenizers.getOrDefault(name, null);
if (preBuiltTokenizers != null) {
Expand Down
Expand Up @@ -42,7 +42,7 @@ public class StandardAnalyzerProvider extends AbstractIndexAnalyzerProvider<Stan
@Inject
public StandardAnalyzerProvider(Index index, @IndexSettings Settings indexSettings, Environment env, @Assisted String name, @Assisted Settings settings) {
super(index, indexSettings, name, settings);
this.esVersion = indexSettings.getAsVersion(IndexMetaData.SETTING_VERSION_CREATED, org.elasticsearch.Version.CURRENT);
this.esVersion = Version.indexCreated(indexSettings);
final CharArraySet defaultStopwords;
if (esVersion.onOrAfter(Version.V_1_0_0_Beta1)) {
defaultStopwords = CharArraySet.EMPTY_SET;
Expand Down
Expand Up @@ -41,7 +41,7 @@ public class StandardHtmlStripAnalyzerProvider extends AbstractIndexAnalyzerProv
@Inject
public StandardHtmlStripAnalyzerProvider(Index index, @IndexSettings Settings indexSettings, Environment env, @Assisted String name, @Assisted Settings settings) {
super(index, indexSettings, name, settings);
this.esVersion = indexSettings.getAsVersion(IndexMetaData.SETTING_VERSION_CREATED, org.elasticsearch.Version.CURRENT);
this.esVersion = Version.indexCreated(indexSettings);
final CharArraySet defaultStopwords;
if (esVersion.onOrAfter(Version.V_1_0_0_RC1)) {
defaultStopwords = CharArraySet.EMPTY_SET;
Expand Down

0 comments on commit ac4b39b

Please sign in to comment.