Skip to content

Commit

Permalink
Fix caching for PreConfiguredTokenFilter (elastic#50912)
Browse files Browse the repository at this point in the history
The PreConfiguredTokenFilter#singletonWithVersion uses the version
internaly for the token filter factories but it registers only one
instance in the cahce and not one instance per version. This can lead
to exceptions like the one described in elastic#50734 since the singleton is
created and cached using the version created of the first index
that is processed.

Remove the singletonWithVersion() methods and use the
elasticsearchVersion() methods instead.

Fixes: elastic#50734
  • Loading branch information
matriv authored and SivagurunathanV committed Jan 21, 2020
1 parent 7d6221e commit e1a055e
Show file tree
Hide file tree
Showing 4 changed files with 142 additions and 21 deletions.
Expand Up @@ -507,7 +507,7 @@ public List<PreConfiguredTokenFilter> getPreConfiguredTokenFilters() {
| WordDelimiterFilter.SPLIT_ON_CASE_CHANGE
| WordDelimiterFilter.SPLIT_ON_NUMERICS
| WordDelimiterFilter.STEM_ENGLISH_POSSESSIVE, null)));
filters.add(PreConfiguredTokenFilter.singletonWithVersion("word_delimiter_graph", false, false, (input, version) -> {
filters.add(PreConfiguredTokenFilter.elasticsearchVersion("word_delimiter_graph", false, false, (input, version) -> {
boolean adjustOffsets = version.onOrAfter(Version.V_7_3_0);
return new WordDelimiterGraphFilter(input, adjustOffsets, WordDelimiterIterator.DEFAULT_WORD_DELIM_TABLE,
WordDelimiterGraphFilter.GENERATE_WORD_PARTS
Expand Down
Expand Up @@ -52,25 +52,6 @@ public static PreConfiguredTokenFilter singleton(String name, boolean useFilterF
(tokenStream, version) -> create.apply(tokenStream));
}

/**
* Create a pre-configured token filter that may vary based on the Elasticsearch version.
*/
public static PreConfiguredTokenFilter singletonWithVersion(String name, boolean useFilterForMultitermQueries,
BiFunction<TokenStream, Version, TokenStream> create) {
return new PreConfiguredTokenFilter(name, useFilterForMultitermQueries, true, CachingStrategy.ONE,
(tokenStream, version) -> create.apply(tokenStream, version));
}

/**
* Create a pre-configured token filter that may vary based on the Elasticsearch version.
*/
public static PreConfiguredTokenFilter singletonWithVersion(String name, boolean useFilterForMultitermQueries,
boolean useFilterForParsingSynonyms,
BiFunction<TokenStream, Version, TokenStream> create) {
return new PreConfiguredTokenFilter(name, useFilterForMultitermQueries, useFilterForParsingSynonyms, CachingStrategy.ONE,
(tokenStream, version) -> create.apply(tokenStream, version));
}

/**
* Create a pre-configured token filter that may vary based on the Lucene version.
*/
Expand All @@ -88,6 +69,16 @@ public static PreConfiguredTokenFilter elasticsearchVersion(String name, boolean
return new PreConfiguredTokenFilter(name, useFilterForMultitermQueries, true, CachingStrategy.ELASTICSEARCH, create);
}

/**
* Create a pre-configured token filter that may vary based on the Elasticsearch version.
*/
public static PreConfiguredTokenFilter elasticsearchVersion(String name, boolean useFilterForMultitermQueries,
boolean useFilterForParsingSynonyms,
BiFunction<TokenStream, Version, TokenStream> create) {
return new PreConfiguredTokenFilter(name, useFilterForMultitermQueries, useFilterForParsingSynonyms,
CachingStrategy.ELASTICSEARCH, create);
}

private final boolean useFilterForMultitermQueries;
private final boolean allowForSynonymParsing;
private final BiFunction<TokenStream, Version, TokenStream> create;
Expand Down
Expand Up @@ -181,7 +181,7 @@ static Map<String, PreConfiguredTokenFilter> setupPreConfiguredTokenFilters(List
preConfiguredTokenFilters.register("lowercase", PreConfiguredTokenFilter.singleton("lowercase", true, LowerCaseFilter::new));
// Add "standard" for old indices (bwc)
preConfiguredTokenFilters.register( "standard",
PreConfiguredTokenFilter.singletonWithVersion("standard", true, (reader, version) -> {
PreConfiguredTokenFilter.elasticsearchVersion("standard", true, (reader, version) -> {
if (version.before(Version.V_7_0_0)) {
deprecationLogger.deprecatedAndMaybeLog("standard_deprecation",
"The [standard] token filter is deprecated and will be removed in a future version.");
Expand Down
@@ -0,0 +1,130 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.analysis;

import org.apache.lucene.analysis.TokenFilter;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.env.TestEnvironment;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.IndexSettingsModule;
import org.elasticsearch.test.VersionUtils;

import java.io.IOException;

public class PreConfiguredTokenFilterTests extends ESTestCase {

private final Settings emptyNodeSettings = Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();

public void testCachingWithSingleton() throws IOException {
PreConfiguredTokenFilter pctf =
PreConfiguredTokenFilter.singleton("singleton", randomBoolean(),
(tokenStream) -> new TokenFilter(tokenStream) {
@Override
public boolean incrementToken() {
return false;
}
});

IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", Settings.EMPTY);

Version version1 = VersionUtils.randomVersion(random());
Settings settings1 = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version1)
.build();
TokenFilterFactory tff_v1_1 =
pctf.get(indexSettings, TestEnvironment.newEnvironment(emptyNodeSettings), "singleton", settings1);
TokenFilterFactory tff_v1_2 =
pctf.get(indexSettings, TestEnvironment.newEnvironment(emptyNodeSettings), "singleton", settings1);
assertSame(tff_v1_1, tff_v1_2);

Version version2 = randomValueOtherThan(version1, () -> randomFrom(VersionUtils.allVersions()));
Settings settings2 = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version2)
.build();

TokenFilterFactory tff_v2 =
pctf.get(indexSettings, TestEnvironment.newEnvironment(emptyNodeSettings), "singleton", settings2);
assertSame(tff_v1_1, tff_v2);
}

public void testCachingWithElasticsearchVersion() throws IOException {
PreConfiguredTokenFilter pctf =
PreConfiguredTokenFilter.elasticsearchVersion("elasticsearch_version", randomBoolean(),
(tokenStream, esVersion) -> new TokenFilter(tokenStream) {
@Override
public boolean incrementToken() {
return false;
}
});

IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", Settings.EMPTY);

Version version1 = VersionUtils.randomVersion(random());
Settings settings1 = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version1)
.build();
TokenFilterFactory tff_v1_1 =
pctf.get(indexSettings, TestEnvironment.newEnvironment(emptyNodeSettings), "elasticsearch_version", settings1);
TokenFilterFactory tff_v1_2 =
pctf.get(indexSettings, TestEnvironment.newEnvironment(emptyNodeSettings), "elasticsearch_version", settings1);
assertSame(tff_v1_1, tff_v1_2);

Version version2 = randomValueOtherThan(version1, () -> randomFrom(VersionUtils.allVersions()));
Settings settings2 = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version2)
.build();

TokenFilterFactory tff_v2 =
pctf.get(indexSettings, TestEnvironment.newEnvironment(emptyNodeSettings), "elasticsearch_version", settings2);
assertNotSame(tff_v1_1, tff_v2);
}

public void testCachingWithLuceneVersion() throws IOException {
PreConfiguredTokenFilter pctf =
PreConfiguredTokenFilter.luceneVersion("lucene_version", randomBoolean(),
(tokenStream, luceneVersion) -> new TokenFilter(tokenStream) {
@Override
public boolean incrementToken() {
return false;
}
});

IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", Settings.EMPTY);

Version version1 = Version.CURRENT;
Settings settings1 = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version1)
.build();
TokenFilterFactory tff_v1_1 =
pctf.get(indexSettings, TestEnvironment.newEnvironment(emptyNodeSettings), "lucene_version", settings1);
TokenFilterFactory tff_v1_2 =
pctf.get(indexSettings, TestEnvironment.newEnvironment(emptyNodeSettings), "lucene_version", settings1);
assertSame(tff_v1_1, tff_v1_2);

byte major = VersionUtils.getFirstVersion().major;
Version version2 = Version.fromString(major - 1 + ".0.0");
Settings settings2 = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version2)
.build();

TokenFilterFactory tff_v2 =
pctf.get(indexSettings, TestEnvironment.newEnvironment(emptyNodeSettings), "lucene_version", settings2);
assertNotSame(tff_v1_1, tff_v2);
}
}

0 comments on commit e1a055e

Please sign in to comment.