Skip to content
Permalink
Browse files

remove or cutover all uses of now unsupported versions

  • Loading branch information...
mikemccand committed Mar 2, 2016
1 parent c62ad7b commit 09aa951ad0cb48ee9d2ea220e8e23005aac71b10
Showing with 28 additions and 296 deletions.
  1. +1 −3 lucene/analysis/common/src/java/org/apache/lucene/analysis/ar/ArabicAnalyzer.java
  2. +1 −3 lucene/analysis/common/src/java/org/apache/lucene/analysis/ckb/SoraniAnalyzer.java
  3. +1 −3 lucene/analysis/common/src/java/org/apache/lucene/analysis/fa/PersianAnalyzer.java
  4. +1 −3 lucene/analysis/common/src/java/org/apache/lucene/analysis/hi/HindiAnalyzer.java
  5. +2 −3 lucene/analysis/common/src/java/org/apache/lucene/analysis/th/ThaiAnalyzer.java
  6. +0 −12 lucene/analysis/common/src/test/org/apache/lucene/analysis/ar/TestArabicAnalyzer.java
  7. +0 −12 lucene/analysis/common/src/test/org/apache/lucene/analysis/ckb/TestSoraniAnalyzer.java
  8. +4 −4 lucene/analysis/common/src/test/org/apache/lucene/analysis/custom/TestCustomAnalyzer.java
  9. +0 −12 lucene/analysis/common/src/test/org/apache/lucene/analysis/fa/TestPersianAnalyzer.java
  10. +0 −12 lucene/analysis/common/src/test/org/apache/lucene/analysis/hi/TestHindiAnalyzer.java
  11. +0 −12 lucene/analysis/common/src/test/org/apache/lucene/analysis/th/TestThaiAnalyzer.java
  12. +0 −107 lucene/backward-codecs/src/test/org/apache/lucene/index/TestMaxPositionInOldIndex.java
  13. +5 −5 lucene/core/src/test/org/apache/lucene/index/TestSegmentInfos.java
  14. +9 −9 lucene/core/src/test/org/apache/lucene/util/TestVersion.java
  15. +4 −9 solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java
  16. +0 −1 solr/core/src/test/org/apache/solr/search/similarities/TestPerFieldSimilarity.java
  17. +0 −86 solr/core/src/test/org/apache/solr/search/similarities/TestPerFieldSimilarityClassic.java
@@ -134,9 +134,7 @@ public ArabicAnalyzer(CharArraySet stopwords, CharArraySet stemExclusionSet){
protected TokenStreamComponents createComponents(String fieldName) {
final Tokenizer source = new StandardTokenizer();
TokenStream result = new LowerCaseFilter(source);
if (getVersion().onOrAfter(Version.LUCENE_5_4_0)) {
result = new DecimalDigitFilter(result);
}
result = new DecimalDigitFilter(result);
// the order here is important: the stopword list is not normalized!
result = new StopFilter(result, stopwords);
// TODO maybe we should make ArabicNormalization filter also KeywordAttribute aware?!
@@ -120,9 +120,7 @@ protected TokenStreamComponents createComponents(String fieldName) {
TokenStream result = new StandardFilter(source);
result = new SoraniNormalizationFilter(result);
result = new LowerCaseFilter(result);
if (getVersion().onOrAfter(Version.LUCENE_5_4_0)) {
result = new DecimalDigitFilter(result);
}
result = new DecimalDigitFilter(result);
result = new StopFilter(result, stopwords);
if(!stemExclusionSet.isEmpty())
result = new SetKeywordMarkerFilter(result, stemExclusionSet);
@@ -116,9 +116,7 @@ public PersianAnalyzer(CharArraySet stopwords){
protected TokenStreamComponents createComponents(String fieldName) {
final Tokenizer source = new StandardTokenizer();
TokenStream result = new LowerCaseFilter(source);
if (getVersion().onOrAfter(Version.LUCENE_5_4_0)) {
result = new DecimalDigitFilter(result);
}
result = new DecimalDigitFilter(result);
result = new ArabicNormalizationFilter(result);
/* additional persian-specific normalization */
result = new PersianNormalizationFilter(result);
@@ -117,9 +117,7 @@ public HindiAnalyzer() {
protected TokenStreamComponents createComponents(String fieldName) {
final Tokenizer source = new StandardTokenizer();
TokenStream result = new LowerCaseFilter(source);
if (getVersion().onOrAfter(Version.LUCENE_5_4_0)) {
result = new DecimalDigitFilter(result);
}
result = new DecimalDigitFilter(result);
if (!stemExclusionSet.isEmpty())
result = new SetKeywordMarkerFilter(result, stemExclusionSet);
result = new IndicNormalizationFilter(result);
@@ -98,9 +98,8 @@ public ThaiAnalyzer(CharArraySet stopwords) {
protected TokenStreamComponents createComponents(String fieldName) {
final Tokenizer source = new ThaiTokenizer();
TokenStream result = new LowerCaseFilter(source);
if (getVersion().onOrAfter(Version.LUCENE_5_4_0)) {
result = new DecimalDigitFilter(result);
}
result = new DecimalDigitFilter(result);

result = new StopFilter(result, stopwords);
return new TokenStreamComponents(source, result);
}
@@ -110,18 +110,6 @@ public void testDigits() throws Exception {
a.close();
}

/**
* test that we don't fold digits for back compat behavior
* @deprecated remove this test in lucene 7
*/
@Deprecated
public void testDigitsBackCompat() throws Exception {
ArabicAnalyzer a = new ArabicAnalyzer();
a.setVersion(Version.LUCENE_5_3_0);
checkOneTerm(a, "١٢٣٤", "١٢٣٤");
a.close();
}

/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
ArabicAnalyzer a = new ArabicAnalyzer();
@@ -74,18 +74,6 @@ public void testDigits() throws Exception {
a.close();
}

/**
* test that we don't fold digits for back compat behavior
* @deprecated remove this test in lucene 7
*/
@Deprecated
public void testDigitsBackCompat() throws Exception {
SoraniAnalyzer a = new SoraniAnalyzer();
a.setVersion(Version.LUCENE_5_3_0);
checkOneTerm(a, "١٢٣٤", "١٢٣٤");
a.close();
}

/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
Analyzer a = new SoraniAnalyzer();
@@ -95,7 +95,7 @@ public void testWhitespaceWithFolding() throws Exception {

public void testFactoryHtmlStripClassicFolding() throws Exception {
CustomAnalyzer a = CustomAnalyzer.builder()
.withDefaultMatchVersion(Version.LUCENE_5_0_0)
.withDefaultMatchVersion(Version.LUCENE_6_0_0)
.addCharFilter(HTMLStripCharFilterFactory.class)
.withTokenizer(ClassicTokenizerFactory.class)
.addTokenFilter(ASCIIFoldingFilterFactory.class, "preserveOriginal", "true")
@@ -114,7 +114,7 @@ public void testFactoryHtmlStripClassicFolding() throws Exception {
assertSame(LowerCaseFilterFactory.class, tokenFilters.get(1).getClass());
assertEquals(100, a.getPositionIncrementGap("dummy"));
assertEquals(1000, a.getOffsetGap("dummy"));
assertSame(Version.LUCENE_5_0_0, a.getVersion());
assertSame(Version.LUCENE_6_0_0, a.getVersion());

assertAnalyzesTo(a, "<p>foo bar</p> FOO BAR",
new String[] { "foo", "bar", "foo", "bar" },
@@ -127,7 +127,7 @@ public void testFactoryHtmlStripClassicFolding() throws Exception {

public void testHtmlStripClassicFolding() throws Exception {
CustomAnalyzer a = CustomAnalyzer.builder()
.withDefaultMatchVersion(Version.LUCENE_5_0_0)
.withDefaultMatchVersion(Version.LUCENE_6_0_0)
.addCharFilter("htmlstrip")
.withTokenizer("classic")
.addTokenFilter("asciifolding", "preserveOriginal", "true")
@@ -146,7 +146,7 @@ public void testHtmlStripClassicFolding() throws Exception {
assertSame(LowerCaseFilterFactory.class, tokenFilters.get(1).getClass());
assertEquals(100, a.getPositionIncrementGap("dummy"));
assertEquals(1000, a.getOffsetGap("dummy"));
assertSame(Version.LUCENE_5_0_0, a.getVersion());
assertSame(Version.LUCENE_6_0_0, a.getVersion());

assertAnalyzesTo(a, "<p>foo bar</p> FOO BAR",
new String[] { "foo", "bar", "foo", "bar" },
@@ -238,18 +238,6 @@ public void testDigits() throws Exception {
a.close();
}

/**
* test that we don't fold digits for back compat behavior
* @deprecated remove this test in lucene 7
*/
@Deprecated
public void testDigitsBackCompat() throws Exception {
PersianAnalyzer a = new PersianAnalyzer();
a.setVersion(Version.LUCENE_5_3_0);
checkOneTerm(a, "۱۲۳۴", "۱۲۳۴");
a.close();
}

/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
PersianAnalyzer a = new PersianAnalyzer();
@@ -57,18 +57,6 @@ public void testDigits() throws Exception {
a.close();
}

/**
* test that we don't fold digits for back compat behavior
* @deprecated remove this test in lucene 7
*/
@Deprecated
public void testDigitsBackCompat() throws Exception {
HindiAnalyzer a = new HindiAnalyzer();
a.setVersion(Version.LUCENE_5_3_0);
checkOneTerm(a, "१२३४", "१२३४");
a.close();
}

/** blast some random strings through the analyzer */
public void testRandomStrings() throws Exception {
Analyzer analyzer = new HindiAnalyzer();
@@ -132,18 +132,6 @@ public void testDigits() throws Exception {
a.close();
}

/**
* test that we don't fold digits for back compat behavior
* @deprecated remove this test in lucene 7
*/
@Deprecated
public void testDigitsBackCompat() throws Exception {
ThaiAnalyzer a = new ThaiAnalyzer();
a.setVersion(Version.LUCENE_5_3_0);
checkOneTerm(a, "๑๒๓๔", "๑๒๓๔");
a.close();
}

public void testTwoSentences() throws Exception {
Analyzer analyzer = new ThaiAnalyzer(CharArraySet.EMPTY_SET);
assertAnalyzesTo(analyzer, "This is a test. การที่ได้ต้องแสดงว่างานดี",

This file was deleted.

@@ -50,7 +50,7 @@ public void testVersionsOneSegment() throws IOException {
Codec codec = Codec.getDefault();

SegmentInfos sis = new SegmentInfos();
SegmentInfo info = new SegmentInfo(dir, Version.LUCENE_5_0_0, "_0", 1, false, Codec.getDefault(),
SegmentInfo info = new SegmentInfo(dir, Version.LUCENE_6_0_0, "_0", 1, false, Codec.getDefault(),
Collections.<String,String>emptyMap(), id, Collections.<String,String>emptyMap());
info.setFiles(Collections.<String>emptySet());
codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
@@ -59,7 +59,7 @@ public void testVersionsOneSegment() throws IOException {
sis.add(commitInfo);
sis.commit(dir);
sis = SegmentInfos.readLatestCommit(dir);
assertEquals(Version.LUCENE_5_0_0, sis.getMinSegmentLuceneVersion());
assertEquals(Version.LUCENE_6_0_0, sis.getMinSegmentLuceneVersion());
assertEquals(Version.LATEST, sis.getCommitLuceneVersion());
dir.close();
}
@@ -72,14 +72,14 @@ public void testVersionsTwoSegments() throws IOException {
Codec codec = Codec.getDefault();

SegmentInfos sis = new SegmentInfos();
SegmentInfo info = new SegmentInfo(dir, Version.LUCENE_5_0_0, "_0", 1, false, Codec.getDefault(),
SegmentInfo info = new SegmentInfo(dir, Version.LUCENE_6_0_0, "_0", 1, false, Codec.getDefault(),
Collections.<String,String>emptyMap(), id, Collections.<String,String>emptyMap());
info.setFiles(Collections.<String>emptySet());
codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
SegmentCommitInfo commitInfo = new SegmentCommitInfo(info, 0, -1, -1, -1);
sis.add(commitInfo);

info = new SegmentInfo(dir, Version.LUCENE_5_1_0, "_1", 1, false, Codec.getDefault(),
info = new SegmentInfo(dir, Version.LUCENE_6_0_0, "_1", 1, false, Codec.getDefault(),
Collections.<String,String>emptyMap(), id, Collections.<String,String>emptyMap());
info.setFiles(Collections.<String>emptySet());
codec.segmentInfoFormat().write(dir, info, IOContext.DEFAULT);
@@ -88,7 +88,7 @@ public void testVersionsTwoSegments() throws IOException {

sis.commit(dir);
sis = SegmentInfos.readLatestCommit(dir);
assertEquals(Version.LUCENE_5_0_0, sis.getMinSegmentLuceneVersion());
assertEquals(Version.LUCENE_6_0_0, sis.getMinSegmentLuceneVersion());
assertEquals(Version.LATEST, sis.getCommitLuceneVersion());
dir.close();
}
@@ -32,25 +32,25 @@ public void testOnOrAfter() throws Exception {
assertTrue("LATEST must be always onOrAfter("+v+")", Version.LATEST.onOrAfter(v));
}
}
assertTrue(Version.LUCENE_6_0_0.onOrAfter(Version.LUCENE_5_0_0));;
assertTrue(Version.LUCENE_7_0_0.onOrAfter(Version.LUCENE_6_0_0));;
}

public void testToString() {
assertEquals("5.0.0", Version.LUCENE_5_0_0.toString());
assertEquals("6.0.0", Version.LUCENE_6_0_0.toString());
assertEquals("7.0.0", Version.LUCENE_7_0_0.toString());
}

public void testParseLeniently() throws Exception {
assertEquals(Version.LUCENE_5_0_0, Version.parseLeniently("5.0"));
assertEquals(Version.LUCENE_5_0_0, Version.parseLeniently("5.0.0"));
assertEquals(Version.LUCENE_5_0_0, Version.parseLeniently("LUCENE_50"));
assertEquals(Version.LUCENE_5_0_0, Version.parseLeniently("LUCENE_5_0"));
assertEquals(Version.LUCENE_5_0_0, Version.parseLeniently("LUCENE_5_0_0"));
assertEquals(Version.LUCENE_6_0_0, Version.parseLeniently("6.0"));
assertEquals(Version.LUCENE_6_0_0, Version.parseLeniently("6.0.0"));
assertEquals(Version.LUCENE_6_0_0, Version.parseLeniently("LUCENE_60"));
assertEquals(Version.LUCENE_6_0_0, Version.parseLeniently("LUCENE_6_0"));
assertEquals(Version.LUCENE_6_0_0, Version.parseLeniently("LUCENE_6_0_0"));
assertEquals(Version.LUCENE_7_0_0, Version.parseLeniently("7.0"));
assertEquals(Version.LUCENE_7_0_0, Version.parseLeniently("7.0.0"));
assertEquals(Version.LUCENE_7_0_0, Version.parseLeniently("LUCENE_70"));
assertEquals(Version.LUCENE_7_0_0, Version.parseLeniently("LUCENE_7_0"));
assertEquals(Version.LUCENE_7_0_0, Version.parseLeniently("LUCENE_7_0_0"));
assertEquals(Version.LATEST, Version.parseLeniently("LATEST"));
assertEquals(Version.LATEST, Version.parseLeniently("latest"));
assertEquals(Version.LATEST, Version.parseLeniently("LUCENE_CURRENT"));
@@ -95,15 +95,15 @@ public void testParseLenientlyOnAllConstants() throws Exception {

public void testParse() throws Exception {
assertEquals(Version.LUCENE_6_0_0, Version.parse("6.0.0"));
assertEquals(Version.LUCENE_5_0_0, Version.parse("5.0.0"));
assertEquals(Version.LUCENE_7_0_0, Version.parse("7.0.0"));

// Version does not pass judgement on the major version:
assertEquals(1, Version.parse("1.0").major);
assertEquals(7, Version.parse("7.0.0").major);
}

public void testForwardsCompatibility() throws Exception {
assertTrue(Version.parse("5.10.20").onOrAfter(Version.LUCENE_5_0_0));
assertTrue(Version.parse("6.10.20").onOrAfter(Version.LUCENE_6_0_0));
}

public void testParseExceptions() {

0 comments on commit 09aa951

Please sign in to comment.
You can’t perform that action at this time.