From aaaa61c918336848fb3c6797f947cd336863f8be Mon Sep 17 00:00:00 2001 From: Frank Scholten Date: Tue, 24 Mar 2015 18:02:02 +0100 Subject: [PATCH] Upgraded to Lucene 4.10.3 --- .../mahout/classifier/NewsgroupHelper.java | 2 +- integration/pom.xml | 12 +- .../mahout/text/LuceneSegmentInputFormat.java | 4 +- .../mahout/text/LuceneSegmentInputSplit.java | 4 +- .../text/LuceneSegmentRecordReader.java | 3 +- .../text/LuceneStorageConfiguration.java | 4 +- .../text/MailArchivesClusteringAnalyzer.java | 24 +- .../text/ReadOnlyFileSystemDirectory.java | 354 ------------------ .../text/SequenceFilesFromLuceneStorage.java | 1 - .../SequenceFilesFromLuceneStorageDriver.java | 3 +- .../text/wikipedia/WikipediaAnalyzer.java | 10 +- .../utils/regex/AnalyzerTransformer.java | 2 +- .../mahout/common/lucene/AnalyzerUtils.java | 4 +- .../encoders/TextValueEncoderTest.java | 2 +- pom.xml | 2 +- 15 files changed, 41 insertions(+), 390 deletions(-) delete mode 100644 integration/src/main/java/org/apache/mahout/text/ReadOnlyFileSystemDirectory.java diff --git a/examples/src/main/java/org/apache/mahout/classifier/NewsgroupHelper.java b/examples/src/main/java/org/apache/mahout/classifier/NewsgroupHelper.java index 8b11e9fd59..863e75a6fa 100644 --- a/examples/src/main/java/org/apache/mahout/classifier/NewsgroupHelper.java +++ b/examples/src/main/java/org/apache/mahout/classifier/NewsgroupHelper.java @@ -60,7 +60,7 @@ public final class NewsgroupHelper { private static final long WEEK = 7 * 24 * 3600; private final Random rand = RandomUtils.getRandom(); - private final Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_46); + private final Analyzer analyzer = new StandardAnalyzer(); private final FeatureVectorEncoder encoder = new StaticWordValueEncoder("body"); private final FeatureVectorEncoder bias = new ConstantValueEncoder("Intercept"); diff --git a/integration/pom.xml b/integration/pom.xml index 84317fd653..bd3366d2f4 100644 --- a/integration/pom.xml +++ b/integration/pom.xml @@ -122,7 +122,17 @@ lucene-analyzers-common true - + + org.apache.solr + solr-core + ${lucene.version} + + + commons-httpclient + commons-httpclient + 3.1 + + org.mongodb mongo-java-driver diff --git a/integration/src/main/java/org/apache/mahout/text/LuceneSegmentInputFormat.java b/integration/src/main/java/org/apache/mahout/text/LuceneSegmentInputFormat.java index fabca54239..828aaa0746 100644 --- a/integration/src/main/java/org/apache/mahout/text/LuceneSegmentInputFormat.java +++ b/integration/src/main/java/org/apache/mahout/text/LuceneSegmentInputFormat.java @@ -29,6 +29,7 @@ import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.lucene.index.SegmentCommitInfo; import org.apache.lucene.index.SegmentInfos; +import org.apache.solr.store.hdfs.HdfsDirectory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -52,8 +53,7 @@ public List getSplits(JobContext context) throws IOExce List indexPaths = lucene2SeqConfiguration.getIndexPaths(); for (Path indexPath : indexPaths) { - ReadOnlyFileSystemDirectory directory = new ReadOnlyFileSystemDirectory(FileSystem.get(configuration), indexPath, - false, configuration); + HdfsDirectory directory = new HdfsDirectory(indexPath, configuration); SegmentInfos segmentInfos = new SegmentInfos(); segmentInfos.read(directory); diff --git a/integration/src/main/java/org/apache/mahout/text/LuceneSegmentInputSplit.java b/integration/src/main/java/org/apache/mahout/text/LuceneSegmentInputSplit.java index 1441e32e7d..f30c7fbd71 100644 --- a/integration/src/main/java/org/apache/mahout/text/LuceneSegmentInputSplit.java +++ b/integration/src/main/java/org/apache/mahout/text/LuceneSegmentInputSplit.java @@ -24,6 +24,7 @@ import org.apache.lucene.index.SegmentCommitInfo; import org.apache.lucene.index.SegmentInfo; import org.apache.lucene.index.SegmentInfos; +import org.apache.solr.store.hdfs.HdfsDirectory; import java.io.DataInput; import java.io.DataOutput; @@ -88,8 +89,7 @@ public void readFields(DataInput in) throws IOException { * @throws IOException if an error occurs when accessing the directory */ public SegmentCommitInfo getSegment(Configuration configuration) throws IOException { - ReadOnlyFileSystemDirectory directory = new ReadOnlyFileSystemDirectory(FileSystem.get(configuration), indexPath, - false, configuration); + HdfsDirectory directory = new HdfsDirectory(indexPath, configuration); SegmentInfos segmentInfos = new SegmentInfos(); segmentInfos.read(directory); diff --git a/integration/src/main/java/org/apache/mahout/text/LuceneSegmentRecordReader.java b/integration/src/main/java/org/apache/mahout/text/LuceneSegmentRecordReader.java index a0aa6b0744..86f76c0eb3 100644 --- a/integration/src/main/java/org/apache/mahout/text/LuceneSegmentRecordReader.java +++ b/integration/src/main/java/org/apache/mahout/text/LuceneSegmentRecordReader.java @@ -62,9 +62,8 @@ public void initialize(InputSplit split, TaskAttemptContext context) throws IOEx for (String field : lucene2SeqConfiguration.getFields()) { LuceneIndexHelper.fieldShouldExistInIndex(segmentReader, field); } - Weight weight = lucene2SeqConfiguration.getQuery().createWeight(searcher); - scorer = weight.scorer(segmentReader.getContext(), false, false, null); + scorer = weight.scorer(segmentReader.getContext(), segmentReader.getLiveDocs()); if (scorer == null) { throw new IllegalArgumentException("Could not create query scorer for query: " + lucene2SeqConfiguration.getQuery()); diff --git a/integration/src/main/java/org/apache/mahout/text/LuceneStorageConfiguration.java b/integration/src/main/java/org/apache/mahout/text/LuceneStorageConfiguration.java index 88f86c579e..544a11b383 100644 --- a/integration/src/main/java/org/apache/mahout/text/LuceneStorageConfiguration.java +++ b/integration/src/main/java/org/apache/mahout/text/LuceneStorageConfiguration.java @@ -31,6 +31,7 @@ import org.apache.lucene.queryparser.classic.QueryParser; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.util.Version; import org.apache.mahout.common.Pair; import org.apache.mahout.common.iterator.sequencefile.PathFilters; import org.apache.mahout.common.iterator.sequencefile.PathType; @@ -44,7 +45,6 @@ import java.util.List; import java.util.Set; -import static org.apache.lucene.util.Version.LUCENE_46; /** * Holds all the configuration for {@link SequenceFilesFromLuceneStorage}, which generates a sequence file @@ -212,7 +212,7 @@ public void readFields(DataInput in) throws IOException { } idField = in.readUTF(); fields = Arrays.asList(in.readUTF().split(SEPARATOR_FIELDS)); - query = new QueryParser(LUCENE_46, "query", new StandardAnalyzer(LUCENE_46)).parse(in.readUTF()); + query = new QueryParser(Version.LUCENE_4_10_3, "query", new StandardAnalyzer(Version.LUCENE_4_10_3)).parse(in.readUTF()); maxHits = in.readInt(); } catch (ParseException e) { throw new RuntimeException("Could not deserialize " + this.getClass().getName(), e); diff --git a/integration/src/main/java/org/apache/mahout/text/MailArchivesClusteringAnalyzer.java b/integration/src/main/java/org/apache/mahout/text/MailArchivesClusteringAnalyzer.java index a7503e192c..f9b5b8b501 100644 --- a/integration/src/main/java/org/apache/mahout/text/MailArchivesClusteringAnalyzer.java +++ b/integration/src/main/java/org/apache/mahout/text/MailArchivesClusteringAnalyzer.java @@ -21,6 +21,8 @@ import java.util.Arrays; import java.util.regex.Matcher; import java.util.regex.Pattern; + +import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; @@ -33,6 +35,7 @@ import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.util.CharArraySet; import org.apache.lucene.analysis.util.StopwordAnalyzerBase; +import org.apache.lucene.util.AttributeFactory; import org.apache.lucene.util.Version; /** @@ -41,13 +44,13 @@ * stop words, excluding non-alpha-numeric tokens, and porter stemming. */ public final class MailArchivesClusteringAnalyzer extends StopwordAnalyzerBase { - private static final Version LUCENE_VERSION = Version.LUCENE_46; - + private static final Version LUCENE_VERSION = Version.LUCENE_4_10_3; + // extended set of stop words composed of common mail terms like "hi", // HTML tags, and Java keywords asmany of the messages in the archives // are subversion check-in notifications - private static final CharArraySet STOP_SET = new CharArraySet(LUCENE_VERSION, Arrays.asList( + private static final CharArraySet STOP_SET = new CharArraySet(Arrays.asList( "3d","7bit","a0","about","above","abstract","across","additional","after", "afterwards","again","against","align","all","almost","alone","along", "already","also","although","always","am","among","amongst","amoungst", @@ -107,22 +110,17 @@ public final class MailArchivesClusteringAnalyzer extends StopwordAnalyzerBase { private static final Matcher MATCHER = ALPHA_NUMERIC.matcher(""); public MailArchivesClusteringAnalyzer() { - super(LUCENE_VERSION, STOP_SET); + super(STOP_SET); } - public MailArchivesClusteringAnalyzer(CharArraySet stopSet) { - super(LUCENE_VERSION, stopSet); - - } - @Override protected TokenStreamComponents createComponents(String fieldName, Reader reader) { - Tokenizer tokenizer = new StandardTokenizer(LUCENE_VERSION, reader); - TokenStream result = new StandardFilter(LUCENE_VERSION, tokenizer); - result = new LowerCaseFilter(LUCENE_VERSION, result); + Tokenizer tokenizer = new StandardTokenizer(reader); + TokenStream result = new StandardFilter(tokenizer); + result = new LowerCaseFilter(result); result = new ASCIIFoldingFilter(result); result = new AlphaNumericMaxLengthFilter(result); - result = new StopFilter(LUCENE_VERSION, result, STOP_SET); + result = new StopFilter(result, STOP_SET); result = new PorterStemFilter(result); return new TokenStreamComponents(tokenizer, result); } diff --git a/integration/src/main/java/org/apache/mahout/text/ReadOnlyFileSystemDirectory.java b/integration/src/main/java/org/apache/mahout/text/ReadOnlyFileSystemDirectory.java deleted file mode 100644 index e97e35bf03..0000000000 --- a/integration/src/main/java/org/apache/mahout/text/ReadOnlyFileSystemDirectory.java +++ /dev/null @@ -1,354 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.mahout.text; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FSDataInputStream; -import org.apache.hadoop.fs.FSDataOutputStream; -import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.lucene.store.BaseDirectory; -import org.apache.lucene.store.BufferedIndexInput; -import org.apache.lucene.store.BufferedIndexOutput; -import org.apache.lucene.store.IOContext; -import org.apache.lucene.store.IndexInput; -import org.apache.lucene.store.IndexOutput; -import org.apache.lucene.store.Lock; -import org.apache.lucene.store.LockFactory; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.util.Collection; - -//TODO: is there a better way of doing this in Lucene 4.x? - -/** - * This class implements a read-only Lucene Directory on top of a general FileSystem. - * Currently it does not support locking. - *

- * // TODO: Rename to FileSystemReadOnlyDirectory - */ -public class ReadOnlyFileSystemDirectory extends BaseDirectory { - - private final FileSystem fs; - private final Path directory; - private final int ioFileBufferSize; - - private static final Logger log = LoggerFactory.getLogger(ReadOnlyFileSystemDirectory.class); - - /** - * Constructor - * - * @param fs - filesystem - * @param directory - directory path - * @param create - if true create the directory - * @param conf - MR Job Configuration - * @throws IOException - */ - - public ReadOnlyFileSystemDirectory(FileSystem fs, Path directory, boolean create, - Configuration conf) throws IOException { - - this.fs = fs; - this.directory = directory; - this.ioFileBufferSize = conf.getInt("io.file.buffer.size", 4096); - - if (create) { - create(); - } - - boolean isDir = false; - try { - FileStatus status = fs.getFileStatus(directory); - if (status != null) { - isDir = status.isDir(); - } - } catch (IOException e) { - log.error(e.getMessage(), e); - } - if (!isDir) { - throw new IOException(directory + " is not a directory"); - } - } - - - private void create() throws IOException { - if (!fs.exists(directory)) { - fs.mkdirs(directory); - } - - boolean isDir = false; - try { - FileStatus status = fs.getFileStatus(directory); - if (status != null) { - isDir = status.isDir(); - } - } catch (IOException e) { - log.error(e.getMessage(), e); - } - if (!isDir) { - throw new IOException(directory + " is not a directory"); - } - - // clear old index files - FileStatus[] fileStatus = - fs.listStatus(directory, LuceneIndexFileNameFilter.getFilter()); - for (FileStatus status : fileStatus) { - if (!fs.delete(status.getPath(), true)) { - throw new IOException("Cannot delete index file " - + status.getPath()); - } - } - } - - public String[] list() throws IOException { - FileStatus[] fileStatus = - fs.listStatus(directory, LuceneIndexFileNameFilter.getFilter()); - String[] result = new String[fileStatus.length]; - for (int i = 0; i < fileStatus.length; i++) { - result[i] = fileStatus[i].getPath().getName(); - } - return result; - } - - @Override - public String[] listAll() throws IOException { - return list(); - } - - @Override - public boolean fileExists(String name) throws IOException { - return fs.exists(new Path(directory, name)); - } - - @Override - public long fileLength(String name) throws IOException { - return fs.getFileStatus(new Path(directory, name)).getLen(); - } - - @Override - public void deleteFile(String name) throws IOException { - if (!fs.delete(new Path(directory, name), true)) { - throw new IOException("Cannot delete index file " + name); - } - } - - @Override - public IndexOutput createOutput(String name, IOContext context) throws IOException { - //TODO: What should we be doing with the IOContext here, if anything? - Path file = new Path(directory, name); - if (fs.exists(file) && !fs.delete(file, true)) { - // delete the existing one if applicable - throw new IOException("Cannot overwrite index file " + file); - } - - return new FileSystemIndexOutput(file, ioFileBufferSize); - } - - @Override - public void sync(Collection names) throws IOException { - // do nothing, as this is read-only - } - - @Override - public IndexInput openInput(String name, IOContext context) throws IOException { - return new FileSystemIndexInput(new Path(directory, name), ioFileBufferSize); - } - - @Override - public Lock makeLock(final String name) { - return new Lock() { - public boolean obtain() { - return true; - } - - public void release() { - } - - public boolean isLocked() { - throw new UnsupportedOperationException(); - } - - public String toString() { - return "Lock@" + new Path(directory, name); - } - }; - } - - @Override - public void clearLock(String name) throws IOException { - // do nothing - } - - @Override - public void close() throws IOException { - // do not close the file system - } - - @Override - public void setLockFactory(LockFactory lockFactory) throws IOException { - // do nothing - } - - @Override - public LockFactory getLockFactory() { - return null; - } - - @Override - public String toString() { - return this.getClass().getName() + "@" + directory; - } - - private class FileSystemIndexInput extends BufferedIndexInput implements Cloneable { - - // shared by clones - private class Descriptor { - public final FSDataInputStream in; - public long position; // cache of in.getPos() - - public Descriptor(Path file, int ioFileBufferSize) throws IOException { - this.in = fs.open(file, ioFileBufferSize); - } - } - - private final Path filePath; // for debugging - private final Descriptor descriptor; - private final long length; - private boolean isOpen; - private boolean isClone; - - public FileSystemIndexInput(Path path, int ioFileBufferSize) - throws IOException { - super("FSII_" + path.getName(), ioFileBufferSize); - filePath = path; - descriptor = new Descriptor(path, ioFileBufferSize); - length = fs.getFileStatus(path).getLen(); - isOpen = true; - } - - @Override - protected void readInternal(byte[] b, int offset, int len) - throws IOException { - long position = getFilePointer(); - if (position != descriptor.position) { - descriptor.in.seek(position); - descriptor.position = position; - } - int total = 0; - do { - int i = descriptor.in.read(b, offset + total, len - total); - if (i == -1) { - throw new IOException("Read past EOF"); - } - descriptor.position += i; - total += i; - } while (total < len); - } - - @Override - public void close() throws IOException { - if (!isClone) { - if (isOpen) { - descriptor.in.close(); - isOpen = false; - } else { - throw new IOException("Index file " + filePath + " already closed"); - } - } - } - - @Override - protected void seekInternal(long position) { - // handled in readInternal() - } - - @Override - public long length() { - return length; - } - - @Override - protected void finalize() throws Throwable { - super.finalize(); - if (!isClone && isOpen) { - close(); // close the file - } - } - - @Override - public BufferedIndexInput clone() { - FileSystemIndexInput clone = (FileSystemIndexInput) super.clone(); - clone.isClone = true; - return clone; - } - } - - private class FileSystemIndexOutput extends BufferedIndexOutput { - - private final Path filePath; // for debugging - private final FSDataOutputStream out; - private boolean isOpen; - - public FileSystemIndexOutput(Path path, int ioFileBufferSize) - throws IOException { - filePath = path; - // overwrite is true by default - out = fs.create(path, true, ioFileBufferSize); - isOpen = true; - } - - @Override - public void flushBuffer(byte[] b, int offset, int size) throws IOException { - out.write(b, offset, size); - } - - @Override - public void close() throws IOException { - if (isOpen) { - super.close(); - out.close(); - isOpen = false; - } else { - throw new IOException("Index file " + filePath + " already closed"); - } - } - - @Override - public void seek(long pos) throws IOException { - throw new UnsupportedOperationException(); - } - - @Override - public long length() throws IOException { - return out.getPos(); - } - - @Override - protected void finalize() throws Throwable { - super.finalize(); - if (isOpen) { - close(); // close the file - } - } - } - -} diff --git a/integration/src/main/java/org/apache/mahout/text/SequenceFilesFromLuceneStorage.java b/integration/src/main/java/org/apache/mahout/text/SequenceFilesFromLuceneStorage.java index b7fd495f14..4906d3a9ef 100644 --- a/integration/src/main/java/org/apache/mahout/text/SequenceFilesFromLuceneStorage.java +++ b/integration/src/main/java/org/apache/mahout/text/SequenceFilesFromLuceneStorage.java @@ -82,7 +82,6 @@ public void run(final LuceneStorageConfiguration lucene2seqConf) throws IOExcept processedDocs = writerCollector.processedDocs; Closeables.close(sequenceFileWriter, false); directory.close(); - //searcher.close(); reader.close(); } } diff --git a/integration/src/main/java/org/apache/mahout/text/SequenceFilesFromLuceneStorageDriver.java b/integration/src/main/java/org/apache/mahout/text/SequenceFilesFromLuceneStorageDriver.java index d3903ddfa0..2e19701f83 100644 --- a/integration/src/main/java/org/apache/mahout/text/SequenceFilesFromLuceneStorageDriver.java +++ b/integration/src/main/java/org/apache/mahout/text/SequenceFilesFromLuceneStorageDriver.java @@ -97,8 +97,7 @@ public int run(String[] args) throws Exception { if (hasOption(OPTION_QUERY)) { try { String queryString = COMPILE.matcher(getOption(OPTION_QUERY)).replaceAll(""); - QueryParser queryParser = new QueryParser(Version.LUCENE_46, queryString, - new StandardAnalyzer(Version.LUCENE_46)); + QueryParser queryParser = new QueryParser(queryString, new StandardAnalyzer()); query = queryParser.parse(queryString); } catch (ParseException e) { throw new IllegalArgumentException(e.getMessage(), e); diff --git a/integration/src/main/java/org/apache/mahout/text/wikipedia/WikipediaAnalyzer.java b/integration/src/main/java/org/apache/mahout/text/wikipedia/WikipediaAnalyzer.java index d9df97f81d..aeab16e779 100644 --- a/integration/src/main/java/org/apache/mahout/text/wikipedia/WikipediaAnalyzer.java +++ b/integration/src/main/java/org/apache/mahout/text/wikipedia/WikipediaAnalyzer.java @@ -33,19 +33,19 @@ public class WikipediaAnalyzer extends StopwordAnalyzerBase { public WikipediaAnalyzer() { - super(Version.LUCENE_46, StopAnalyzer.ENGLISH_STOP_WORDS_SET); + super(StopAnalyzer.ENGLISH_STOP_WORDS_SET); } public WikipediaAnalyzer(CharArraySet stopSet) { - super(Version.LUCENE_46, stopSet); + super(stopSet); } @Override protected TokenStreamComponents createComponents(String fieldName, Reader reader) { Tokenizer tokenizer = new WikipediaTokenizer(reader); - TokenStream result = new StandardFilter(Version.LUCENE_46, tokenizer); - result = new LowerCaseFilter(Version.LUCENE_46, result); - result = new StopFilter(Version.LUCENE_46, result, getStopwordSet()); + TokenStream result = new StandardFilter(tokenizer); + result = new LowerCaseFilter(result); + result = new StopFilter(result, getStopwordSet()); return new TokenStreamComponents(tokenizer, result); } } diff --git a/integration/src/main/java/org/apache/mahout/utils/regex/AnalyzerTransformer.java b/integration/src/main/java/org/apache/mahout/utils/regex/AnalyzerTransformer.java index a7f0e67908..e5cefba45e 100644 --- a/integration/src/main/java/org/apache/mahout/utils/regex/AnalyzerTransformer.java +++ b/integration/src/main/java/org/apache/mahout/utils/regex/AnalyzerTransformer.java @@ -38,7 +38,7 @@ public class AnalyzerTransformer implements RegexTransformer { private static final Logger log = LoggerFactory.getLogger(AnalyzerTransformer.class); public AnalyzerTransformer() { - this(new StandardAnalyzer(Version.LUCENE_46), "text"); + this(new StandardAnalyzer()); } public AnalyzerTransformer(Analyzer analyzer) { diff --git a/mrlegacy/src/main/java/org/apache/mahout/common/lucene/AnalyzerUtils.java b/mrlegacy/src/main/java/org/apache/mahout/common/lucene/AnalyzerUtils.java index 37ca383d4a..cfaac07a93 100644 --- a/mrlegacy/src/main/java/org/apache/mahout/common/lucene/AnalyzerUtils.java +++ b/mrlegacy/src/main/java/org/apache/mahout/common/lucene/AnalyzerUtils.java @@ -32,7 +32,7 @@ private AnalyzerUtils() {} * @throws ClassNotFoundException - {@link ClassNotFoundException} */ public static Analyzer createAnalyzer(String analyzerClassName) throws ClassNotFoundException { - return createAnalyzer(analyzerClassName, Version.LUCENE_46); + return createAnalyzer(analyzerClassName, Version.LUCENE_4_10_3); } public static Analyzer createAnalyzer(String analyzerClassName, Version version) throws ClassNotFoundException { @@ -47,7 +47,7 @@ public static Analyzer createAnalyzer(String analyzerClassName, Version version) * @return {@link Analyzer} */ public static Analyzer createAnalyzer(Class analyzerClass) { - return createAnalyzer(analyzerClass, Version.LUCENE_46); + return createAnalyzer(analyzerClass, Version.LUCENE_4_10_3); } public static Analyzer createAnalyzer(Class analyzerClass, Version version) { diff --git a/mrlegacy/src/test/java/org/apache/mahout/vectorizer/encoders/TextValueEncoderTest.java b/mrlegacy/src/test/java/org/apache/mahout/vectorizer/encoders/TextValueEncoderTest.java index 4446fef9a8..3b7c93ee7a 100644 --- a/mrlegacy/src/test/java/org/apache/mahout/vectorizer/encoders/TextValueEncoderTest.java +++ b/mrlegacy/src/test/java/org/apache/mahout/vectorizer/encoders/TextValueEncoderTest.java @@ -70,7 +70,7 @@ public void testAsString() { @Test public void testLuceneEncoding() throws Exception { LuceneTextValueEncoder enc = new LuceneTextValueEncoder("text"); - enc.setAnalyzer(new WhitespaceAnalyzer(Version.LUCENE_46)); + enc.setAnalyzer(new WhitespaceAnalyzer()); Vector v1 = new DenseVector(200); enc.addToVector("test1 and more", v1); enc.flush(1, v1); diff --git a/pom.xml b/pom.xml index 4f5fe8803a..f7029bad4b 100644 --- a/pom.xml +++ b/pom.xml @@ -107,7 +107,7 @@ 2.2.0 hadoop2 0.98.0-${hadoop.classifier} - 4.6.1 + 4.10.3 1.7.5 2.10 2.10.4