Skip to content
This repository has been archived by the owner on Mar 27, 2020. It is now read-only.

Commit

Permalink
Update to elasticsearch 1.0.0.RC1
Browse files Browse the repository at this point in the history
  • Loading branch information
johtani committed Jan 18, 2014
1 parent fe3665a commit 627a86a
Show file tree
Hide file tree
Showing 6 changed files with 29 additions and 35 deletions.
2 changes: 1 addition & 1 deletion NOTICE
@@ -1,4 +1,4 @@
Copyright 2013, Jun Ohtani
Copyright 2013-2014, Jun Ohtani

Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
Expand Down
10 changes: 6 additions & 4 deletions README.md
Expand Up @@ -7,10 +7,12 @@ And the plugin output tokens step by step.

Similar functionality to Solr admin UI analysis page.

|Plugin |Elasticsearch |Release date|
|--------|---------------|------------|
|0.6.0 | 0.90.7->master| 2013-11-19 |
|0.5 | 0.90.7->master| 2013-11-14 |
|Plugin |Elasticsearch |Release date|
|---------|-------------------|------------|
|1.0.0.RC1| 1.0.0.RC1->master | 2014-01-18 |
|0.7.0 | 0.90.7->0.90 | 2013-11-28 |
|0.6.0 | 0.90.7->master | 2013-11-19 |
|0.5 | 0.90.7->master | 2013-11-14 |

### Feature

Expand Down
6 changes: 3 additions & 3 deletions pom.xml
Expand Up @@ -3,7 +3,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>info.johtani</groupId>
<artifactId>elasticsearch-extended-analyze</artifactId>
<version>0.8.0-SNAPSHOT</version>
<version>1.0.0.RC1-SNAPSHOT</version>
<packaging>jar</packaging>
<description>Extend _analyze API plugin for ElasticSearch</description>
<url>https://github.com/johtani/elasticsearch-extended-analyze</url>
Expand All @@ -28,8 +28,8 @@
</parent>

<properties>
<elasticsearch.version>0.90.7</elasticsearch.version>
<lucene.version>4.5.1</lucene.version>
<elasticsearch.version>1.0.0.RC1</elasticsearch.version>
<lucene.version>4.6.0</lucene.version>
</properties>

<dependencies>
Expand Down
Expand Up @@ -25,9 +25,6 @@

import static org.elasticsearch.action.ValidateActions.*;

/**
* TODO: extends AnalyzeRequest? this implement is not smart...
*/
public class ExtendedAnalyzeRequest extends SingleCustomOperationRequest<ExtendedAnalyzeRequest> {

private String index;
Expand Down
Expand Up @@ -25,9 +25,8 @@
import org.apache.lucene.util.Attribute;
import org.apache.lucene.util.AttributeReflector;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.ElasticSearchIllegalArgumentException;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchIllegalArgumentException;
import org.elasticsearch.action.support.single.custom.TransportSingleCustomOperationAction;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState;
Expand All @@ -53,7 +52,6 @@
import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
Expand Down Expand Up @@ -118,7 +116,7 @@ protected ShardsIterator shards(ClusterState state, ExtendedAnalyzeRequest reque
}

@Override
protected ExtendedAnalyzeResponse shardOperation(ExtendedAnalyzeRequest request, int shardId) throws ElasticSearchException {
protected ExtendedAnalyzeResponse shardOperation(ExtendedAnalyzeRequest request, int shardId) throws ElasticsearchException {
IndexService indexService = null;
if (request.index() != null) {
indexService = indicesService.indexServiceSafe(request.index());
Expand All @@ -128,12 +126,12 @@ protected ExtendedAnalyzeResponse shardOperation(ExtendedAnalyzeRequest request,
String field = null;
if (request.field() != null) {
if (indexService == null) {
throw new ElasticSearchIllegalArgumentException("No index provided, and trying to analyzer based on a specific field which requires the index parameter");
throw new ElasticsearchIllegalArgumentException("No index provided, and trying to analyzer based on a specific field which requires the index parameter");
}
FieldMapper<?> fieldMapper = indexService.mapperService().smartNameFieldMapper(request.field());
if (fieldMapper != null) {
if (fieldMapper.isNumeric()) {
throw new ElasticSearchIllegalArgumentException("Can't process field [" + request.field() + "], Analysis requests are not supported on numeric fields");
throw new ElasticsearchIllegalArgumentException("Can't process field [" + request.field() + "], Analysis requests are not supported on numeric fields");
}
analyzer = fieldMapper.indexAnalyzer();
field = fieldMapper.names().indexName();
Expand All @@ -154,20 +152,20 @@ protected ExtendedAnalyzeResponse shardOperation(ExtendedAnalyzeRequest request,
analyzer = indexService.analysisService().analyzer(request.analyzer());
}
if (analyzer == null) {
throw new ElasticSearchIllegalArgumentException("failed to find analyzer [" + request.analyzer() + "]");
throw new ElasticsearchIllegalArgumentException("failed to find analyzer [" + request.analyzer() + "]");
}
} else if (request.tokenizer() != null) {
TokenizerFactory tokenizerFactory;
if (indexService == null) {
TokenizerFactoryFactory tokenizerFactoryFactory = indicesAnalysisService.tokenizerFactoryFactory(request.tokenizer());
if (tokenizerFactoryFactory == null) {
throw new ElasticSearchIllegalArgumentException("failed to find global tokenizer under [" + request.tokenizer() + "]");
throw new ElasticsearchIllegalArgumentException("failed to find global tokenizer under [" + request.tokenizer() + "]");
}
tokenizerFactory = tokenizerFactoryFactory.create(request.tokenizer(), ImmutableSettings.Builder.EMPTY_SETTINGS);
} else {
tokenizerFactory = indexService.analysisService().tokenizer(request.tokenizer());
if (tokenizerFactory == null) {
throw new ElasticSearchIllegalArgumentException("failed to find tokenizer under [" + request.tokenizer() + "]");
throw new ElasticsearchIllegalArgumentException("failed to find tokenizer under [" + request.tokenizer() + "]");
}
}
TokenFilterFactory[] tokenFilterFactories = new TokenFilterFactory[0];
Expand All @@ -178,17 +176,17 @@ protected ExtendedAnalyzeResponse shardOperation(ExtendedAnalyzeRequest request,
if (indexService == null) {
TokenFilterFactoryFactory tokenFilterFactoryFactory = indicesAnalysisService.tokenFilterFactoryFactory(tokenFilterName);
if (tokenFilterFactoryFactory == null) {
throw new ElasticSearchIllegalArgumentException("failed to find global token filter under [" + request.tokenizer() + "]");
throw new ElasticsearchIllegalArgumentException("failed to find global token filter under [" + request.tokenizer() + "]");
}
tokenFilterFactories[i] = tokenFilterFactoryFactory.create(tokenFilterName, ImmutableSettings.Builder.EMPTY_SETTINGS);
} else {
tokenFilterFactories[i] = indexService.analysisService().tokenFilter(tokenFilterName);
if (tokenFilterFactories[i] == null) {
throw new ElasticSearchIllegalArgumentException("failed to find token filter under [" + request.tokenizer() + "]");
throw new ElasticsearchIllegalArgumentException("failed to find token filter under [" + request.tokenizer() + "]");
}
}
if (tokenFilterFactories[i] == null) {
throw new ElasticSearchIllegalArgumentException("failed to find token filter under [" + request.tokenizer() + "]");
throw new ElasticsearchIllegalArgumentException("failed to find token filter under [" + request.tokenizer() + "]");
}
}
}
Expand All @@ -200,17 +198,17 @@ protected ExtendedAnalyzeResponse shardOperation(ExtendedAnalyzeRequest request,
if (indexService == null) {
CharFilterFactoryFactory charFilterFactoryFactory = indicesAnalysisService.charFilterFactoryFactory(charFilterName);
if (charFilterFactoryFactory == null) {
throw new ElasticSearchIllegalArgumentException("failed to find global char filter top [" + request.tokenizer() + "]");
throw new ElasticsearchIllegalArgumentException("failed to find global char filter top [" + request.tokenizer() + "]");
}
charFilterFactories[i] = charFilterFactoryFactory.create(charFilterName, ImmutableSettings.Builder.EMPTY_SETTINGS);
} else {
charFilterFactories[i] = indexService.analysisService().charFilter(charFilterName);
if (charFilterFactories[i] == null) {
throw new ElasticSearchIllegalArgumentException("failed to find char filter top [" + request.tokenizer() + "]");
throw new ElasticsearchIllegalArgumentException("failed to find char filter top [" + request.tokenizer() + "]");
}
}
if (charFilterFactories[i] == null) {
throw new ElasticSearchIllegalArgumentException("failed to find char filter top [" + request.tokenizer() + "]");
throw new ElasticsearchIllegalArgumentException("failed to find char filter top [" + request.tokenizer() + "]");
}
}
}
Expand All @@ -224,7 +222,7 @@ protected ExtendedAnalyzeResponse shardOperation(ExtendedAnalyzeRequest request,
}
}
if (analyzer == null) {
throw new ElasticSearchIllegalArgumentException("failed to find analyzer");
throw new ElasticsearchIllegalArgumentException("failed to find analyzer");
}

ExtendedAnalyzeResponse response = buildResponse(request, analyzer, closeAnalyzer, field);
Expand All @@ -235,7 +233,6 @@ protected ExtendedAnalyzeResponse shardOperation(ExtendedAnalyzeRequest request,
private ExtendedAnalyzeResponse buildResponse(ExtendedAnalyzeRequest request, Analyzer analyzer, boolean closeAnalyzer, String field) {
ExtendedAnalyzeResponse response = new ExtendedAnalyzeResponse();
TokenStream stream = null;
List<ExtendedAnalyzeResponse.ExtendedAnalyzeToken> tokens = null;
final Set<String> includeAttibutes = Sets.newHashSet();
if (request.attributes() != null && request.attributes().length > 0) {
for (String attribute : request.attributes()) {
Expand Down Expand Up @@ -274,7 +271,6 @@ private ExtendedAnalyzeResponse buildResponse(ExtendedAnalyzeRequest request, An
for (int i = 0; i < tokenfilters.length; i++) {
stream = createStackedTokenStream(source, tokenizer, tokenfilters, i + 1);
response.addTokenfilter(new ExtendedAnalyzeResponse.ExtendedAnalyzeTokenList(tokenfilters[i].name(), processAnalysis(stream, includeAttibutes)));
//FIXME implement freezeStage

stream.close();
}
Expand All @@ -293,7 +289,7 @@ private ExtendedAnalyzeResponse buildResponse(ExtendedAnalyzeRequest request, An

}
} catch (IOException e) {
throw new ElasticSearchException("failed to analyze", e);
throw new ElasticsearchException("failed to analyze", e);
} finally {
if (stream != null) {
try {
Expand Down Expand Up @@ -331,7 +327,7 @@ private String writeCharStream(Reader input) {
try {
len = input.read(buf, 0, BUFFER_SIZE);
} catch (IOException e) {
throw new ElasticSearchException("failed to analyze (charfiltering)", e);
throw new ElasticsearchException("failed to analyze (charfiltering)", e);
}
if (len > 0)
sb.append(buf, 0, len);
Expand Down
Expand Up @@ -15,9 +15,8 @@
*/
package info.johtani.elasticsearch.rest.action.admin.indices.analyze;

import org.elasticsearch.ElasticSearchIllegalArgumentException;
import org.elasticsearch.ElasticsearchIllegalArgumentException;
import org.elasticsearch.action.ActionListener;
import info.johtani.elasticsearch.action.admin.indices.extended.analyze.*;
import info.johtani.elasticsearch.action.admin.indices.extended.analyze.ExtendedAnalyzeAction;
import info.johtani.elasticsearch.action.admin.indices.extended.analyze.ExtendedAnalyzeRequest;
import info.johtani.elasticsearch.action.admin.indices.extended.analyze.ExtendedAnalyzeResponse;
Expand Down Expand Up @@ -55,7 +54,7 @@ public void handleRequest(final RestRequest request, final RestChannel channel)
}
if (text == null) {
try {
channel.sendResponse(new XContentThrowableRestResponse(request, new ElasticSearchIllegalArgumentException("text is missing")));
channel.sendResponse(new XContentThrowableRestResponse(request, new ElasticsearchIllegalArgumentException("text is missing")));
} catch (IOException e1) {
logger.warn("Failed to send response", e1);
}
Expand Down

0 comments on commit 627a86a

Please sign in to comment.