Skip to content

Commit

Permalink
Merge pull request #11565 from rjernst/remove/field-mapper-wrappers
Browse files Browse the repository at this point in the history
Mappings: Remove leftover sugar methods from FieldMapper
  • Loading branch information
rjernst committed Jun 9, 2015
2 parents 09b5f90 + 6c73647 commit 49bef19
Show file tree
Hide file tree
Showing 10 changed files with 20 additions and 168 deletions.
Expand Up @@ -81,7 +81,7 @@ public void postProcess(DocumentMapper documentMapper) {
}
List<Object> fieldValues = entry.getValue();
for (int i = 0; i < fieldValues.size(); i++) {
fieldValues.set(i, fieldMapper.valueForSearch(fieldValues.get(i)));
fieldValues.set(i, fieldMapper.fieldType().valueForSearch(fieldValues.get(i)));
}
}
}
Expand Down
Expand Up @@ -252,7 +252,7 @@ public GetResult innerGet(String type, String id, String[] gFields, boolean real
List<Object> values = searchLookup.source().extractRawValues(field);
if (!values.isEmpty()) {
for (int i = 0; i < values.size(); i++) {
values.set(i, fieldMapper.valueForSearch(values.get(i)));
values.set(i, fieldMapper.fieldType().valueForSearch(values.get(i)));
}
value = values;
}
Expand Down Expand Up @@ -379,7 +379,7 @@ private GetResult innerGetLoadFromStoredFields(String type, String id, String[]
List<Object> values = searchLookup.source().extractRawValues(field);
if (!values.isEmpty()) {
for (int i = 0; i < values.size(); i++) {
values.set(i, fieldMapper.valueForSearch(values.get(i)));
values.set(i, fieldMapper.fieldType().valueForSearch(values.get(i)));
}
value = values;
}
Expand Down
Expand Up @@ -194,7 +194,7 @@ public DocumentMapper(MapperService mapperService, String index, @Nullable Setti
meta);
this.documentParser = new DocumentParser(index, indexSettings, docMapperParser, this, new ReleasableLock(mappingLock.readLock()));

this.typeFilter = typeMapper().termQuery(type, null);
this.typeFilter = typeMapper().fieldType().termQuery(type, null);
this.mappingWriteLock = new ReleasableLock(mappingLock.writeLock());
this.mappingLock = mappingLock;

Expand Down
64 changes: 0 additions & 64 deletions core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java
Expand Up @@ -19,19 +19,9 @@

package org.elasticsearch.index.mapper;

import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms;
import org.apache.lucene.search.MultiTermQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.action.fieldstats.FieldStats;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
import org.elasticsearch.index.query.QueryParseContext;

import java.io.IOException;
import java.util.List;

/**
*
Expand All @@ -47,55 +37,6 @@ public interface FieldMapper extends Mapper {
*/
AbstractFieldMapper.CopyTo copyTo();

/**
* Returns the actual value of the field.
*/
Object value(Object value);

/**
* Returns the value that will be used as a result for search. Can be only of specific types... .
*/
Object valueForSearch(Object value);

/**
* Returns the indexed value used to construct search "values".
*/
BytesRef indexedValueForSearch(Object value);

/**
* Should the field query {@link #termQuery(Object, org.elasticsearch.index.query.QueryParseContext)} be used when detecting this
* field in query string.
*/
boolean useTermQueryWithQueryString();

Query termQuery(Object value, @Nullable QueryParseContext context);

Query termsQuery(List values, @Nullable QueryParseContext context);

Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context);

Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions);

Query prefixQuery(Object value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context);

Query regexpQuery(Object value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context);

/**
* A term query to use when parsing a query string. Can return <tt>null</tt>.
*/
@Nullable
Query queryStringTermQuery(Term term);

/**
* Null value filter, returns <tt>null</tt> if there is no null value associated with the field.
*/
@Nullable
Query nullValueFilter();

boolean isNumeric();

boolean isSortable();

/**
* Fields might not be available before indexing, for example _all, token_count,...
* When get is called and these fields are requested, this case needs special treatment.
Expand All @@ -111,9 +52,4 @@ public interface FieldMapper extends Mapper {
*/
Mapper parse(ParseContext context) throws IOException;

/**
* @return a {@link FieldStats} instance that maps to the type of this field based on the provided {@link Terms} instance.
*/
FieldStats stats(Terms terms, int maxDoc) throws IOException;

}
Expand Up @@ -28,18 +28,11 @@
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms;
import org.apache.lucene.search.MultiTermQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.action.fieldstats.FieldStats;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.fielddata.FieldDataType;
Expand All @@ -52,7 +45,6 @@
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.similarity.SimilarityLookupService;
import org.elasticsearch.index.similarity.SimilarityProvider;

Expand Down Expand Up @@ -388,67 +380,6 @@ public Iterator<Mapper> iterator() {
return multiFields.iterator();
}

@Override
public final Object value(Object value) {
return fieldType().value(value);
}

@Override
public final Object valueForSearch(Object value) {
return fieldType().valueForSearch(value);
}

// TODO: this is not final so ParentFieldMapper can have custom behavior, per type...
@Override
public BytesRef indexedValueForSearch(Object value) {
return fieldType().indexedValueForSearch(value);
}

@Override
public final Query queryStringTermQuery(Term term) {
return fieldType().queryStringTermQuery(term);
}

@Override
public final boolean useTermQueryWithQueryString() {
return fieldType().useTermQueryWithQueryString();
}

@Override
public final Query termQuery(Object value, @Nullable QueryParseContext context) {
return fieldType().termQuery(value, context);
}

@Override
public final Query termsQuery(List values, @Nullable QueryParseContext context) {
return fieldType().termsQuery(values, context);
}

@Override
public final Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
return fieldType().rangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, context);
}

@Override
public final Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
return fieldType().fuzzyQuery(value, fuzziness, prefixLength, maxExpansions, transpositions);
}

@Override
public final Query prefixQuery(Object value, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) {
return fieldType().prefixQuery(value, method, context);
}

@Override
public final Query regexpQuery(Object value, int flags, int maxDeterminizedStates, @Nullable MultiTermQuery.RewriteMethod method, @Nullable QueryParseContext context) {
return fieldType().regexpQuery(value, flags, maxDeterminizedStates, method, context);
}

@Override
public final Query nullValueFilter() {
return fieldType().nullValueQuery();
}

@Override
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
if (!this.getClass().equals(mergeWith.getClass())) {
Expand Down Expand Up @@ -684,16 +615,6 @@ public void close() {
multiFields.close();
}

@Override
public final boolean isNumeric() {
return fieldType().isNumeric();
}

@Override
public final boolean isSortable() {
return fieldType().isSortable();
}

public static class MultiFields {

public static MultiFields empty() {
Expand Down Expand Up @@ -903,9 +824,4 @@ public List<String> copyToFields() {
public boolean isGenerated() {
return false;
}

@Override
public final FieldStats stats(Terms terms, int maxDoc) throws IOException {
return fieldType().stats(terms, maxDoc);
}
}
Expand Up @@ -185,7 +185,7 @@ public String path() {

public String value(Document document) {
Field field = (Field) document.getField(fieldType.names().indexName());
return field == null ? null : (String)value(field);
return field == null ? null : (String)fieldType().value(field);
}

@Override
Expand Down
Expand Up @@ -85,7 +85,7 @@ public void testSimpleAllMappers() throws Exception {
assertThat(allEntries.fields().contains("simple1"), equalTo(true));
AllFieldMapper mapper = docMapper.allFieldMapper();
assertThat(field.fieldType().omitNorms(), equalTo(true));
assertThat(mapper.queryStringTermQuery(new Term("_all", "foobar")), Matchers.instanceOf(AllTermQuery.class));
assertThat(mapper.fieldType().queryStringTermQuery(new Term("_all", "foobar")), Matchers.instanceOf(AllTermQuery.class));
}

public void testAllMappersNoBoost() throws Exception {
Expand Down Expand Up @@ -116,7 +116,7 @@ public void testAllMappersTermQuery() throws Exception {
assertThat(allEntries.fields().contains("simple1"), equalTo(true));
AllFieldMapper mapper = docMapper.allFieldMapper();
assertThat(field.fieldType().omitNorms(), equalTo(false));
assertThat(mapper.queryStringTermQuery(new Term("_all", "foobar")), Matchers.instanceOf(AllTermQuery.class));
assertThat(mapper.fieldType().queryStringTermQuery(new Term("_all", "foobar")), Matchers.instanceOf(AllTermQuery.class));

}

Expand All @@ -136,7 +136,7 @@ public void testAllMappersWithOffsetsTermQuery() throws Exception {
assertThat(allEntries.fields().contains("simple1"), equalTo(true));
AllFieldMapper mapper = docMapper.allFieldMapper();
assertThat(field.fieldType().omitNorms(), equalTo(false));
assertThat(mapper.queryStringTermQuery(new Term("_all", "foobar")), Matchers.instanceOf(AllTermQuery.class));
assertThat(mapper.fieldType().queryStringTermQuery(new Term("_all", "foobar")), Matchers.instanceOf(AllTermQuery.class));
}

// #6187: if _all doesn't index positions then we never use AllTokenStream, even if some fields have boost
Expand Down Expand Up @@ -443,7 +443,7 @@ public void testAutoBoost() throws Exception {
IndexService indexService = createIndex(index, client().admin().indices().prepareCreate(index).addMapping("type", "foo", "type=string" + (boost ? ",boost=2" : "")));
client().prepareIndex(index, "type").setSource("foo", "bar").get();
client().admin().indices().prepareRefresh(index).get();
Query query = indexService.mapperService().documentMapper("type").allFieldMapper().termQuery("bar", null);
Query query = indexService.mapperService().documentMapper("type").allFieldMapper().fieldType().termQuery("bar", null);
try (Searcher searcher = indexService.shard(0).acquireSearcher("tests")) {
query = searcher.searcher().rewrite(query);
final Class<?> expected = boost ? AllTermQuery.class : TermQuery.class;
Expand Down
Expand Up @@ -89,7 +89,7 @@ public void testStoredValue() throws IOException {
BytesRef indexedValue = doc.rootDoc().getBinaryValue("field");
assertEquals(new BytesRef(value), indexedValue);
FieldMapper fieldMapper = mapper.mappers().smartNameFieldMapper("field");
Object originalValue = fieldMapper.valueForSearch(indexedValue);
Object originalValue = fieldMapper.fieldType().valueForSearch(indexedValue);
assertEquals(new BytesArray(value), originalValue);
}
}
Expand Down Expand Up @@ -120,7 +120,7 @@ public void testCompressedBackCompat() throws Exception {
BytesRef indexedValue = doc.rootDoc().getBinaryValue("field");
assertEquals(new BytesRef(binaryValue), indexedValue);
FieldMapper fieldMapper = mapper.mappers().smartNameFieldMapper("field");
Object originalValue = fieldMapper.valueForSearch(indexedValue);
Object originalValue = fieldMapper.fieldType().valueForSearch(indexedValue);
assertEquals(new BytesArray(original), originalValue);
}

Expand Down
Expand Up @@ -228,7 +228,7 @@ public void testHourFormat() throws Exception {
NumericRangeQuery<Long> rangeQuery;
try {
SearchContext.setCurrent(new TestSearchContext());
rangeQuery = (NumericRangeQuery<Long>) defaultMapper.mappers().smartNameFieldMapper("date_field").rangeQuery("10:00:00", "11:00:00", true, true, null);
rangeQuery = (NumericRangeQuery<Long>) defaultMapper.mappers().smartNameFieldMapper("date_field").fieldType().rangeQuery("10:00:00", "11:00:00", true, true, null);
} finally {
SearchContext.removeCurrent();
}
Expand All @@ -254,7 +254,7 @@ public void testDayWithoutYearFormat() throws Exception {
NumericRangeQuery<Long> rangeQuery;
try {
SearchContext.setCurrent(new TestSearchContext());
rangeQuery = (NumericRangeQuery<Long>) defaultMapper.mappers().smartNameFieldMapper("date_field").rangeQuery("Jan 02 10:00:00", "Jan 02 11:00:00", true, true, null);
rangeQuery = (NumericRangeQuery<Long>) defaultMapper.mappers().smartNameFieldMapper("date_field").fieldType().rangeQuery("Jan 02 10:00:00", "Jan 02 11:00:00", true, true, null);
} finally {
SearchContext.removeCurrent();
}
Expand Down
Expand Up @@ -71,25 +71,25 @@ public void testDoubleIndexingSameDoc() throws Exception {
IndexReader reader = DirectoryReader.open(writer, true);
IndexSearcher searcher = new IndexSearcher(reader);

TopDocs topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field1").termQuery("value1", null), 10);
TopDocs topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field1").fieldType().termQuery("value1", null), 10);
assertThat(topDocs.totalHits, equalTo(2));

topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field2").termQuery("1", null), 10);
topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field2").fieldType().termQuery("1", null), 10);
assertThat(topDocs.totalHits, equalTo(2));

topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field3").termQuery("1.1", null), 10);
topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field3").fieldType().termQuery("1.1", null), 10);
assertThat(topDocs.totalHits, equalTo(2));

topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field4").termQuery("2010-01-01", null), 10);
topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field4").fieldType().termQuery("2010-01-01", null), 10);
assertThat(topDocs.totalHits, equalTo(2));

topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field5").termQuery("1", null), 10);
topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field5").fieldType().termQuery("1", null), 10);
assertThat(topDocs.totalHits, equalTo(2));

topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field5").termQuery("2", null), 10);
topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field5").fieldType().termQuery("2", null), 10);
assertThat(topDocs.totalHits, equalTo(2));

topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field5").termQuery("3", null), 10);
topDocs = searcher.search(mapper.mappers().smartNameFieldMapper("field5").fieldType().termQuery("3", null), 10);
assertThat(topDocs.totalHits, equalTo(2));
writer.close();
reader.close();
Expand Down

0 comments on commit 49bef19

Please sign in to comment.