Skip to content

Commit

Permalink
Scripts: Allow to access _fields providing access to stored fields, c…
Browse files Browse the repository at this point in the history
…loses elastic#402.
  • Loading branch information
kimchy committed Oct 3, 2010
1 parent 28cbfa1 commit c9a47a1
Show file tree
Hide file tree
Showing 3 changed files with 269 additions and 7 deletions.
@@ -0,0 +1,49 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.elasticsearch.common.lucene.document;

import org.apache.lucene.document.FieldSelector;
import org.apache.lucene.document.FieldSelectorResult;

/**
* @author kimchy (shay.banon)
*/
public class SingleFieldSelector implements FieldSelector {

private String name;

public SingleFieldSelector() {
}

public SingleFieldSelector(String name) {
this.name = name;
}

public void name(String name) {
this.name = name;
}

@Override public FieldSelectorResult accept(String fieldName) {
if (name.equals(fieldName)) {
return FieldSelectorResult.LOAD;
}
return FieldSelectorResult.NO_LOAD;
}
}
Expand Up @@ -31,6 +31,7 @@
import org.elasticsearch.common.io.stream.BytesStreamInput;
import org.elasticsearch.common.io.stream.CachedStreamInput;
import org.elasticsearch.common.io.stream.LZFStreamInput;
import org.elasticsearch.common.lucene.document.SingleFieldSelector;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
Expand All @@ -43,9 +44,7 @@

import javax.annotation.Nullable;
import java.io.IOException;
import java.util.Collection;
import java.util.Map;
import java.util.Set;
import java.util.*;

/**
* @author kimchy (shay.banon)
Expand All @@ -56,12 +55,15 @@ public class ScriptSearchLookup {

final SourceMap sourceMap;

final FieldsMap fieldsMap;

final Map<String, Object> scriptVars;

public ScriptSearchLookup(MapperService mapperService, FieldDataCache fieldDataCache) {
docMap = new DocMap(mapperService, fieldDataCache);
sourceMap = new SourceMap();
scriptVars = ImmutableMap.<String, Object>of("doc", docMap, "_source", sourceMap);
fieldsMap = new FieldsMap(mapperService);
scriptVars = ImmutableMap.<String, Object>of("doc", docMap, "_source", sourceMap, "_fields", fieldsMap);
}

public Map<String, Object> processScriptParams(@Nullable Map<String, Object> params) {
Expand All @@ -70,17 +72,20 @@ public Map<String, Object> processScriptParams(@Nullable Map<String, Object> par
}
params.put("doc", docMap);
params.put("_source", sourceMap);
params.put("_fields", fieldsMap);
return params;
}

public void setNextReader(IndexReader reader) {
docMap.setNextReader(reader);
sourceMap.setNextReader(reader);
fieldsMap.setNextReader(reader);
}

public void setNextDocId(int docId) {
docMap.setNextDocId(docId);
sourceMap.setNextDocId(docId);
fieldsMap.setNextDocId(docId);
}

static class SourceMap implements Map {
Expand Down Expand Up @@ -120,7 +125,7 @@ private Map<String, Object> loadSourceIfNeeded() {
this.source = parser.map();
}
} catch (Exception e) {
throw new ElasticSearchParseException("failed to parse source", e);
throw new ElasticSearchParseException("failed to parse / load source", e);
} finally {
if (parser != null) {
parser.close();
Expand Down Expand Up @@ -195,9 +200,202 @@ public void setNextDocId(int docId) {
}
}

public static class FieldsMap implements Map {

private final MapperService mapperService;

private IndexReader reader;

private int docId = -1;

private final Map<String, FieldData> cachedFieldData = Maps.newHashMap();

private final SingleFieldSelector fieldSelector = new SingleFieldSelector();

FieldsMap(MapperService mapperService) {
this.mapperService = mapperService;
}

public void setNextReader(IndexReader reader) {
if (this.reader == reader) { // if we are called with the same reader, don't invalidate source
return;
}
this.reader = reader;
clearCache();
this.docId = -1;
}

public void setNextDocId(int docId) {
if (this.docId == docId) { // if we are called with the same docId, don't invalidate source
return;
}
this.docId = docId;
clearCache();
}


@Override public Object get(Object key) {
return loadFieldData(key.toString());
}

@Override public boolean containsKey(Object key) {
try {
loadFieldData(key.toString());
return true;
} catch (Exception e) {
return false;
}
}

@Override public int size() {
throw new UnsupportedOperationException();
}

@Override public boolean isEmpty() {
throw new UnsupportedOperationException();
}

@Override public Set keySet() {
throw new UnsupportedOperationException();
}

@Override public Collection values() {
throw new UnsupportedOperationException();
}

@Override public Set entrySet() {
throw new UnsupportedOperationException();
}

@Override public Object put(Object key, Object value) {
throw new UnsupportedOperationException();
}

@Override public Object remove(Object key) {
throw new UnsupportedOperationException();
}

@Override public void clear() {
throw new UnsupportedOperationException();
}

@Override public void putAll(Map m) {
throw new UnsupportedOperationException();
}

@Override public boolean containsValue(Object value) {
throw new UnsupportedOperationException();
}

private FieldData loadFieldData(String name) {
FieldData data = cachedFieldData.get(name);
if (data == null) {
FieldMapper mapper = mapperService.smartNameFieldMapper(name);
if (mapper == null) {
throw new ElasticSearchIllegalArgumentException("No field found for [" + name + "]");
}
data = new FieldData(mapper);
cachedFieldData.put(name, data);
}
if (data.doc() == null) {
fieldSelector.name(data.mapper().names().indexName());
try {
data.doc(reader.document(docId, fieldSelector));
} catch (IOException e) {
throw new ElasticSearchParseException("failed to load field [" + name + "]", e);
}
}
return data;
}

private void clearCache() {
for (Entry<String, FieldData> entry : cachedFieldData.entrySet()) {
entry.getValue().clear();
}
}

public static class FieldData {

// we can cached mapper completely per name, since its on an index/shard level (the lookup, and it does not change within the scope of a search request)
private final FieldMapper mapper;

private Document doc;

private Object value;

private boolean valueLoaded = false;

private List<Object> values = new ArrayList<Object>();

private boolean valuesLoaded = false;

FieldData(FieldMapper mapper) {
this.mapper = mapper;
}

public FieldMapper mapper() {
return mapper;
}

public Document doc() {
return doc;
}

public void doc(Document doc) {
this.doc = doc;
}

public void clear() {
value = null;
valueLoaded = false;
values.clear();
valuesLoaded = true;
doc = null;
}

public boolean isEmpty() {
if (valueLoaded) {
return value == null;
}
if (valuesLoaded) {
return values.isEmpty();
}
return getValue() == null;
}

public Object getValue() {
if (valueLoaded) {
return value;
}
valueLoaded = true;
value = null;
Fieldable field = doc.getFieldable(mapper.names().indexName());
if (field == null) {
return null;
}
value = mapper.value(field);
return value;
}

public List<Object> getValues() {
if (valuesLoaded) {
return values;
}
valuesLoaded = true;
values.clear();
Fieldable[] fields = doc.getFieldables(mapper.names().indexName());
for (Fieldable field : fields) {
values.add(mapper.value(field));
}
return values;
}
}
}


// --- Map implementation for doc field data lookup

static class DocMap implements Map {
public static class DocMap implements Map {

private final Map<String, FieldData> localCacheFieldData = Maps.newHashMapWithExpectedSize(4);

Expand Down
Expand Up @@ -23,12 +23,14 @@
import org.elasticsearch.client.Client;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.integration.AbstractNodesTests;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;

import java.util.Arrays;
import java.util.List;
import java.util.Map;

Expand Down Expand Up @@ -62,8 +64,15 @@ protected Client getClient() {
return client("client1");
}

@Test public void testCustomScriptBoost() throws Exception {
@Test public void testDocAndFields() throws Exception {
client.admin().indices().prepareCreate("test").execute().actionGet();

String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("num1").field("type", "double").field("store", "yes").endObject()
.endObject().endObject().endObject().string();

client.admin().indices().preparePutMapping().setType("type1").setSource(mapping).execute().actionGet();

client.prepareIndex("test", "type1", "1")
.setSource(jsonBuilder().startObject().field("test", "value beck").field("num1", 1.0f).field("date", "1970-01-01T00:00:00").endObject())
.execute().actionGet();
Expand All @@ -82,19 +91,25 @@ protected Client getClient() {
.setQuery(matchAllQuery())
.addSort("num1", SortOrder.ASC)
.addScriptField("sNum1", "doc['num1'].value")
.addScriptField("sNum1_field", "_fields['num1'].value")
.addScriptField("date1", "doc['date'].date.millis")
.execute().actionGet();

assertThat("Failures " + Arrays.toString(response.shardFailures()), response.shardFailures().length, equalTo(0));

assertThat(response.hits().totalHits(), equalTo(3l));
assertThat(response.hits().getAt(0).isSourceEmpty(), equalTo(true));
assertThat(response.hits().getAt(0).id(), equalTo("1"));
assertThat((Double) response.hits().getAt(0).fields().get("sNum1").values().get(0), equalTo(1.0));
assertThat((Double) response.hits().getAt(0).fields().get("sNum1_field").values().get(0), equalTo(1.0));
assertThat((Long) response.hits().getAt(0).fields().get("date1").values().get(0), equalTo(0l));
assertThat(response.hits().getAt(1).id(), equalTo("2"));
assertThat((Double) response.hits().getAt(1).fields().get("sNum1").values().get(0), equalTo(2.0));
assertThat((Double) response.hits().getAt(1).fields().get("sNum1_field").values().get(0), equalTo(2.0));
assertThat((Long) response.hits().getAt(1).fields().get("date1").values().get(0), equalTo(25000l));
assertThat(response.hits().getAt(2).id(), equalTo("3"));
assertThat((Double) response.hits().getAt(2).fields().get("sNum1").values().get(0), equalTo(3.0));
assertThat((Double) response.hits().getAt(2).fields().get("sNum1_field").values().get(0), equalTo(3.0));
assertThat((Long) response.hits().getAt(2).fields().get("date1").values().get(0), equalTo(120000l));

logger.info("running doc['num1'].value * factor");
Expand Down

0 comments on commit c9a47a1

Please sign in to comment.