Skip to content

Commit

Permalink
Histogram Facet: Allow to define a key field and value script, closes e…
Browse files Browse the repository at this point in the history
  • Loading branch information
kimchy committed Nov 15, 2010
1 parent b1db5c4 commit 5c6c4bf
Show file tree
Hide file tree
Showing 4 changed files with 167 additions and 8 deletions.
Expand Up @@ -93,11 +93,9 @@ public class HistogramFacetCollectorParser implements FacetCollectorParser {
throw new FacetPhaseExecutionException(facetName, "[interval] is required to be set for histogram facet");
}

if (interval < 0) {
throw new FacetPhaseExecutionException(facetName, "[interval] is required to be positive for histogram facet");
}

if (valueField == null || keyField.equals(valueField)) {
if (valueScript != null) {
return new KeyValueScriptHistogramFacetCollector(facetName, keyField, scriptLang, valueScript, params, interval, comparatorType, context);
} else if (valueField == null || keyField.equals(valueField)) {
return new HistogramFacetCollector(facetName, keyField, interval, comparatorType, context);
} else {
// we have a value field, and its different than the key
Expand Down
Expand Up @@ -33,6 +33,7 @@
*/
public class HistogramScriptFacetBuilder extends AbstractFacetBuilder {
private String lang;
private String keyFieldName;
private String keyScript;
private String valueScript;
private Map<String, Object> params;
Expand All @@ -51,6 +52,11 @@ public HistogramScriptFacetBuilder lang(String lang) {
return this;
}

public HistogramScriptFacetBuilder keyField(String keyFieldName) {
this.keyFieldName = keyFieldName;
return this;
}

public HistogramScriptFacetBuilder keyScript(String keyScript) {
this.keyScript = keyScript;
return this;
Expand Down Expand Up @@ -90,16 +96,20 @@ public HistogramScriptFacetBuilder facetFilter(XContentFilterBuilder filter) {
}

@Override public void toXContent(XContentBuilder builder, Params params) throws IOException {
if (keyScript == null) {
throw new SearchSourceBuilderException("key_script must be set on histogram script facet for facet [" + name + "]");
if (keyScript == null && keyFieldName == null) {
throw new SearchSourceBuilderException("key_script or key_field must be set on histogram script facet for facet [" + name + "]");
}
if (valueScript == null) {
throw new SearchSourceBuilderException("value_script must be set on histogram script facet for facet [" + name + "]");
}
builder.startObject(name);

builder.startObject(HistogramFacetCollectorParser.NAME);
builder.field("key_script", keyScript);
if (keyFieldName != null) {
builder.field("key_field", keyFieldName);
} else if (keyScript != null) {
builder.field("key_script", keyScript);
}
builder.field("value_script", valueScript);
if (lang != null) {
builder.field("lang", lang);
Expand Down
@@ -0,0 +1,138 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.elasticsearch.search.facet.histogram;

import org.apache.lucene.index.IndexReader;
import org.elasticsearch.common.trove.TLongDoubleHashMap;
import org.elasticsearch.common.trove.TLongLongHashMap;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.field.data.NumericFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.script.search.SearchScript;
import org.elasticsearch.search.facet.Facet;
import org.elasticsearch.search.facet.FacetPhaseExecutionException;
import org.elasticsearch.search.facet.support.AbstractFacetCollector;
import org.elasticsearch.search.internal.SearchContext;

import java.io.IOException;
import java.util.Map;

/**
* A histogram facet collector that uses the same field as the key as well as the
* value.
*
* @author kimchy (shay.banon)
*/
public class KeyValueScriptHistogramFacetCollector extends AbstractFacetCollector {

private final String fieldName;

private final String indexFieldName;

private final long interval;

private final HistogramFacet.ComparatorType comparatorType;

private final FieldDataCache fieldDataCache;

private final FieldDataType fieldDataType;

private NumericFieldData fieldData;

private final SearchScript valueScript;

private final HistogramProc histoProc;

public KeyValueScriptHistogramFacetCollector(String facetName, String fieldName, String scriptLang, String valueScript, Map<String, Object> params, long interval, HistogramFacet.ComparatorType comparatorType, SearchContext context) {
super(facetName);
this.fieldName = fieldName;
this.interval = interval;
this.comparatorType = comparatorType;
this.fieldDataCache = context.fieldDataCache();

MapperService.SmartNameFieldMappers smartMappers = context.mapperService().smartName(fieldName);
if (smartMappers == null || !smartMappers.hasMapper()) {
throw new FacetPhaseExecutionException(facetName, "No mapping found for field [" + fieldName + "]");
}

// add type filter if there is exact doc mapper associated with it
if (smartMappers.hasDocMapper()) {
setFilter(context.filterCache().cache(smartMappers.docMapper().typeFilter()));
}

this.valueScript = new SearchScript(context.scriptSearchLookup(), scriptLang, valueScript, params, context.scriptService());

FieldMapper mapper = smartMappers.mapper();

indexFieldName = mapper.names().indexName();
fieldDataType = mapper.fieldDataType();

histoProc = new HistogramProc(interval, this.valueScript);
}

@Override protected void doCollect(int doc) throws IOException {
fieldData.forEachValueInDoc(doc, histoProc);
}

@Override protected void doSetNextReader(IndexReader reader, int docBase) throws IOException {
fieldData = (NumericFieldData) fieldDataCache.cache(fieldDataType, reader, indexFieldName);
}

@Override public Facet facet() {
return new InternalHistogramFacet(facetName, fieldName, fieldName, interval, comparatorType, histoProc.counts(), histoProc.totals());
}

public static long bucket(double value, long interval) {
return (((long) (value / interval)) * interval);
}

public static class HistogramProc implements NumericFieldData.DoubleValueInDocProc {

private final long interval;

private final SearchScript valueScript;

private final TLongLongHashMap counts = new TLongLongHashMap();

private final TLongDoubleHashMap totals = new TLongDoubleHashMap();

public HistogramProc(long interval, SearchScript valueScript) {
this.interval = interval;
this.valueScript = valueScript;
}

@Override public void onValue(int docId, double value) {
long bucket = bucket(value, interval);
counts.adjustOrPutValue(bucket, 1, 1);
double scriptValue = ((Number) valueScript.execute(docId)).doubleValue();
totals.adjustOrPutValue(bucket, scriptValue, scriptValue);
}

public TLongLongHashMap counts() {
return counts;
}

public TLongDoubleHashMap totals() {
return totals;
}
}
}
Expand Up @@ -631,6 +631,7 @@ protected Client getClient() {
.addFacet(histogramFacet("stats3").keyField("num").valueField("multi_num").interval(100))
.addFacet(histogramScriptFacet("stats4").keyScript("doc['date'].date.minuteOfHour").valueScript("doc['num'].value"))
.addFacet(histogramFacet("stats5").field("date").interval(1, TimeUnit.MINUTES))
.addFacet(histogramScriptFacet("stats6").keyField("num").valueScript("doc['num'].value").interval(100))
.execute().actionGet();

if (searchResponse.failedShards() > 0) {
Expand Down Expand Up @@ -700,6 +701,18 @@ protected Client getClient() {
assertThat(facet.entries().get(0).count(), equalTo(2l));
assertThat(facet.entries().get(1).key(), equalTo(TimeValue.timeValueMinutes(2).millis()));
assertThat(facet.entries().get(1).count(), equalTo(1l));

facet = searchResponse.facets().facet("stats6");
assertThat(facet.name(), equalTo("stats6"));
assertThat(facet.entries().size(), equalTo(2));
assertThat(facet.entries().get(0).key(), equalTo(1000l));
assertThat(facet.entries().get(0).count(), equalTo(2l));
assertThat(facet.entries().get(0).total(), equalTo(2120d));
assertThat(facet.entries().get(0).mean(), equalTo(1060d));
assertThat(facet.entries().get(1).key(), equalTo(1100l));
assertThat(facet.entries().get(1).count(), equalTo(1l));
assertThat(facet.entries().get(1).total(), equalTo(1175d));
assertThat(facet.entries().get(1).mean(), equalTo(1175d));
}

@Test public void testRangeFacets() throws Exception {
Expand Down

0 comments on commit 5c6c4bf

Please sign in to comment.