Skip to content

Commit

Permalink
fix NPE if _score is selected with order by
Browse files Browse the repository at this point in the history
In the `searchWithOrderBy` method the scorer isn't set on
LuceneCollectorExpression so the ScoreCollectorExpression
caused an NPE.

Also removed the minimumScore check in `collect`: The
ContextIndexSearcher wraps the LuceneDocCollector inside a
MinimumScoreCollector so the minimumScore filter was already
applied. (In most cases, for some `search` calls
ContextIndexSearcher.Stage.MAIN_QUERY had to be set)
  • Loading branch information
mfussenegger committed Jun 24, 2015
1 parent 383d390 commit 1d70c9b
Show file tree
Hide file tree
Showing 3 changed files with 39 additions and 18 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@

package io.crate.operation.collect;

import com.google.common.base.Predicates;
import com.google.common.collect.Iterables;
import io.crate.Constants;
import io.crate.action.sql.query.CrateSearchContext;
import io.crate.action.sql.query.LuceneSortGenerator;
Expand All @@ -30,10 +32,7 @@
import io.crate.lucene.QueryBuilderHelper;
import io.crate.metadata.Functions;
import io.crate.operation.*;
import io.crate.operation.reference.doc.lucene.CollectorContext;
import io.crate.operation.reference.doc.lucene.LuceneCollectorExpression;
import io.crate.operation.reference.doc.lucene.LuceneDocLevelReferenceResolver;
import io.crate.operation.reference.doc.lucene.OrderByCollectorExpression;
import io.crate.operation.reference.doc.lucene.*;
import io.crate.planner.node.dql.CollectNode;
import io.crate.planner.symbol.Reference;
import io.crate.planner.symbol.Symbol;
Expand All @@ -42,9 +41,11 @@
import org.elasticsearch.common.Nullable;
import org.elasticsearch.index.fieldvisitor.FieldsVisitor;
import org.elasticsearch.index.mapper.internal.SourceFieldMapper;
import org.elasticsearch.search.internal.ContextIndexSearcher;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.concurrent.CancellationException;
Expand Down Expand Up @@ -102,7 +103,6 @@ public void required(boolean required) {
private RamAccountingContext ramAccountingContext;
private boolean producedRows = false;
private boolean failed = false;
private Scorer scorer;
private int rowCount = 0;
private int pageSize;

Expand Down Expand Up @@ -131,7 +131,6 @@ public LuceneDocCollector(List<Input<?>> inputs,

@Override
public void setScorer(Scorer scorer) throws IOException {
this.scorer = scorer;
for (LuceneCollectorExpression expr : collectorExpressions) {
expr.setScorer(scorer);
}
Expand All @@ -142,19 +141,14 @@ public void collect(int doc) throws IOException {
if (shardContext.isKilled()) {
throw new CancellationException();
}

rowCount++;
if (ramAccountingContext != null && ramAccountingContext.trippedBreaker()) {
// stop collecting because breaker limit was reached
throw new UnexpectedCollectionTerminatedException(
CrateCircuitBreakerService.breakingExceptionMessage(ramAccountingContext.contextId(),
ramAccountingContext.limit()));
}
// validate minimum score
if (searchContext.minimumScore() != null && scorer.score() < searchContext.minimumScore()) {
return;
}

rowCount++;
producedRows = true;
if (visitorEnabled) {
fieldsVisitor.reset();
Expand Down Expand Up @@ -202,12 +196,13 @@ public void doCollect(JobCollectContext jobCollectContext) {
}
visitorEnabled = fieldsVisitor.required();
shardContext.acquireContext();
searchContext.searcher().inStage(ContextIndexSearcher.Stage.MAIN_QUERY);
Query query = searchContext.query();

try {
assert query != null : "query must not be null";

if( orderBy != null) {
if(orderBy != null) {
searchWithOrderBy(jobCollectContext, query);
} else {
searchContext.searcher().search(query, this);
Expand All @@ -219,6 +214,7 @@ public void doCollect(JobCollectContext jobCollectContext) {
failed = true;
downstream.fail(shardContext.isKilled() ? new CancellationException() : e);
} finally {
searchContext().searcher().finishStage(ContextIndexSearcher.Stage.MAIN_QUERY);
shardContext.releaseContext();
shardContext.close();
}
Expand All @@ -229,7 +225,9 @@ private void searchWithOrderBy(JobCollectContext jobCollectContext, Query query)
Sort sort = LuceneSortGenerator.generateLuceneSort(searchContext, orderBy, inputSymbolVisitor);
TopFieldDocs topFieldDocs = searchContext.searcher().search(query, batchSize, sort);
int collected = topFieldDocs.scoreDocs.length;
ScoreDoc lastCollected = collectTopFields(topFieldDocs);

Collection<ScoreCollectorExpression> scoreExpressions = getScoreExpressions();
ScoreDoc lastCollected = collectTopFields(topFieldDocs, scoreExpressions);
while ((limit == null || collected < limit) && topFieldDocs.scoreDocs.length >= batchSize && lastCollected != null) {
jobCollectContext.interruptIfKilled();

Expand All @@ -244,10 +242,20 @@ private void searchWithOrderBy(JobCollectContext jobCollectContext, Query query)
topFieldDocs = (TopFieldDocs)searchContext.searcher().searchAfter(lastCollected, query, batchSize, sort);
}
collected += topFieldDocs.scoreDocs.length;
lastCollected = collectTopFields(topFieldDocs);
lastCollected = collectTopFields(topFieldDocs, scoreExpressions);
}
}

private Collection<ScoreCollectorExpression> getScoreExpressions() {
List<ScoreCollectorExpression> scoreCollectorExpressions = new ArrayList<>();
for (LuceneCollectorExpression<?> expression : collectorExpressions) {
if (expression instanceof ScoreCollectorExpression) {
scoreCollectorExpressions.add((ScoreCollectorExpression) expression);
}
}
return scoreCollectorExpressions;
}

public CrateSearchContext searchContext() {
return searchContext;
}
Expand All @@ -264,7 +272,7 @@ public void pageSize(int pageSize) {
this.pageSize = pageSize;
}

private ScoreDoc collectTopFields(TopFieldDocs topFieldDocs) throws IOException{
private ScoreDoc collectTopFields(TopFieldDocs topFieldDocs, Collection<ScoreCollectorExpression> scoreExpressions) throws IOException{
IndexReaderContext indexReaderContext = searchContext.searcher().getTopReaderContext();
ScoreDoc lastDoc = null;
if(!indexReaderContext.leaves().isEmpty()) {
Expand All @@ -274,6 +282,9 @@ private ScoreDoc collectTopFields(TopFieldDocs topFieldDocs) throws IOException{
int subDoc = scoreDoc.doc - subReaderContext.docBase;
setNextReader(subReaderContext);
setNextOrderByValues(scoreDoc);
for (LuceneCollectorExpression<?> scoreExpression : scoreExpressions) {
((ScoreCollectorExpression) scoreExpression).score(scoreDoc.score);
}
collect(subDoc);
lastDoc = scoreDoc;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,8 +45,15 @@ public Float value() {
return score;
}

public void score(float score) {
this.score = score;
}

@Override
public void setNextDocId(int doc) {
if (scorer == null) {
return;
}
try {
score = scorer.score();
} catch (IOException e) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -946,8 +946,6 @@ public void testSelectWhereScore() throws Exception {
execute("create table quotes (quote string, " +
"index quote_ft using fulltext(quote)) with (number_of_replicas = 0)");
ensureYellow();
assertTrue(client().admin().indices().exists(new IndicesExistsRequest("quotes"))
.actionGet().isExists());

execute("insert into quotes values (?), (?)",
new Object[]{"Would it save you a lot of time if I just gave up and went mad now?",
Expand All @@ -959,6 +957,11 @@ public void testSelectWhereScore() throws Exception {
"and \"_score\" >= 0.98");
assertEquals(1L, response.rowCount());
assertThat((Float) response.rows()[0][1], greaterThanOrEqualTo(0.98f));

execute("select quote, \"_score\" from quotes where match(quote_ft, 'time') " +
"and \"_score\" >= 0.98 order by quote ");
assertEquals(1L, response.rowCount());
assertThat((Float) response.rows()[0][1], greaterThanOrEqualTo(0.98f));
}

@Test
Expand Down

0 comments on commit 1d70c9b

Please sign in to comment.