Skip to content

Commit

Permalink
Search: Make FilteredQuery a forbidden API.
Browse files Browse the repository at this point in the history
This commit makes FilteredQuery a forbidden API and also removes some more usage
of the Filter API. There are some remaining code using filters for parent/child
queries but I'm not touching this as they are already being refactored in #6511.
  • Loading branch information
jpountz committed May 19, 2015
1 parent fae1a07 commit 4131bcb
Show file tree
Hide file tree
Showing 31 changed files with 225 additions and 330 deletions.
4 changes: 4 additions & 0 deletions dev-tools/forbidden/core-signatures.txt
Expand Up @@ -131,3 +131,7 @@ java.util.concurrent.Future#cancel(boolean)
@defaultMessage Don't try reading from paths that are not configured in Environment, resolve from Environment instead
org.elasticsearch.common.io.PathUtils#get(java.lang.String, java.lang.String[])
org.elasticsearch.common.io.PathUtils#get(java.net.URI)

@defaultMessage Use queries, not filters
org.apache.lucene.search.FilteredQuery#<init>(org.apache.lucene.search.Query, org.apache.lucene.search.Filter)
org.apache.lucene.search.FilteredQuery#<init>(org.apache.lucene.search.Query, org.apache.lucene.search.Filter, org.apache.lucene.search.FilteredQuery$FilterStrategy)
74 changes: 74 additions & 0 deletions src/main/java/org/elasticsearch/common/lucene/Lucene.java
Expand Up @@ -39,6 +39,8 @@
import org.apache.lucene.index.SegmentCommitInfo;
import org.apache.lucene.index.SegmentInfos;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.Filter;
Expand All @@ -52,11 +54,13 @@
import org.apache.lucene.search.TimeLimitingCollector;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TopFieldDocs;
import org.apache.lucene.search.TwoPhaseIterator;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.Lock;
import org.apache.lucene.store.LockObtainFailedException;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.Counter;
import org.apache.lucene.util.Version;
Expand Down Expand Up @@ -795,4 +799,74 @@ public void delete() {
throw new UnsupportedOperationException("This IndexCommit does not support deletions");
}
}

/**
* Is it an empty {@link DocIdSet}?
*/
public static boolean isEmpty(@Nullable DocIdSet set) {
return set == null || set == DocIdSet.EMPTY;
}

/**
* Given a {@link Scorer}, return a {@link Bits} instance that will match
* all documents contained in the set. Note that the returned {@link Bits}
* instance MUST be consumed in order.
*/
public static Bits asSequentialAccessBits(final int maxDoc, @Nullable Scorer scorer) throws IOException {
if (scorer == null) {
return new Bits.MatchNoBits(maxDoc);
}
final TwoPhaseIterator twoPhase = scorer.asTwoPhaseIterator();
final DocIdSetIterator iterator;
if (twoPhase == null) {
iterator = scorer;
} else {
iterator = twoPhase.approximation();
}

return new Bits() {

int previous = -1;
boolean previousMatched = false;

@Override
public boolean get(int index) {
if (index < 0 || index >= maxDoc) {
throw new IndexOutOfBoundsException(index + " is out of bounds: [" + 0 + "-" + maxDoc + "[");
}
if (index < previous) {
throw new IllegalArgumentException("This Bits instance can only be consumed in order. "
+ "Got called on [" + index + "] while previously called on [" + previous + "]");
}
if (index == previous) {
// we cache whether it matched because it is illegal to call
// twoPhase.matches() twice
return previousMatched;
}
previous = index;

int doc = iterator.docID();
if (doc < index) {
try {
doc = iterator.advance(index);
} catch (IOException e) {
throw new IllegalStateException("Cannot advance iterator", e);
}
}
if (index == doc) {
try {
return previousMatched = twoPhase == null || twoPhase.matches();
} catch (IOException e) {
throw new IllegalStateException("Cannot validate match", e);
}
}
return previousMatched = false;
}

@Override
public int length() {
return maxDoc;
}
};
}
}
105 changes: 0 additions & 105 deletions src/main/java/org/elasticsearch/common/lucene/docset/DocIdSets.java

This file was deleted.

Expand Up @@ -19,9 +19,13 @@
package org.elasticsearch.common.lucene.search;

import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.*;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.FilterLeafCollector;
import org.apache.lucene.search.LeafCollector;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.util.Bits;
import org.elasticsearch.common.lucene.docset.DocIdSets;
import org.elasticsearch.common.lucene.Lucene;

import java.io.IOException;

Expand All @@ -42,7 +46,7 @@ public FilteredCollector(Collector collector, Weight filter) {
public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException {
final Scorer filterScorer = filter.scorer(context, null);
final LeafCollector in = collector.getLeafCollector(context);
final Bits bits = DocIdSets.asSequentialAccessBits(context.reader().maxDoc(), filterScorer);
final Bits bits = Lucene.asSequentialAccessBits(context.reader().maxDoc(), filterScorer);

return new FilterLeafCollector(in) {
@Override
Expand Down

This file was deleted.

Expand Up @@ -19,16 +19,24 @@

package org.elasticsearch.common.lucene.search.function;

import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.*;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.ToStringUtils;
import org.elasticsearch.common.lucene.docset.DocIdSets;
import org.elasticsearch.common.lucene.Lucene;

import java.io.IOException;
import java.util.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import java.util.Set;

/**
* A query that allows for a pluggable boost function / filter. If it matches
Expand Down Expand Up @@ -169,7 +177,7 @@ public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOExcept
FilterFunction filterFunction = filterFunctions[i];
functions[i] = filterFunction.function.getLeafScoreFunction(context);
Scorer filterScorer = filterWeights[i].scorer(context, null); // no need to apply accepted docs
docSets[i] = DocIdSets.asSequentialAccessBits(context.reader().maxDoc(), filterScorer);
docSets[i] = Lucene.asSequentialAccessBits(context.reader().maxDoc(), filterScorer);
}
return new FiltersFunctionFactorScorer(this, subQueryScorer, scoreMode, filterFunctions, maxBoost, functions, docSets, combineFunction, minScore);
}
Expand All @@ -193,7 +201,7 @@ public Explanation explain(LeafReaderContext context, int doc) throws IOExceptio
weightSum++;
}

Bits docSet = DocIdSets.asSequentialAccessBits(context.reader().maxDoc(),
Bits docSet = Lucene.asSequentialAccessBits(context.reader().maxDoc(),
filterWeights[i].scorer(context, null));
if (docSet.get(doc)) {
Explanation functionExplanation = filterFunction.function.getLeafScoreFunction(context).explainScore(doc, subQueryExpl);
Expand Down
14 changes: 7 additions & 7 deletions src/main/java/org/elasticsearch/index/mapper/MapperService.java
Expand Up @@ -35,7 +35,7 @@
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryWrapperFilter;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchGenerationException;
Expand Down Expand Up @@ -372,11 +372,11 @@ public Query searchFilter(String... types) {
BooleanQuery bq = new BooleanQuery();
bq.add(percolatorType, Occur.MUST_NOT);
bq.add(Queries.newNonNestedFilter(), Occur.MUST);
return new QueryWrapperFilter(bq);
return new ConstantScoreQuery(bq);
} else if (hasNested) {
return Queries.newNonNestedFilter();
} else if (filterPercolateType) {
return new QueryWrapperFilter(Queries.not(percolatorType));
return new ConstantScoreQuery(Queries.not(percolatorType));
} else {
return null;
}
Expand All @@ -390,7 +390,7 @@ public Query searchFilter(String... types) {
BooleanQuery bq = new BooleanQuery();
bq.add(percolatorType, Occur.MUST_NOT);
bq.add(filter, Occur.MUST);
return new QueryWrapperFilter(bq);
return new ConstantScoreQuery(bq);
} else {
return filter;
}
Expand Down Expand Up @@ -420,9 +420,9 @@ public Query searchFilter(String... types) {
BooleanQuery bq = new BooleanQuery();
bq.add(percolatorType, Occur.MUST_NOT);
bq.add(termsFilter, Occur.MUST);
return new QueryWrapperFilter(bq);
return new ConstantScoreQuery(bq);
} else {
return new QueryWrapperFilter(termsFilter);
return termsFilter;
}
} else {
// Current bool filter requires that at least one should clause matches, even with a must clause.
Expand All @@ -442,7 +442,7 @@ public Query searchFilter(String... types) {
bool.add(Queries.newNonNestedFilter(), BooleanClause.Occur.MUST);
}

return new QueryWrapperFilter(bool);
return new ConstantScoreQuery(bool);
}
}

Expand Down

0 comments on commit 4131bcb

Please sign in to comment.