Skip to content

Commit

Permalink
Always set the query cache explicitely on IndexSearcher.
Browse files Browse the repository at this point in the history
  • Loading branch information
jpountz committed Apr 30, 2015
1 parent 50a92ae commit 9009c99
Show file tree
Hide file tree
Showing 11 changed files with 40 additions and 56 deletions.
Expand Up @@ -18,9 +18,9 @@

package org.apache.lucene.search.postingshighlight;

import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexReaderContext;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.Strings;
Expand Down Expand Up @@ -91,8 +91,7 @@ public CustomPostingsHighlighter(CustomPassageFormatter passageFormatter, List<O
/*
Our own api to highlight a single document field, passing in the query terms, and get back our own Snippet object
*/
public Snippet[] highlightDoc(String field, BytesRef[] terms, IndexSearcher searcher, int docId, int maxPassages) throws IOException {
IndexReader reader = searcher.getIndexReader();
public Snippet[] highlightDoc(String field, BytesRef[] terms, IndexReader reader, int docId, int maxPassages) throws IOException {
IndexReaderContext readerContext = reader.getContext();
List<LeafReaderContext> leaves = readerContext.leaves();

Expand Down
Expand Up @@ -105,6 +105,7 @@ public Weight doCreateWeight(IndexSearcher searcher, boolean needsScores) throws

IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
indexSearcher.setSimilarity(searcher.getSimilarity());
indexSearcher.setQueryCache(null);
ParentOrdCollector collector = new ParentOrdCollector(globalIfd, valueCount, parentType);
indexSearcher.search(childQuery, collector);

Expand Down
Expand Up @@ -155,6 +155,7 @@ public Weight doCreateWeight(IndexSearcher searcher, boolean needsScores) throws
}
IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
indexSearcher.setSimilarity(searcher.getSimilarity());
indexSearcher.setQueryCache(null);

boolean abort = true;
long numFoundParents;
Expand Down
Expand Up @@ -94,6 +94,7 @@ public Weight doCreateWeight(IndexSearcher searcher, boolean needsScores) throws
ParentOrdsCollector collector = new ParentOrdsCollector(globalIfd, maxOrd, parentType);
IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
indexSearcher.setSimilarity(searcher.getSimilarity());
indexSearcher.setQueryCache(null);
indexSearcher.search(parentQuery, collector);

if (collector.parentCount() == 0) {
Expand Down
Expand Up @@ -36,7 +36,6 @@
import org.apache.lucene.search.Weight;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.ToStringUtils;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.lucene.IndexCacheableQuery;
Expand Down Expand Up @@ -132,6 +131,7 @@ public Weight doCreateWeight(IndexSearcher searcher, boolean needsScores) throws
collector = new ParentOrdAndScoreCollector(sc, globalIfd, parentType);
IndexSearcher indexSearcher = new IndexSearcher(sc.searcher().getIndexReader());
indexSearcher.setSimilarity(searcher.getSimilarity());
indexSearcher.setQueryCache(null);
indexSearcher.search(parentQuery, collector);
if (collector.parentCount() == 0) {
return new BooleanQuery().createWeight(searcher, needsScores);
Expand Down Expand Up @@ -251,12 +251,16 @@ public Scorer scorer(LeafReaderContext context, Bits acceptDocs) throws IOExcept
if (DocIdSets.isEmpty(childrenDocSet)) {
return null;
}
final DocIdSetIterator childIterator = childrenDocSet.iterator();
if (childIterator == null) {
return null;
}
SortedDocValues bytesValues = globalIfd.load(context).getOrdinalsValues(parentType);
if (bytesValues == null) {
return null;
}

return new ChildScorer(this, parentIdxs, scores, childrenDocSet.iterator(), bytesValues);
return new ChildScorer(this, parentIdxs, scores, childIterator, bytesValues);
}

}
Expand Down
Expand Up @@ -104,6 +104,7 @@ public Weight doCreateWeight(IndexSearcher searcher, boolean needsScores) throws

IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
indexSearcher.setSimilarity(searcher.getSimilarity());
indexSearcher.setQueryCache(null);
while (true) {
parentDocs.clear();
TopDocs topChildDocs = indexSearcher.search(childQuery, numChildDocs);
Expand Down
Expand Up @@ -71,7 +71,9 @@ public void prepare(PercolateContext context, ParsedDocument parsedDocument) {
try {
MultiReader mReader = new MultiReader(memoryIndices, true);
LeafReader slowReader = SlowCompositeReaderWrapper.wrap(mReader);
DocSearcher docSearcher = new DocSearcher(new IndexSearcher(slowReader), rootDocMemoryIndex);
final IndexSearcher slowSearcher = new IndexSearcher(slowReader);
slowSearcher.setQueryCache(null);
DocSearcher docSearcher = new DocSearcher(slowSearcher, rootDocMemoryIndex);
context.initialize(docSearcher, parsedDocument);
} catch (IOException e) {
throw new ElasticsearchException("Failed to create index for percolator with nested document ", e);
Expand Down
13 changes: 0 additions & 13 deletions src/main/java/org/elasticsearch/search/fetch/FetchSubPhase.java
Expand Up @@ -22,8 +22,6 @@
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.IndexSearcher;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.internal.InternalSearchHit;
Expand All @@ -42,14 +40,12 @@ public static class HitContext {
private LeafReaderContext readerContext;
private int docId;
private Map<String, Object> cache;
private IndexSearcher atomicIndexSearcher;

public void reset(InternalSearchHit hit, LeafReaderContext context, int docId, IndexReader topLevelReader) {
this.hit = hit;
this.readerContext = context;
this.docId = docId;
this.topLevelReader = topLevelReader;
this.atomicIndexSearcher = null;
}

public InternalSearchHit hit() {
Expand All @@ -64,15 +60,6 @@ public LeafReaderContext readerContext() {
return readerContext;
}

public IndexSearcher searcher() {
if (atomicIndexSearcher == null) {
// Use the reader directly otherwise the IndexSearcher assertion will trip because it expects a top level
// reader context.
atomicIndexSearcher = new IndexSearcher(readerContext.reader());
}
return atomicIndexSearcher;
}

public int docId() {
return docId;
}
Expand Down
Expand Up @@ -122,7 +122,7 @@ public HighlightField highlight(HighlighterContext highlighterContext) {
//we highlight every value separately calling the highlight method multiple times, only if we need to have back a snippet per value (whole value)
int values = mergeValues ? 1 : textsToHighlight.size();
for (int i = 0; i < values; i++) {
Snippet[] fieldSnippets = highlighter.highlightDoc(fieldMapper.names().indexName(), mapperHighlighterEntry.filteredQueryTerms, hitContext.searcher(), hitContext.docId(), numberOfFragments);
Snippet[] fieldSnippets = highlighter.highlightDoc(fieldMapper.names().indexName(), mapperHighlighterEntry.filteredQueryTerms, hitContext.reader(), hitContext.docId(), numberOfFragments);
if (fieldSnippets != null) {
for (Snippet fieldSnippet : fieldSnippets) {
if (Strings.hasText(fieldSnippet.getText())) {
Expand Down
24 changes: 6 additions & 18 deletions src/main/java/org/elasticsearch/search/lookup/LeafIndexLookup.java
Expand Up @@ -18,12 +18,12 @@
*/
package org.elasticsearch.search.lookup;

import org.apache.lucene.index.CompositeReader;
import org.apache.lucene.index.Fields;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexReaderContext;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.ReaderUtil;
import org.apache.lucene.search.IndexSearcher;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.util.MinimalMap;
Expand All @@ -40,14 +40,11 @@ public class LeafIndexLookup extends MinimalMap<String, IndexField> {

// The parent reader from which we can get proper field and term
// statistics
private final CompositeReader parentReader;
private final IndexReader parentReader;

// we need this later to get the field and term statistics of the shard
private final IndexSearcher indexSearcher;

// we need this later to get the term statistics of the shard
private final IndexReaderContext indexReaderContext;

// current docId
private int docId = -1;

Expand Down Expand Up @@ -90,15 +87,9 @@ public int numDeletedDocs() {

public LeafIndexLookup(LeafReaderContext ctx) {
reader = ctx.reader();
if (ctx.parent != null) {
parentReader = ctx.parent.reader();
indexSearcher = new IndexSearcher(parentReader);
indexReaderContext = ctx.parent;
} else {
parentReader = null;
indexSearcher = null;
indexReaderContext = null;
}
parentReader = ReaderUtil.getTopLevelContext(ctx).reader();
indexSearcher = new IndexSearcher(parentReader);
indexSearcher.setQueryCache(null);
}

public void setDocument(int docId) {
Expand Down Expand Up @@ -175,13 +166,10 @@ public IndexReader getParentReader() {
}

public IndexSearcher getIndexSearcher() {
if (indexSearcher == null) {
return new IndexSearcher(reader);
}
return indexSearcher;
}

public IndexReaderContext getReaderContext() {
return indexReaderContext;
return getParentReader().getContext();
}
}
Expand Up @@ -82,39 +82,39 @@ public void testDiscreteHighlightingPerValue() throws Exception {
Query query = new TermQuery(new Term("body", "highlighting"));
BytesRef[] queryTerms = filterTerms(extractTerms(searcher, query), "body", true);

TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertThat(topDocs.totalHits, equalTo(1));
int docId = topDocs.scoreDocs[0].doc;

//highlighting per value, considering whole values (simulating number_of_fragments=0)
CustomPostingsHighlighter highlighter = new CustomPostingsHighlighter(new CustomPassageFormatter("<b>", "</b>", new DefaultEncoder()), fieldValues, false, Integer.MAX_VALUE - 1, 0);
highlighter.setBreakIterator(new WholeBreakIterator());

Snippet[] snippets = highlighter.highlightDoc("body", queryTerms, searcher, docId, 5);
Snippet[] snippets = highlighter.highlightDoc("body", queryTerms, ir, docId, 5);
assertThat(snippets.length, equalTo(1));
assertThat(snippets[0].getText(), equalTo("This is a test. Just a test <b>highlighting</b> from postings highlighter."));

snippets = highlighter.highlightDoc("body", queryTerms, searcher, docId, 5);
snippets = highlighter.highlightDoc("body", queryTerms, ir, docId, 5);
assertThat(snippets.length, equalTo(1));
assertThat(snippets[0].getText(), equalTo("This is the second value to perform <b>highlighting</b> on."));

snippets = highlighter.highlightDoc("body", queryTerms, searcher, docId, 5);
snippets = highlighter.highlightDoc("body", queryTerms, ir, docId, 5);
assertThat(snippets.length, equalTo(1));
assertThat(snippets[0].getText(), equalTo("This is the third value to test <b>highlighting</b> with postings."));


//let's try without whole break iterator as well, to prove that highlighting works the same when working per value (not optimized though)
highlighter = new CustomPostingsHighlighter(new CustomPassageFormatter("<b>", "</b>", new DefaultEncoder()), fieldValues, false, Integer.MAX_VALUE - 1, 0);

snippets = highlighter.highlightDoc("body", queryTerms, searcher, docId, 5);
snippets = highlighter.highlightDoc("body", queryTerms, ir, docId, 5);
assertThat(snippets.length, equalTo(1));
assertThat(snippets[0].getText(), equalTo("Just a test <b>highlighting</b> from postings highlighter."));

snippets = highlighter.highlightDoc("body", queryTerms, searcher, docId, 5);
snippets = highlighter.highlightDoc("body", queryTerms, ir, docId, 5);
assertThat(snippets.length, equalTo(1));
assertThat(snippets[0].getText(), equalTo("This is the second value to perform <b>highlighting</b> on."));

snippets = highlighter.highlightDoc("body", queryTerms, searcher, docId, 5);
snippets = highlighter.highlightDoc("body", queryTerms, ir, docId, 5);
assertThat(snippets.length, equalTo(1));
assertThat(snippets[0].getText(), equalTo("This is the third value to test <b>highlighting</b> with postings."));

Expand Down Expand Up @@ -177,7 +177,7 @@ public void testDiscreteHighlightingScoring() throws Exception {
Query query = new TermQuery(new Term("body", "highlighting"));
BytesRef[] queryTerms = filterTerms(extractTerms(searcher, query), "body", true);

TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertThat(topDocs.totalHits, equalTo(1));

int docId = topDocs.scoreDocs[0].doc;
Expand All @@ -190,7 +190,7 @@ public void testDiscreteHighlightingScoring() throws Exception {

boolean mergeValues = true;
CustomPostingsHighlighter highlighter = new CustomPostingsHighlighter(new CustomPassageFormatter("<b>", "</b>", new DefaultEncoder()), fieldValues, mergeValues, Integer.MAX_VALUE-1, 0);
Snippet[] snippets = highlighter.highlightDoc("body", queryTerms, searcher, docId, 5);
Snippet[] snippets = highlighter.highlightDoc("body", queryTerms, ir, docId, 5);

assertThat(snippets.length, equalTo(4));

Expand All @@ -205,7 +205,7 @@ public void testDiscreteHighlightingScoring() throws Exception {
highlighter = new CustomPostingsHighlighter(new CustomPassageFormatter("<b>", "</b>", new DefaultEncoder()), fieldValues, mergeValues, Integer.MAX_VALUE-1, 0);
List<Snippet> snippets2 = new ArrayList<>();
for (int i = 0; i < fieldValues.size(); i++) {
snippets2.addAll(Arrays.asList(highlighter.highlightDoc("body", queryTerms, searcher, docId, 5)));
snippets2.addAll(Arrays.asList(highlighter.highlightDoc("body", queryTerms, ir, docId, 5)));
}

assertThat(snippets2.size(), equalTo(4));
Expand Down Expand Up @@ -292,7 +292,7 @@ public void testMergeValuesScoring() throws Exception {
Query query = new TermQuery(new Term("body", "highlighting"));
BytesRef[] queryTerms = filterTerms(extractTerms(searcher, query), "body", true);

TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertThat(topDocs.totalHits, equalTo(1));

int docId = topDocs.scoreDocs[0].doc;
Expand All @@ -305,7 +305,7 @@ public void testMergeValuesScoring() throws Exception {

boolean mergeValues = true;
CustomPostingsHighlighter highlighter = new CustomPostingsHighlighter(new CustomPassageFormatter("<b>", "</b>", new DefaultEncoder()), fieldValues, mergeValues, Integer.MAX_VALUE-1, 0);
Snippet[] snippets = highlighter.highlightDoc("body", queryTerms, searcher, docId, 5);
Snippet[] snippets = highlighter.highlightDoc("body", queryTerms, ir, docId, 5);

assertThat(snippets.length, equalTo(4));

Expand Down Expand Up @@ -379,7 +379,7 @@ public void testRequireFieldMatch() throws Exception {
Query query = new TermQuery(new Term("none", "highlighting"));
IndexSearcher searcher = newSearcher(ir);
SortedSet<Term> queryTerms = extractTerms(searcher, query);
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertThat(topDocs.totalHits, equalTo(1));
int docId = topDocs.scoreDocs[0].doc;

Expand All @@ -392,15 +392,15 @@ public void testRequireFieldMatch() throws Exception {
//no snippets with simulated require field match (we filter the terms ourselves)
boolean requireFieldMatch = true;
BytesRef[] filteredQueryTerms = filterTerms(queryTerms, "body", requireFieldMatch);
Snippet[] snippets = highlighter.highlightDoc("body", filteredQueryTerms, searcher, docId, 5);
Snippet[] snippets = highlighter.highlightDoc("body", filteredQueryTerms, ir, docId, 5);
assertThat(snippets.length, equalTo(0));


highlighter = new CustomPostingsHighlighter(passageFormatter, values, true, Integer.MAX_VALUE - 1, 0);
//one snippet without require field match, just passing in the query terms with no filtering on our side
requireFieldMatch = false;
filteredQueryTerms = filterTerms(queryTerms, "body", requireFieldMatch);
snippets = highlighter.highlightDoc("body", filteredQueryTerms, searcher, docId, 5);
snippets = highlighter.highlightDoc("body", filteredQueryTerms, ir, docId, 5);
assertThat(snippets.length, equalTo(1));
assertThat(snippets[0].getText(), equalTo("Just a test <b>highlighting</b> from postings."));

Expand Down Expand Up @@ -435,7 +435,7 @@ public void testNoMatchSize() throws Exception {

IndexSearcher searcher = newSearcher(ir);
SortedSet<Term> queryTerms = extractTerms(searcher, query);
TopDocs topDocs = searcher.search(query, null, 10, Sort.INDEXORDER);
TopDocs topDocs = searcher.search(query, 10, Sort.INDEXORDER);
assertThat(topDocs.totalHits, equalTo(1));
int docId = topDocs.scoreDocs[0].doc;

Expand All @@ -446,11 +446,11 @@ public void testNoMatchSize() throws Exception {
CustomPassageFormatter passageFormatter = new CustomPassageFormatter("<b>", "</b>", new DefaultEncoder());

CustomPostingsHighlighter highlighter = new CustomPostingsHighlighter(passageFormatter, values, true, Integer.MAX_VALUE - 1, 0);
Snippet[] snippets = highlighter.highlightDoc("body", filteredQueryTerms, searcher, docId, 5);
Snippet[] snippets = highlighter.highlightDoc("body", filteredQueryTerms, ir, docId, 5);
assertThat(snippets.length, equalTo(0));

highlighter = new CustomPostingsHighlighter(passageFormatter, values, true, Integer.MAX_VALUE - 1, scaledRandomIntBetween(1, 10));
snippets = highlighter.highlightDoc("body", filteredQueryTerms, searcher, docId, 5);
snippets = highlighter.highlightDoc("body", filteredQueryTerms, ir, docId, 5);
assertThat(snippets.length, equalTo(1));
assertThat(snippets[0].getText(), equalTo("This is a test."));

Expand Down

0 comments on commit 9009c99

Please sign in to comment.