Skip to content

Commit

Permalink
Make sure only relevant documents are evaluated in the second round l…
Browse files Browse the repository at this point in the history
…ookup phase.

Both has_parent and has_child filters are internally executed in two rounds. In the second round all documents are evaluated whilst only specific documents need to be checked. In the has_child case only documents belonging to a specific parent type need to be checked and in the has_parent case only child documents need to be checked.

Closes #3034
  • Loading branch information
martijnvg committed May 14, 2013
1 parent ae6c1b3 commit 906f278
Show file tree
Hide file tree
Showing 7 changed files with 209 additions and 185 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,9 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar
throw new ElasticSearchIllegalStateException("[has_child] Can't execute, search context not set.");
}

HasChildFilter childFilter = HasChildFilter.create(query, parentType, childType, searchContext);
DocumentMapper parentDocMapper = parseContext.mapperService().documentMapper(parentType);
Filter parentFilter = parseContext.cacheFilter(parentDocMapper.typeFilter(), null);
HasChildFilter childFilter = new HasChildFilter(query, parentType, childType, parentFilter, searchContext);
searchContext.addRewrite(childFilter);
Filter filter = childFilter;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,12 @@

package org.elasticsearch.index.query;

import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query;
import org.elasticsearch.ElasticSearchIllegalStateException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.XConstantScoreQuery;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.search.child.ChildrenQuery;
Expand Down Expand Up @@ -127,16 +127,17 @@ public Query parse(QueryParseContext parseContext) throws IOException, QueryPars
if (searchContext == null) {
throw new ElasticSearchIllegalStateException("[has_child] Can't execute, search context not set.");
}

Query query;
Filter parentFilter = parseContext.cacheFilter(parentDocMapper.typeFilter(), null);
if (scoreType != null) {
Filter parentFilter = parseContext.cacheFilter(parentDocMapper.typeFilter(), null);
ChildrenQuery childrenQuery = new ChildrenQuery(searchContext, parentType, childType, parentFilter, innerQuery, scoreType);
searchContext.addRewrite(childrenQuery);
query = childrenQuery;
} else {
HasChildFilter hasChildFilter = HasChildFilter.create(innerQuery, parentType, childType, searchContext);
HasChildFilter hasChildFilter = new HasChildFilter(innerQuery, parentType, childType, parentFilter, searchContext);
searchContext.addRewrite(hasChildFilter);
query = new ConstantScoreQuery(hasChildFilter);
query = new XConstantScoreQuery(hasChildFilter);
}
query.setBoost(boost);
return query;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,20 +19,26 @@

package org.elasticsearch.index.query;

import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query;
import org.elasticsearch.ElasticSearchIllegalStateException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.NotFilter;
import org.elasticsearch.common.lucene.search.XBooleanFilter;
import org.elasticsearch.common.lucene.search.XConstantScoreQuery;
import org.elasticsearch.common.lucene.search.XFilteredQuery;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.cache.filter.support.CacheKeyFilter;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.search.child.HasParentFilter;
import org.elasticsearch.search.internal.SearchContext;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

/**
*
Expand Down Expand Up @@ -130,9 +136,31 @@ public Filter parse(QueryParseContext parseContext) throws IOException, QueryPar
throw new ElasticSearchIllegalStateException("[has_parent] Can't execute, search context not set");
}

HasParentFilter parentFilter = HasParentFilter.create(query, parentType, searchContext);
searchContext.addRewrite(parentFilter);
Filter filter = parentFilter;
List<String> parentTypes = new ArrayList<String>(2);
for (DocumentMapper documentMapper : parseContext.mapperService()) {
ParentFieldMapper parentFieldMapper = documentMapper.parentFieldMapper();
if (parentFieldMapper != null) {
parentTypes.add(parentFieldMapper.type());
}
}

Filter parentFilter;
if (parentTypes.size() == 1) {
DocumentMapper documentMapper = parseContext.mapperService().documentMapper(parentTypes.get(0));
parentFilter = parseContext.cacheFilter(documentMapper.typeFilter(), null);
} else {
XBooleanFilter parentsFilter = new XBooleanFilter();
for (String parentTypeStr : parentTypes) {
DocumentMapper documentMapper = parseContext.mapperService().documentMapper(parentTypeStr);
Filter filter = parseContext.cacheFilter(documentMapper.typeFilter(), null);
parentsFilter.add(filter, BooleanClause.Occur.SHOULD);
}
parentFilter = parentsFilter;
}
Filter childrenFilter = parseContext.cacheFilter(new NotFilter(parentFilter), null);
HasParentFilter hasParentFilter = new HasParentFilter(query, parentType, searchContext, childrenFilter);
searchContext.addRewrite(hasParentFilter);
Filter filter = hasParentFilter;

if (cache) {
filter = parseContext.cacheFilter(filter, cacheKey);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,13 +20,14 @@
package org.elasticsearch.index.query;

import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query;
import org.elasticsearch.ElasticSearchIllegalStateException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.NotFilter;
import org.elasticsearch.common.lucene.search.XBooleanFilter;
import org.elasticsearch.common.lucene.search.XConstantScoreQuery;
import org.elasticsearch.common.lucene.search.XFilteredQuery;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.DocumentMapper;
Expand Down Expand Up @@ -122,49 +123,46 @@ public Query parse(QueryParseContext parseContext) throws IOException, QueryPars
throw new QueryParsingException(parseContext.index(), "[has_parent] query configured 'parent_type' [" + parentType + "] is not a valid type");
}

List<String> childTypes = new ArrayList<String>(2);
innerQuery.setBoost(boost);
// wrap the query with type query
innerQuery = new XFilteredQuery(innerQuery, parseContext.cacheFilter(parentDocMapper.typeFilter(), null));
SearchContext searchContext = SearchContext.current();
if (searchContext == null) {
throw new ElasticSearchIllegalStateException("[has_parent] Can't execute, search context not set.");
}

List<String> parentTypes = new ArrayList<String>(2);
for (DocumentMapper documentMapper : parseContext.mapperService()) {
ParentFieldMapper parentFieldMapper = documentMapper.parentFieldMapper();
if (parentFieldMapper == null) {
continue;
}

if (parentDocMapper.type().equals(parentFieldMapper.type())) {
childTypes.add(documentMapper.type());
if (parentFieldMapper != null) {
parentTypes.add(parentFieldMapper.type());
}
}

Filter childFilter;
if (childTypes.size() == 1) {
DocumentMapper documentMapper = parseContext.mapperService().documentMapper(childTypes.get(0));
childFilter = parseContext.cacheFilter(documentMapper.typeFilter(), null);
Filter parentFilter;
if (parentTypes.size() == 1) {
DocumentMapper documentMapper = parseContext.mapperService().documentMapper(parentTypes.get(0));
parentFilter = parseContext.cacheFilter(documentMapper.typeFilter(), null);
} else {
XBooleanFilter childrenFilter = new XBooleanFilter();
for (String childType : childTypes) {
DocumentMapper documentMapper = parseContext.mapperService().documentMapper(childType);
XBooleanFilter parentsFilter = new XBooleanFilter();
for (String parentTypeStr : parentTypes) {
DocumentMapper documentMapper = parseContext.mapperService().documentMapper(parentTypeStr);
Filter filter = parseContext.cacheFilter(documentMapper.typeFilter(), null);
childrenFilter.add(filter, BooleanClause.Occur.SHOULD);
parentsFilter.add(filter, BooleanClause.Occur.SHOULD);
}
childFilter = childrenFilter;
}

innerQuery.setBoost(boost);
// wrap the query with type query
innerQuery = new XFilteredQuery(innerQuery, parseContext.cacheFilter(parentDocMapper.typeFilter(), null));
SearchContext searchContext = SearchContext.current();
if (searchContext == null) {
throw new ElasticSearchIllegalStateException("[has_parent] Can't execute, search context not set.");
parentFilter = parentsFilter;
}
Filter childrenFilter = parseContext.cacheFilter(new NotFilter(parentFilter), null);

Query query;
if (score) {
ParentQuery parentQuery = new ParentQuery(searchContext, innerQuery, parentType, childTypes, childFilter);
ParentQuery parentQuery = new ParentQuery(searchContext, innerQuery, parentType, childrenFilter);
searchContext.addRewrite(parentQuery);
query = parentQuery;
} else {
HasParentFilter hasParentFilter = HasParentFilter.create(innerQuery, parentType, searchContext);
HasParentFilter hasParentFilter = new HasParentFilter(innerQuery, parentType, searchContext, childrenFilter);
searchContext.addRewrite(hasParentFilter);
query = new ConstantScoreQuery(hasParentFilter);
query = new XConstantScoreQuery(hasParentFilter);
}
query.setBoost(boost);
return query;
Expand Down
119 changes: 59 additions & 60 deletions src/main/java/org/elasticsearch/index/search/child/HasChildFilter.java
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,8 @@
import org.apache.lucene.util.Bits;
import org.elasticsearch.ElasticSearchIllegalStateException;
import org.elasticsearch.common.CacheRecycler;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.bytes.HashedBytesArray;
import org.elasticsearch.common.lucene.docset.DocIdSets;
import org.elasticsearch.common.lucene.docset.MatchDocIdSet;
import org.elasticsearch.index.cache.id.IdReaderTypeCache;
import org.elasticsearch.search.internal.SearchContext;
Expand All @@ -39,14 +39,18 @@
/**
*
*/
public abstract class HasChildFilter extends Filter implements SearchContext.Rewrite {
public class HasChildFilter extends Filter implements SearchContext.Rewrite {

final Query childQuery;
final String parentType;
final String childType;
final Filter parentFilter;
final SearchContext searchContext;

protected HasChildFilter(Query childQuery, String parentType, String childType, SearchContext searchContext) {
THashSet<HashedBytesArray> collectedUids;

public HasChildFilter(Query childQuery, String parentType, String childType, Filter parentFilter, SearchContext searchContext) {
this.parentFilter = parentFilter;
this.searchContext = searchContext;
this.parentType = parentType;
this.childType = childType;
Expand Down Expand Up @@ -90,79 +94,74 @@ public String toString() {
return sb.toString();
}

public static HasChildFilter create(Query childQuery, String parentType, String childType, SearchContext searchContext) {
return new Uid(childQuery, parentType, childType, searchContext);
}

static class Uid extends HasChildFilter {

THashSet<HashedBytesArray> collectedUids;

Uid(Query childQuery, String parentType, String childType, SearchContext searchContext) {
super(childQuery, parentType, childType, searchContext);
public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) throws IOException {
if (collectedUids == null) {
throw new ElasticSearchIllegalStateException("has_child filter hasn't executed properly");
}

public DocIdSet getDocIdSet(AtomicReaderContext context, Bits acceptDocs) throws IOException {
if (collectedUids == null) {
throw new ElasticSearchIllegalStateException("has_child filter hasn't executed properly");
}

IdReaderTypeCache idReaderTypeCache = searchContext.idCache().reader(context.reader()).type(parentType);
if (idReaderTypeCache != null) {
return new ParentDocSet(context.reader(), acceptDocs, collectedUids, idReaderTypeCache);
} else {
return null;
}
DocIdSet parentDocIdSet = this.parentFilter.getDocIdSet(context, null);
if (DocIdSets.isEmpty(parentDocIdSet)) {
return null;
}

@Override
public void contextRewrite(SearchContext searchContext) throws Exception {
searchContext.idCache().refresh(searchContext.searcher().getTopReaderContext().leaves());
collectedUids = CacheRecycler.popHashSet();
UidCollector collector = new UidCollector(parentType, searchContext, collectedUids);
searchContext.searcher().search(childQuery, collector);
Bits parentsBits = DocIdSets.toSafeBits(context.reader(), parentDocIdSet);
IdReaderTypeCache idReaderTypeCache = searchContext.idCache().reader(context.reader()).type(parentType);
if (idReaderTypeCache != null) {
return new ParentDocSet(context.reader(), parentsBits, collectedUids, idReaderTypeCache);
} else {
return null;
}
}

@Override
public void contextClear() {
if (collectedUids != null) {
CacheRecycler.pushHashSet(collectedUids);
}
collectedUids = null;
@Override
public void contextRewrite(SearchContext searchContext) throws Exception {
searchContext.idCache().refresh(searchContext.searcher().getTopReaderContext().leaves());
collectedUids = CacheRecycler.popHashSet();
UidCollector collector = new UidCollector(parentType, searchContext, collectedUids);
searchContext.searcher().search(childQuery, collector);
}

@Override
public void contextClear() {
if (collectedUids != null) {
CacheRecycler.pushHashSet(collectedUids);
}
collectedUids = null;
}

final static class ParentDocSet extends MatchDocIdSet {
final static class ParentDocSet extends MatchDocIdSet {

final IndexReader reader;
final THashSet<HashedBytesArray> parents;
final IdReaderTypeCache typeCache;
final IndexReader reader;
final THashSet<HashedBytesArray> parents;
final IdReaderTypeCache typeCache;

ParentDocSet(IndexReader reader, @Nullable Bits acceptDocs, THashSet<HashedBytesArray> parents, IdReaderTypeCache typeCache) {
super(reader.maxDoc(), acceptDocs);
this.reader = reader;
this.parents = parents;
this.typeCache = typeCache;
}
ParentDocSet(IndexReader reader, Bits acceptDocs, THashSet<HashedBytesArray> parents, IdReaderTypeCache typeCache) {
super(reader.maxDoc(), acceptDocs);
this.reader = reader;
this.parents = parents;
this.typeCache = typeCache;
}

@Override
protected boolean matchDoc(int doc) {
return parents.contains(typeCache.idByDoc(doc));
}
@Override
protected boolean matchDoc(int doc) {
return parents.contains(typeCache.idByDoc(doc));
}
}

final static class UidCollector extends ParentIdCollector {
private final THashSet<HashedBytesArray> collectedUids;
final static class UidCollector extends ParentIdCollector {

UidCollector(String parentType, SearchContext context, THashSet<HashedBytesArray> collectedUids) {
super(parentType, context);
this.collectedUids = collectedUids;
}
final THashSet<HashedBytesArray> collectedUids;

@Override
public void collect(int doc, HashedBytesArray parentIdByDoc){
collectedUids.add(parentIdByDoc);
}
UidCollector(String parentType, SearchContext context, THashSet<HashedBytesArray> collectedUids) {
super(parentType, context);
this.collectedUids = collectedUids;
}

@Override
public void collect(int doc, HashedBytesArray parentIdByDoc){
collectedUids.add(parentIdByDoc);
}

}

}
Loading

0 comments on commit 906f278

Please sign in to comment.