Skip to content

Commit

Permalink
Fixed issue where parent & child queries can fail if a segment doesn'…
Browse files Browse the repository at this point in the history
…t have documents with the targeted type or associated parent type

Closes #2537
  • Loading branch information
martijnvg committed Jan 11, 2013
1 parent 43aabe8 commit 1ce10df
Show file tree
Hide file tree
Showing 7 changed files with 97 additions and 10 deletions.
Expand Up @@ -66,8 +66,8 @@ public HasChildQueryBuilder boost(float boost) {
/**
* Defines how the scores from the matching child documents are mapped into the parent document.
*/
public HasChildQueryBuilder scoreType(String executionType) {
this.scoreType = executionType;
public HasChildQueryBuilder scoreType(String scoreType) {
this.scoreType = scoreType;
return this;
}

Expand Down
Expand Up @@ -21,7 +21,6 @@

import org.apache.lucene.index.AtomicReader;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.util.FixedBitSet;
Expand Down Expand Up @@ -70,11 +69,14 @@ public Map<Object, FixedBitSet> parentDocs() {

@Override
public void setScorer(Scorer scorer) throws IOException {

}

@Override
public void collect(int doc) throws IOException {
if (typeCache == null) {
return;
}

HashedBytesArray parentId = typeCache.parentIdByDoc(doc);
if (parentId == null) {
return;
Expand Down
Expand Up @@ -321,6 +321,10 @@ static class ChildUidCollector extends NoopCollector {

@Override
public void collect(int doc) throws IOException {
if (typeCache == null) {
return;
}

HashedBytesArray parentUid = typeCache.parentIdByDoc(doc);
float previousScore = uidToScore.get(parentUid);
float currentScore = scorer.score();
Expand Down Expand Up @@ -364,6 +368,10 @@ static class AvgChildUidCollector extends ChildUidCollector {

@Override
public void collect(int doc) throws IOException {
if (typeCache == null) {
return;
}

HashedBytesArray parentUid = typeCache.parentIdByDoc(doc);
float previousScore = uidToScore.get(parentUid);
float currentScore = scorer.score();
Expand Down
Expand Up @@ -198,7 +198,11 @@ static class UidCollector extends NoopCollector {

@Override
public void collect(int doc) throws IOException {
collectedUids.add(typeCache.parentIdByDoc(doc));
// It can happen that for particular segment no document exist for an specific type. This prevents NPE
if (typeCache != null) {
collectedUids.add(typeCache.parentIdByDoc(doc));
}

}

@Override
Expand Down
Expand Up @@ -163,7 +163,10 @@ static class ParentUidsCollector extends NoopCollector {
}

public void collect(int doc) throws IOException {
collectedUids.add(typeCache.idByDoc(doc));
// It can happen that for particular segment no document exist for an specific type. This prevents NPE
if (typeCache != null) {
collectedUids.add(typeCache.idByDoc(doc));
}
}

@Override
Expand Down Expand Up @@ -199,7 +202,12 @@ public DocIdSet getDocIdSet(AtomicReaderContext readerContext, Bits acceptDocs)
throw new ElasticSearchIllegalStateException("has_parent filter hasn't executed properly");
}

return new ChildrenDocSet(readerContext.reader(), acceptDocs, parentDocs, context, parentType);
IdReaderTypeCache currentTypeCache = context.idCache().reader(readerContext.reader()).type(parentType);
if (currentTypeCache == null) {
return null;
} else {
return new ChildrenDocSet(readerContext.reader(), currentTypeCache, acceptDocs, parentDocs, context, parentType);
}
}

public void clear() {
Expand All @@ -213,10 +221,10 @@ static class ChildrenDocSet extends MatchDocIdSet {
final Tuple<AtomicReader, IdReaderTypeCache>[] readersToTypeCache;
final Map<Object, FixedBitSet> parentDocs;

ChildrenDocSet(AtomicReader currentReader, @Nullable Bits acceptDocs, Map<Object, FixedBitSet> parentDocs,
SearchContext context, String parentType) {
ChildrenDocSet(AtomicReader currentReader, IdReaderTypeCache currentTypeCache, @Nullable Bits acceptDocs,
Map<Object, FixedBitSet> parentDocs, SearchContext context, String parentType) {
super(currentReader.maxDoc(), acceptDocs);
this.currentTypeCache = context.idCache().reader(currentReader).type(parentType);
this.currentTypeCache = currentTypeCache;
this.currentReader = currentReader;
this.parentDocs = parentDocs;
this.readersToTypeCache = new Tuple[context.searcher().getIndexReader().leaves().size()];
Expand Down
Expand Up @@ -160,6 +160,10 @@ static class ParentUidCollector extends NoopCollector {

@Override
public void collect(int doc) throws IOException {
if (typeCache == null) {
return;
}

HashedBytesArray parentUid = typeCache.idByDoc(doc);
uidToScore.put(parentUid, scorer.score());
}
Expand Down
Expand Up @@ -1077,4 +1077,65 @@ public void testScoreForParentChildQueries() throws Exception {
assertThat(response.hits().hits()[6].score(), equalTo(5f));
}

@Test
// https://github.com/elasticsearch/elasticsearch/issues/2536
public void testParentChildQueriesCanHandleNoRelevantTypesInIndex() throws Exception {
client.admin().indices().prepareDelete().execute().actionGet();

client.admin().indices().prepareCreate("test")
.addMapping("parent", jsonBuilder()
.startObject()
.startObject("parent")
.endObject()
.endObject()
).addMapping("child", jsonBuilder()
.startObject()
.startObject("child")
.startObject("_parent")
.field("type", "parent")
.endObject()
.endObject()
.endObject()
).setSettings(
ImmutableSettings.settingsBuilder()
.put("index.number_of_shards", 1)
.put("index.number_of_replicas", 0)
).execute().actionGet();
client.admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet();

SearchResponse response = client.prepareSearch("test")
.setQuery(QueryBuilders.hasChildQuery("child", matchQuery("text", "value")))
.execute().actionGet();
assertThat(response.failedShards(), equalTo(0));
assertThat(response.hits().totalHits(), equalTo(0l));

client.prepareIndex("test", "child1").setSource(jsonBuilder().startObject().field("text", "value").endObject())
.setRefresh(true)
.execute().actionGet();

client.prepareSearch("test")
.setQuery(QueryBuilders.hasChildQuery("child", matchQuery("text", "value")).executionType(getExecutionMethod()))
.execute().actionGet();
assertThat(response.failedShards(), equalTo(0));
assertThat(response.hits().totalHits(), equalTo(0l));

client.prepareSearch("test")
.setQuery(QueryBuilders.hasChildQuery("child", matchQuery("text", "value")).scoreType("max"))
.execute().actionGet();
assertThat(response.failedShards(), equalTo(0));
assertThat(response.hits().totalHits(), equalTo(0l));

client.prepareSearch("test")
.setQuery(QueryBuilders.hasParentQuery("child", matchQuery("text", "value")).executionType(getExecutionMethod()))
.execute().actionGet();
assertThat(response.failedShards(), equalTo(0));
assertThat(response.hits().totalHits(), equalTo(0l));

client.prepareSearch("test")
.setQuery(QueryBuilders.hasParentQuery("child", matchQuery("text", "value")).scoreType("score"))
.execute().actionGet();
assertThat(response.failedShards(), equalTo(0));
assertThat(response.hits().totalHits(), equalTo(0l));
}

}

0 comments on commit 1ce10df

Please sign in to comment.