Skip to content

Commit

Permalink
Fold InternalSearchHits and friends into their interfaces (#23042)
Browse files Browse the repository at this point in the history
We have a bunch of interfaces that have only a single implementation
for 6 years now. These interfaces are pretty useless from a SW development
perspective and only add unnecessary abstractions. They also require
lots of casting in many places where we expect that there is only one
concrete implementation. This change removes the interfaces, makes
all of the classes final and removes the duplicate `foo` `getFoo` accessors
in favor of `getFoo` from these classes.

This is a backport of ecb01c1 that deprecates
all methods that are removed in 6.0
  • Loading branch information
s1monw committed Feb 14, 2017
1 parent cf6249b commit 3005757
Show file tree
Hide file tree
Showing 34 changed files with 1,303 additions and 1,530 deletions.
Expand Up @@ -28,8 +28,8 @@
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.internal.InternalSearchHit;
import org.elasticsearch.search.internal.InternalSearchHits;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.internal.InternalSearchResponse;
import org.elasticsearch.search.profile.SearchProfileShardResults;
import org.elasticsearch.search.suggest.Suggest;
Expand All @@ -49,8 +49,8 @@ public TransportNoopSearchAction(Settings settings, ThreadPool threadPool, Trans
@Override
protected void doExecute(SearchRequest request, ActionListener<SearchResponse> listener) {
listener.onResponse(new SearchResponse(new InternalSearchResponse(
new InternalSearchHits(
new InternalSearchHit[0], 0L, 0.0f),
new SearchHits(
new SearchHit[0], 0L, 0.0f),
new InternalAggregations(Collections.emptyList()),
new Suggest(Collections.emptyList()),
new SearchProfileShardResults(Collections.emptyMap()), false, false), "", 1, 1, 0, new ShardSearchFailure[0]));
Expand Down
Expand Up @@ -266,8 +266,8 @@ public Long getTTL() {
}

private <T> T fieldValue(String fieldName) {
SearchHitField field = delegate.field(fieldName);
return field == null ? null : field.value();
SearchHitField field = delegate.getField(fieldName);
return field == null ? null : field.getValue();
}
}
}
Expand Up @@ -45,8 +45,8 @@
import org.elasticsearch.search.dfs.AggregatedDfs;
import org.elasticsearch.search.dfs.DfsSearchResult;
import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.internal.InternalSearchHit;
import org.elasticsearch.search.internal.InternalSearchHits;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.internal.InternalSearchResponse;
import org.elasticsearch.search.profile.ProfileShardResult;
import org.elasticsearch.search.profile.SearchProfileShardResults;
Expand Down Expand Up @@ -374,7 +374,7 @@ public InternalSearchResponse merge(boolean ignoreFrom, ScoreDoc[] sortedDocs,
return InternalSearchResponse.empty();
}
List<? extends AtomicArray.Entry<? extends QuerySearchResultProvider>> fetchResults = fetchResultsArr.asList();
InternalSearchHits hits = getHits(reducedQueryPhase, ignoreFrom, sortedDocs, fetchResultsArr);
SearchHits hits = getHits(reducedQueryPhase, ignoreFrom, sortedDocs, fetchResultsArr);
if (reducedQueryPhase.suggest != null) {
if (!fetchResults.isEmpty()) {
int currentOffset = hits.getHits().length;
Expand All @@ -389,7 +389,7 @@ public InternalSearchResponse merge(boolean ignoreFrom, ScoreDoc[] sortedDocs,
FetchSearchResult fetchResult = searchResultProvider.fetchResult();
int fetchResultIndex = fetchResult.counterGetAndIncrement();
if (fetchResultIndex < fetchResult.hits().internalHits().length) {
InternalSearchHit hit = fetchResult.hits().internalHits()[fetchResultIndex];
SearchHit hit = fetchResult.hits().internalHits()[fetchResultIndex];
CompletionSuggestion.Entry.Option suggestOption =
suggestionOptions.get(scoreDocIndex - currentOffset);
hit.score(shardDoc.score);
Expand All @@ -405,8 +405,8 @@ public InternalSearchResponse merge(boolean ignoreFrom, ScoreDoc[] sortedDocs,
return reducedQueryPhase.buildResponse(hits);
}

private InternalSearchHits getHits(ReducedQueryPhase reducedQueryPhase, boolean ignoreFrom, ScoreDoc[] sortedDocs,
AtomicArray<? extends QuerySearchResultProvider> fetchResultsArr) {
private SearchHits getHits(ReducedQueryPhase reducedQueryPhase, boolean ignoreFrom, ScoreDoc[] sortedDocs,
AtomicArray<? extends QuerySearchResultProvider> fetchResultsArr) {
List<? extends AtomicArray.Entry<? extends QuerySearchResultProvider>> fetchResults = fetchResultsArr.asList();
boolean sorted = false;
int sortScoreIndex = -1;
Expand All @@ -433,7 +433,7 @@ private InternalSearchHits getHits(ReducedQueryPhase reducedQueryPhase, boolean
// with collapsing we can have more fetch hits than sorted docs
numSearchHits = Math.min(sortedDocs.length, numSearchHits);
// merge hits
List<InternalSearchHit> hits = new ArrayList<>();
List<SearchHit> hits = new ArrayList<>();
if (!fetchResults.isEmpty()) {
for (int i = 0; i < numSearchHits; i++) {
ScoreDoc shardDoc = sortedDocs[i];
Expand All @@ -444,7 +444,7 @@ private InternalSearchHits getHits(ReducedQueryPhase reducedQueryPhase, boolean
FetchSearchResult fetchResult = fetchResultProvider.fetchResult();
int index = fetchResult.counterGetAndIncrement();
if (index < fetchResult.hits().internalHits().length) {
InternalSearchHit searchHit = fetchResult.hits().internalHits()[index];
SearchHit searchHit = fetchResult.hits().internalHits()[index];
searchHit.score(shardDoc.score);
searchHit.shard(fetchResult.shardTarget());
if (sorted) {
Expand All @@ -458,7 +458,7 @@ private InternalSearchHits getHits(ReducedQueryPhase reducedQueryPhase, boolean
}
}
}
return new InternalSearchHits(hits.toArray(new InternalSearchHit[hits.size()]), reducedQueryPhase.totalHits,
return new SearchHits(hits.toArray(new SearchHit[hits.size()]), reducedQueryPhase.totalHits,
reducedQueryPhase.maxScore);
}

Expand Down Expand Up @@ -584,7 +584,7 @@ public static final class ReducedQueryPhase {
* Creates a new search response from the given merged hits.
* @see #merge(boolean, ScoreDoc[], ReducedQueryPhase, AtomicArray)
*/
public InternalSearchResponse buildResponse(InternalSearchHits hits) {
public InternalSearchResponse buildResponse(SearchHits hits) {
return new InternalSearchResponse(hits, aggregations, suggest, shardResults, timedOut, terminatedEarly);
}

Expand Down
Expand Up @@ -46,7 +46,6 @@
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.collapse.CollapseBuilder;
import org.elasticsearch.search.internal.AliasFilter;
import org.elasticsearch.search.internal.InternalSearchHit;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.Transport;
Expand Down Expand Up @@ -331,17 +330,16 @@ void expandCollapsedHits(DiscoveryNode node,
ActionListener.wrap(response -> {
Iterator<MultiSearchResponse.Item> it = response.iterator();
for (SearchHit hit : searchResponse.getHits()) {
InternalSearchHit internalHit = (InternalSearchHit) hit;
MultiSearchResponse.Item item = it.next();
if (item.isFailure()) {
finalListener.onFailure(item.getFailure());
return;
}
SearchHits innerHits = item.getResponse().getHits();
if (internalHit.getInnerHits() == null) {
internalHit.setInnerHits(new HashMap<>(1));
if (hit.getInnerHits() == null) {
hit.setInnerHits(new HashMap<>(1));
}
internalHit.getInnerHits().put(collapseBuilder.getInnerHit().getName(), innerHits);
hit.getInnerHits().put(collapseBuilder.getInnerHit().getName(), innerHits);
}
finalListener.onResponse(searchResponse);
}, finalListener::onFailure)
Expand Down

0 comments on commit 3005757

Please sign in to comment.