Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add sort and collapse info to SearchHits transport serialization #36555

Merged
merged 9 commits into from
Dec 14, 2018
Prev Previous commit
Next Next commit
add serialization of SortFields, collapseField and collapseValues to …
…SearchResponse
javanna committed Dec 13, 2018
commit b6ecd0cd23048c04dc759abccba0b73e99ee9c48
Original file line number Diff line number Diff line change
@@ -109,8 +109,9 @@ private void innerRun() throws IOException {
// query AND fetch optimization
finishPhase.run();
} else {
final IntArrayList[] docIdsToLoad = searchPhaseController.fillDocIdsToLoad(numShards, reducedQueryPhase.scoreDocs);
if (reducedQueryPhase.scoreDocs.length == 0) { // no docs to fetch -- sidestep everything and return
ScoreDoc[] scoreDocs = reducedQueryPhase.sortedTopDocs.scoreDocs;
final IntArrayList[] docIdsToLoad = searchPhaseController.fillDocIdsToLoad(numShards, scoreDocs);
if (scoreDocs.length == 0) { // no docs to fetch -- sidestep everything and return
phaseResults.stream()
.map(SearchPhaseResult::queryResult)
.forEach(this::releaseIrrelevantSearchContext); // we have to release contexts here to free up resources
Original file line number Diff line number Diff line change
@@ -211,18 +211,23 @@ static SortedTopDocs sortDocs(boolean ignoreFrom, Collection<? extends SearchPha
}
}
}
final boolean isSortedByField;
final SortField[] sortFields;
boolean isSortedByField = false;
SortField[] sortFields = null;
String collapseField = null;
Object[] collapseValues = null;
if (mergedTopDocs instanceof TopFieldDocs) {
TopFieldDocs fieldDocs = (TopFieldDocs) mergedTopDocs;
isSortedByField = (fieldDocs instanceof CollapseTopFieldDocs &&
fieldDocs.fields.length == 1 && fieldDocs.fields[0].getType() == SortField.Type.SCORE) == false;
sortFields = fieldDocs.fields;
} else {
isSortedByField = false;
sortFields = null;
if (fieldDocs instanceof CollapseTopFieldDocs) {
isSortedByField = (fieldDocs.fields.length == 1 && fieldDocs.fields[0].getType() == SortField.Type.SCORE) == false;
CollapseTopFieldDocs collapseTopFieldDocs = (CollapseTopFieldDocs) fieldDocs;
collapseField = collapseTopFieldDocs.field;
collapseValues = collapseTopFieldDocs.collapseValues;
} else {
isSortedByField = true;
}
}
return new SortedTopDocs(scoreDocs, isSortedByField, sortFields);
return new SortedTopDocs(scoreDocs, isSortedByField, sortFields, collapseField, collapseValues);
} else {
// no relevant docs
return SortedTopDocs.EMPTY;
@@ -266,7 +271,7 @@ private static void setShardIndex(TopDocs topDocs, int shardIndex) {
public ScoreDoc[] getLastEmittedDocPerShard(ReducedQueryPhase reducedQueryPhase, int numShards) {
final ScoreDoc[] lastEmittedDocPerShard = new ScoreDoc[numShards];
if (reducedQueryPhase.isEmptyResult == false) {
final ScoreDoc[] sortedScoreDocs = reducedQueryPhase.scoreDocs;
final ScoreDoc[] sortedScoreDocs = reducedQueryPhase.sortedTopDocs.scoreDocs;
// from is always zero as when we use scroll, we ignore from
long size = Math.min(reducedQueryPhase.fetchHits, reducedQueryPhase.size);
// with collapsing we can have more hits than sorted docs
@@ -307,7 +312,7 @@ public InternalSearchResponse merge(boolean ignoreFrom, ReducedQueryPhase reduce
if (reducedQueryPhase.isEmptyResult) {
return InternalSearchResponse.empty();
}
ScoreDoc[] sortedDocs = reducedQueryPhase.scoreDocs;
ScoreDoc[] sortedDocs = reducedQueryPhase.sortedTopDocs.scoreDocs;
SearchHits hits = getHits(reducedQueryPhase, ignoreFrom, fetchResults, resultsLookup);
if (reducedQueryPhase.suggest != null) {
if (!fetchResults.isEmpty()) {
@@ -345,12 +350,12 @@ public InternalSearchResponse merge(boolean ignoreFrom, ReducedQueryPhase reduce

private SearchHits getHits(ReducedQueryPhase reducedQueryPhase, boolean ignoreFrom,
Collection<? extends SearchPhaseResult> fetchResults, IntFunction<SearchPhaseResult> resultsLookup) {
final boolean sorted = reducedQueryPhase.isSortedByField;
ScoreDoc[] sortedDocs = reducedQueryPhase.scoreDocs;
SortedTopDocs sortedTopDocs = reducedQueryPhase.sortedTopDocs;
int sortScoreIndex = -1;
if (sorted) {
for (int i = 0; i < reducedQueryPhase.sortField.length; i++) {
if (reducedQueryPhase.sortField[i].getType() == SortField.Type.SCORE) {
if (sortedTopDocs.isSortedByField) {
SortField[] sortFields = sortedTopDocs.sortFields;
for (int i = 0; i < sortFields.length; i++) {
if (sortFields[i].getType() == SortField.Type.SCORE) {
sortScoreIndex = i;
}
}
@@ -362,12 +367,12 @@ private SearchHits getHits(ReducedQueryPhase reducedQueryPhase, boolean ignoreFr
int from = ignoreFrom ? 0 : reducedQueryPhase.from;
int numSearchHits = (int) Math.min(reducedQueryPhase.fetchHits - from, reducedQueryPhase.size);
// with collapsing we can have more fetch hits than sorted docs
numSearchHits = Math.min(sortedDocs.length, numSearchHits);
numSearchHits = Math.min(sortedTopDocs.scoreDocs.length, numSearchHits);
// merge hits
List<SearchHit> hits = new ArrayList<>();
if (!fetchResults.isEmpty()) {
for (int i = 0; i < numSearchHits; i++) {
ScoreDoc shardDoc = sortedDocs[i];
ScoreDoc shardDoc = sortedTopDocs.scoreDocs[i];
SearchPhaseResult fetchResultProvider = resultsLookup.apply(shardDoc.shardIndex);
if (fetchResultProvider == null) {
// this can happen if we are hitting a shard failure during the fetch phase
@@ -381,21 +386,21 @@ private SearchHits getHits(ReducedQueryPhase reducedQueryPhase, boolean ignoreFr
assert index < fetchResult.hits().getHits().length : "not enough hits fetched. index [" + index + "] length: "
+ fetchResult.hits().getHits().length;
SearchHit searchHit = fetchResult.hits().getHits()[index];
if (sorted == false) {
searchHit.score(shardDoc.score);
}
searchHit.shard(fetchResult.getSearchShardTarget());
if (sorted) {
if (sortedTopDocs.isSortedByField) {
FieldDoc fieldDoc = (FieldDoc) shardDoc;
searchHit.sortValues(fieldDoc.fields, reducedQueryPhase.sortValueFormats);
if (sortScoreIndex != -1) {
searchHit.score(((Number) fieldDoc.fields[sortScoreIndex]).floatValue());
}
} else {
searchHit.score(shardDoc.score);
}
hits.add(searchHit);
}
}
return new SearchHits(hits.toArray(new SearchHit[0]), reducedQueryPhase.totalHits, reducedQueryPhase.maxScore);
return new SearchHits(hits.toArray(new SearchHit[0]), reducedQueryPhase.totalHits,
reducedQueryPhase.maxScore, sortedTopDocs.sortFields, sortedTopDocs.collapseField, sortedTopDocs.collapseValues);
}

/**
@@ -436,8 +441,7 @@ private ReducedQueryPhase reducedQueryPhase(Collection<? extends SearchPhaseResu
if (queryResults.isEmpty()) { // early terminate we have nothing to reduce
final TotalHits totalHits = topDocsStats.getTotalHits();
return new ReducedQueryPhase(totalHits, topDocsStats.fetchHits, topDocsStats.maxScore,
timedOut, terminatedEarly, null, null, null, EMPTY_DOCS, null,
null, numReducePhases, false, 0, 0, true);
timedOut, terminatedEarly, null, null, null, SortedTopDocs.EMPTY, null, numReducePhases, 0, 0, true);
}
final QuerySearchResult firstResult = queryResults.stream().findFirst().get().queryResult();
final boolean hasSuggest = firstResult.suggest() != null;
@@ -499,11 +503,11 @@ private ReducedQueryPhase reducedQueryPhase(Collection<? extends SearchPhaseResu
final InternalAggregations aggregations = aggregationsList.isEmpty() ? null : reduceAggs(aggregationsList,
firstResult.pipelineAggregators(), reduceContext);
final SearchProfileShardResults shardResults = profileResults.isEmpty() ? null : new SearchProfileShardResults(profileResults);
final SortedTopDocs scoreDocs = sortDocs(isScrollRequest, queryResults, bufferedTopDocs, topDocsStats, from, size);
final SortedTopDocs sortedTopDocs = sortDocs(isScrollRequest, queryResults, bufferedTopDocs, topDocsStats, from, size);
final TotalHits totalHits = topDocsStats.getTotalHits();
return new ReducedQueryPhase(totalHits, topDocsStats.fetchHits, topDocsStats.maxScore,
timedOut, terminatedEarly, suggest, aggregations, shardResults, scoreDocs.scoreDocs, scoreDocs.sortFields,
firstResult.sortValueFormats(), numReducePhases, scoreDocs.isSortedByField, size, from, false);
timedOut, terminatedEarly, suggest, aggregations, shardResults, sortedTopDocs,
firstResult.sortValueFormats(), numReducePhases, size, from, firstResult == null);
}

/**
@@ -551,12 +555,8 @@ public static final class ReducedQueryPhase {
final SearchProfileShardResults shardResults;
// the number of reduces phases
final int numReducePhases;
// the searches merged top docs
final ScoreDoc[] scoreDocs;
// the top docs sort fields used to sort the score docs, <code>null</code> if the results are not sorted
final SortField[] sortField;
// <code>true</code> iff the result score docs is sorted by a field (not score), this implies that <code>sortField</code> is set.
final boolean isSortedByField;
//encloses info about the merged top docs, the sort fields used to sort the score docs etc.
final SortedTopDocs sortedTopDocs;
// the size of the top hits to return
final int size;
// <code>true</code> iff the query phase had no results. Otherwise <code>false</code>
@@ -567,9 +567,8 @@ public static final class ReducedQueryPhase {
final DocValueFormat[] sortValueFormats;

ReducedQueryPhase(TotalHits totalHits, long fetchHits, float maxScore, boolean timedOut, Boolean terminatedEarly, Suggest suggest,
InternalAggregations aggregations, SearchProfileShardResults shardResults, ScoreDoc[] scoreDocs,
SortField[] sortFields, DocValueFormat[] sortValueFormats, int numReducePhases, boolean isSortedByField, int size,
int from, boolean isEmptyResult) {
InternalAggregations aggregations, SearchProfileShardResults shardResults, SortedTopDocs sortedTopDocs,
DocValueFormat[] sortValueFormats, int numReducePhases, int size, int from, boolean isEmptyResult) {
if (numReducePhases <= 0) {
throw new IllegalArgumentException("at least one reduce phase must have been applied but was: " + numReducePhases);
}
@@ -586,9 +585,7 @@ public static final class ReducedQueryPhase {
this.aggregations = aggregations;
this.shardResults = shardResults;
this.numReducePhases = numReducePhases;
this.scoreDocs = scoreDocs;
this.sortField = sortFields;
this.isSortedByField = isSortedByField;
this.sortedTopDocs = sortedTopDocs;
this.size = size;
this.from = from;
this.isEmptyResult = isEmptyResult;
@@ -728,7 +725,7 @@ InitialSearchPhase.ArraySearchPhaseResults<SearchPhaseResult> newSearchPhaseResu
}
return new InitialSearchPhase.ArraySearchPhaseResults<SearchPhaseResult>(numShards) {
@Override
public ReducedQueryPhase reduce() {
ReducedQueryPhase reduce() {
return reducedQueryPhase(results.asList(), isScrollRequest, trackTotalHits);
}
};
@@ -770,15 +767,23 @@ void add(TopDocsAndMaxScore topDocs) {
}

static final class SortedTopDocs {
static final SortedTopDocs EMPTY = new SortedTopDocs(EMPTY_DOCS, false, null);
static final SortedTopDocs EMPTY = new SortedTopDocs(EMPTY_DOCS, false, null, null, null);
// the searches merged top docs
final ScoreDoc[] scoreDocs;
// <code>true</code> iff the result score docs is sorted by a field (not score), this implies that <code>sortField</code> is set.
final boolean isSortedByField;
// the top docs sort fields used to sort the score docs, <code>null</code> if the results are not sorted
final SortField[] sortFields;
final String collapseField;
final Object[] collapseValues;

SortedTopDocs(ScoreDoc[] scoreDocs, boolean isSortedByField, SortField[] sortFields) {
SortedTopDocs(ScoreDoc[] scoreDocs, boolean isSortedByField, SortField[] sortFields,
String collapseField, Object[] collapseValues) {
this.scoreDocs = scoreDocs;
this.isSortedByField = isSortedByField;
this.sortFields = sortFields;
this.collapseField = collapseField;
this.collapseValues = collapseValues;
}
}
}
Original file line number Diff line number Diff line change
@@ -35,7 +35,6 @@
import org.elasticsearch.search.query.ScrollQuerySearchResult;
import org.elasticsearch.transport.Transport;

import java.io.IOException;
import java.util.function.BiFunction;

final class SearchScrollQueryThenFetchAsyncAction extends SearchScrollAsyncAction<ScrollQuerySearchResult> {
@@ -68,16 +67,16 @@ protected void executeInitialPhase(Transport.Connection connection, InternalScro
protected SearchPhase moveToNextPhase(BiFunction<String, String, DiscoveryNode> clusterNodeLookup) {
return new SearchPhase("fetch") {
@Override
public void run() throws IOException {
public void run() {
final SearchPhaseController.ReducedQueryPhase reducedQueryPhase = searchPhaseController.reducedScrollQueryPhase(
queryResults.asList());
if (reducedQueryPhase.scoreDocs.length == 0) {
ScoreDoc[] scoreDocs = reducedQueryPhase.sortedTopDocs.scoreDocs;
if (scoreDocs.length == 0) {
sendResponse(reducedQueryPhase, fetchResults);
return;
}

final IntArrayList[] docIdsToLoad = searchPhaseController.fillDocIdsToLoad(queryResults.length(),
reducedQueryPhase.scoreDocs);
final IntArrayList[] docIdsToLoad = searchPhaseController.fillDocIdsToLoad(queryResults.length(), scoreDocs);
final ScoreDoc[] lastEmittedDocPerShard = searchPhaseController.getLastEmittedDocPerShard(reducedQueryPhase,
queryResults.length());
final CountDown counter = new CountDown(docIdsToLoad.length);
72 changes: 66 additions & 6 deletions server/src/main/java/org/elasticsearch/search/SearchHits.java
Original file line number Diff line number Diff line change
@@ -19,8 +19,10 @@

package org.elasticsearch.search;

import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TotalHits;
import org.apache.lucene.search.TotalHits.Relation;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@@ -45,7 +47,7 @@ public final class SearchHits implements Streamable, ToXContentFragment, Iterabl

public static SearchHits empty() {
// We shouldn't use static final instance, since that could directly be returned by native transport clients
return new SearchHits(EMPTY, new TotalHits(0, Relation.EQUAL_TO), 0);
return new SearchHits(EMPTY, new TotalHits(0, Relation.EQUAL_TO), 0, null, null, null);
}

public static final SearchHit[] EMPTY = new SearchHit[0];
@@ -56,14 +58,33 @@ public static SearchHits empty() {

private float maxScore;

@Nullable
private SortField[] sortFields;
@Nullable
private String collapseField;
@Nullable
private Object[] collapseValues;

SearchHits() {

}

//TODO look for other users of the old constructor!!!

//TODO we may want to remove this constructor and replace it with the new one:
// such change causes a lot of noise, probably wise to make it as a followup
public SearchHits(SearchHit[] hits, @Nullable TotalHits totalHits, float maxScore) {
this(hits, totalHits, maxScore, null, null, null);
}

public SearchHits(SearchHit[] hits, @Nullable TotalHits totalHits, float maxScore, @Nullable SortField[] sortFields,
@Nullable String collapseField, @Nullable Object[] collapseValues) {
this.hits = hits;
this.totalHits = totalHits == null ? null : new Total(totalHits);
this.maxScore = maxScore;
this.sortFields = sortFields;
this.collapseField = collapseField;
this.collapseValues = collapseValues;
}

/**
@@ -74,7 +95,6 @@ public TotalHits getTotalHits() {
return totalHits == null ? null : totalHits.in;
}


/**
* The maximum score of this query.
*/
@@ -96,6 +116,31 @@ public SearchHit getAt(int position) {
return hits[position];
}

/**
* In case documents were sorted by field(s), returns information about such field(s), null otherwise
* @see SortField
*/
@Nullable
public SortField[] getSortFields() {
return sortFields;
}

/**
* In case field collapsing was performed, returns the field used for field collapsing, null otherwise
*/
@Nullable
public String getCollapseField() {
return collapseField;
}

/**
* In case field collapsing was performed, returns the values of the field that field collapsing was performed on, null otherwise
*/
@Nullable
public Object[] getCollapseValues() {
return collapseValues;
}

@Override
public Iterator<SearchHit> iterator() {
return Arrays.stream(getHits()).iterator();
@@ -175,8 +220,7 @@ public static SearchHits fromXContent(XContentParser parser) throws IOException
}
}
}
SearchHits searchHits = new SearchHits(hits.toArray(new SearchHit[hits.size()]), totalHits, maxScore);
return searchHits;
return new SearchHits(hits.toArray(new SearchHit[0]), totalHits, maxScore);
}

public static SearchHits readSearchHits(StreamInput in) throws IOException {
@@ -203,6 +247,12 @@ public void readFrom(StreamInput in) throws IOException {
hits[i] = SearchHit.readSearchHit(in);
}
}
//TODO update version once backported
if (in.getVersion().onOrAfter(Version.V_7_0_0)) {
sortFields = in.readOptionalArray(Lucene::readSortField, SortField[]::new);
collapseField = in.readOptionalString();
collapseValues = in.readOptionalArray(Lucene::readSortValue, Object[]::new);
}
}

@Override
@@ -219,6 +269,12 @@ public void writeTo(StreamOutput out) throws IOException {
hit.writeTo(out);
}
}
//TODO update version once backported
if (out.getVersion().onOrAfter(Version.V_7_0_0)) {
out.writeOptionalArray(Lucene::writeSortField, sortFields);
out.writeOptionalString(collapseField);
out.writeOptionalArray(Lucene::writeSortValue, collapseValues);
}
}

@Override
@@ -229,12 +285,16 @@ public boolean equals(Object obj) {
SearchHits other = (SearchHits) obj;
return Objects.equals(totalHits, other.totalHits)
&& Objects.equals(maxScore, other.maxScore)
&& Arrays.equals(hits, other.hits);
&& Arrays.equals(hits, other.hits)
&& Arrays.equals(sortFields, other.sortFields)
&& Objects.equals(collapseField, other.collapseField)
&& Arrays.equals(collapseValues, other.collapseValues);
}

@Override
public int hashCode() {
return Objects.hash(totalHits, maxScore, Arrays.hashCode(hits));
return Objects.hash(totalHits, maxScore, Arrays.hashCode(hits),
Arrays.hashCode(sortFields), collapseField, Arrays.hashCode(collapseValues));
}

public static TotalHits parseTotalHitsFragment(XContentParser parser) throws IOException {
Original file line number Diff line number Diff line change
@@ -24,6 +24,7 @@
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TotalHits;
import org.apache.lucene.search.TotalHits.Relation;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.util.BigArrays;
@@ -47,7 +48,6 @@
import org.elasticsearch.test.ESTestCase;
import org.junit.Before;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@@ -138,7 +138,7 @@ private AtomicArray<SearchPhaseResult> generateSeededQueryResults(long seed, int
() -> generateQueryResults(nShards, suggestions, searchHitsSize, useConstantScore));
}

public void testMerge() throws IOException {
public void testMerge() {
List<CompletionSuggestion> suggestions = new ArrayList<>();
int maxSuggestSize = 0;
for (int i = 0; i < randomIntBetween(1, 5); i++) {
@@ -152,8 +152,8 @@ public void testMerge() throws IOException {
for (boolean trackTotalHits : new boolean[] {true, false}) {
SearchPhaseController.ReducedQueryPhase reducedQueryPhase =
searchPhaseController.reducedQueryPhase(queryResults.asList(), false, trackTotalHits);
AtomicArray<SearchPhaseResult> searchPhaseResultAtomicArray = generateFetchResults(nShards, reducedQueryPhase.scoreDocs,
reducedQueryPhase.suggest);
AtomicArray<SearchPhaseResult> searchPhaseResultAtomicArray = generateFetchResults(nShards,
reducedQueryPhase.sortedTopDocs.scoreDocs, reducedQueryPhase.suggest);
InternalSearchResponse mergedResponse = searchPhaseController.merge(false,
reducedQueryPhase,
searchPhaseResultAtomicArray.asList(), searchPhaseResultAtomicArray::get);
@@ -166,7 +166,7 @@ public void testMerge() throws IOException {
suggestSize += stream.collect(Collectors.summingInt(e -> e.getOptions().size()));
}
assertThat(suggestSize, lessThanOrEqualTo(maxSuggestSize));
assertThat(mergedResponse.hits().getHits().length, equalTo(reducedQueryPhase.scoreDocs.length - suggestSize));
assertThat(mergedResponse.hits().getHits().length, equalTo(reducedQueryPhase.sortedTopDocs.scoreDocs.length - suggestSize));
Suggest suggestResult = mergedResponse.suggest();
for (Suggest.Suggestion<?> suggestion : reducedQueryPhase.suggest) {
assertThat(suggestion, instanceOf(CompletionSuggestion.class));
@@ -183,24 +183,26 @@ public void testMerge() throws IOException {
}
}

private AtomicArray<SearchPhaseResult> generateQueryResults(int nShards,
//TODO add new tests that check returned values as part of SortedTopDocs. Test also with TopFieldDocs and CollapseTopFieldDocs

private static AtomicArray<SearchPhaseResult> generateQueryResults(int nShards,
List<CompletionSuggestion> suggestions,
int searchHitsSize, boolean useConstantScore) {
AtomicArray<SearchPhaseResult> queryResults = new AtomicArray<>(nShards);
for (int shardIndex = 0; shardIndex < nShards; shardIndex++) {
QuerySearchResult querySearchResult = new QuerySearchResult(shardIndex,
new SearchShardTarget("", new Index("", ""), shardIndex, null));
TopDocs topDocs = new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), new ScoreDoc[0]);
final TopDocs topDocs;
float maxScore = 0;
if (searchHitsSize > 0) {
if (searchHitsSize == 0) {
topDocs = Lucene.EMPTY_TOP_DOCS;
} else {
int nDocs = randomIntBetween(0, searchHitsSize);
ScoreDoc[] scoreDocs = new ScoreDoc[nDocs];
for (int i = 0; i < nDocs; i++) {
float score = useConstantScore ? 1.0F : Math.abs(randomFloat());
scoreDocs[i] = new ScoreDoc(i, score);
if (score > maxScore) {
maxScore = score;
}
maxScore = Math.max(score, maxScore);
}
topDocs = new TopDocs(new TotalHits(scoreDocs.length, TotalHits.Relation.EQUAL_TO), scoreDocs);
}
@@ -283,7 +285,7 @@ private AtomicArray<SearchPhaseResult> generateFetchResults(int nShards, ScoreDo
}
}
}
SearchHit[] hits = searchHits.toArray(new SearchHit[searchHits.size()]);
SearchHit[] hits = searchHits.toArray(new SearchHit[0]);
fetchSearchResult.hits(new SearchHits(hits, new TotalHits(hits.length, Relation.EQUAL_TO), maxScore));
fetchResults.set(shardIndex, fetchSearchResult);
}
@@ -336,6 +338,10 @@ public void testConsumer() {
assertEquals(numTotalReducePhases, reduce.numReducePhases);
InternalMax max = (InternalMax) reduce.aggregations.asList().get(0);
assertEquals(3.0D, max.getValue(), 0.0D);
assertFalse(reduce.sortedTopDocs.isSortedByField);
assertNull(reduce.sortedTopDocs.sortFields);
assertNull(reduce.sortedTopDocs.collapseField);
assertNull(reduce.sortedTopDocs.collapseValues);
}

public void testConsumerConcurrently() throws InterruptedException {
@@ -374,13 +380,17 @@ public void testConsumerConcurrently() throws InterruptedException {
SearchPhaseController.ReducedQueryPhase reduce = consumer.reduce();
InternalMax internalMax = (InternalMax) reduce.aggregations.asList().get(0);
assertEquals(max.get(), internalMax.getValue(), 0.0D);
assertEquals(1, reduce.scoreDocs.length);
assertEquals(1, reduce.sortedTopDocs.scoreDocs.length);
assertEquals(max.get(), reduce.maxScore, 0.0f);
assertEquals(expectedNumResults, reduce.totalHits.value);
assertEquals(max.get(), reduce.scoreDocs[0].score, 0.0f);
assertEquals(max.get(), reduce.sortedTopDocs.scoreDocs[0].score, 0.0f);
assertFalse(reduce.sortedTopDocs.isSortedByField);
assertNull(reduce.sortedTopDocs.sortFields);
assertNull(reduce.sortedTopDocs.collapseField);
assertNull(reduce.sortedTopDocs.collapseValues);
}

public void testConsumerOnlyAggs() throws InterruptedException {
public void testConsumerOnlyAggs() {
int expectedNumResults = randomIntBetween(1, 100);
int bufferSize = randomIntBetween(2, 200);
SearchRequest request = new SearchRequest();
@@ -390,29 +400,31 @@ public void testConsumerOnlyAggs() throws InterruptedException {
searchPhaseController.newSearchPhaseResults(request, expectedNumResults);
AtomicInteger max = new AtomicInteger();
for (int i = 0; i < expectedNumResults; i++) {
int id = i;
int number = randomIntBetween(1, 1000);
max.updateAndGet(prev -> Math.max(prev, number));
QuerySearchResult result = new QuerySearchResult(id, new SearchShardTarget("node", new Index("a", "b"), id, null));
QuerySearchResult result = new QuerySearchResult(i, new SearchShardTarget("node", new Index("a", "b"), i, null));
result.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[0]), number),
new DocValueFormat[0]);
InternalAggregations aggs = new InternalAggregations(Arrays.asList(new InternalMax("test", (double) number,
DocValueFormat.RAW, Collections.emptyList(), Collections.emptyMap())));
result.aggregations(aggs);
result.setShardIndex(id);
result.setShardIndex(i);
result.size(1);
consumer.consumeResult(result);
}
SearchPhaseController.ReducedQueryPhase reduce = consumer.reduce();
InternalMax internalMax = (InternalMax) reduce.aggregations.asList().get(0);
assertEquals(max.get(), internalMax.getValue(), 0.0D);
assertEquals(0, reduce.scoreDocs.length);
assertEquals(0, reduce.sortedTopDocs.scoreDocs.length);
assertEquals(max.get(), reduce.maxScore, 0.0f);
assertEquals(expectedNumResults, reduce.totalHits.value);
assertFalse(reduce.sortedTopDocs.isSortedByField);
assertNull(reduce.sortedTopDocs.sortFields);
assertNull(reduce.sortedTopDocs.collapseField);
assertNull(reduce.sortedTopDocs.collapseValues);
}


public void testConsumerOnlyHits() throws InterruptedException {
public void testConsumerOnlyHits() {
int expectedNumResults = randomIntBetween(1, 100);
int bufferSize = randomIntBetween(2, 200);
SearchRequest request = new SearchRequest();
@@ -424,24 +436,26 @@ public void testConsumerOnlyHits() throws InterruptedException {
searchPhaseController.newSearchPhaseResults(request, expectedNumResults);
AtomicInteger max = new AtomicInteger();
for (int i = 0; i < expectedNumResults; i++) {
int id = i;
int number = randomIntBetween(1, 1000);
max.updateAndGet(prev -> Math.max(prev, number));
QuerySearchResult result = new QuerySearchResult(id, new SearchShardTarget("node", new Index("a", "b"), id, null));
QuerySearchResult result = new QuerySearchResult(i, new SearchShardTarget("node", new Index("a", "b"), i, null));
result.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO),
new ScoreDoc[] {new ScoreDoc(0, number)}), number), new DocValueFormat[0]);
result.setShardIndex(id);
result.setShardIndex(i);
result.size(1);
consumer.consumeResult(result);
}
SearchPhaseController.ReducedQueryPhase reduce = consumer.reduce();
assertEquals(1, reduce.scoreDocs.length);
assertEquals(1, reduce.sortedTopDocs.scoreDocs.length);
assertEquals(max.get(), reduce.maxScore, 0.0f);
assertEquals(expectedNumResults, reduce.totalHits.value);
assertEquals(max.get(), reduce.scoreDocs[0].score, 0.0f);
assertEquals(max.get(), reduce.sortedTopDocs.scoreDocs[0].score, 0.0f);
assertFalse(reduce.sortedTopDocs.isSortedByField);
assertNull(reduce.sortedTopDocs.sortFields);
assertNull(reduce.sortedTopDocs.collapseField);
assertNull(reduce.sortedTopDocs.collapseValues);
}


public void testNewSearchPhaseResults() {
for (int i = 0; i < 10; i++) {
int expectedNumResults = randomIntBetween(1, 10);
@@ -497,15 +511,15 @@ public void testReduceTopNWithFromOffset() {
consumer.consumeResult(result);
}
// 4*3 results = 12 we get result 5 to 10 here with from=5 and size=5

SearchPhaseController.ReducedQueryPhase reduce = consumer.reduce();
assertEquals(5, reduce.scoreDocs.length);
ScoreDoc[] scoreDocs = reduce.sortedTopDocs.scoreDocs;
assertEquals(5, scoreDocs.length);
assertEquals(100.f, reduce.maxScore, 0.0f);
assertEquals(12, reduce.totalHits.value);
assertEquals(95.0f, reduce.scoreDocs[0].score, 0.0f);
assertEquals(94.0f, reduce.scoreDocs[1].score, 0.0f);
assertEquals(93.0f, reduce.scoreDocs[2].score, 0.0f);
assertEquals(92.0f, reduce.scoreDocs[3].score, 0.0f);
assertEquals(91.0f, reduce.scoreDocs[4].score, 0.0f);
assertEquals(95.0f, scoreDocs[0].score, 0.0f);
assertEquals(94.0f, scoreDocs[1].score, 0.0f);
assertEquals(93.0f, scoreDocs[2].score, 0.0f);
assertEquals(92.0f, scoreDocs[3].score, 0.0f);
assertEquals(91.0f, scoreDocs[4].score, 0.0f);
}
}
24 changes: 12 additions & 12 deletions server/src/test/java/org/elasticsearch/search/SearchHitTests.java
Original file line number Diff line number Diff line change
@@ -19,15 +19,6 @@

package org.elasticsearch.search;

import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Predicate;

import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.TotalHits;
import org.elasticsearch.action.OriginalIndices;
@@ -52,6 +43,15 @@
import org.elasticsearch.test.AbstractStreamableTestCase;
import org.elasticsearch.test.RandomObjects;

import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Predicate;

import static org.elasticsearch.common.xcontent.XContentHelper.toXContent;
import static org.elasticsearch.test.XContentTestUtils.insertRandomFields;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
@@ -65,7 +65,7 @@ public static SearchHit createTestItem(boolean withOptionalInnerHits, boolean wi
return createTestItem(randomFrom(XContentType.values()), withOptionalInnerHits, withShardTarget);
}

public static SearchHit createTestItem(XContentType xContentType, boolean withOptionalInnerHits, boolean withShardTarget) {
public static SearchHit createTestItem(XContentType xContentType, boolean withOptionalInnerHits, boolean transportSerialization) {
int internalId = randomInt();
String uid = randomAlphaOfLength(10);
Text type = new Text(randomAlphaOfLengthBetween(5, 10));
@@ -120,12 +120,12 @@ public static SearchHit createTestItem(XContentType xContentType, boolean withOp
Map<String, SearchHits> innerHits = new HashMap<>(innerHitsSize);
for (int i = 0; i < innerHitsSize; i++) {
innerHits.put(randomAlphaOfLength(5),
SearchHitsTests.createTestItem(xContentType, false, withShardTarget));
SearchHitsTests.createTestItem(xContentType, false, transportSerialization));
}
hit.setInnerHits(innerHits);
}
}
if (withShardTarget && randomBoolean()) {
if (transportSerialization && randomBoolean()) {
String index = randomAlphaOfLengthBetween(5, 10);
String clusterAlias = randomBoolean() ? null : randomAlphaOfLengthBetween(5, 10);
hit.shard(new SearchShardTarget(randomAlphaOfLengthBetween(5, 10),
121 changes: 111 additions & 10 deletions server/src/test/java/org/elasticsearch/search/SearchHitsTests.java
Original file line number Diff line number Diff line change
@@ -19,11 +19,15 @@

package org.elasticsearch.search;

import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TotalHits;
import org.apache.lucene.util.TestUtil;
import org.elasticsearch.Version;
import org.elasticsearch.action.OriginalIndices;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.lucene.LuceneTests;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.ToXContent;
@@ -34,49 +38,83 @@
import org.elasticsearch.index.Index;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.test.AbstractStreamableXContentTestCase;
import org.elasticsearch.test.VersionUtils;

import java.io.IOException;
import java.util.Base64;
import java.util.Collections;
import java.util.function.Predicate;

public class SearchHitsTests extends AbstractStreamableXContentTestCase<SearchHits> {

public static SearchHits createTestItem(boolean withOptionalInnerHits, boolean withShardTarget) {
return createTestItem(randomFrom(XContentType.values()), withOptionalInnerHits, withShardTarget);
}

private static SearchHit[] createSearchHitArray(int size, XContentType xContentType, boolean withOptionalInnerHits,
boolean withShardTarget) {
boolean transportSerialization) {
SearchHit[] hits = new SearchHit[size];
for (int i = 0; i < hits.length; i++) {
hits[i] = SearchHitTests.createTestItem(xContentType, withOptionalInnerHits, withShardTarget);
hits[i] = SearchHitTests.createTestItem(xContentType, withOptionalInnerHits, transportSerialization);
}
return hits;
}

private static TotalHits randomTotalHits() {
private static TotalHits randomTotalHits(TotalHits.Relation relation) {
long totalHits = TestUtil.nextLong(random(), 0, Long.MAX_VALUE);
TotalHits.Relation relation = randomFrom(TotalHits.Relation.values());
return new TotalHits(totalHits, relation);
}

public static SearchHits createTestItem(XContentType xContentType, boolean withOptionalInnerHits, boolean withShardTarget) {
public static SearchHits createTestItem(XContentType xContentType, boolean withOptionalInnerHits, boolean transportSerialization) {
return createTestItem(xContentType, withOptionalInnerHits, transportSerialization, randomFrom(TotalHits.Relation.values()));
}

private static SearchHits createTestItem(XContentType xContentType, boolean withOptionalInnerHits, boolean transportSerialization,
TotalHits.Relation totalHitsRelation) {
int searchHits = randomIntBetween(0, 5);
SearchHit[] hits = createSearchHitArray(searchHits, xContentType, withOptionalInnerHits, withShardTarget);
SearchHit[] hits = createSearchHitArray(searchHits, xContentType, withOptionalInnerHits, transportSerialization);
TotalHits totalHits = frequently() ? randomTotalHits(totalHitsRelation) : null;
float maxScore = frequently() ? randomFloat() : Float.NaN;
return new SearchHits(hits, frequently() ? randomTotalHits() : null, maxScore);
SortField[] sortFields = null;
String collapseField = null;
Object[] collapseValues = null;
if (transportSerialization) {
sortFields = randomBoolean() ? createSortFields(randomIntBetween(1, 5)) : null;
collapseField = randomAlphaOfLengthBetween(5, 10);
collapseValues = randomBoolean() ? createCollapseValues(randomIntBetween(1, 10)) : null;
}
return new SearchHits(hits, totalHits, maxScore, sortFields, collapseField, collapseValues);
}

private static SortField[] createSortFields(int size) {
SortField[] sortFields = new SortField[size];
for (int i = 0; i < sortFields.length; i++) {
//sort fields are simplified before serialization, we write directly the simplified version
//otherwise equality comparisons become complicated
sortFields[i] = LuceneTests.randomSortField().v2();
}
return sortFields;
}

private static Object[] createCollapseValues(int size) {
Object[] collapseValues = new Object[size];
for (int i = 0; i < collapseValues.length; i++) {
collapseValues[i] = LuceneTests.randomSortValue();
}
return collapseValues;
}

@Override
protected SearchHits mutateInstance(SearchHits instance) {
switch (randomIntBetween(0, 2)) {
switch (randomIntBetween(0, 5)) {
case 0:
return new SearchHits(createSearchHitArray(instance.getHits().length + 1,
randomFrom(XContentType.values()), false, randomBoolean()),
instance.getTotalHits(), instance.getMaxScore());
case 1:
final TotalHits totalHits;
if (instance.getTotalHits() == null) {
totalHits = randomTotalHits();
totalHits = randomTotalHits(randomFrom(TotalHits.Relation.values()));
} else {
totalHits = null;
}
@@ -89,6 +127,33 @@ protected SearchHits mutateInstance(SearchHits instance) {
maxScore = Float.NaN;
}
return new SearchHits(instance.getHits(), instance.getTotalHits(), maxScore);
case 3:
SortField[] sortFields;
if (instance.getSortFields() == null) {
sortFields = createSortFields(randomIntBetween(1, 5));
} else {
sortFields = randomBoolean() ? createSortFields(instance.getSortFields().length + 1) : null;
}
return new SearchHits(instance.getHits(), instance.getTotalHits(), instance.getMaxScore(),
sortFields, instance.getCollapseField(), instance.getCollapseValues());
case 4:
String collapseField;
if (instance.getCollapseField() == null) {
collapseField = randomAlphaOfLengthBetween(5, 10);
} else {
collapseField = randomBoolean() ? instance.getCollapseField() + randomAlphaOfLengthBetween(2, 5) : null;
}
return new SearchHits(instance.getHits(), instance.getTotalHits(), instance.getMaxScore(),
instance.getSortFields(), collapseField, instance.getCollapseValues());
case 5:
Object[] collapseValues;
if (instance.getCollapseValues() == null) {
collapseValues = createCollapseValues(randomIntBetween(1, 5));
} else {
collapseValues = randomBoolean() ? createCollapseValues(instance.getCollapseValues().length) : null;
}
return new SearchHits(instance.getHits(), instance.getTotalHits(), instance.getMaxScore(),
instance.getSortFields(), instance.getCollapseField(), collapseValues);
default:
throw new UnsupportedOperationException();
}
@@ -125,7 +190,7 @@ protected SearchHits createXContextTestInstance(XContentType xContentType) {
// deserialized hit cannot be equal to the original instance.
// There is another test (#testFromXContentWithShards) that checks the
// rest serialization with shard targets.
return createTestItem(xContentType,true, false);
return createTestItem(xContentType, true, false);
}

@Override
@@ -205,4 +270,40 @@ public void testFromXContentWithShards() throws IOException {

}
}

//TODO rename method and adapt versions after backport
public void testReadFromPre70() throws IOException {
try (StreamInput in = StreamInput.wrap(Base64.getDecoder().decode("AQC/gAAAAAA="))) {
in.setVersion(VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, VersionUtils.getPreviousVersion(Version.V_7_0_0)));
SearchHits searchHits = new SearchHits();
searchHits.readFrom(in);
assertEquals(0, searchHits.getHits().length);
assertNotNull(searchHits.getTotalHits());
assertEquals(0L, searchHits.getTotalHits().value);
assertEquals(TotalHits.Relation.EQUAL_TO, searchHits.getTotalHits().relation);
assertEquals(-1F, searchHits.getMaxScore(), 0F);
assertNull(searchHits.getSortFields());
assertNull(searchHits.getCollapseField());
assertNull(searchHits.getCollapseValues());
}
}

//TODO rename method and adapt versions after backport
public void testSerializationPre70() throws IOException {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, VersionUtils.getPreviousVersion(Version.V_7_0_0));
SearchHits original = createTestItem(randomFrom(XContentType.values()), false, true, TotalHits.Relation.EQUAL_TO);
SearchHits deserialized = copyInstance(original, version);
assertArrayEquals(original.getHits(), deserialized.getHits());
assertEquals(original.getMaxScore(), deserialized.getMaxScore(), 0F);
if (original.getTotalHits() == null) {
assertNull(deserialized.getTotalHits());
} else {
assertNotNull(deserialized.getTotalHits());
assertEquals(original.getTotalHits().value, deserialized.getTotalHits().value);
assertEquals(original.getTotalHits().relation, deserialized.getTotalHits().relation);
}
assertNull(deserialized.getSortFields());
assertNull(deserialized.getCollapseField());
assertNull(deserialized.getCollapseValues());
}
}