Skip to content

Remove hppc from FetchSearchPhase #85188

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,6 @@
*/
package org.elasticsearch.action.search;

import com.carrotsearch.hppc.IntArrayList;

import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.search.ScoreDoc;
Expand Down Expand Up @@ -118,7 +116,7 @@ private void innerRun() throws Exception {
finishPhase.run();
} else {
ScoreDoc[] scoreDocs = reducedQueryPhase.sortedTopDocs().scoreDocs();
final IntArrayList[] docIdsToLoad = SearchPhaseController.fillDocIdsToLoad(numShards, scoreDocs);
final List<Integer>[] docIdsToLoad = SearchPhaseController.fillDocIdsToLoad(numShards, scoreDocs);
// no docs to fetch -- sidestep everything and return
if (scoreDocs.length == 0) {
// we have to release contexts here to free up resources
Expand All @@ -135,7 +133,7 @@ private void innerRun() throws Exception {
context
);
for (int i = 0; i < docIdsToLoad.length; i++) {
IntArrayList entry = docIdsToLoad[i];
List<Integer> entry = docIdsToLoad[i];
SearchPhaseResult queryResult = queryResults.get(i);
if (entry == null) { // no results for this shard ID
if (queryResult != null) {
Expand Down Expand Up @@ -176,7 +174,7 @@ private void innerRun() throws Exception {
protected ShardFetchSearchRequest createFetchRequest(
ShardSearchContextId contextId,
int index,
IntArrayList entry,
List<Integer> entry,
ScoreDoc[] lastEmittedDocPerShard,
OriginalIndices originalIndices,
ShardSearchRequest shardSearchRequest,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,6 @@

package org.elasticsearch.action.search;

import com.carrotsearch.hppc.IntArrayList;

import org.apache.lucene.index.Term;
import org.apache.lucene.search.CollectionStatistics;
import org.apache.lucene.search.FieldDoc;
Expand Down Expand Up @@ -235,12 +233,13 @@ public static ScoreDoc[] getLastEmittedDocPerShard(ReducedQueryPhase reducedQuer
/**
* Builds an array, with potential null elements, with docs to load.
*/
public static IntArrayList[] fillDocIdsToLoad(int numShards, ScoreDoc[] shardDocs) {
IntArrayList[] docIdsToLoad = new IntArrayList[numShards];
public static List<Integer>[] fillDocIdsToLoad(int numShards, ScoreDoc[] shardDocs) {
@SuppressWarnings("unchecked")
List<Integer>[] docIdsToLoad = (List<Integer>[]) new ArrayList<?>[numShards];
for (ScoreDoc shardDoc : shardDocs) {
IntArrayList shardDocIdsToLoad = docIdsToLoad[shardDoc.shardIndex];
List<Integer> shardDocIdsToLoad = docIdsToLoad[shardDoc.shardIndex];
if (shardDocIdsToLoad == null) {
shardDocIdsToLoad = docIdsToLoad[shardDoc.shardIndex] = new IntArrayList();
shardDocIdsToLoad = docIdsToLoad[shardDoc.shardIndex] = new ArrayList<>();
}
shardDocIdsToLoad.add(shardDoc.doc);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,6 @@

package org.elasticsearch.action.search;

import com.carrotsearch.hppc.IntArrayList;

import org.apache.logging.log4j.Logger;
import org.apache.lucene.search.ScoreDoc;
import org.elasticsearch.action.ActionListener;
Expand All @@ -25,6 +23,7 @@
import org.elasticsearch.search.query.ScrollQuerySearchResult;
import org.elasticsearch.transport.Transport;

import java.util.List;
import java.util.function.BiFunction;

final class SearchScrollQueryThenFetchAsyncAction extends SearchScrollAsyncAction<ScrollQuerySearchResult> {
Expand Down Expand Up @@ -75,15 +74,15 @@ public void run() {
return;
}

final IntArrayList[] docIdsToLoad = SearchPhaseController.fillDocIdsToLoad(queryResults.length(), scoreDocs);
final List<Integer>[] docIdsToLoad = SearchPhaseController.fillDocIdsToLoad(queryResults.length(), scoreDocs);
final ScoreDoc[] lastEmittedDocPerShard = SearchPhaseController.getLastEmittedDocPerShard(
reducedQueryPhase,
queryResults.length()
);
final CountDown counter = new CountDown(docIdsToLoad.length);
for (int i = 0; i < docIdsToLoad.length; i++) {
final int index = i;
final IntArrayList docIds = docIdsToLoad[index];
final List<Integer> docIds = docIdsToLoad[index];
if (docIds != null) {
final QuerySearchResult querySearchResult = queryResults.get(index);
ScoreDoc lastEmittedDoc = lastEmittedDocPerShard[index];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,6 @@

package org.elasticsearch.search.fetch;

import com.carrotsearch.hppc.IntArrayList;

import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.ScoreDoc;
import org.elasticsearch.action.search.SearchShardTask;
Expand All @@ -26,6 +24,7 @@
import org.elasticsearch.transport.TransportRequest;

import java.io.IOException;
import java.util.List;
import java.util.Map;

/**
Expand All @@ -34,25 +33,22 @@
*/
public class ShardFetchRequest extends TransportRequest {

private ShardSearchContextId contextId;

private int[] docIds;
private final ShardSearchContextId contextId;

private int size;
private final int[] docIds;

private ScoreDoc lastEmittedDoc;

public ShardFetchRequest(ShardSearchContextId contextId, IntArrayList list, ScoreDoc lastEmittedDoc) {
public ShardFetchRequest(ShardSearchContextId contextId, List<Integer> docIds, ScoreDoc lastEmittedDoc) {
this.contextId = contextId;
this.docIds = list.buffer;
this.size = list.size();
this.docIds = docIds.stream().mapToInt(Integer::intValue).toArray();
this.lastEmittedDoc = lastEmittedDoc;
}

public ShardFetchRequest(StreamInput in) throws IOException {
super(in);
contextId = new ShardSearchContextId(in);
size = in.readVInt();
int size = in.readVInt();
docIds = new int[size];
for (int i = 0; i < size; i++) {
docIds[i] = in.readVInt();
Expand All @@ -71,9 +67,9 @@ public ShardFetchRequest(StreamInput in) throws IOException {
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
contextId.writeTo(out);
out.writeVInt(size);
for (int i = 0; i < size; i++) {
out.writeVInt(docIds[i]);
out.writeVInt(docIds.length);
for (int docId : docIds) {
out.writeVInt(docId);
}
if (lastEmittedDoc == null) {
out.writeByte((byte) 0);
Expand All @@ -95,7 +91,7 @@ public int[] docIds() {
}

public int docIdsSize() {
return size;
return docIds.length;
}

public ScoreDoc lastEmittedDoc() {
Expand All @@ -109,7 +105,7 @@ public Task createTask(long id, String type, String action, TaskId parentTaskId,

@Override
public String getDescription() {
return "id[" + contextId + "], size[" + size + "], lastEmittedDoc[" + lastEmittedDoc + "]";
return "id[" + contextId + "], size[" + docIds.length + "], lastEmittedDoc[" + lastEmittedDoc + "]";
}

@Nullable
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,6 @@

package org.elasticsearch.search.fetch;

import com.carrotsearch.hppc.IntArrayList;

import org.apache.lucene.search.ScoreDoc;
import org.elasticsearch.Version;
import org.elasticsearch.action.IndicesRequest;
Expand All @@ -23,6 +21,7 @@
import org.elasticsearch.search.internal.ShardSearchRequest;

import java.io.IOException;
import java.util.List;

/**
* Shard level fetch request used with search. Holds indices taken from the original search request
Expand All @@ -39,12 +38,12 @@ public ShardFetchSearchRequest(
OriginalIndices originalIndices,
ShardSearchContextId id,
ShardSearchRequest shardSearchRequest,
IntArrayList list,
List<Integer> docIds,
ScoreDoc lastEmittedDoc,
RescoreDocIds rescoreDocIds,
AggregatedDfs aggregatedDfs
) {
super(id, list, lastEmittedDoc);
super(id, docIds, lastEmittedDoc);
this.originalIndices = originalIndices;
this.shardSearchRequest = shardSearchRequest;
this.rescoreDocIds = rescoreDocIds;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,6 @@
*/
package org.elasticsearch.search;

import com.carrotsearch.hppc.IntArrayList;

import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.FilterDirectoryReader;
import org.apache.lucene.index.LeafReader;
Expand Down Expand Up @@ -358,7 +356,7 @@ public void onFailure(Exception e) {
result
);
SearchPhaseResult searchPhaseResult = result.get();
IntArrayList intCursors = new IntArrayList(1);
List<Integer> intCursors = new ArrayList<>(1);
intCursors.add(0);
ShardFetchRequest req = new ShardFetchRequest(searchPhaseResult.getContextId(), intCursors, null/* not a scroll */);
PlainActionFuture<FetchSearchResult> listener = new PlainActionFuture<>();
Expand Down