Skip to content

Commit

Permalink
Optimize segcore API GetResultData
Browse files Browse the repository at this point in the history
Signed-off-by: yudong.cai <yudong.cai@zilliz.com>
  • Loading branch information
cydrain committed Oct 25, 2021
1 parent f680258 commit f6e89ea
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions internal/core/src/segcore/reduce_c.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ GetResultData(std::vector<std::vector<int64_t>>& search_records,
// remove duplicates
if (curr_pk == -1 || std::abs(curr_dis - prev_dis) > 0.00001) {
result_pair.search_result_->result_offsets_.push_back(loc_offset++);
search_records[index].push_back(result_pair.offset_++);
search_records[index].push_back(result_pair.offset_);
prev_dis = curr_dis;
prev_pk_set.clear();
prev_pk_set.insert(curr_pk);
Expand All @@ -105,15 +105,15 @@ GetResultData(std::vector<std::vector<int64_t>>& search_records,
// e3: [100, 0.99] ==> duplicated, should remove
if (prev_pk_set.count(curr_pk) == 0) {
result_pair.search_result_->result_offsets_.push_back(loc_offset++);
search_records[index].push_back(result_pair.offset_++);
search_records[index].push_back(result_pair.offset_);
// prev_pk_set keeps all primary keys with same distance
prev_pk_set.insert(curr_pk);
} else {
// the entity with same distance and same primary key must be duplicated
result_pair.offset_++;
LOG_SEGCORE_DEBUG_ << "skip duplicated search result, primary key " << curr_pk;
}
}
result_pair.offset_++;
}
#endif
}
Expand Down

0 comments on commit f6e89ea

Please sign in to comment.