Skip to content

Add a new merge policy that interleaves old and new segments on force merge #48533

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Oct 29, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,119 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.lucene.index;

import org.elasticsearch.common.lucene.Lucene;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
* A {@link FilterMergePolicy} that interleaves eldest and newest segments picked by {@link MergePolicy#findForcedMerges}
* and {@link MergePolicy#findForcedDeletesMerges}. This allows time-based indices, that usually have the eldest documents
* first, to be efficient at finding the most recent documents too.
*/
public class ShuffleForcedMergePolicy extends FilterMergePolicy {
private static final String SHUFFLE_MERGE_KEY = "es.shuffle_merge";

public ShuffleForcedMergePolicy(MergePolicy in) {
super(in);
}

/**
* Return <code>true</code> if the provided reader was merged with interleaved segments.
*/
public static boolean isInterleavedSegment(LeafReader reader) {
SegmentReader segReader = Lucene.segmentReader(reader);
return segReader.getSegmentInfo().info.getDiagnostics().containsKey(SHUFFLE_MERGE_KEY);
}


@Override
public MergeSpecification findForcedDeletesMerges(SegmentInfos segmentInfos, MergeContext mergeContext) throws IOException {
return wrap(in.findForcedDeletesMerges(segmentInfos, mergeContext));
}

@Override
public MergeSpecification findForcedMerges(SegmentInfos segmentInfos, int maxSegmentCount,
Map<SegmentCommitInfo, Boolean> segmentsToMerge,
MergeContext mergeContext) throws IOException {
return wrap(in.findForcedMerges(segmentInfos, maxSegmentCount, segmentsToMerge, mergeContext));
}

private MergeSpecification wrap(MergeSpecification mergeSpec) throws IOException {
if (mergeSpec == null) {
return null;
}
MergeSpecification newMergeSpec = new MergeSpecification();
for (OneMerge toWrap : mergeSpec.merges) {
List<SegmentCommitInfo> newInfos = interleaveList(new ArrayList<>(toWrap.segments));
newMergeSpec.add(new OneMerge(newInfos) {
@Override
public CodecReader wrapForMerge(CodecReader reader) throws IOException {
return toWrap.wrapForMerge(reader);
}

@Override
public void setMergeInfo(SegmentCommitInfo info) {
// Record that this merged segment is current as of this schemaGen:
Map<String, String> copy = new HashMap<>(info.info.getDiagnostics());
copy.put(SHUFFLE_MERGE_KEY, "");
info.info.setDiagnostics(copy);
super.setMergeInfo(info);
}
});
}

return newMergeSpec;
}

// Return a new list that sort segments of the original one by name (older first)
// and then interleave them to colocate oldest and most recent segments together.
private List<SegmentCommitInfo> interleaveList(List<SegmentCommitInfo> infos) throws IOException {
List<SegmentCommitInfo> newInfos = new ArrayList<>(infos.size());
Collections.sort(infos, Comparator.comparing(a -> a.info.name));
int left = 0;
int right = infos.size() - 1;
while (left <= right) {
SegmentCommitInfo leftInfo = infos.get(left);
if (left == right) {
newInfos.add(infos.get(left));
} else {
SegmentCommitInfo rightInfo = infos.get(right);
// smaller segment first
if (leftInfo.sizeInBytes() < rightInfo.sizeInBytes()) {
newInfos.add(leftInfo);
newInfos.add(rightInfo);
} else {
newInfos.add(rightInfo);
newInfos.add(leftInfo);
}
}
left ++;
right --;
}
return newInfos;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
import org.apache.lucene.index.MergePolicy;
import org.apache.lucene.index.SegmentCommitInfo;
import org.apache.lucene.index.SegmentInfos;
import org.apache.lucene.index.ShuffleForcedMergePolicy;
import org.apache.lucene.index.SoftDeletesRetentionMergePolicy;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.DocIdSetIterator;
Expand All @@ -56,6 +57,7 @@
import org.elasticsearch.Assertions;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.lease.Releasable;
Expand Down Expand Up @@ -2224,6 +2226,13 @@ private IndexWriterConfig getIndexWriterConfig() {
new SoftDeletesRetentionMergePolicy(Lucene.SOFT_DELETES_FIELD, softDeletesPolicy::getRetentionQuery,
new PrunePostingsMergePolicy(mergePolicy, IdFieldMapper.NAME)));
}
boolean shuffleForcedMerge = Booleans.parseBoolean(System.getProperty("es.shuffle_forced_merge", Boolean.TRUE.toString()));
if (shuffleForcedMerge) {
// We wrap the merge policy for all indices even though it is mostly useful for time-based indices
// but there should be no overhead for other type of indices so it's simpler than adding a setting
// to enable it.
mergePolicy = new ShuffleForcedMergePolicy(mergePolicy);
}
iwc.setMergePolicy(new ElasticsearchMergePolicy(mergePolicy));
iwc.setSimilarity(engineConfig.getSimilarity());
iwc.setRAMBufferSizeMB(engineConfig.getIndexingBufferSize().getMbFrac());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,11 @@ public ElasticsearchMergePolicy(MergePolicy delegate) {
super(delegate);
}

/** return the wrapped merge policy */
public MergePolicy getDelegate() {
return in;
}

private boolean shouldUpgrade(SegmentCommitInfo info) {
org.apache.lucene.util.Version old = info.info.getVersion();
org.apache.lucene.util.Version cur = Version.CURRENT.luceneVersion;
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.lucene.index;

import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.store.Directory;

import java.io.IOException;
import java.util.function.Consumer;

import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;

public class ShuffleForcedMergePolicyTests extends BaseMergePolicyTestCase {
public void testDiagnostics() throws IOException {
try (Directory dir = newDirectory()) {
IndexWriterConfig iwc = newIndexWriterConfig();
MergePolicy mp = new ShuffleForcedMergePolicy(newLogMergePolicy());
iwc.setMergePolicy(mp);
boolean sorted = random().nextBoolean();
if (sorted) {
iwc.setIndexSort(new Sort(new SortField("sort", SortField.Type.INT)));
}
int numDocs = atLeast(100);

try (IndexWriter writer = new IndexWriter(dir, iwc)) {
for (int i = 0; i < numDocs; i++) {
if (i % 10 == 0) {
writer.flush();
}
Document doc = new Document();
doc.add(new StringField("id", "" + i, Field.Store.NO));
doc.add(new NumericDocValuesField("sort", random().nextInt()));
writer.addDocument(doc);
}
try (DirectoryReader reader = DirectoryReader.open(writer)) {
assertThat(reader.leaves().size(), greaterThan(2));
assertSegmentReaders(reader, leaf -> {
assertFalse(ShuffleForcedMergePolicy.isInterleavedSegment(leaf));
});
}
writer.forceMerge(1);
try (DirectoryReader reader = DirectoryReader.open(writer)) {
assertThat(reader.leaves().size(), equalTo(1));
assertSegmentReaders(reader, leaf -> {
assertTrue(ShuffleForcedMergePolicy.isInterleavedSegment(leaf));
});
}
}
}
}

private void assertSegmentReaders(DirectoryReader reader, Consumer<LeafReader> checkLeaf) {
for (LeafReaderContext leaf : reader.leaves()) {
checkLeaf.accept(leaf.reader());
}
}

@Override
protected MergePolicy mergePolicy() {
return new ShuffleForcedMergePolicy(newLogMergePolicy());
}

@Override
protected void assertSegmentInfos(MergePolicy policy, SegmentInfos infos) throws IOException {}

@Override
protected void assertMerge(MergePolicy policy, MergePolicy.MergeSpecification merge) throws IOException {}
}
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.MergePolicy;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.ShuffleForcedMergePolicy;
import org.apache.lucene.index.SoftDeletesRetentionMergePolicy;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms;
Expand All @@ -51,7 +52,7 @@ public void testPrune() throws IOException {
iwc.setSoftDeletesField("_soft_deletes");
MergePolicy mp = new SoftDeletesRetentionMergePolicy("_soft_deletes", MatchAllDocsQuery::new,
new PrunePostingsMergePolicy(newLogMergePolicy(), "id"));
iwc.setMergePolicy(mp);
iwc.setMergePolicy(new ShuffleForcedMergePolicy(mp));
boolean sorted = randomBoolean();
if (sorted) {
iwc.setIndexSort(new Sort(new SortField("sort", SortField.Type.INT)));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.SegmentCommitInfo;
import org.apache.lucene.index.SegmentInfos;
import org.apache.lucene.index.ShuffleForcedMergePolicy;
import org.apache.lucene.index.StandardDirectoryReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.DocIdSetIterator;
Expand All @@ -57,7 +58,7 @@ public void testPruneAll() throws IOException {
IndexWriterConfig iwc = newIndexWriterConfig();
RecoverySourcePruneMergePolicy mp = new RecoverySourcePruneMergePolicy("extra_source", MatchNoDocsQuery::new,
newLogMergePolicy());
iwc.setMergePolicy(mp);
iwc.setMergePolicy(new ShuffleForcedMergePolicy(mp));
try (IndexWriter writer = new IndexWriter(dir, iwc)) {
for (int i = 0; i < 20; i++) {
if (i > 0 && randomBoolean()) {
Expand Down