Skip to content

Commit

Permalink
[Remove] types from Uid and remaining types/Uid from translog (#2450)
Browse files Browse the repository at this point in the history
Removes types from UID class along with cleaning up all obsolete MapperService
dependendices. This includes removing UID from the Translog Delete operation
which is no longer needed due to type dependency removal.

Signed-off-by: Nicholas Walter Knize <nknize@apache.org>
  • Loading branch information
nknize authored Mar 13, 2022
1 parent 95d4750 commit bdcaec5
Show file tree
Hide file tree
Showing 18 changed files with 108 additions and 224 deletions.
10 changes: 2 additions & 8 deletions server/src/main/java/org/opensearch/index/engine/Engine.java
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,6 @@
import org.opensearch.common.unit.TimeValue;
import org.opensearch.common.util.concurrent.ReleasableLock;
import org.opensearch.index.VersionType;
import org.opensearch.index.mapper.MapperService;
import org.opensearch.index.mapper.Mapping;
import org.opensearch.index.mapper.ParseContext.Document;
import org.opensearch.index.mapper.ParsedDocument;
Expand Down Expand Up @@ -736,13 +735,8 @@ public enum SearcherScope {
* Creates a new history snapshot from Lucene for reading operations whose seqno in the requesting seqno range (both inclusive).
* This feature requires soft-deletes enabled. If soft-deletes are disabled, this method will throw an {@link IllegalStateException}.
*/
public abstract Translog.Snapshot newChangesSnapshot(
String source,
MapperService mapperService,
long fromSeqNo,
long toSeqNo,
boolean requiredFullRange
) throws IOException;
public abstract Translog.Snapshot newChangesSnapshot(String source, long fromSeqNo, long toSeqNo, boolean requiredFullRange)
throws IOException;

public abstract boolean hasCompleteOperationHistory(String reason, long startingSeqNo);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,6 @@
import org.opensearch.index.VersionType;
import org.opensearch.index.fieldvisitor.IdOnlyFieldVisitor;
import org.opensearch.index.mapper.IdFieldMapper;
import org.opensearch.index.mapper.MapperService;
import org.opensearch.index.mapper.ParseContext;
import org.opensearch.index.mapper.ParsedDocument;
import org.opensearch.index.mapper.SeqNoFieldMapper;
Expand Down Expand Up @@ -2773,20 +2772,13 @@ long getNumDocUpdates() {
}

@Override
public Translog.Snapshot newChangesSnapshot(
String source,
MapperService mapperService,
long fromSeqNo,
long toSeqNo,
boolean requiredFullRange
) throws IOException {
public Translog.Snapshot newChangesSnapshot(String source, long fromSeqNo, long toSeqNo, boolean requiredFullRange) throws IOException {
ensureOpen();
refreshIfNeeded(source, toSeqNo);
Searcher searcher = acquireSearcher(source, SearcherScope.INTERNAL);
try {
LuceneChangesSnapshot snapshot = new LuceneChangesSnapshot(
searcher,
mapperService,
LuceneChangesSnapshot.DEFAULT_BATCH_SIZE,
fromSeqNo,
toSeqNo,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.DocValuesFieldExistsQuery;
Expand All @@ -51,11 +50,8 @@
import org.opensearch.common.lucene.Lucene;
import org.opensearch.core.internal.io.IOUtils;
import org.opensearch.index.fieldvisitor.FieldsVisitor;
import org.opensearch.index.mapper.IdFieldMapper;
import org.opensearch.index.mapper.MapperService;
import org.opensearch.index.mapper.SeqNoFieldMapper;
import org.opensearch.index.mapper.SourceFieldMapper;
import org.opensearch.index.mapper.Uid;
import org.opensearch.index.translog.Translog;

import java.io.Closeable;
Expand All @@ -77,7 +73,6 @@ final class LuceneChangesSnapshot implements Translog.Snapshot {
private final boolean requiredFullRange;

private final IndexSearcher indexSearcher;
private final MapperService mapperService;
private int docIndex = 0;
private final int totalHits;
private ScoreDoc[] scoreDocs;
Expand All @@ -88,20 +83,13 @@ final class LuceneChangesSnapshot implements Translog.Snapshot {
* Creates a new "translog" snapshot from Lucene for reading operations whose seq# in the specified range.
*
* @param engineSearcher the internal engine searcher which will be taken over if the snapshot is opened successfully
* @param mapperService the mapper service which will be mainly used to resolve the document's type and uid
* @param searchBatchSize the number of documents should be returned by each search
* @param fromSeqNo the min requesting seq# - inclusive
* @param toSeqNo the maximum requesting seq# - inclusive
* @param requiredFullRange if true, the snapshot will strictly check for the existence of operations between fromSeqNo and toSeqNo
*/
LuceneChangesSnapshot(
Engine.Searcher engineSearcher,
MapperService mapperService,
int searchBatchSize,
long fromSeqNo,
long toSeqNo,
boolean requiredFullRange
) throws IOException {
LuceneChangesSnapshot(Engine.Searcher engineSearcher, int searchBatchSize, long fromSeqNo, long toSeqNo, boolean requiredFullRange)
throws IOException {
if (fromSeqNo < 0 || toSeqNo < 0 || fromSeqNo > toSeqNo) {
throw new IllegalArgumentException("Invalid range; from_seqno [" + fromSeqNo + "], to_seqno [" + toSeqNo + "]");
}
Expand All @@ -114,7 +102,6 @@ final class LuceneChangesSnapshot implements Translog.Snapshot {
IOUtils.close(engineSearcher);
}
};
this.mapperService = mapperService;
final long requestingSize = (toSeqNo - fromSeqNo) == Long.MAX_VALUE ? Long.MAX_VALUE : (toSeqNo - fromSeqNo + 1L);
this.searchBatchSize = requestingSize < searchBatchSize ? Math.toIntExact(requestingSize) : searchBatchSize;
this.fromSeqNo = fromSeqNo;
Expand Down Expand Up @@ -278,19 +265,17 @@ private Translog.Operation readDocAsOp(int docIndex) throws IOException {
: SourceFieldMapper.NAME;
final FieldsVisitor fields = new FieldsVisitor(true, sourceField);
leaf.reader().document(segmentDocID, fields);
fields.postProcess(mapperService);

final Translog.Operation op;
final boolean isTombstone = parallelArray.isTombStone[docIndex];
if (isTombstone && fields.uid() == null) {
if (isTombstone && fields.id() == null) {
op = new Translog.NoOp(seqNo, primaryTerm, fields.source().utf8ToString());
assert version == 1L : "Noop tombstone should have version 1L; actual version [" + version + "]";
assert assertDocSoftDeleted(leaf.reader(), segmentDocID) : "Noop but soft_deletes field is not set [" + op + "]";
} else {
final String id = fields.uid().id();
final Term uid = new Term(IdFieldMapper.NAME, Uid.encodeId(id));
final String id = fields.id();
if (isTombstone) {
op = new Translog.Delete(id, uid, seqNo, primaryTerm, version);
op = new Translog.Delete(id, seqNo, primaryTerm, version);
assert assertDocSoftDeleted(leaf.reader(), segmentDocID) : "Delete op but soft_deletes field is not set [" + op + "]";
} else {
final BytesReference source = fields.source();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,6 @@
import org.opensearch.common.lucene.index.OpenSearchDirectoryReader;
import org.opensearch.common.util.concurrent.ReleasableLock;
import org.opensearch.core.internal.io.IOUtils;
import org.opensearch.index.mapper.MapperService;
import org.opensearch.index.seqno.SeqNoStats;
import org.opensearch.index.seqno.SequenceNumbers;
import org.opensearch.index.store.Store;
Expand Down Expand Up @@ -326,13 +325,7 @@ public Closeable acquireHistoryRetentionLock() {
}

@Override
public Translog.Snapshot newChangesSnapshot(
String source,
MapperService mapperService,
long fromSeqNo,
long toSeqNo,
boolean requiredFullRange
) {
public Translog.Snapshot newChangesSnapshot(String source, long fromSeqNo, long toSeqNo, boolean requiredFullRange) {
return newEmptySnapshot();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@
import org.apache.lucene.util.BytesRef;
import org.opensearch.common.bytes.BytesArray;
import org.opensearch.common.bytes.BytesReference;
import org.opensearch.index.mapper.DocumentMapper;
import org.opensearch.index.mapper.IdFieldMapper;
import org.opensearch.index.mapper.IgnoredFieldMapper;
import org.opensearch.index.mapper.MappedFieldType;
Expand Down Expand Up @@ -67,7 +66,7 @@ public class FieldsVisitor extends StoredFieldVisitor {
private final String sourceFieldName;
private final Set<String> requiredFields;
protected BytesReference source;
protected String type, id;
protected String id;
protected Map<String, List<Object>> fieldsValues;

public FieldsVisitor(boolean loadSource) {
Expand Down Expand Up @@ -98,10 +97,6 @@ public Status needsField(FieldInfo fieldInfo) {
}

public void postProcess(MapperService mapperService) {
final DocumentMapper mapper = mapperService.documentMapper();
if (mapper != null) {
type = mapper.type();
}
for (Map.Entry<String, List<Object>> entry : fields().entrySet()) {
MappedFieldType fieldType = mapperService.fieldType(entry.getKey());
if (fieldType == null) {
Expand Down Expand Up @@ -167,13 +162,8 @@ public BytesReference source() {
return source;
}

public Uid uid() {
if (id == null) {
return null;
} else if (type == null) {
throw new IllegalStateException("Call postProcess before getting the uid");
}
return new Uid(type, id);
public String id() {
return id;
}

public String routing() {
Expand All @@ -195,7 +185,6 @@ public Map<String, List<Object>> fields() {
public void reset() {
if (fieldsValues != null) fieldsValues.clear();
source = null;
type = null;
id = null;

requiredFields.addAll(BASE_REQUIRED_FIELDS);
Expand Down
45 changes: 3 additions & 42 deletions server/src/main/java/org/opensearch/index/mapper/Uid.java
Original file line number Diff line number Diff line change
Expand Up @@ -43,52 +43,13 @@ public final class Uid {
public static final char DELIMITER = '#';
public static final byte DELIMITER_BYTE = 0x23;

private final String type;

private final String id;

public Uid(String type, String id) {
this.type = type;
this.id = id;
}

public String type() {
return type;
}

public String id() {
return id;
}

@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;

Uid uid = (Uid) o;

if (id != null ? !id.equals(uid.id) : uid.id != null) return false;
if (type != null ? !type.equals(uid.type) : uid.type != null) return false;

return true;
}

@Override
public int hashCode() {
int result = type != null ? type.hashCode() : 0;
result = 31 * result + (id != null ? id.hashCode() : 0);
return result;
}

@Override
public String toString() {
return type + "#" + id;
}

private static final int UTF8 = 0xff;
private static final int NUMERIC = 0xfe;
private static final int BASE64_ESCAPE = 0xfd;

// non-instantiable
private Uid() {}

static boolean isURLBase64WithoutPadding(String id) {
// We are not lenient about padding chars ('=') otherwise
// 'xxx=' and 'xxx' could be considered the same id
Expand Down
11 changes: 5 additions & 6 deletions server/src/main/java/org/opensearch/index/shard/IndexShard.java
Original file line number Diff line number Diff line change
Expand Up @@ -1069,14 +1069,12 @@ private Engine.DeleteResult applyDeleteOperation(
+ getOperationPrimaryTerm()
+ "]";
ensureWriteAllowed(origin);
final Term uid = new Term(IdFieldMapper.NAME, Uid.encodeId(id));
final Engine.Delete delete = prepareDelete(id, uid, seqNo, opPrimaryTerm, version, versionType, origin, ifSeqNo, ifPrimaryTerm);
final Engine.Delete delete = prepareDelete(id, seqNo, opPrimaryTerm, version, versionType, origin, ifSeqNo, ifPrimaryTerm);
return delete(engine, delete);
}

private Engine.Delete prepareDelete(
public static Engine.Delete prepareDelete(
String id,
Term uid,
long seqNo,
long primaryTerm,
long version,
Expand All @@ -1086,6 +1084,7 @@ private Engine.Delete prepareDelete(
long ifPrimaryTerm
) {
long startTime = System.nanoTime();
final Term uid = new Term(IdFieldMapper.NAME, Uid.encodeId(id));
return new Engine.Delete(id, uid, seqNo, primaryTerm, version, versionType, origin, startTime, ifSeqNo, ifPrimaryTerm);
}

Expand Down Expand Up @@ -2238,7 +2237,7 @@ public Closeable acquireHistoryRetentionLock() {
* The returned snapshot can be retrieved from either Lucene index or translog files.
*/
public Translog.Snapshot getHistoryOperations(String reason, long startingSeqNo, long endSeqNo) throws IOException {
return getEngine().newChangesSnapshot(reason, mapperService, startingSeqNo, endSeqNo, true);
return getEngine().newChangesSnapshot(reason, startingSeqNo, endSeqNo, true);
}

/**
Expand Down Expand Up @@ -2270,7 +2269,7 @@ public long getMinRetainedSeqNo() {
* This parameter should be only enabled when the entire requesting range is below the global checkpoint.
*/
public Translog.Snapshot newChangesSnapshot(String source, long fromSeqNo, long toSeqNo, boolean requiredFullRange) throws IOException {
return getEngine().newChangesSnapshot(source, mapperService, fromSeqNo, toSeqNo, requiredFullRange);
return getEngine().newChangesSnapshot(source, fromSeqNo, toSeqNo, requiredFullRange);
}

public List<Segment> segments(boolean verbose) {
Expand Down
Loading

0 comments on commit bdcaec5

Please sign in to comment.