Skip to content

Commit

Permalink
Cleanup unused code in org.opensearch.common.lucene (opensearch-proje…
Browse files Browse the repository at this point in the history
…ct#5306)


Signed-off-by: Rabi Panda <adnapibar@gmail.com>
  • Loading branch information
adnapibar authored Nov 18, 2022
1 parent a0f022b commit 6c84cbd
Show file tree
Hide file tree
Showing 5 changed files with 0 additions and 205 deletions.
143 changes: 0 additions & 143 deletions server/src/main/java/org/opensearch/common/lucene/Lucene.java
Original file line number Diff line number Diff line change
Expand Up @@ -32,20 +32,14 @@

package org.opensearch.common.lucene;

import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.analysis.core.KeywordAnalyzer;
import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.document.LatLonDocValuesField;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.FieldInfos;
import org.apache.lucene.index.Fields;
import org.apache.lucene.index.FilterCodecReader;
import org.apache.lucene.index.FilterDirectoryReader;
import org.apache.lucene.index.FilterLeafReader;
Expand All @@ -55,21 +49,12 @@
import org.apache.lucene.index.IndexFormatTooOldException;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LeafMetaData;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NoMergePolicy;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.PointValues;
import org.apache.lucene.index.SegmentCommitInfo;
import org.apache.lucene.index.SegmentInfos;
import org.apache.lucene.index.SegmentReader;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.index.StoredFieldVisitor;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.VectorValues;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.FieldDoc;
Expand Down Expand Up @@ -142,18 +127,6 @@ public class Lucene {

private Lucene() {}

public static Version parseVersion(@Nullable String version, Version defaultVersion, Logger logger) {
if (version == null) {
return defaultVersion;
}
try {
return Version.parse(version);
} catch (ParseException e) {
logger.warn(() -> new ParameterizedMessage("no version match {}, default to {}", version, defaultVersion), e);
return defaultVersion;
}
}

/**
* Reads the segments infos, failing if it fails to load
*/
Expand Down Expand Up @@ -697,34 +670,6 @@ public static boolean indexExists(final Directory directory) throws IOException
return DirectoryReader.indexExists(directory);
}

/**
* Wait for an index to exist for up to {@code timeLimitMillis}. Returns
* true if the index eventually exists, false if not.
*
* Will retry the directory every second for at least {@code timeLimitMillis}
*/
public static boolean waitForIndex(final Directory directory, final long timeLimitMillis) throws IOException {
final long DELAY = 1000;
long waited = 0;
try {
while (true) {
if (waited >= timeLimitMillis) {
break;
}
if (indexExists(directory)) {
return true;
}
Thread.sleep(DELAY);
waited += DELAY;
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
return false;
}
// one more try after all retries
return indexExists(directory);
}

/**
* Returns {@code true} iff the given exception or
* one of it's causes is an instance of {@link CorruptIndexException},
Expand Down Expand Up @@ -1024,92 +969,4 @@ public static NumericDocValuesField newSoftDeletesField() {
return new NumericDocValuesField(SOFT_DELETES_FIELD, 1);
}

/**
* Returns an empty leaf reader with the given max docs. The reader will be fully deleted.
*/
public static LeafReader emptyReader(final int maxDoc) {
return new LeafReader() {
final Bits liveDocs = new Bits.MatchNoBits(maxDoc);

public Terms terms(String field) {
return null;
}

public NumericDocValues getNumericDocValues(String field) {
return null;
}

public BinaryDocValues getBinaryDocValues(String field) {
return null;
}

public SortedDocValues getSortedDocValues(String field) {
return null;
}

public SortedNumericDocValues getSortedNumericDocValues(String field) {
return null;
}

public SortedSetDocValues getSortedSetDocValues(String field) {
return null;
}

public NumericDocValues getNormValues(String field) {
return null;
}

public FieldInfos getFieldInfos() {
return new FieldInfos(new FieldInfo[0]);
}

public Bits getLiveDocs() {
return this.liveDocs;
}

public PointValues getPointValues(String fieldName) {
return null;
}

public void checkIntegrity() {}

public Fields getTermVectors(int docID) {
return null;
}

public int numDocs() {
return 0;
}

public int maxDoc() {
return maxDoc;
}

public void document(int docID, StoredFieldVisitor visitor) {}

protected void doClose() {}

public LeafMetaData getMetaData() {
return new LeafMetaData(Version.LATEST.major, Version.LATEST, null);
}

public CacheHelper getCoreCacheHelper() {
return null;
}

public CacheHelper getReaderCacheHelper() {
return null;
}

@Override
public VectorValues getVectorValues(String field) throws IOException {
return null;
}

@Override
public TopDocs searchNearestVectors(String field, float[] target, int k, Bits acceptDocs, int visitedLimit) throws IOException {
return null;
}
};
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -323,10 +323,6 @@ private boolean termArraysEquals(List<Term[]> termArrays1, List<Term[]> termArra
return true;
}

public String getField() {
return field;
}

@Override
public void visit(QueryVisitor visitor) {
visitor.visitLeaf(this);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,10 +87,6 @@ public static Query newLenientFieldQuery(String field, RuntimeException e) {
return Queries.newMatchNoDocsQuery("failed [" + field + "] query, caused by " + message);
}

public static Query newNestedFilter() {
return not(newNonNestedFilter());
}

/**
* Creates a new non-nested docs query
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,10 +73,6 @@ public WeightFactorFunction(float weight) {
this(weight, null, null);
}

public WeightFactorFunction(float weight, @Nullable String functionName) {
this(weight, null, functionName);
}

@Override
public LeafScoreFunction getLeafScoreFunction(LeafReaderContext ctx) throws IOException {
final LeafScoreFunction leafFunction = scoreFunction.getLeafScoreFunction(ctx);
Expand Down
50 changes: 0 additions & 50 deletions server/src/test/java/org/opensearch/common/lucene/LuceneTests.java
Original file line number Diff line number Diff line change
Expand Up @@ -92,62 +92,12 @@
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicBoolean;

import static org.hamcrest.Matchers.equalTo;

public class LuceneTests extends OpenSearchTestCase {
private static final NamedWriteableRegistry EMPTY_REGISTRY = new NamedWriteableRegistry(Collections.emptyList());

public void testWaitForIndex() throws Exception {
final MockDirectoryWrapper dir = newMockDirectory();

final AtomicBoolean succeeded = new AtomicBoolean(false);
final CountDownLatch latch = new CountDownLatch(1);

// Create a shadow Engine, which will freak out because there is no
// index yet
Thread t = new Thread(new Runnable() {
@Override
public void run() {
try {
latch.await();
if (Lucene.waitForIndex(dir, 5000)) {
succeeded.set(true);
} else {
fail("index should have eventually existed!");
}
} catch (InterruptedException e) {
// ignore interruptions
} catch (Exception e) {
fail("should have been able to create the engine! " + e.getMessage());
}
}
});
t.start();

// count down latch
// now shadow engine should try to be created
latch.countDown();

IndexWriterConfig iwc = newIndexWriterConfig();
iwc.setIndexDeletionPolicy(NoDeletionPolicy.INSTANCE);
iwc.setMergePolicy(NoMergePolicy.INSTANCE);
iwc.setMaxBufferedDocs(2);
IndexWriter writer = new IndexWriter(dir, iwc);
Document doc = new Document();
doc.add(new TextField("id", "1", random().nextBoolean() ? Field.Store.YES : Field.Store.NO));
writer.addDocument(doc);
writer.commit();

t.join();

writer.close();
dir.close();
assertTrue("index should have eventually existed", succeeded.get());
}

public void testCleanIndex() throws IOException {
MockDirectoryWrapper dir = newMockDirectory();
IndexWriterConfig iwc = newIndexWriterConfig();
Expand Down

0 comments on commit 6c84cbd

Please sign in to comment.