Skip to content

Commit 6bb74d3

Browse files
committed
Revert "Fixing few of the Flaky tests related to derived source tests (opensearch-project#18493)"
This reverts commit 674de10. Signed-off-by: Andrew Ross <andrross@amazon.com>
1 parent 8b95b19 commit 6bb74d3

File tree

4 files changed

+42
-77
lines changed

4 files changed

+42
-77
lines changed

server/src/internalClusterTest/java/org/opensearch/recovery/FullRollingRestartIT.java

Lines changed: 36 additions & 49 deletions
Original file line numberDiff line numberDiff line change
@@ -453,7 +453,7 @@ public void testDerivedSourceRollingRestart() throws Exception {
453453
rollingRestartWithVerification(docCount);
454454
}
455455

456-
public void testDerivedSourceWithMultiFieldsRollingRestart() throws Exception {
456+
public void testDerivedSourceWithMixedVersionRollingRestart() throws Exception {
457457
String mapping = """
458458
{
459459
"properties": {
@@ -503,12 +503,12 @@ public void testDerivedSourceWithMultiFieldsRollingRestart() throws Exception {
503503

504504
// Add replicas before starting new nodes
505505
assertAcked(
506-
client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder().put("index.number_of_replicas", 1))
506+
client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder().put("index.number_of_replicas", 2))
507507
);
508508

509509
// Add nodes and additional documents
510510
int totalDocs = docCount;
511-
for (int i = 0; i < 1; i++) {
511+
for (int i = 0; i < 2; i++) {
512512
internalCluster().startNode();
513513

514514
// Add more documents
@@ -564,7 +564,7 @@ public void testDerivedSourceWithConcurrentUpdatesRollingRestart() throws Except
564564
prepareCreate(
565565
"test",
566566
Settings.builder()
567-
.put("index.number_of_shards", 2)
567+
.put("index.number_of_shards", 3)
568568
.put("index.number_of_replicas", 0)
569569
.put("index.derived_source.enabled", true)
570570
.put("index.refresh_interval", "1s")
@@ -601,86 +601,74 @@ public void testDerivedSourceWithConcurrentUpdatesRollingRestart() throws Except
601601

602602
// Start concurrent updates during rolling restart
603603
logger.info("--> starting rolling restart with concurrent updates");
604+
concurrentUpdatesRollingRestartWithVerification(docCount);
605+
}
604606

605-
final AtomicBoolean stop = new AtomicBoolean(false);
606-
final AtomicInteger successfulUpdates = new AtomicInteger(0);
607-
final CountDownLatch updateLatch = new CountDownLatch(1);
608-
final Thread updateThread = new Thread(() -> {
607+
private void concurrentUpdatesRollingRestartWithVerification(int initialDocCount) throws Exception {
608+
AtomicBoolean stop = new AtomicBoolean(false);
609+
AtomicInteger totalUpdates = new AtomicInteger(0);
610+
CountDownLatch updateLatch = new CountDownLatch(1);
611+
612+
// Start concurrent update thread
613+
Thread updateThread = new Thread(() -> {
609614
try {
610-
updateLatch.await();
615+
updateLatch.await(); // Wait for cluster to be ready
611616
while (stop.get() == false) {
612617
try {
613-
// Update documents sequentially to avoid conflicts
614-
for (int i = 0; i < docCount && !stop.get(); i++) {
615-
client().prepareUpdate("test", String.valueOf(i))
616-
.setRetryOnConflict(3)
617-
.setDoc("counter", successfulUpdates.get() + 1, "last_updated", System.currentTimeMillis(), "version", 1)
618-
.execute()
619-
.actionGet(TimeValue.timeValueSeconds(5));
620-
successfulUpdates.incrementAndGet();
621-
Thread.sleep(50); // Larger delay between updates
622-
}
618+
int docId = randomIntBetween(0, initialDocCount - 1);
619+
client().prepareUpdate("test", String.valueOf(docId))
620+
.setRetryOnConflict(3)
621+
.setDoc("counter", randomIntBetween(0, 1000), "last_updated", System.currentTimeMillis(), "version", 1)
622+
.execute()
623+
.actionGet();
624+
totalUpdates.incrementAndGet();
625+
Thread.sleep(10);
623626
} catch (Exception e) {
624-
if (stop.get() == false) {
625-
logger.warn("Error in update thread", e);
627+
if (e instanceof InterruptedException) {
628+
break;
626629
}
630+
logger.warn("Error in update thread", e);
627631
}
628632
}
629633
} catch (InterruptedException e) {
630634
Thread.currentThread().interrupt();
631635
}
632636
});
637+
updateThread.start();
633638

634639
try {
635640
// Add replicas
636641
assertAcked(
637-
client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder().put("index.number_of_replicas", 1))
642+
client().admin().indices().prepareUpdateSettings("test").setSettings(Settings.builder().put("index.number_of_replicas", 2))
638643
);
639644

640645
// Start additional nodes
641-
for (int i = 0; i < 1; i++) {
646+
for (int i = 0; i < 2; i++) {
642647
internalCluster().startNode();
643648
}
644649
ensureGreen("test");
645650

646-
// Start updates after cluster is stable
647-
updateThread.start();
651+
// Start updates
648652
updateLatch.countDown();
649653

650654
// Wait for some updates to occur
651-
assertBusy(() -> { assertTrue("No successful updates occurred", successfulUpdates.get() > 0); }, 30, TimeUnit.SECONDS);
655+
Thread.sleep(2000);
652656

653657
// Rolling restart of all nodes
654658
for (String node : internalCluster().getNodeNames()) {
655-
// Stop updates temporarily during node restart
656-
stop.set(true);
657-
Thread.sleep(1000); // Wait for in-flight operations to complete
658-
659659
internalCluster().restartNode(node);
660-
ensureGreen(TimeValue.timeValueSeconds(60));
661-
662-
// Verify data consistency
663-
refresh("test");
664-
verifyDerivedSourceWithUpdates(docCount);
665-
666-
// Resume updates
667-
stop.set(false);
660+
ensureGreen(TimeValue.timeValueMinutes(2));
661+
verifyDerivedSourceWithUpdates(initialDocCount);
668662
}
669-
670663
} finally {
671-
// Clean shutdown
672664
stop.set(true);
673-
updateThread.join(TimeValue.timeValueSeconds(30).millis());
674-
if (updateThread.isAlive()) {
675-
updateThread.interrupt();
676-
updateThread.join(TimeValue.timeValueSeconds(5).millis());
677-
}
665+
updateThread.join();
678666
}
679667

680-
logger.info("--> performed {} successful updates during rolling restart", successfulUpdates.get());
668+
logger.info("--> performed {} concurrent updates during rolling restart", totalUpdates.get());
681669
refresh("test");
682670
flush("test");
683-
verifyDerivedSourceWithUpdates(docCount);
671+
verifyDerivedSourceWithUpdates(initialDocCount);
684672
}
685673

686674
private void verifyDerivedSourceWithUpdates(int expectedDocs) throws Exception {
@@ -699,8 +687,7 @@ private void verifyDerivedSourceWithUpdates(int expectedDocs) throws Exception {
699687
assertEquals("text value " + id, source.get("text_field"));
700688
assertNotNull("counter missing for doc " + id, source.get("counter"));
701689
assertFalse(((String) source.get("last_updated")).isEmpty());
702-
Integer counter = (Integer) source.get("counter");
703-
assertEquals(counter == 0 ? 0 : 1, source.get("version"));
690+
assertEquals(1, source.get("version"));
704691

705692
// Verify text_field maintains original value
706693
assertEquals("text value " + id, source.get("text_field"));

server/src/internalClusterTest/java/org/opensearch/recovery/RecoveryWhileUnderLoadIT.java

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -749,7 +749,6 @@ protected XContentBuilder generateSource(long id, Random random) throws IOExcept
749749
.endObject();
750750
}
751751
}) {
752-
indexer.setUseAutoGeneratedIDs(true);
753752
logger.info("--> waiting for {} docs to be indexed ...", waitFor);
754753
waitForDocs(waitFor, indexer);
755754
indexer.assertNoFailures();
@@ -770,10 +769,7 @@ protected XContentBuilder generateSource(long id, Random random) throws IOExcept
770769
allowNodes("test", 2);
771770

772771
logger.info("--> waiting for GREEN health status ...");
773-
// make sure the cluster state is green, and all has been recovered
774-
assertNoTimeout(
775-
client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setTimeout("5m").setWaitForGreenStatus()
776-
);
772+
ensureGreen(TimeValue.timeValueMinutes(2));
777773

778774
logger.info("--> waiting for {} docs to be indexed ...", totalNumDocs);
779775
waitForDocs(totalNumDocs, indexer);
@@ -862,7 +858,6 @@ protected XContentBuilder generateSource(long id, Random random) throws IOExcept
862858
}
863859
}) {
864860

865-
indexer.setUseAutoGeneratedIDs(true);
866861
for (int i = 0; i < numDocs; i += scaledRandomIntBetween(500, Math.min(1000, numDocs))) {
867862
indexer.assertNoFailures();
868863
logger.info("--> waiting for {} docs to be indexed ...", i);

server/src/test/java/org/opensearch/common/lucene/index/DerivedSourceLeafReaderTests.java

Lines changed: 1 addition & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,6 @@
1818
import org.apache.lucene.index.IndexWriter;
1919
import org.apache.lucene.index.IndexWriterConfig;
2020
import org.apache.lucene.index.LeafReader;
21-
import org.apache.lucene.index.NoMergePolicy;
2221
import org.apache.lucene.index.StoredFieldVisitor;
2322
import org.apache.lucene.index.StoredFields;
2423
import org.apache.lucene.store.Directory;
@@ -146,9 +145,7 @@ public void testGetCoreAndReaderCacheHelper() {
146145

147146
public void testWithRandomDocuments() throws IOException {
148147
Directory randomDir = newDirectory();
149-
IndexWriterConfig config = newIndexWriterConfig(random(), null).setCodec(new RandomCodec(random()))
150-
.setMergePolicy(NoMergePolicy.INSTANCE); // Prevent automatic merges
151-
148+
IndexWriterConfig config = newIndexWriterConfig(random(), null).setCodec(new RandomCodec(random()));
152149
IndexWriter randomWriter = new IndexWriter(randomDir, config);
153150

154151
int numDocs = randomIntBetween(1, 10);
@@ -161,14 +158,9 @@ public void testWithRandomDocuments() throws IOException {
161158
doc.add(new StoredField("_source", source));
162159
randomWriter.addDocument(doc);
163160
}
164-
165-
// Force merge into a single segment
166-
randomWriter.forceMerge(1);
167161
randomWriter.commit();
168162

169163
DirectoryReader randomDirectoryReader = DirectoryReader.open(randomWriter);
170-
assertEquals("Should have exactly one segment", 1, randomDirectoryReader.leaves().size());
171-
172164
LeafReader randomLeafReader = randomDirectoryReader.leaves().get(0).reader();
173165
DerivedSourceLeafReader randomDerivedReader = new DerivedSourceLeafReader(
174166
randomLeafReader,

server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java

Lines changed: 4 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -8233,16 +8233,14 @@ public void testGetMaxSeqNoFromSegmentInfosConcurrentWrites() throws IOException
82338233
}
82348234

82358235
public void testNewChangesSnapshotWithDerivedSource() throws IOException {
8236-
IOUtils.close(engine, store);
8236+
82378237
// Create test documents
82388238
List<Engine.Operation> operations = new ArrayList<>();
82398239
final int numDocs = randomIntBetween(1, 100);
82408240

82418241
try (Store store = createStore()) {
82428242
EngineConfig engineConfig = createEngineConfigWithMapperSupplierForDerivedSource(store);
8243-
InternalEngine engine = null;
8244-
try {
8245-
engine = createEngine(engineConfig);
8243+
try (InternalEngine engine = createEngine(engineConfig)) {
82468244
// Index documents
82478245
for (int i = 0; i < numDocs; i++) {
82488246
ParsedDocument doc = testParsedDocument(
@@ -8296,24 +8294,19 @@ public void testNewChangesSnapshotWithDerivedSource() throws IOException {
82968294
// Verify we got all documents
82978295
assertThat(count, equalTo(numDocs));
82988296
}
8299-
} finally {
8300-
IOUtils.close(engine, store);
83018297
}
83028298
}
83038299
}
83048300

8305-
public void testNewChangesSnapshotWithDeleteAndUpdateWithDerivedSource() throws IOException {
8306-
IOUtils.close(engine, store);
8301+
public void testNewChangesSnapshotWithDeleteAndUpdate() throws IOException {
83078302
final List<Engine.Operation> operations = new ArrayList<>();
83088303
int numDocs = randomIntBetween(10, 100);
83098304
int numDocsToDelete = randomIntBetween(1, numDocs / 2);
83108305
Set<String> deletedDocs = new HashSet<>();
83118306

83128307
try (Store store = createStore()) {
83138308
EngineConfig engineConfig = createEngineConfigWithMapperSupplierForDerivedSource(store);
8314-
InternalEngine engine = null;
8315-
try {
8316-
engine = createEngine(engineConfig);
8309+
try (InternalEngine engine = createEngine(engineConfig)) {
83178310
// First index documents
83188311
for (int i = 0; i < numDocs; i++) {
83198312
ParsedDocument doc = testParsedDocument(
@@ -8458,8 +8451,6 @@ public void testNewChangesSnapshotWithDeleteAndUpdateWithDerivedSource() throws
84588451
}
84598452
assertEquals("Expected number of operations in range", to - from + 1, count);
84608453
}
8461-
} finally {
8462-
IOUtils.close(engine, store);
84638454
}
84648455
}
84658456
}

0 commit comments

Comments
 (0)