Skip to content

Commit 3d03234

Browse files
HyukjinKwondongjoon-hyun
authored andcommitted
[SPARK-33810][TESTS] Reenable test cases disabled in SPARK-31732
### What changes were proposed in this pull request? The test failures were due to machine being slow in Jenkins. We switched to Ubuntu 20 if I am not wrong. Looks like all machines are functioning properly unlike the past, and the tests pass without a problem anymore. This PR proposes to enable them back. ### Why are the changes needed? To restore test coverage. ### Does this PR introduce _any_ user-facing change? No, dev-only. ### How was this patch tested? Jenkins jobs in this PR show the flakiness. Closes #30798 from HyukjinKwon/do-not-merge-test. Authored-by: HyukjinKwon <gurwls223@apache.org> Signed-off-by: Dongjoon Hyun <dongjoon@apache.org>
1 parent 8666d1c commit 3d03234

File tree

4 files changed

+5
-10
lines changed

4 files changed

+5
-10
lines changed

external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaMicroBatchSourceSuite.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -349,8 +349,7 @@ abstract class KafkaMicroBatchSourceSuiteBase extends KafkaSourceSuiteBase {
349349
)
350350
}
351351

352-
// TODO (SPARK-31731): re-enable it
353-
ignore("subscribing topic by pattern with topic deletions") {
352+
test("subscribing topic by pattern with topic deletions") {
354353
val topicPrefix = newTopic()
355354
val topic = topicPrefix + "-seems"
356355
val topic2 = topicPrefix + "-bad"

external/kafka-0-10-sql/src/test/scala/org/apache/spark/sql/kafka010/KafkaRelationSuite.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -174,8 +174,7 @@ abstract class KafkaRelationSuiteBase extends QueryTest with SharedSparkSession
174174
("3", Seq(("e", "f".getBytes(UTF_8)), ("e", "g".getBytes(UTF_8))))).toDF)
175175
}
176176

177-
// TODO (SPARK-31729): re-enable it
178-
ignore("timestamp provided for starting and ending") {
177+
test("timestamp provided for starting and ending") {
179178
val (topic, timestamps) = prepareTimestampRelatedUnitTest
180179

181180
// timestamp both presented: starting "first" ending "finalized"

external/kafka-0-10/src/test/scala/org/apache/spark/streaming/kafka010/DirectKafkaStreamSuite.scala

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -332,8 +332,7 @@ class DirectKafkaStreamSuite
332332
}
333333

334334
// Test to verify the offset ranges can be recovered from the checkpoints
335-
// TODO (SPARK-31722): re-enable it
336-
ignore("offset recovery") {
335+
test("offset recovery") {
337336
val topic = "recovery"
338337
kafkaTestUtils.createTopic(topic)
339338
testDir = Utils.createTempDir()
@@ -420,8 +419,7 @@ class DirectKafkaStreamSuite
420419
}
421420

422421
// Test to verify the offsets can be recovered from Kafka
423-
// TODO (SPARK-31722): re-enable it
424-
ignore("offset recovery from kafka") {
422+
test("offset recovery from kafka") {
425423
val topic = "recoveryfromkafka"
426424
kafkaTestUtils.createTopic(topic)
427425

streaming/src/test/scala/org/apache/spark/streaming/StreamingContextSuite.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -293,8 +293,7 @@ class StreamingContextSuite
293293
}
294294
}
295295

296-
// TODO (SPARK-31728): re-enable it
297-
ignore("stop gracefully") {
296+
test("stop gracefully") {
298297
val conf = new SparkConf().setMaster(master).setAppName(appName)
299298
conf.set("spark.dummyTimeConfig", "3600s")
300299
val sc = new SparkContext(conf)

0 commit comments

Comments
 (0)