Skip to content

Commit 2234667

Browse files
cloud-fanHyukjinKwon
authored andcommitted
[SPARK-27563][SQL][TEST] automatically get the latest Spark versions in HiveExternalCatalogVersionsSuite
## What changes were proposed in this pull request? We can get the latest downloadable Spark versions from https://dist.apache.org/repos/dist/release/spark/ ## How was this patch tested? manually. Closes #24454 from cloud-fan/test. Authored-by: Wenchen Fan <wenchen@databricks.com> Signed-off-by: HyukjinKwon <gurwls223@apache.org>
1 parent d2656aa commit 2234667

File tree

1 file changed

+18
-1
lines changed

1 file changed

+18
-1
lines changed

sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala

Lines changed: 18 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@ import java.nio.charset.StandardCharsets
2222
import java.nio.file.{Files, Paths}
2323

2424
import scala.sys.process._
25+
import scala.util.control.NonFatal
2526

2627
import org.apache.hadoop.conf.Configuration
2728

@@ -169,6 +170,10 @@ class HiveExternalCatalogVersionsSuite extends SparkSubmitTestUtils {
169170
""".stripMargin.getBytes("utf8"))
170171
// scalastyle:on line.size.limit
171172

173+
if (PROCESS_TABLES.testingVersions.isEmpty) {
174+
fail("Fail to get the lates Spark versions to test.")
175+
}
176+
172177
PROCESS_TABLES.testingVersions.zipWithIndex.foreach { case (version, index) =>
173178
val sparkHome = new File(sparkTestingDir, s"spark-$version")
174179
if (!sparkHome.exists()) {
@@ -206,7 +211,19 @@ class HiveExternalCatalogVersionsSuite extends SparkSubmitTestUtils {
206211

207212
object PROCESS_TABLES extends QueryTest with SQLTestUtils {
208213
// Tests the latest version of every release line.
209-
val testingVersions = Seq("2.3.3", "2.4.2")
214+
val testingVersions: Seq[String] = {
215+
import scala.io.Source
216+
try {
217+
Source.fromURL("https://dist.apache.org/repos/dist/release/spark/").mkString
218+
.split("\n")
219+
.filter(_.contains("""<li><a href="spark-"""))
220+
.map("""<a href="spark-(\d.\d.\d)/">""".r.findFirstMatchIn(_).get.group(1))
221+
.filter(_ < org.apache.spark.SPARK_VERSION)
222+
} catch {
223+
// do not throw exception during object initialization.
224+
case NonFatal(_) => Nil
225+
}
226+
}
210227

211228
protected var spark: SparkSession = _
212229

0 commit comments

Comments
 (0)