Skip to content

Commit 74b32b2

Browse files
committed
[SPARK-44914][BUILD] Upgrade Apache Ivy to 2.5.2
1 parent 0973929 commit 74b32b2

File tree

8 files changed

+29
-16
lines changed

8 files changed

+29
-16
lines changed

common/utils/src/main/scala/org/apache/spark/util/MavenUtils.scala

Lines changed: 12 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -324,6 +324,13 @@ private[spark] object MavenUtils extends Logging {
324324
val ivySettings: IvySettings = new IvySettings
325325
try {
326326
ivySettings.load(file)
327+
if (ivySettings.getDefaultIvyUserDir == null && ivySettings.getDefaultCache == null) {
328+
// To protect old Ivy-based systems like old Spark from Apache Ivy 2.5.2's incompatibility.
329+
// `processIvyPathArg` can overwrite these later.
330+
val alternateIvyDir = System.getProperty("user.home") + File.separator + ".ivy2.5.2"
331+
ivySettings.setDefaultIvyUserDir(new File(alternateIvyDir))
332+
ivySettings.setDefaultCache(new File(alternateIvyDir, "cache"))
333+
}
327334
} catch {
328335
case e @ (_: IOException | _: ParseException) =>
329336
throw new SparkException(s"Failed when loading Ivy settings from $settingsFile", e)
@@ -335,10 +342,12 @@ private[spark] object MavenUtils extends Logging {
335342

336343
/* Set ivy settings for location of cache, if option is supplied */
337344
private def processIvyPathArg(ivySettings: IvySettings, ivyPath: Option[String]): Unit = {
338-
ivyPath.filterNot(_.trim.isEmpty).foreach { alternateIvyDir =>
339-
ivySettings.setDefaultIvyUserDir(new File(alternateIvyDir))
340-
ivySettings.setDefaultCache(new File(alternateIvyDir, "cache"))
345+
val alternateIvyDir = ivyPath.filterNot(_.trim.isEmpty).getOrElse {
346+
// To protect old Ivy-based systems like old Spark from Apache Ivy 2.5.2's incompatibility.
347+
System.getProperty("user.home") + File.separator + ".ivy2.5.2"
341348
}
349+
ivySettings.setDefaultIvyUserDir(new File(alternateIvyDir))
350+
ivySettings.setDefaultCache(new File(alternateIvyDir, "cache"))
342351
}
343352

344353
/* Add any optional additional remote repositories */

common/utils/src/test/scala/org/apache/spark/util/IvyTestUtils.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -374,7 +374,8 @@ private[spark] object IvyTestUtils {
374374
f(repo.toURI.toString)
375375
} finally {
376376
// Clean up
377-
if (repo.toString.contains(".m2") || repo.toString.contains(".ivy2")) {
377+
if (repo.toString.contains(".m2") || repo.toString.contains(".ivy2") ||
378+
repo.toString.contains(".ivy2.5.2")) {
378379
val groupDir = getBaseGroupDirectory(artifact, useIvyLayout)
379380
FileUtils.deleteDirectory(new File(repo, groupDir + File.separator + artifact.artifactId))
380381
deps.foreach { _.foreach { dep =>

core/src/main/scala/org/apache/spark/internal/config/package.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2491,10 +2491,10 @@ package object config {
24912491
.doc("Path to specify the Ivy user directory, used for the local Ivy cache and " +
24922492
"package files from spark.jars.packages. " +
24932493
"This will override the Ivy property ivy.default.ivy.user.dir " +
2494-
"which defaults to ~/.ivy2.")
2494+
"which defaults to ~/.ivy2.5.2")
24952495
.version("1.3.0")
24962496
.stringConf
2497-
.createOptional
2497+
.createWithDefault("~/.ivy2.5.2")
24982498

24992499
private[spark] val JAR_IVY_SETTING_PATH =
25002500
ConfigBuilder(MavenUtils.JAR_IVY_SETTING_PATH_KEY)

dev/deps/spark-deps-hadoop-3-hive-2.3

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,7 @@ httpcore/4.4.16//httpcore-4.4.16.jar
102102
icu4j/72.1//icu4j-72.1.jar
103103
ini4j/0.5.4//ini4j-0.5.4.jar
104104
istack-commons-runtime/3.0.8//istack-commons-runtime-3.0.8.jar
105-
ivy/2.5.1//ivy-2.5.1.jar
105+
ivy/2.5.2//ivy-2.5.2.jar
106106
jackson-annotations/2.16.1//jackson-annotations-2.16.1.jar
107107
jackson-core-asl/1.9.13//jackson-core-asl-1.9.13.jar
108108
jackson-core/2.16.1//jackson-core-2.16.1.jar

dev/run-tests.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -478,6 +478,8 @@ def main():
478478
rm_r(os.path.join(SPARK_HOME, "work"))
479479
rm_r(os.path.join(USER_HOME, ".ivy2", "local", "org.apache.spark"))
480480
rm_r(os.path.join(USER_HOME, ".ivy2", "cache", "org.apache.spark"))
481+
rm_r(os.path.join(USER_HOME, ".ivy2.5.2", "local", "org.apache.spark"))
482+
rm_r(os.path.join(USER_HOME, ".ivy2.5.2", "cache", "org.apache.spark"))
481483

482484
os.environ["CURRENT_BLOCK"] = str(ERROR_CODES["BLOCK_GENERAL"])
483485

docs/core-migration-guide.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,8 @@ license: |
3636

3737
- Since Spark 4.0, Spark uses `ReadWriteOncePod` instead of `ReadWriteOnce` access mode in persistence volume claims. To restore the legacy behavior, you can set `spark.kubernetes.legacy.useReadWriteOnceAccessMode` to `true`.
3838

39+
- Since Spark 4.0, Spark uses `~/.ivy2.5.2` as Ivy user directory by default to isolate the existing systems from Apache Ivy's incompatibility. To restore the legacy behavior, you can set `spark.jars.ivy` to `~/.ivy2`.
40+
3941
## Upgrading from Core 3.4 to 3.5
4042

4143
- Since Spark 3.5, `spark.yarn.executor.failuresValidityInterval` is deprecated. Use `spark.executor.failuresValidityInterval` instead.

pom.xml

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -146,11 +146,7 @@
146146
<jetty.version>10.0.19</jetty.version>
147147
<jakartaservlet.version>4.0.3</jakartaservlet.version>
148148
<chill.version>0.10.0</chill.version>
149-
<!--
150-
SPARK-44968: don't upgrade Ivy to version 2.5.2 until the test aborted of
151-
`HiveExternalCatalogVersionsSuite` in Java 11/17 daily tests is resolved.
152-
-->
153-
<ivy.version>2.5.1</ivy.version>
149+
<ivy.version>2.5.2</ivy.version>
154150
<oro.version>2.0.8</oro.version>
155151
<!--
156152
If you change codahale.metrics.version, you also need to change

sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -58,6 +58,7 @@ import org.apache.spark.sql.types._
5858
import org.apache.spark.tags.ExtendedSQLTest
5959
import org.apache.spark.unsafe.types.{CalendarInterval, UTF8String}
6060
import org.apache.spark.util.ResetSystemProperties
61+
import org.apache.spark.util.Utils
6162

6263
@ExtendedSQLTest
6364
class SQLQuerySuite extends QueryTest with SharedSparkSession with AdaptiveSparkPlanHelper
@@ -3873,12 +3874,12 @@ class SQLQuerySuite extends QueryTest with SharedSparkSession with AdaptiveSpark
38733874
test("SPARK-33084: Add jar support Ivy URI in SQL -- jar contains udf class") {
38743875
val sumFuncClass = "org.apache.spark.examples.sql.Spark33084"
38753876
val functionName = "test_udf"
3876-
withTempDir { dir =>
3877-
System.setProperty("ivy.home", dir.getAbsolutePath)
3877+
val targetCacheJarDir = new File(
3878+
System.getProperty("user.home") + File.separator + ".ivy2.5.2",
3879+
"/local/org.apache.spark/SPARK-33084/1.0/jars/")
3880+
try {
38783881
val sourceJar = new File(Thread.currentThread().getContextClassLoader
38793882
.getResource("SPARK-33084.jar").getFile)
3880-
val targetCacheJarDir = new File(dir.getAbsolutePath +
3881-
"/local/org.apache.spark/SPARK-33084/1.0/jars/")
38823883
targetCacheJarDir.mkdir()
38833884
// copy jar to local cache
38843885
FileUtils.copyFileToDirectory(sourceJar, targetCacheJarDir)
@@ -3905,6 +3906,8 @@ class SQLQuerySuite extends QueryTest with SharedSparkSession with AdaptiveSpark
39053906
checkAnswer(sql("SELECT * FROM v1"), Seq(Row(2.0)))
39063907
}
39073908
}
3909+
} finally {
3910+
Utils.deleteRecursively(targetCacheJarDir)
39083911
}
39093912
}
39103913

0 commit comments

Comments
 (0)