Skip to content

Commit a21f95a

Browse files
committed
remove previous ivy resolution when using spark-submit
1 parent 715f084 commit a21f95a

File tree

2 files changed

+26
-17
lines changed

2 files changed

+26
-17
lines changed

core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala

Lines changed: 23 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -756,6 +756,20 @@ private[spark] object SparkSubmitUtils {
756756
val cr = new ChainResolver
757757
cr.setName("list")
758758

759+
val repositoryList = remoteRepos.getOrElse("")
760+
// add any other remote repositories other than maven central
761+
if (repositoryList.trim.nonEmpty) {
762+
repositoryList.split(",").zipWithIndex.foreach { case (repo, i) =>
763+
val brr: IBiblioResolver = new IBiblioResolver
764+
brr.setM2compatible(true)
765+
brr.setUsepoms(true)
766+
brr.setRoot(repo)
767+
brr.setName(s"repo-${i + 1}")
768+
cr.add(brr)
769+
printStream.println(s"$repo added as a remote repository with the name: ${brr.getName}")
770+
}
771+
}
772+
759773
val localM2 = new IBiblioResolver
760774
localM2.setM2compatible(true)
761775
localM2.setRoot(m2Path.toURI.toString)
@@ -786,20 +800,6 @@ private[spark] object SparkSubmitUtils {
786800
sp.setRoot("http://dl.bintray.com/spark-packages/maven")
787801
sp.setName("spark-packages")
788802
cr.add(sp)
789-
790-
val repositoryList = remoteRepos.getOrElse("")
791-
// add any other remote repositories other than maven central
792-
if (repositoryList.trim.nonEmpty) {
793-
repositoryList.split(",").zipWithIndex.foreach { case (repo, i) =>
794-
val brr: IBiblioResolver = new IBiblioResolver
795-
brr.setM2compatible(true)
796-
brr.setUsepoms(true)
797-
brr.setRoot(repo)
798-
brr.setName(s"repo-${i + 1}")
799-
cr.add(brr)
800-
printStream.println(s"$repo added as a remote repository with the name: ${brr.getName}")
801-
}
802-
}
803803
cr
804804
}
805805

@@ -922,6 +922,15 @@ private[spark] object SparkSubmitUtils {
922922

923923
// A Module descriptor must be specified. Entries are dummy strings
924924
val md = getModuleDescriptor
925+
// clear ivy resolution from previous launches. The resolution file is usually at
926+
// ~/.ivy2/org.apache.spark-spark-submit-parent-default.xml. In between runs, this file
927+
// leads to confusion with Ivy when the files can no longer be found at the repository
928+
// declared in that file/
929+
val mdId = md.getModuleRevisionId
930+
val previousResolution = new File(ivySettings.getDefaultCache,
931+
s"${mdId.getOrganisation}-${mdId.getName}-$ivyConfName.xml")
932+
if (previousResolution.exists) previousResolution.delete
933+
925934
md.setDefaultConf(ivyConfName)
926935

927936
// Add exclusion rules for Spark and Scala Library

core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -77,9 +77,9 @@ class SparkSubmitUtilsSuite extends SparkFunSuite with BeforeAndAfterAll {
7777
assert(resolver2.getResolvers.size() === 7)
7878
val expected = repos.split(",").map(r => s"$r/")
7979
resolver2.getResolvers.toArray.zipWithIndex.foreach { case (resolver: AbstractResolver, i) =>
80-
if (i > 3) {
81-
assert(resolver.getName === s"repo-${i - 3}")
82-
assert(resolver.asInstanceOf[IBiblioResolver].getRoot === expected(i - 4))
80+
if (i < 3) {
81+
assert(resolver.getName === s"repo-${i + 1}")
82+
assert(resolver.asInstanceOf[IBiblioResolver].getRoot === expected(i))
8383
}
8484
}
8585
}

0 commit comments

Comments
 (0)