Skip to content

Commit 0b15e2c

Browse files
committed
fix ut
1 parent b9fa638 commit 0b15e2c

File tree

2 files changed

+38
-33
lines changed

2 files changed

+38
-33
lines changed
-190 Bytes
Binary file not shown.

core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala

Lines changed: 38 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -1847,39 +1847,44 @@ class SparkSubmitSuite
18471847

18481848
test("SPARK-52334: Update all files, jars, and pyFiles to" +
18491849
"reference the working directory after they are downloaded") {
1850-
val testFile = "test_metrics_config.properties"
1851-
val testPyFile = "test_metrics_system.properties"
1852-
val testJar = "TestUDTF.jar"
1853-
val testArchives = "archive1.zip#test_archives"
1854-
val clArgs = Seq(
1855-
"--deploy-mode", "client",
1856-
"--proxy-user", "test.user",
1857-
"--master", "k8s://host:port",
1858-
"--executor-memory", "5g",
1859-
"--class", "org.SomeClass",
1860-
"--driver-memory", "4g",
1861-
"--conf", "spark.kubernetes.namespace=spark",
1862-
"--conf", "spark.kubernetes.driver.container.image=bar",
1863-
"--conf", "spark.kubernetes.submitInDriver=true",
1864-
"--files", s"src/test/resources/$testFile",
1865-
"--py-files", s"src/test/resources/$testPyFile",
1866-
"--jars", s"src/test/resources/$testJar",
1867-
"--archives", s"src/test/resources/$testArchives",
1868-
"/home/thejar.jar",
1869-
"arg1")
1870-
val appArgs = new SparkSubmitArguments(clArgs)
1871-
val _ = submit.prepareSubmitEnvironment(appArgs)
1872-
1873-
appArgs.files should be (Utils.resolveURIs(s"$testFile,$testPyFile"))
1874-
appArgs.pyFiles should be (Utils.resolveURIs(testPyFile))
1875-
appArgs.jars should be (Utils.resolveURIs(testJar))
1876-
appArgs.archives should be (Utils.resolveURIs(s"src/test/resources/$testArchives"))
1877-
1878-
Files.isDirectory(Paths.get("test_archives")) should be(true)
1879-
Files.delete(Paths.get(testFile))
1880-
Files.delete(Paths.get(testPyFile))
1881-
Files.delete(Paths.get(testJar))
1882-
Files.delete(Paths.get("test_archives/archive1.text"))
1850+
withTempDir { dir =>
1851+
val text1 = File.createTempFile("test1_", ".txt", dir)
1852+
val zipFile1 = File.createTempFile("test1_", ".zip", dir)
1853+
TestUtils.createJar(Seq(text1), zipFile1)
1854+
val testFile = "test_metrics_config.properties"
1855+
val testPyFile = "test_metrics_system.properties"
1856+
val testJar = "TestUDTF.jar"
1857+
val clArgs = Seq(
1858+
"--deploy-mode", "client",
1859+
"--proxy-user", "test.user",
1860+
"--master", "k8s://host:port",
1861+
"--executor-memory", "5g",
1862+
"--class", "org.SomeClass",
1863+
"--driver-memory", "4g",
1864+
"--conf", "spark.kubernetes.namespace=spark",
1865+
"--conf", "spark.kubernetes.driver.container.image=bar",
1866+
"--conf", "spark.kubernetes.submitInDriver=true",
1867+
"--files", s"src/test/resources/$testFile",
1868+
"--py-files", s"src/test/resources/$testPyFile",
1869+
"--jars", s"src/test/resources/$testJar",
1870+
"--archives", s"${zipFile1.getAbsolutePath}#test_archives",
1871+
"/home/thejar.jar",
1872+
"arg1")
1873+
val appArgs = new SparkSubmitArguments(clArgs)
1874+
val _ = submit.prepareSubmitEnvironment(appArgs)
1875+
1876+
appArgs.files should be (Utils.resolveURIs(s"$testFile,$testPyFile"))
1877+
appArgs.pyFiles should be (Utils.resolveURIs(testPyFile))
1878+
appArgs.jars should be (Utils.resolveURIs(testJar))
1879+
appArgs.archives should be (Utils.resolveURIs(s"${zipFile1.getAbsolutePath}#test_archives"))
1880+
1881+
Files.isDirectory(Paths.get("test_archives")) should be(true)
1882+
Files.delete(Paths.get(testFile))
1883+
Files.delete(Paths.get(testPyFile))
1884+
Files.delete(Paths.get(testJar))
1885+
Files.delete(Paths.get(s"test_archives/${text1.getName}"))
1886+
Files.delete(Paths.get("test_archives/META-INF/MANIFEST.MF"))
1887+
}
18831888
}
18841889

18851890
// Requires Python dependencies for Spark Connect. Should be enabled by default.

0 commit comments

Comments
 (0)