Skip to content

Commit 149bed4

Browse files
committed
Remove erroring test case for pyspark shell
1 parent c626bd1 commit 149bed4

File tree

1 file changed

+1
-90
lines changed
  • resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest

1 file changed

+1
-90
lines changed

resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/PythonTestsSuite.scala

Lines changed: 1 addition & 90 deletions
Original file line numberDiff line numberDiff line change
@@ -16,11 +16,6 @@
1616
*/
1717
package org.apache.spark.deploy.k8s.integrationtest
1818

19-
import scala.collection.JavaConverters._
20-
import org.scalatest.concurrent.Eventually
21-
22-
import org.apache.spark.deploy.k8s.integrationtest.KubernetesSuite.{k8sTestTag, INTERVAL, TIMEOUT}
23-
2419
private[spark] trait PythonTestsSuite { k8sSuite: KubernetesSuite =>
2520

2621
import PythonTestsSuite._
@@ -94,90 +89,6 @@ private[spark] trait PythonTestsSuite { k8sSuite: KubernetesSuite =>
9489
isJVM = false,
9590
pyFiles = Some(PYSPARK_CONTAINER_TESTS))
9691
}
97-
98-
test("Run PySpark shell", k8sTestTag) {
99-
val labels = Map("spark-app-selector" -> driverPodName)
100-
val driverPort = 7077
101-
val blockManagerPort = 10000
102-
val driverService = testBackend
103-
.getKubernetesClient
104-
.services()
105-
.inNamespace(kubernetesTestComponents.namespace)
106-
.createNew()
107-
.withNewMetadata()
108-
.withName(s"$driverPodName-svc")
109-
.endMetadata()
110-
.withNewSpec()
111-
.withClusterIP("None")
112-
.withSelector(labels.asJava)
113-
.addNewPort()
114-
.withName("driver-port")
115-
.withPort(driverPort)
116-
.withNewTargetPort(driverPort)
117-
.endPort()
118-
.addNewPort()
119-
.withName("block-manager")
120-
.withPort(blockManagerPort)
121-
.withNewTargetPort(blockManagerPort)
122-
.endPort()
123-
.endSpec()
124-
.done()
125-
try {
126-
val driverPod = testBackend
127-
.getKubernetesClient
128-
.pods()
129-
.inNamespace(kubernetesTestComponents.namespace)
130-
.createNew()
131-
.withNewMetadata()
132-
.withName(driverPodName)
133-
.withLabels(labels.asJava)
134-
.endMetadata()
135-
.withNewSpec()
136-
.withServiceAccountName(kubernetesTestComponents.serviceAccountName)
137-
.addNewContainer()
138-
.withName("pyspark-shell")
139-
.withImage(pyImage)
140-
.withImagePullPolicy("IfNotPresent")
141-
.withCommand("/opt/spark/bin/pyspark")
142-
.addToArgs("--master", s"k8s://https://kubernetes.default.svc")
143-
.addToArgs("--deploy-mode", "client")
144-
.addToArgs("--conf", s"spark.kubernetes.container.image="+pyImage)
145-
.addToArgs(
146-
"--conf",
147-
s"spark.kubernetes.namespace=${kubernetesTestComponents.namespace}")
148-
.addToArgs("--conf", "spark.kubernetes.authenticate.oauthTokenFile=" +
149-
"/var/run/secrets/kubernetes.io/serviceaccount/token")
150-
.addToArgs("--conf", "spark.kubernetes.authenticate.caCertFile=" +
151-
"/var/run/secrets/kubernetes.io/serviceaccount/ca.crt")
152-
.addToArgs("--conf", s"spark.kubernetes.driver.pod.name=$driverPodName")
153-
.addToArgs("--conf", "spark.executor.memory=500m")
154-
.addToArgs("--conf", "spark.executor.cores=1")
155-
.addToArgs("--conf", "spark.executor.instances=1")
156-
.addToArgs("--conf",
157-
s"spark.driver.host=" +
158-
s"${driverService.getMetadata.getName}.${kubernetesTestComponents.namespace}.svc")
159-
.addToArgs("--conf", s"spark.driver.port=$driverPort")
160-
.addToArgs("--conf", s"spark.driver.blockManager.port=$blockManagerPort")
161-
.endContainer()
162-
.endSpec()
163-
.done()
164-
Eventually.eventually(TIMEOUT, INTERVAL) {
165-
assert(kubernetesTestComponents.kubernetesClient
166-
.pods()
167-
.withName(driverPodName)
168-
.getLog
169-
.contains("SparkSession available"), "The application did not complete.")
170-
}
171-
} finally {
172-
// Have to delete the service manually since it doesn't have an owner reference
173-
kubernetesTestComponents
174-
.kubernetesClient
175-
.services()
176-
.inNamespace(kubernetesTestComponents.namespace)
177-
.delete(driverService)
178-
}
179-
}
180-
18192
}
18293

18394
private[spark] object PythonTestsSuite {
@@ -187,4 +98,4 @@ private[spark] object PythonTestsSuite {
18798
val PYSPARK_FILES: String = TEST_LOCAL_PYSPARK + "pyfiles.py"
18899
val PYSPARK_CONTAINER_TESTS: String = TEST_LOCAL_PYSPARK + "py_container_checks.py"
189100
val PYSPARK_MEMORY_CHECK: String = TEST_LOCAL_PYSPARK + "worker_memory_check.py"
190-
}
101+
}

0 commit comments

Comments
 (0)