Skip to content

Commit e6f35d2

Browse files
lins05ash211
authored andcommitted
Fix k8s integration tests (apache#44)
* Fixed k8s integration test - Enable spark ui explicitly for in-process submit - Fixed some broken assertions in integration tests - Fixed a scalastyle error in SparkDockerImageBuilder.scala - Log into target/integration-tests.log like other modules * Fixed line length. * CR
1 parent 86bd589 commit e6f35d2

File tree

3 files changed

+49
-4
lines changed

3 files changed

+49
-4
lines changed
Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
#
2+
# Licensed to the Apache Software Foundation (ASF) under one or more
3+
# contributor license agreements. See the NOTICE file distributed with
4+
# this work for additional information regarding copyright ownership.
5+
# The ASF licenses this file to You under the Apache License, Version 2.0
6+
# (the "License"); you may not use this file except in compliance with
7+
# the License. You may obtain a copy of the License at
8+
#
9+
# http://www.apache.org/licenses/LICENSE-2.0
10+
#
11+
# Unless required by applicable law or agreed to in writing, software
12+
# distributed under the License is distributed on an "AS IS" BASIS,
13+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
# See the License for the specific language governing permissions and
15+
# limitations under the License.
16+
#
17+
18+
# Set everything to be logged to the file target/integration-tests.log
19+
log4j.rootCategory=INFO, file
20+
log4j.appender.file=org.apache.log4j.FileAppender
21+
log4j.appender.file.append=true
22+
log4j.appender.file.file=target/integration-tests.log
23+
log4j.appender.file.layout=org.apache.log4j.PatternLayout
24+
log4j.appender.file.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss.SSS} %t %p %c{1}: %m%n
25+
26+
# Ignore messages below warning level from a few verbose libraries.
27+
log4j.logger.com.sun.jersey=WARN
28+
log4j.logger.org.apache.hadoop=WARN
29+
log4j.logger.org.eclipse.jetty=WARN
30+
log4j.logger.org.mortbay=WARN
31+
log4j.logger.org.spark_project.jetty=WARN

resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/kubernetes/integrationtest/KubernetesSuite.scala

Lines changed: 17 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,7 @@ import org.apache.spark.deploy.kubernetes.Client
3636
import org.apache.spark.deploy.kubernetes.integrationtest.docker.SparkDockerImageBuilder
3737
import org.apache.spark.deploy.kubernetes.integrationtest.minikube.Minikube
3838
import org.apache.spark.deploy.kubernetes.integrationtest.restapis.SparkRestApiV1
39+
import org.apache.spark.internal.Logging
3940
import org.apache.spark.status.api.v1.{ApplicationStatus, StageStatus}
4041
import org.apache.spark.util.Utils
4142

@@ -82,8 +83,15 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
8283

8384
before {
8485
Eventually.eventually(TIMEOUT, INTERVAL) {
85-
assert(minikubeKubernetesClient.pods().list().getItems.isEmpty)
86-
assert(minikubeKubernetesClient.services().list().getItems.isEmpty)
86+
val podsList = minikubeKubernetesClient.pods().list()
87+
assert(podsList == null
88+
|| podsList.getItems == null
89+
|| podsList.getItems.isEmpty
90+
)
91+
val servicesList = minikubeKubernetesClient.services().list()
92+
assert(servicesList == null
93+
|| servicesList.getItems == null
94+
|| servicesList.getItems.isEmpty)
8795
}
8896
}
8997

@@ -139,6 +147,9 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
139147
}
140148

141149
test("Run a simple example") {
150+
// We'll make assertions based on spark rest api, so we need to turn on
151+
// spark.ui.enabled explicitly since the scalatest-maven-plugin would set it
152+
// to false by default.
142153
val sparkConf = new SparkConf(true)
143154
.setMaster(s"k8s://https://${Minikube.getMinikubeIp}:8443")
144155
.set("spark.kubernetes.submit.caCertFile", clientConfig.getCaCertFile)
@@ -152,6 +163,8 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
152163
.set("spark.executor.cores", "1")
153164
.set("spark.executors.instances", "1")
154165
.set("spark.app.id", "spark-pi")
166+
.set("spark.ui.enabled", "true")
167+
.set("spark.testing", "false")
155168
val mainAppResource = s"file://$EXAMPLES_JAR"
156169

157170
new Client(
@@ -174,6 +187,8 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
174187
"--num-executors", "1",
175188
"--upload-jars", HELPER_JAR,
176189
"--class", MAIN_CLASS,
190+
"--conf", "spark.ui.enabled=true",
191+
"--conf", "spark.testing=false",
177192
"--conf", s"spark.kubernetes.submit.caCertFile=${clientConfig.getCaCertFile}",
178193
"--conf", s"spark.kubernetes.submit.clientKeyFile=${clientConfig.getClientKeyFile}",
179194
"--conf", s"spark.kubernetes.submit.clientCertFile=${clientConfig.getClientCertFile}",

resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/kubernetes/integrationtest/docker/SparkDockerImageBuilder.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -55,5 +55,4 @@ private[spark] class SparkDockerImageBuilder(private val dockerEnv: Map[String,
5555
dockerClient.build(Paths.get("target", "docker", "driver"), "spark-driver")
5656
dockerClient.build(Paths.get("target", "docker", "executor"), "spark-executor")
5757
}
58-
59-
}
58+
}

0 commit comments

Comments
 (0)