Skip to content

Commit 3c1a16a

Browse files
liyinan926foxish
authored andcommitted
Removing deprecated configuration (apache#503)
Replaced apache#501 by @foxish.
1 parent 3eb04bb commit 3c1a16a

File tree

10 files changed

+7
-121
lines changed

10 files changed

+7
-121
lines changed

docs/running-on-kubernetes.md

Lines changed: 0 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -606,48 +606,6 @@ from the other deployment modes. See the [configuration page](configuration.html
606606
<code>myIdentifier</code>. Multiple annotations can be added by setting multiple configurations with this prefix.
607607
</td>
608608
</tr>
609-
<tr>
610-
<td><code>spark.kubernetes.driver.labels</code></td>
611-
<td>(none)</td>
612-
<td>
613-
<i>Deprecated.</i> Use <code>spark.kubernetes.driver.label.<labelKey></code> instead which supports <code>=</code>
614-
and <code>,</code> characters in label values.
615-
Custom labels that will be added to the driver pod. This should be a comma-separated list of label key-value pairs,
616-
where each label is in the format <code>key=value</code>. Note that Spark also adds its own labels to the driver pod
617-
for bookkeeping purposes.
618-
</td>
619-
</tr>
620-
<tr>
621-
<td><code>spark.kubernetes.driver.annotations</code></td>
622-
<td>(none)</td>
623-
<td>
624-
<i>Deprecated.</i> Use <code>spark.kubernetes.driver.annotation.<annotationKey></code> instead which supports
625-
<code>=</code> and <code>,</code> characters in annotation values.
626-
Custom annotations that will be added to the driver pod. This should be a comma-separated list of label key-value
627-
pairs, where each annotation is in the format <code>key=value</code>.
628-
</td>
629-
</tr>
630-
<tr>
631-
<td><code>spark.kubernetes.executor.labels</code></td>
632-
<td>(none)</td>
633-
<td>
634-
<i>Deprecated.</i> Use <code>spark.kubernetes.executor.label.<labelKey></code> instead which supports
635-
<code>=</code> and <code>,</code> characters in label values.
636-
Custom labels that will be added to the executor pods. This should be a comma-separated list of label key-value
637-
pairs, where each label is in the format <code>key=value</code>. Note that Spark also adds its own labels to the
638-
executor pods for bookkeeping purposes.
639-
</td>
640-
</tr>
641-
<tr>
642-
<td><code>spark.kubernetes.executor.annotations</code></td>
643-
<td>(none)</td>
644-
<td>
645-
<i>Deprecated.</i> Use <code>spark.kubernetes.executor.annotation.<annotationKey></code> instead which supports
646-
<code>=</code> and <code>,</code> characters in annotation values.
647-
Custom annotations that will be added to the executor pods. This should be a comma-separated list of annotation
648-
key-value pairs, where each annotation is in the format <code>key=value</code>.
649-
</td>
650-
</tr>
651609
<tr>
652610
<td><code>spark.kubernetes.driver.pod.name</code></td>
653611
<td>(none)</td>

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/ConfigurationUtils.scala

Lines changed: 0 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@ package org.apache.spark.deploy.k8s
1919

2020
import org.apache.spark.{SparkConf, SparkException}
2121
import org.apache.spark.internal.Logging
22-
import org.apache.spark.internal.config.OptionalConfigEntry
2322

2423
object ConfigurationUtils extends Logging {
2524
def parseKeyValuePairs(
@@ -41,31 +40,6 @@ object ConfigurationUtils extends Logging {
4140
}).getOrElse(Map.empty[String, String])
4241
}
4342

44-
def combinePrefixedKeyValuePairsWithDeprecatedConf(
45-
sparkConf: SparkConf,
46-
prefix: String,
47-
deprecatedConf: OptionalConfigEntry[String],
48-
configType: String): Map[String, String] = {
49-
val deprecatedKeyValuePairsString = sparkConf.get(deprecatedConf)
50-
deprecatedKeyValuePairsString.foreach { _ =>
51-
logWarning(s"Configuration with key ${deprecatedConf.key} is deprecated. Use" +
52-
s" configurations with prefix $prefix<key> instead.")
53-
}
54-
val fromDeprecated = parseKeyValuePairs(
55-
deprecatedKeyValuePairsString,
56-
deprecatedConf.key,
57-
configType)
58-
val fromPrefix = sparkConf.getAllWithPrefix(prefix)
59-
val combined = fromDeprecated.toSeq ++ fromPrefix
60-
combined.groupBy(_._1).foreach {
61-
case (key, values) =>
62-
require(values.size == 1,
63-
s"Cannot have multiple values for a given $configType key, got key $key with" +
64-
s" values $values")
65-
}
66-
combined.toMap
67-
}
68-
6943
def parsePrefixedKeyValuePairs(
7044
sparkConf: SparkConf,
7145
prefix: String,

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/config.scala

Lines changed: 1 addition & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -115,43 +115,12 @@ package object config extends Logging {
115115

116116
private[spark] val KUBERNETES_DRIVER_LABEL_PREFIX = "spark.kubernetes.driver.label."
117117
private[spark] val KUBERNETES_DRIVER_ANNOTATION_PREFIX = "spark.kubernetes.driver.annotation."
118+
118119
private[spark] val KUBERNETES_EXECUTOR_LABEL_PREFIX = "spark.kubernetes.executor.label."
119120
private[spark] val KUBERNETES_EXECUTOR_ANNOTATION_PREFIX = "spark.kubernetes.executor.annotation."
120121

121-
private[spark] val KUBERNETES_DRIVER_LABELS =
122-
ConfigBuilder("spark.kubernetes.driver.labels")
123-
.doc("Custom labels that will be added to the driver pod. This should be a comma-separated" +
124-
" list of label key-value pairs, where each label is in the format key=value. Note that" +
125-
" Spark also adds its own labels to the driver pod for bookkeeping purposes.")
126-
.stringConf
127-
.createOptional
128-
129122
private[spark] val KUBERNETES_DRIVER_ENV_KEY = "spark.kubernetes.driverEnv."
130123

131-
private[spark] val KUBERNETES_DRIVER_ANNOTATIONS =
132-
ConfigBuilder("spark.kubernetes.driver.annotations")
133-
.doc("Custom annotations that will be added to the driver pod. This should be a" +
134-
" comma-separated list of annotation key-value pairs, where each annotation is in the" +
135-
" format key=value.")
136-
.stringConf
137-
.createOptional
138-
139-
private[spark] val KUBERNETES_EXECUTOR_LABELS =
140-
ConfigBuilder("spark.kubernetes.executor.labels")
141-
.doc("Custom labels that will be added to the executor pods. This should be a" +
142-
" comma-separated list of label key-value pairs, where each label is in the format" +
143-
" key=value.")
144-
.stringConf
145-
.createOptional
146-
147-
private[spark] val KUBERNETES_EXECUTOR_ANNOTATIONS =
148-
ConfigBuilder("spark.kubernetes.executor.annotations")
149-
.doc("Custom annotations that will be added to the executor pods. This should be a" +
150-
" comma-separated list of annotation key-value pairs, where each annotation is in the" +
151-
" format key=value.")
152-
.stringConf
153-
.createOptional
154-
155124
private[spark] val KUBERNETES_DRIVER_SECRETS_PREFIX = "spark.kubernetes.driver.secrets."
156125
private[spark] val KUBERNETES_EXECUTOR_SECRETS_PREFIX = "spark.kubernetes.executor.secrets."
157126

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/DriverConfigurationStepsOrchestrator.scala

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -72,10 +72,9 @@ private[spark] class DriverConfigurationStepsOrchestrator(
7272
.getOrElse(Array.empty[String]) ++
7373
additionalMainAppPythonFile.toSeq ++
7474
additionalPythonFiles
75-
val driverCustomLabels = ConfigurationUtils.combinePrefixedKeyValuePairsWithDeprecatedConf(
75+
val driverCustomLabels = ConfigurationUtils.parsePrefixedKeyValuePairs(
7676
submissionSparkConf,
7777
KUBERNETES_DRIVER_LABEL_PREFIX,
78-
KUBERNETES_DRIVER_LABELS,
7978
"label")
8079
require(!driverCustomLabels.contains(SPARK_APP_ID_LABEL), s"Label with key " +
8180
s" $SPARK_APP_ID_LABEL is not allowed as it is reserved for Spark bookkeeping" +
@@ -124,7 +123,7 @@ private[spark] class DriverConfigurationStepsOrchestrator(
124123
// Then, indicate to the outer block that the init-container should not handle
125124
// those local files simply by filtering them out.
126125
val sparkFilesWithoutLocal = KubernetesFileUtils.getNonSubmitterLocalFiles(sparkFiles)
127-
val smallFilesSecretName = s"${kubernetesAppId}-submitted-files"
126+
val smallFilesSecretName = s"$kubernetesAppId-submitted-files"
128127
val mountSmallFilesBootstrap = new MountSmallFilesBootstrapImpl(
129128
smallFilesSecretName, MOUNTED_SMALL_FILES_SECRET_MOUNT_PATH)
130129
val mountSmallLocalFilesStep = new MountSmallLocalFilesStep(

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/submit/submitsteps/BaseDriverConfigurationStep.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -67,10 +67,9 @@ private[spark] class BaseDriverConfigurationStep(
6767
.build()
6868
}
6969
val driverCustomAnnotations = ConfigurationUtils
70-
.combinePrefixedKeyValuePairsWithDeprecatedConf(
70+
.parsePrefixedKeyValuePairs(
7171
submissionSparkConf,
7272
KUBERNETES_DRIVER_ANNOTATION_PREFIX,
73-
KUBERNETES_DRIVER_ANNOTATIONS,
7473
"annotation")
7574
require(!driverCustomAnnotations.contains(SPARK_APP_NAME_ANNOTATION),
7675
s"Annotation with key $SPARK_APP_NAME_ANNOTATION is not allowed as it is reserved for" +

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorPodFactory.scala

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@ package org.apache.spark.scheduler.cluster.k8s
1919
import scala.collection.JavaConverters._
2020

2121
import io.fabric8.kubernetes.api.model.{ContainerBuilder, ContainerPortBuilder, EnvVar, EnvVarBuilder, EnvVarSourceBuilder, Pod, PodBuilder, QuantityBuilder}
22-
import org.apache.commons.io.FilenameUtils
2322

2423
import org.apache.spark.{SparkConf, SparkException}
2524
import org.apache.spark.deploy.k8s.{ConfigurationUtils, InitContainerResourceStagingServerSecretPlugin, PodWithDetachedInitContainer, SparkPodInitContainerBootstrap}
@@ -56,10 +55,9 @@ private[spark] class ExecutorPodFactoryImpl(
5655
org.apache.spark.internal.config.EXECUTOR_CLASS_PATH)
5756
private val executorJarsDownloadDir = sparkConf.get(INIT_CONTAINER_JARS_DOWNLOAD_LOCATION)
5857

59-
private val executorLabels = ConfigurationUtils.combinePrefixedKeyValuePairsWithDeprecatedConf(
58+
private val executorLabels = ConfigurationUtils.parsePrefixedKeyValuePairs(
6059
sparkConf,
6160
KUBERNETES_EXECUTOR_LABEL_PREFIX,
62-
KUBERNETES_EXECUTOR_LABELS,
6361
"executor label")
6462
require(
6563
!executorLabels.contains(SPARK_APP_ID_LABEL),
@@ -70,10 +68,9 @@ private[spark] class ExecutorPodFactoryImpl(
7068
s" Spark.")
7169

7270
private val executorAnnotations =
73-
ConfigurationUtils.combinePrefixedKeyValuePairsWithDeprecatedConf(
71+
ConfigurationUtils.parsePrefixedKeyValuePairs(
7472
sparkConf,
7573
KUBERNETES_EXECUTOR_ANNOTATION_PREFIX,
76-
KUBERNETES_EXECUTOR_ANNOTATIONS,
7774
"executor annotation")
7875
private val nodeSelector =
7976
ConfigurationUtils.parsePrefixedKeyValuePairs(

resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/submitsteps/BaseDriverConfigurationStepSuite.scala

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -34,8 +34,6 @@ private[spark] class BaseDriverConfigurationStepSuite extends SparkFunSuite {
3434
private val APP_ARGS = Array("arg1", "arg2")
3535
private val CUSTOM_ANNOTATION_KEY = "customAnnotation"
3636
private val CUSTOM_ANNOTATION_VALUE = "customAnnotationValue"
37-
private val DEPRECATED_CUSTOM_ANNOTATION_KEY = "customAnnotationDeprecated"
38-
private val DEPRECATED_CUSTOM_ANNOTATION_VALUE = "customAnnotationDeprecatedValue"
3937
private val DRIVER_CUSTOM_ENV_KEY1 = "customDriverEnv1"
4038
private val DRIVER_CUSTOM_ENV_KEY2 = "customDriverEnv2"
4139

@@ -49,8 +47,6 @@ private[spark] class BaseDriverConfigurationStepSuite extends SparkFunSuite {
4947
.set(KUBERNETES_DRIVER_MEMORY_OVERHEAD, 200L)
5048
.set(DRIVER_DOCKER_IMAGE, "spark-driver:latest")
5149
.set(s"spark.kubernetes.driver.annotation.$CUSTOM_ANNOTATION_KEY", CUSTOM_ANNOTATION_VALUE)
52-
.set("spark.kubernetes.driver.annotations",
53-
s"$DEPRECATED_CUSTOM_ANNOTATION_KEY=$DEPRECATED_CUSTOM_ANNOTATION_VALUE")
5450
.set(s"$KUBERNETES_DRIVER_ENV_KEY$DRIVER_CUSTOM_ENV_KEY1", "customDriverEnv1")
5551
.set(s"$KUBERNETES_DRIVER_ENV_KEY$DRIVER_CUSTOM_ENV_KEY2", "customDriverEnv2")
5652

@@ -98,7 +94,6 @@ private[spark] class BaseDriverConfigurationStepSuite extends SparkFunSuite {
9894
assert(driverPodMetadata.getLabels.asScala === DRIVER_LABELS)
9995
val expectedAnnotations = Map(
10096
CUSTOM_ANNOTATION_KEY -> CUSTOM_ANNOTATION_VALUE,
101-
DEPRECATED_CUSTOM_ANNOTATION_KEY -> DEPRECATED_CUSTOM_ANNOTATION_VALUE,
10297
SPARK_APP_NAME_ANNOTATION -> APP_NAME)
10398
assert(driverPodMetadata.getAnnotations.asScala === expectedAnnotations)
10499
assert(preparedDriverSpec.driverPod.getSpec.getRestartPolicy === "Never")

resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/submit/submitsteps/initcontainer/InitContainerConfigurationStepsOrchestratorSuite.scala

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,6 @@ class InitContainerConfigurationStepsOrchestratorSuite extends SparkFunSuite {
4646

4747
test ("error thrown if local jars provided without resource staging server") {
4848
val sparkConf = new SparkConf(true)
49-
.set(KUBERNETES_DRIVER_LABELS, s"$DEPRECATED_CUSTOM_LABEL_KEY=$DEPRECATED_CUSTOM_LABEL_VALUE")
5049
.set(s"$KUBERNETES_DRIVER_LABEL_PREFIX$CUSTOM_LABEL_KEY", CUSTOM_LABEL_VALUE)
5150

5251
assert(sparkConf.get(RESOURCE_STAGING_SERVER_URI).isEmpty)
@@ -72,7 +71,6 @@ class InitContainerConfigurationStepsOrchestratorSuite extends SparkFunSuite {
7271

7372
test ("error not thrown with non-local jars and resource staging server provided") {
7473
val sparkConf = new SparkConf(true)
75-
.set(KUBERNETES_DRIVER_LABELS, s"$DEPRECATED_CUSTOM_LABEL_KEY=$DEPRECATED_CUSTOM_LABEL_VALUE")
7674
.set(s"$KUBERNETES_DRIVER_LABEL_PREFIX$CUSTOM_LABEL_KEY", CUSTOM_LABEL_VALUE)
7775
.set(RESOURCE_STAGING_SERVER_URI, STAGING_SERVER_URI)
7876

@@ -97,7 +95,6 @@ class InitContainerConfigurationStepsOrchestratorSuite extends SparkFunSuite {
9795

9896
test ("error not thrown with non-local jars and no resource staging server provided") {
9997
val sparkConf = new SparkConf(true)
100-
.set(KUBERNETES_DRIVER_LABELS, s"$DEPRECATED_CUSTOM_LABEL_KEY=$DEPRECATED_CUSTOM_LABEL_VALUE")
10198
.set(s"$KUBERNETES_DRIVER_LABEL_PREFIX$CUSTOM_LABEL_KEY", CUSTOM_LABEL_VALUE)
10299

103100
val orchestrator = new InitContainerConfigurationStepsOrchestrator(
@@ -120,7 +117,6 @@ class InitContainerConfigurationStepsOrchestratorSuite extends SparkFunSuite {
120117

121118
test ("including step to contact resource staging server") {
122119
val sparkConf = new SparkConf(true)
123-
.set(KUBERNETES_DRIVER_LABELS, s"$DEPRECATED_CUSTOM_LABEL_KEY=$DEPRECATED_CUSTOM_LABEL_VALUE")
124120
.set(s"$KUBERNETES_DRIVER_LABEL_PREFIX$CUSTOM_LABEL_KEY", CUSTOM_LABEL_VALUE)
125121
.set(RESOURCE_STAGING_SERVER_URI, STAGING_SERVER_URI)
126122

@@ -145,7 +141,6 @@ class InitContainerConfigurationStepsOrchestratorSuite extends SparkFunSuite {
145141

146142
test ("not including steps because no contact to resource staging server") {
147143
val sparkConf = new SparkConf(true)
148-
.set(KUBERNETES_DRIVER_LABELS, s"$DEPRECATED_CUSTOM_LABEL_KEY=$DEPRECATED_CUSTOM_LABEL_VALUE")
149144
.set(s"$KUBERNETES_DRIVER_LABEL_PREFIX$CUSTOM_LABEL_KEY", CUSTOM_LABEL_VALUE)
150145

151146
val orchestrator = new InitContainerConfigurationStepsOrchestrator(

resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/KubernetesSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@ private[spark] class KubernetesSuite extends SparkFunSuite with BeforeAndAfter {
6464
sparkConf = kubernetesTestComponents.newSparkConf()
6565
.set(INIT_CONTAINER_DOCKER_IMAGE, s"spark-init:latest")
6666
.set(DRIVER_DOCKER_IMAGE, s"spark-driver:latest")
67-
.set(KUBERNETES_DRIVER_LABELS, s"spark-app-locator=$APP_LOCATOR_LABEL")
67+
.set(s"${KUBERNETES_DRIVER_LABEL_PREFIX}spark-app-locator", APP_LOCATOR_LABEL)
6868
kubernetesTestComponents.createNamespace()
6969
}
7070

0 commit comments

Comments
 (0)