Skip to content

Commit 188c968

Browse files
committed
Move spark.executor.ExecutorLogUrlHandler to spark.LogUrlHandler
1 parent 6fc236c commit 188c968

File tree

4 files changed

+13
-15
lines changed

4 files changed

+13
-15
lines changed

core/src/main/scala/org/apache/spark/executor/ExecutorLogUrlHandler.scala renamed to core/src/main/scala/org/apache/spark/LogUrlHandler.scala

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -15,17 +15,17 @@
1515
* limitations under the License.
1616
*/
1717

18-
package org.apache.spark.executor
18+
package org.apache.spark
1919

2020
import java.util.concurrent.atomic.AtomicBoolean
2121

2222
import scala.util.matching.Regex
2323

24-
import org.apache.spark.internal.{Logging, MDC}
25-
import org.apache.spark.internal.LogKeys
24+
import org.apache.spark.internal.{Logging, LogKeys, MDC}
2625

27-
private[spark] class ExecutorLogUrlHandler(logUrlPattern: Option[String]) extends Logging {
28-
import ExecutorLogUrlHandler._
26+
27+
private[spark] class LogUrlHandler(logUrlPattern: Option[String]) extends Logging {
28+
import LogUrlHandler._
2929

3030
private val informedForMissingAttributes = new AtomicBoolean(false)
3131

@@ -83,14 +83,14 @@ private[spark] class ExecutorLogUrlHandler(logUrlPattern: Option[String]) extend
8383
allPatterns: Set[String],
8484
allAttributes: Set[String]): Unit = {
8585
if (informedForMissingAttributes.compareAndSet(false, true)) {
86-
logInfo(log"Fail to renew executor log urls: ${MDC(LogKeys.REASON, reason)}." +
86+
logInfo(log"Fail to renew log urls: ${MDC(LogKeys.REASON, reason)}." +
8787
log" Required: ${MDC(LogKeys.REGEX, allPatterns)} / " +
8888
log"available: ${MDC(LogKeys.ATTRIBUTE_MAP, allAttributes)}." +
8989
log" Falling back to show app's original log urls.")
9090
}
9191
}
9292
}
9393

94-
private[spark] object ExecutorLogUrlHandler {
94+
private[spark] object LogUrlHandler {
9595
val CUSTOM_URL_PATTERN_REGEX: Regex = "\\{\\{([A-Za-z0-9_\\-]+)\\}\\}".r
9696
}

core/src/main/scala/org/apache/spark/deploy/history/HistoryAppStatusStore.scala

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,7 @@
1717

1818
package org.apache.spark.deploy.history
1919

20-
import org.apache.spark.SparkConf
21-
import org.apache.spark.executor.ExecutorLogUrlHandler
20+
import org.apache.spark.{LogUrlHandler, SparkConf}
2221
import org.apache.spark.internal.Logging
2322
import org.apache.spark.internal.config.History._
2423
import org.apache.spark.status.AppStatusStore
@@ -40,7 +39,7 @@ private[spark] class HistoryAppStatusStore(
4039
}
4140
}
4241

43-
private val logUrlHandler = new ExecutorLogUrlHandler(logUrlPattern)
42+
private val logUrlHandler = new LogUrlHandler(logUrlPattern)
4443

4544
override def executorList(activeOnly: Boolean): Seq[v1.ExecutorSummary] = {
4645
val execList = super.executorList(activeOnly)

core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -27,11 +27,10 @@ import scala.concurrent.Future
2727
import com.google.common.cache.CacheBuilder
2828
import org.apache.hadoop.security.UserGroupInformation
2929

30-
import org.apache.spark.{ExecutorAllocationClient, SparkEnv, TaskState}
30+
import org.apache.spark.{ExecutorAllocationClient, LogUrlHandler, SparkEnv, TaskState}
3131
import org.apache.spark.deploy.SparkHadoopUtil
3232
import org.apache.spark.deploy.security.HadoopDelegationTokenManager
3333
import org.apache.spark.errors.SparkCoreErrors
34-
import org.apache.spark.executor.ExecutorLogUrlHandler
3534
import org.apache.spark.internal.{config, Logging, MDC}
3635
import org.apache.spark.internal.LogKeys
3736
import org.apache.spark.internal.LogKeys._
@@ -156,7 +155,7 @@ class CoarseGrainedSchedulerBackend(scheduler: TaskSchedulerImpl, val rpcEnv: Rp
156155
.filter { case (k, _) => k.startsWith("spark.") }
157156
.toImmutableArraySeq
158157

159-
private val logUrlHandler: ExecutorLogUrlHandler = new ExecutorLogUrlHandler(
158+
private val logUrlHandler: LogUrlHandler = new LogUrlHandler(
160159
conf.get(UI.CUSTOM_EXECUTOR_LOG_URL))
161160

162161
override def onStart(): Unit = {

resource-managers/kubernetes/core/src/main/scala/org/apache/spark/scheduler/cluster/k8s/KubernetesClusterSchedulerBackend.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ import io.fabric8.kubernetes.api.model.Pod
2727
import io.fabric8.kubernetes.api.model.PodBuilder
2828
import io.fabric8.kubernetes.client.KubernetesClient
2929

30-
import org.apache.spark.SparkContext
30+
import org.apache.spark.{LogUrlHandler, SparkContext}
3131
import org.apache.spark.deploy.k8s.{KubernetesConf, KubernetesUtils}
3232
import org.apache.spark.deploy.k8s.Config._
3333
import org.apache.spark.deploy.k8s.Constants._
@@ -115,7 +115,7 @@ private[spark] class KubernetesClusterSchedulerBackend(
115115
Some(Map("LOG_FILES" -> "log") ++ extractAttributes)
116116

117117
override def getDriverLogUrls: Option[Map[String, String]] = {
118-
val logUrlHandler = new ExecutorLogUrlHandler(conf.get(UI.CUSTOM_DRIVER_LOG_URL))
118+
val logUrlHandler = new LogUrlHandler(conf.get(UI.CUSTOM_DRIVER_LOG_URL))
119119
getDriverAttributes.map(attr => logUrlHandler.applyPattern(Map.empty, attr)).filter(_.nonEmpty)
120120
}
121121

0 commit comments

Comments
 (0)