Skip to content

Commit a37ad4f

Browse files
committed
Comments, imports and formatting (minor)
1 parent cd000b0 commit a37ad4f

File tree

16 files changed

+28
-21
lines changed

16 files changed

+28
-21
lines changed

core/src/main/scala/org/apache/spark/deploy/master/Master.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -85,7 +85,6 @@ private[spark] class Master(
8585
val masterSource = new MasterSource(this)
8686

8787
val webUi = new MasterWebUI(this, webUiPort)
88-
webUi.start()
8988

9089
val masterPublicAddress = {
9190
val envVar = System.getenv("SPARK_PUBLIC_DNS")
@@ -116,6 +115,7 @@ private[spark] class Master(
116115
logInfo("Starting Spark master at " + masterUrl)
117116
// Listen for remote client disconnection events, since they don't go through Akka's watch()
118117
context.system.eventStream.subscribe(self, classOf[RemotingLifecycleEvent])
118+
webUi.start()
119119
webUi.bind()
120120
masterWebUiUrl = "http://" + masterPublicAddress + ":" + webUi.boundPort
121121
context.system.scheduler.schedule(0 millis, WORKER_TIMEOUT millis, self, CheckForWorkerTimeOut)

core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@ class MasterWebUI(val master: Master, requestedPort: Int)
3535
val masterActorRef = master.self
3636
val timeout = AkkaUtils.askTimeout(master.conf)
3737

38+
/** Initialize all components of the server. Must be called before bind(). */
3839
def start() {
3940
attachPage(new ApplicationPage(this))
4041
attachPage(new IndexPage(this))

core/src/main/scala/org/apache/spark/deploy/worker/Worker.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -122,10 +122,10 @@ private[spark] class Worker(
122122
host, port, cores, Utils.megabytesToString(memory)))
123123
logInfo("Spark home: " + sparkHome)
124124
createWorkDir()
125+
context.system.eventStream.subscribe(self, classOf[RemotingLifecycleEvent])
125126
webUi = new WorkerWebUI(this, workDir, Some(webUiPort))
126127
webUi.start()
127128
webUi.bind()
128-
context.system.eventStream.subscribe(self, classOf[RemotingLifecycleEvent])
129129
registerWithMaster()
130130

131131
metricsSystem.registerSource(workerSource)

core/src/main/scala/org/apache/spark/deploy/worker/ui/WorkerWebUI.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,7 @@ class WorkerWebUI(val worker: Worker, val workDir: File, requestedPort: Option[I
3838
worker.conf.get("worker.ui.port", WorkerWebUI.DEFAULT_PORT).toInt)
3939
val timeout = AkkaUtils.askTimeout(worker.conf)
4040

41+
/** Initialize all components of the server. Must be called before bind(). */
4142
def start() {
4243
val logPage = new LogPage(this)
4344
attachPage(logPage)

core/src/main/scala/org/apache/spark/ui/WebUI.scala

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -19,15 +19,16 @@ package org.apache.spark.ui
1919

2020
import javax.servlet.http.HttpServletRequest
2121

22+
import scala.collection.mutable.ArrayBuffer
23+
import scala.xml.Node
24+
2225
import org.eclipse.jetty.servlet.ServletContextHandler
26+
import org.json4s.JsonAST.{JNothing, JValue}
2327

2428
import org.apache.spark.SecurityManager
29+
import org.apache.spark.scheduler.SparkListener
2530
import org.apache.spark.ui.JettyUtils._
2631
import org.apache.spark.util.Utils
27-
import scala.collection.mutable.ArrayBuffer
28-
import org.apache.spark.scheduler.SparkListener
29-
import scala.xml.Node
30-
import org.json4s.JsonAST.{JNothing, JValue}
3132

3233
/**
3334
* The top level component of the UI hierarchy that contains the server.
@@ -70,6 +71,9 @@ private[spark] abstract class WebUI(securityManager: SecurityManager, basePath:
7071
/** Return a list of handlers attached to this UI. */
7172
def getHandlers = handlers.toSeq
7273

74+
/** Initialize all components of the server. Must be called before bind(). */
75+
def start()
76+
7377
/**
7478
* Bind to the HTTP server behind this web interface.
7579
* Overridden implementation should set serverInfo.
@@ -101,6 +105,7 @@ private[spark] abstract class UITab(val prefix: String) {
101105
pages += page
102106
}
103107

108+
/** Initialize listener and attach pages. */
104109
def start()
105110
}
106111

core/src/main/scala/org/apache/spark/ui/env/EnvironmentTab.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ private[ui] class EnvironmentTab(parent: SparkUI) extends UITab("environment") {
2929
attachPage(new IndexPage(this))
3030
}
3131

32-
def environmentListener = {
32+
def environmentListener: EnvironmentListener = {
3333
assert(listener.isDefined, "EnvironmentTab has not started yet!")
3434
listener.get.asInstanceOf[EnvironmentListener]
3535
}

core/src/main/scala/org/apache/spark/ui/exec/ExecutorsTab.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ private[ui] class ExecutorsTab(parent: SparkUI) extends UITab("executors") {
3333
attachPage(new IndexPage(this))
3434
}
3535

36-
def executorsListener = {
36+
def executorsListener: ExecutorsListener = {
3737
assert(listener.isDefined, "ExecutorsTab has not started yet!")
3838
listener.get.asInstanceOf[ExecutorsListener]
3939
}

core/src/main/scala/org/apache/spark/ui/exec/IndexPage.scala

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -50,11 +50,11 @@ private[ui] class IndexPage(parent: ExecutorsTab) extends UIPage("") {
5050
</ul>
5151
</div>
5252
</div>
53-
<div class = "row">
54-
<div class="span12">
55-
{execTable}
56-
</div>
57-
</div>;
53+
<div class = "row">
54+
<div class="span12">
55+
{execTable}
56+
</div>
57+
</div>;
5858

5959
UIUtils.headerSparkPage(
6060
content, basePath, appName, "Executors (" + execInfo.size + ")", Executors)

core/src/main/scala/org/apache/spark/ui/jobs/IndexPage.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,8 +22,8 @@ import javax.servlet.http.HttpServletRequest
2222
import scala.xml.{Node, NodeSeq}
2323

2424
import org.apache.spark.scheduler.Schedulable
25-
import org.apache.spark.ui.Page._
2625
import org.apache.spark.ui.{UIPage, UIUtils}
26+
import org.apache.spark.ui.Page.Stages
2727

2828
/** Page showing list of all ongoing and recently finished stages and pools */
2929
private[ui] class IndexPage(parent: JobProgressTab) extends UIPage("") {

core/src/main/scala/org/apache/spark/ui/jobs/JobProgressTab.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ private[ui] class JobProgressTab(parent: SparkUI) extends UITab("stages") {
3636
attachPage(new PoolPage(this))
3737
}
3838

39-
def jobProgressListener = {
39+
def jobProgressListener: JobProgressListener = {
4040
assert(listener.isDefined, "JobProgressTab has not started yet!")
4141
listener.get.asInstanceOf[JobProgressListener]
4242
}

core/src/main/scala/org/apache/spark/ui/jobs/PoolPage.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,8 +22,8 @@ import javax.servlet.http.HttpServletRequest
2222
import scala.xml.Node
2323

2424
import org.apache.spark.scheduler.{Schedulable, StageInfo}
25-
import org.apache.spark.ui.Page._
2625
import org.apache.spark.ui.{UIPage, UIUtils}
26+
import org.apache.spark.ui.Page.Stages
2727

2828
/** Page showing specific pool details */
2929
private[ui] class PoolPage(parent: JobProgressTab) extends UIPage("pool") {

core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,8 +22,8 @@ import javax.servlet.http.HttpServletRequest
2222

2323
import scala.xml.Node
2424

25-
import org.apache.spark.ui.Page._
2625
import org.apache.spark.ui.{UIPage, UIUtils}
26+
import org.apache.spark.ui.Page.Stages
2727
import org.apache.spark.util.{Utils, Distribution}
2828

2929
/** Page showing statistics and task list for a given stage */

core/src/main/scala/org/apache/spark/ui/jobs/StageTable.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ import scala.collection.mutable.HashMap
2323
import scala.xml.Node
2424

2525
import org.apache.spark.scheduler.{StageInfo, TaskInfo}
26-
import org.apache.spark.ui.{WebUI, UIUtils}
26+
import org.apache.spark.ui.UIUtils
2727
import org.apache.spark.util.Utils
2828

2929
/** Page showing list of all ongoing and recently finished stages */

core/src/main/scala/org/apache/spark/ui/storage/BlockManagerTab.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ private[ui] class BlockManagerTab(parent: SparkUI) extends UITab("storage") {
3434
attachPage(new RddPage(this))
3535
}
3636

37-
def blockManagerListener = {
37+
def blockManagerListener: BlockManagerListener = {
3838
assert(listener.isDefined, "BlockManagerTab has not started yet!")
3939
listener.get.asInstanceOf[BlockManagerListener]
4040
}

core/src/main/scala/org/apache/spark/ui/storage/IndexPage.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,8 +22,8 @@ import javax.servlet.http.HttpServletRequest
2222
import scala.xml.Node
2323

2424
import org.apache.spark.storage.RDDInfo
25-
import org.apache.spark.ui.Page._
2625
import org.apache.spark.ui.{UIPage, UIUtils}
26+
import org.apache.spark.ui.Page.Storage
2727
import org.apache.spark.util.Utils
2828

2929
/** Page showing list of RDD's currently stored in the cluster */

core/src/main/scala/org/apache/spark/ui/storage/RDDPage.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,8 +22,8 @@ import javax.servlet.http.HttpServletRequest
2222
import scala.xml.Node
2323

2424
import org.apache.spark.storage.{BlockId, BlockStatus, StorageStatus, StorageUtils}
25-
import org.apache.spark.ui.Page._
2625
import org.apache.spark.ui.{UIPage, UIUtils}
26+
import org.apache.spark.ui.Page.Storage
2727
import org.apache.spark.util.Utils
2828

2929
/** Page showing storage details for a given RDD */

0 commit comments

Comments
 (0)