Skip to content

Commit c21f8d8

Browse files
author
Marcelo Vanzin
committed
Feedback: formatting, docs.
1 parent dd8cc4b commit c21f8d8

File tree

5 files changed

+25
-25
lines changed

5 files changed

+25
-25
lines changed

core/src/main/scala/org/apache/spark/deploy/history/ApplicationHistoryProvider.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,8 +25,7 @@ private[spark] case class ApplicationHistoryInfo(
2525
startTime: Long,
2626
endTime: Long,
2727
lastUpdated: Long,
28-
sparkUser: String) {
29-
}
28+
sparkUser: String)
3029

3130
private[spark] abstract class ApplicationHistoryProvider {
3231

core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -116,10 +116,9 @@ private[history] class FsHistoryProvider(conf: SparkConf) extends ApplicationHis
116116
try {
117117
val logStatus = fs.listStatus(new Path(logDir))
118118
val logDirs = if (logStatus != null) logStatus.filter(_.isDir).toSeq else Seq[FileStatus]()
119-
val logInfos = logDirs
120-
.filter {
121-
dir => fs.isFile(new Path(dir.getPath(), EventLoggingListener.APPLICATION_COMPLETE))
122-
}
119+
val logInfos = logDirs.filter {
120+
dir => fs.isFile(new Path(dir.getPath(), EventLoggingListener.APPLICATION_COMPLETE))
121+
}
123122

124123
val currentApps = Map[String, ApplicationHistoryInfo](
125124
appList.map(app => (app.id -> app)):_*)
@@ -177,7 +176,8 @@ private[history] class FsHistoryProvider(conf: SparkConf) extends ApplicationHis
177176
}
178177

179178
replayBus.replay()
180-
val appInfo = ApplicationHistoryInfo(appId,
179+
val appInfo = ApplicationHistoryInfo(
180+
appId,
181181
appListener.appName,
182182
appListener.startTime,
183183
appListener.endTime,

core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -50,8 +50,6 @@ class HistoryServer(
5050
// How many applications to retain
5151
private val retainedApplications = conf.getInt("spark.history.retainedApplications", 50)
5252

53-
private val localHost = Utils.localHostName()
54-
5553
private val appLoader = new CacheLoader[String, SparkUI] {
5654
override def load(key: String): SparkUI = {
5755
val ui = provider.getAppUI(key)
@@ -192,10 +190,10 @@ object HistoryServer {
192190
server.bind()
193191

194192
Runtime.getRuntime().addShutdownHook(new Thread("HistoryServerStopper") {
195-
override def run() = {
196-
server.stop()
197-
}
198-
})
193+
override def run() = {
194+
server.stop()
195+
}
196+
})
199197

200198
// Wait until the end of the world... or if the HistoryServer process is manually stopped
201199
while(true) { Thread.sleep(Int.MaxValue) }

core/src/main/scala/org/apache/spark/deploy/history/HistoryServerArguments.scala

Lines changed: 10 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -57,17 +57,19 @@ private[spark] class HistoryServerArguments(conf: SparkConf, args: Array[String]
5757
|
5858
|History Server options:
5959
|
60-
| spark.history.ui.port Port where server will listen for connections (default 18080)
61-
| spark.history.acls.enable Whether to enable view acls for all applications (default false)
62-
| spark.history.provider Name of history provider class (defaults to file system-based
63-
| provider)
60+
| spark.history.ui.port Port where server will listen for connections
61+
| (default 18080)
62+
| spark.history.acls.enable Whether to enable view acls for all applications
63+
| (default false)
64+
| spark.history.provider Name of history provider class (defaults to
65+
| file system-based provider)
6466
| spark.history.retainedApplications Max number of application UIs to keep loaded in memory
65-
| (default 50)
67+
| (default 50)
6668
|FsHistoryProvider options:
6769
|
68-
| spark.history.fs.logDirectory Directory where app logs are stored (required)
69-
| spark.history.fs.updateInterval How often to reload log data from storage (seconds,
70-
| default 10)
70+
| spark.history.fs.logDirectory Directory where app logs are stored (required)
71+
| spark.history.fs.updateInterval How often to reload log data from storage (in seconds,
72+
| default 10)
7173
|""".stripMargin)
7274
System.exit(exitCode)
7375
}

docs/monitoring.md

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -38,9 +38,9 @@ You can start a the history server by executing:
3838
./sbin/start-history-server.sh
3939

4040
When using the file-system provider class (see spark.history.provider below), the base logging
41-
directory must be supplied in the "spark.history.fs.logDirectory" configuration option, and should
42-
contain sub-directories that each represents an application's event logs. This creates a web
43-
interface at `http://<server-url>:18080` by default. The history server can be configured as
41+
directory must be supplied in the <code>spark.history.fs.logDirectory</code> configuration option,
42+
and should contain sub-directories that each represents an application's event logs. This creates a
43+
web interface at `http://<server-url>:18080` by default. The history server can be configured as
4444
follows:
4545

4646
<table class="table">
@@ -74,7 +74,8 @@ follows:
7474
<td>spark.history.provider</td>
7575
<td>org.apache.spark.deploy.history.FsHistoryProvider</td>
7676
<td>Name of the class implementing the application history backend. Currently there is only
77-
one implementation provided by Spark, which matches the default value.</td>
77+
one implementation, provided by Spark, which looks for application logs stored in the
78+
file system.</td>
7879
</tr>
7980
<tr>
8081
<td>spark.history.fs.updateInterval</td>

0 commit comments

Comments
 (0)