Skip to content

Commit c8196b8

Browse files
committed
minor update
1 parent f7a15d6 commit c8196b8

File tree

3 files changed

+37
-41
lines changed

3 files changed

+37
-41
lines changed

core/src/main/scala/org/apache/spark/status/AppStatusStore.scala

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -275,8 +275,7 @@ private[spark] class AppStatusStore(
275275
t.gettingResultTime
276276
},
277277
schedulerDelay = scanTasks(TaskIndexNames.SCHEDULER_DELAY) { t => t.schedulerDelay },
278-
peakExecutionMemory = scanTasks(TaskIndexNames.PEAK_MEM) { t =>
279-
t.peakExecutionMemory },
278+
peakExecutionMemory = scanTasks(TaskIndexNames.PEAK_MEM) { t => t.peakExecutionMemory },
280279
memoryBytesSpilled = scanTasks(TaskIndexNames.MEM_SPILL) { t => t.memoryBytesSpilled },
281280
diskBytesSpilled = scanTasks(TaskIndexNames.DISK_SPILL) { t => t.diskBytesSpilled },
282281
inputMetrics = new v1.InputMetricDistributions(
@@ -290,8 +289,7 @@ private[spark] class AppStatusStore(
290289
m.shuffleLocalBytesRead + m.shuffleRemoteBytesRead
291290
},
292291
scanTasks(TaskIndexNames.SHUFFLE_READ_RECORDS) { t => t.shuffleRecordsRead },
293-
scanTasks(TaskIndexNames.SHUFFLE_REMOTE_BLOCKS) { t =>
294-
t.shuffleRemoteBlocksFetched },
292+
scanTasks(TaskIndexNames.SHUFFLE_REMOTE_BLOCKS) { t => t.shuffleRemoteBlocksFetched },
295293
scanTasks(TaskIndexNames.SHUFFLE_LOCAL_BLOCKS) { t => t.shuffleLocalBlocksFetched },
296294
scanTasks(TaskIndexNames.SHUFFLE_READ_TIME) { t => t.shuffleFetchWaitTime },
297295
scanTasks(TaskIndexNames.SHUFFLE_REMOTE_READS) { t => t.shuffleRemoteBytesRead },

core/src/main/scala/org/apache/spark/status/LiveEntity.scala

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -731,9 +731,7 @@ private object LiveEntityHelpers {
731731
* Convert all the metric values to negative as well as handle zero values.
732732
* This method assumes that all the metric values are greater than or equal to zero
733733
*/
734-
def makeNegative(
735-
m: v1.TaskMetrics,
736-
handleZeros: mutable.HashSet[String]): v1.TaskMetrics = {
734+
def makeNegative(m: v1.TaskMetrics, handleZeros: mutable.HashSet[String]): v1.TaskMetrics = {
737735
// If the metric value is 0, then make -1 and update the metric index in handleZeros.
738736
def updateMetricValue(metric: Long, index: String): Long = {
739737
if (metric == 0L) {

core/src/main/scala/org/apache/spark/status/storeTypes.scala

Lines changed: 34 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -182,8 +182,7 @@ private[spark] class TaskDataWrapper(
182182

183183
val hasMetrics: Boolean,
184184
// Non successful metrics will have negative values in `TaskDataWrapper`.
185-
// zero metric value will be converted to -1 and update the index in the hashset.
186-
// However `TaskData` will have actual metric values. To recover the actual metric value
185+
// `TaskData` will have actual metric values. To recover the actual metric value
187186
// from `TaskDataWrapper`, need use `getMetricValue` method. parameter `handleZero` is to
188187
// check whether the index has zero metric value, which is used in the `getMetricValue`.
189188
val handleZero: HashSet[String],
@@ -244,7 +243,7 @@ private[spark] class TaskDataWrapper(
244243
val stageAttemptId: Int) {
245244

246245
// To handle non successful tasks metrics (Running, Failed, Killed).
247-
private def gerMetricValue(metric: Long, index: String): Long = {
246+
private def getMetricValue(metric: Long, index: String): Long = {
248247
if (handleZero(index)) {
249248
0L
250249
} else {
@@ -255,34 +254,35 @@ private[spark] class TaskDataWrapper(
255254
def toApi: TaskData = {
256255
val metrics = if (hasMetrics) {
257256
Some(new TaskMetrics(
258-
gerMetricValue(executorDeserializeTime, TaskIndexNames.DESER_TIME),
259-
gerMetricValue(executorDeserializeCpuTime, TaskIndexNames.DESER_CPU_TIME),
260-
gerMetricValue(executorRunTime, TaskIndexNames.EXEC_RUN_TIME),
261-
gerMetricValue(executorCpuTime, TaskIndexNames.EXEC_CPU_TIME),
262-
gerMetricValue(resultSize, TaskIndexNames.RESULT_SIZE),
263-
gerMetricValue(jvmGcTime, TaskIndexNames.GC_TIME),
264-
gerMetricValue(resultSerializationTime, TaskIndexNames.SER_TIME),
265-
gerMetricValue(memoryBytesSpilled, TaskIndexNames.MEM_SPILL),
266-
gerMetricValue(diskBytesSpilled, TaskIndexNames.DISK_SPILL),
267-
gerMetricValue(peakExecutionMemory, TaskIndexNames.PEAK_MEM),
257+
getMetricValue(executorDeserializeTime, TaskIndexNames.DESER_TIME),
258+
getMetricValue(executorDeserializeCpuTime, TaskIndexNames.DESER_CPU_TIME),
259+
getMetricValue(executorRunTime, TaskIndexNames.EXEC_RUN_TIME),
260+
getMetricValue(executorCpuTime, TaskIndexNames.EXEC_CPU_TIME),
261+
getMetricValue(resultSize, TaskIndexNames.RESULT_SIZE),
262+
getMetricValue(jvmGcTime, TaskIndexNames.GC_TIME),
263+
getMetricValue(resultSerializationTime, TaskIndexNames.SER_TIME),
264+
getMetricValue(memoryBytesSpilled, TaskIndexNames.MEM_SPILL),
265+
getMetricValue(diskBytesSpilled, TaskIndexNames.DISK_SPILL),
266+
getMetricValue(peakExecutionMemory, TaskIndexNames.PEAK_MEM),
268267
new InputMetrics(
269-
gerMetricValue(inputBytesRead, TaskIndexNames.INPUT_SIZE),
270-
gerMetricValue(inputRecordsRead, TaskIndexNames.INPUT_RECORDS)),
268+
getMetricValue(inputBytesRead, TaskIndexNames.INPUT_SIZE),
269+
getMetricValue(inputRecordsRead, TaskIndexNames.INPUT_RECORDS)),
271270
new OutputMetrics(
272-
gerMetricValue(outputBytesWritten, TaskIndexNames.OUTPUT_SIZE),
273-
gerMetricValue(outputRecordsWritten, TaskIndexNames.OUTPUT_RECORDS)),
271+
getMetricValue(outputBytesWritten, TaskIndexNames.OUTPUT_SIZE),
272+
getMetricValue(outputRecordsWritten, TaskIndexNames.OUTPUT_RECORDS)),
274273
new ShuffleReadMetrics(
275-
gerMetricValue(shuffleRemoteBlocksFetched, TaskIndexNames.SHUFFLE_REMOTE_BLOCKS),
276-
gerMetricValue(shuffleLocalBlocksFetched, TaskIndexNames.SHUFFLE_LOCAL_BLOCKS),
277-
gerMetricValue(shuffleFetchWaitTime, TaskIndexNames.SHUFFLE_READ_TIME),
278-
gerMetricValue(shuffleRemoteBytesRead, TaskIndexNames.SHUFFLE_REMOTE_READS),
279-
gerMetricValue(shuffleRemoteBytesReadToDisk, TaskIndexNames.SHUFFLE_REMOTE_READS_TO_DISK),
280-
gerMetricValue(shuffleLocalBytesRead, TaskIndexNames.SHUFFLE_LOCAL_READ),
281-
gerMetricValue(shuffleRecordsRead, TaskIndexNames.SHUFFLE_READ_RECORDS)),
274+
getMetricValue(shuffleRemoteBlocksFetched, TaskIndexNames.SHUFFLE_REMOTE_BLOCKS),
275+
getMetricValue(shuffleLocalBlocksFetched, TaskIndexNames.SHUFFLE_LOCAL_BLOCKS),
276+
getMetricValue(shuffleFetchWaitTime, TaskIndexNames.SHUFFLE_READ_TIME),
277+
getMetricValue(shuffleRemoteBytesRead, TaskIndexNames.SHUFFLE_REMOTE_READS),
278+
getMetricValue(shuffleRemoteBytesReadToDisk,
279+
TaskIndexNames.SHUFFLE_REMOTE_READS_TO_DISK),
280+
getMetricValue(shuffleLocalBytesRead, TaskIndexNames.SHUFFLE_LOCAL_READ),
281+
getMetricValue(shuffleRecordsRead, TaskIndexNames.SHUFFLE_READ_RECORDS)),
282282
new ShuffleWriteMetrics(
283-
gerMetricValue(shuffleBytesWritten, TaskIndexNames.SHUFFLE_WRITE_SIZE),
284-
gerMetricValue(shuffleWriteTime, TaskIndexNames.SHUFFLE_WRITE_TIME),
285-
gerMetricValue(shuffleRecordsWritten, TaskIndexNames.SHUFFLE_WRITE_RECORDS))))
283+
getMetricValue(shuffleBytesWritten, TaskIndexNames.SHUFFLE_WRITE_SIZE),
284+
getMetricValue(shuffleWriteTime, TaskIndexNames.SHUFFLE_WRITE_TIME),
285+
getMetricValue(shuffleRecordsWritten, TaskIndexNames.SHUFFLE_WRITE_RECORDS))))
286286
} else {
287287
None
288288
}
@@ -314,9 +314,9 @@ private[spark] class TaskDataWrapper(
314314
def schedulerDelay: Long = {
315315
if (hasMetrics) {
316316
AppStatusUtils.schedulerDelay(launchTime, resultFetchStart, duration,
317-
gerMetricValue(executorDeserializeTime, TaskIndexNames.DESER_TIME),
318-
gerMetricValue(resultSerializationTime, TaskIndexNames.SER_TIME),
319-
gerMetricValue(executorRunTime, TaskIndexNames.EXEC_RUN_TIME))
317+
getMetricValue(executorDeserializeTime, TaskIndexNames.DESER_TIME),
318+
getMetricValue(resultSerializationTime, TaskIndexNames.SER_TIME),
319+
getMetricValue(executorRunTime, TaskIndexNames.EXEC_RUN_TIME))
320320
} else {
321321
-1L
322322
}
@@ -349,8 +349,8 @@ private[spark] class TaskDataWrapper(
349349
@JsonIgnore @KVIndex(value = TaskIndexNames.SHUFFLE_TOTAL_READS, parent = TaskIndexNames.STAGE)
350350
private def shuffleTotalReads: Long = {
351351
if (hasMetrics) {
352-
gerMetricValue(shuffleLocalBytesRead, TaskIndexNames.SHUFFLE_LOCAL_READ) +
353-
gerMetricValue(shuffleRemoteBytesRead, TaskIndexNames.SHUFFLE_REMOTE_READS)
352+
getMetricValue(shuffleLocalBytesRead, TaskIndexNames.SHUFFLE_LOCAL_READ) +
353+
getMetricValue(shuffleRemoteBytesRead, TaskIndexNames.SHUFFLE_REMOTE_READS)
354354
} else {
355355
-1L
356356
}
@@ -359,8 +359,8 @@ private[spark] class TaskDataWrapper(
359359
@JsonIgnore @KVIndex(value = TaskIndexNames.SHUFFLE_TOTAL_BLOCKS, parent = TaskIndexNames.STAGE)
360360
private def shuffleTotalBlocks: Long = {
361361
if (hasMetrics) {
362-
gerMetricValue(shuffleLocalBlocksFetched, TaskIndexNames.SHUFFLE_LOCAL_BLOCKS) +
363-
gerMetricValue(shuffleRemoteBlocksFetched, TaskIndexNames.SHUFFLE_REMOTE_BLOCKS)
362+
getMetricValue(shuffleLocalBlocksFetched, TaskIndexNames.SHUFFLE_LOCAL_BLOCKS) +
363+
getMetricValue(shuffleRemoteBlocksFetched, TaskIndexNames.SHUFFLE_REMOTE_BLOCKS)
364364
} else {
365365
-1L
366366
}

0 commit comments

Comments
 (0)