Skip to content

Commit befff0c

Browse files
committed
review feedback
1 parent 14ac3ed commit befff0c

File tree

5 files changed

+9
-8
lines changed

5 files changed

+9
-8
lines changed

core/src/main/java/org/apache/spark/status/api/v1/TaskSorting.java

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -17,18 +17,15 @@
1717

1818
package org.apache.spark.status.api.v1;
1919

20-
import org.apache.spark.annotation.DeveloperApi;
2120
import org.apache.spark.util.EnumUtil;
2221

2322
import java.util.HashSet;
2423
import java.util.Set;
2524

26-
// exposed for jersey
27-
@DeveloperApi
2825
public enum TaskSorting {
2926
ID,
30-
IncreasingRuntime("runtime"),
31-
DecreasingRuntime("-runtime");
27+
INCREASING_RUNTIME("runtime"),
28+
DECREASING_RUNTIME("-runtime");
3229

3330
private final Set<String> alternateNames;
3431
private TaskSorting(String... names) {

core/src/main/scala/org/apache/spark/status/api/v1/AllStagesResource.scala

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -294,12 +294,16 @@ private[v1] abstract class MetricHelper[I,O](
294294
quantiles: Array[Double]) {
295295

296296
def getSubmetrics(raw: InternalTaskMetrics): Option[I]
297+
297298
def build: O
299+
298300
val data: Seq[I] = rawMetrics.flatMap(getSubmetrics)
301+
299302
/** applies the given function to all input metrics, and returns the quantiles */
300303
def submetricQuantiles(f: I => Double): IndexedSeq[Double] = {
301304
Distribution(data.map { d => f(d) }).get.getQuantiles(quantiles)
302305
}
306+
303307
def metricOption: Option[O] = {
304308
if (data.isEmpty) {
305309
None

core/src/main/scala/org/apache/spark/status/api/v1/OneStageResource.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -147,8 +147,8 @@ object OneStageResource {
147147
val extractor: (TaskData => Long) = td =>
148148
taskSorting match {
149149
case ID => td.taskId
150-
case IncreasingRuntime => td.taskMetrics.map{_.executorRunTime}.getOrElse(-1L)
151-
case DecreasingRuntime => -td.taskMetrics.map{_.executorRunTime}.getOrElse(-1L)
150+
case INCREASING_RUNTIME => td.taskMetrics.map{_.executorRunTime}.getOrElse(-1L)
151+
case DECREASING_RUNTIME => -td.taskMetrics.map{_.executorRunTime}.getOrElse(-1L)
152152
}
153153
Ordering.by(extractor)
154154
}

core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -104,7 +104,7 @@ class HistoryServerSuite extends FunSuite with BeforeAndAfter with Matchers with
104104
"stage task list w/ offset & length" ->
105105
"applications/local-1427397477963/stages/20/0/taskList?offset=10&length=50",
106106
"stage task list w/ sortBy" ->
107-
"applications/local-1427397477963/stages/20/0/taskList?sortBy=DecreasingRuntime",
107+
"applications/local-1427397477963/stages/20/0/taskList?sortBy=DECREASING_RUNTIME",
108108
"stage task list w/ sortBy short names: -runtime" ->
109109
"applications/local-1427397477963/stages/20/0/taskList?sortBy=-runtime",
110110
"stage task list w/ sortBy short names: runtime" ->

0 commit comments

Comments
 (0)