File tree Expand file tree Collapse file tree 1 file changed +4
-2
lines changed
core/src/main/scala/org/apache/spark/status Expand file tree Collapse file tree 1 file changed +4
-2
lines changed Original file line number Diff line number Diff line change @@ -136,6 +136,8 @@ private[spark] class AppStatusStore(
136
136
store.read(classOf [StageDataWrapper ], Array (stageId, stageAttemptId)).locality
137
137
}
138
138
139
+ private def isLiveUI : Boolean = listener.isDefined
140
+
139
141
/**
140
142
* Calculates a summary of the task metrics for the given stage attempt, returning the
141
143
* requested quantiles for the recorded metrics.
@@ -156,7 +158,7 @@ private[spark] class AppStatusStore(
156
158
// cheaper for disk stores (avoids deserialization).
157
159
val count = {
158
160
Utils .tryWithResource(
159
- if (store. isInstanceOf [ ElementTrackingStore ] ) {
161
+ if (isLiveUI ) {
160
162
// For Live UI, we should count the tasks with status "SUCCESS" only.
161
163
store.view(classOf [TaskDataWrapper ])
162
164
.parent(stageKey)
@@ -246,7 +248,7 @@ private[spark] class AppStatusStore(
246
248
// and failed tasks differently (would be tricky). Also would require changing the disk store
247
249
// version (to invalidate old stores).
248
250
def scanTasks (index : String )(fn : TaskDataWrapper => Long ): IndexedSeq [Double ] = {
249
- if (store. isInstanceOf [ ElementTrackingStore ] ) {
251
+ if (isLiveUI ) {
250
252
val quantileTasks = store.view(classOf [TaskDataWrapper ])
251
253
.parent(stageKey)
252
254
.index(index)
You can’t perform that action at this time.
0 commit comments