Skip to content

Commit 6542b42

Browse files
committed
remove extra stageAttemptId
1 parent ada7726 commit 6542b42

File tree

3 files changed

+1
-3
lines changed

3 files changed

+1
-3
lines changed

core/src/main/scala/org/apache/spark/TaskContextImpl.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,6 @@ private[spark] class TaskContextImpl(
3030
override val attemptNumber: Int,
3131
override val taskMemoryManager: TaskMemoryManager,
3232
val runningLocally: Boolean = false,
33-
val stageAttemptId: Int = 0, // for testing
3433
val taskMetrics: TaskMetrics = TaskMetrics.empty)
3534
extends TaskContext
3635
with Logging {

core/src/main/scala/org/apache/spark/scheduler/Task.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,6 @@ private[spark] abstract class Task[T](
5858
final def run(taskAttemptId: Long, attemptNumber: Int): T = {
5959
context = new TaskContextImpl(
6060
stageId = stageId,
61-
stageAttemptId = stageAttemptId,
6261
partitionId = partitionId,
6362
taskAttemptId = taskAttemptId,
6463
attemptNumber = attemptNumber,

core/src/test/java/org/apache/spark/JavaAPISuite.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1011,7 +1011,7 @@ public void persist() {
10111011
@Test
10121012
public void iterator() {
10131013
JavaRDD<Integer> rdd = sc.parallelize(Arrays.asList(1, 2, 3, 4, 5), 2);
1014-
TaskContext context = new TaskContextImpl(0, 0, 0L, 0, null, false, 0, new TaskMetrics());
1014+
TaskContext context = new TaskContextImpl(0, 0, 0L, 0, null, false, new TaskMetrics());
10151015
Assert.assertEquals(1, rdd.iterator(rdd.partitions().get(0), context).next().intValue());
10161016
}
10171017

0 commit comments

Comments
 (0)