Skip to content

Commit 6c60d14

Browse files
committed
SPARK-24552: Use task ID instead of attempt number for v2 writes.
1 parent 299d297 commit 6c60d14

File tree

1 file changed

+9
-9
lines changed

1 file changed

+9
-9
lines changed

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/WriteToDataSourceV2.scala

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -29,10 +29,8 @@ import org.apache.spark.sql.catalyst.encoders.{ExpressionEncoder, RowEncoder}
2929
import org.apache.spark.sql.catalyst.expressions.Attribute
3030
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
3131
import org.apache.spark.sql.execution.SparkPlan
32-
import org.apache.spark.sql.execution.streaming.{MicroBatchExecution, StreamExecution}
33-
import org.apache.spark.sql.execution.streaming.continuous.{CommitPartitionEpoch, ContinuousExecution, EpochCoordinatorRef, SetWriterPartitions}
32+
import org.apache.spark.sql.execution.streaming.MicroBatchExecution
3433
import org.apache.spark.sql.sources.v2.writer._
35-
import org.apache.spark.sql.sources.v2.writer.streaming.StreamWriter
3634
import org.apache.spark.sql.types.StructType
3735
import org.apache.spark.util.Utils
3836

@@ -110,7 +108,7 @@ object DataWritingSparkTask extends Logging {
110108
useCommitCoordinator: Boolean): WriterCommitMessage = {
111109
val stageId = context.stageId()
112110
val partId = context.partitionId()
113-
val attemptId = context.attemptNumber()
111+
val attemptId = context.taskAttemptId().toInt
114112
val epochId = Option(context.getLocalProperty(MicroBatchExecution.BATCH_ID_KEY)).getOrElse("0")
115113
val dataWriter = writeTask.createDataWriter(partId, attemptId, epochId.toLong)
116114

@@ -124,10 +122,12 @@ object DataWritingSparkTask extends Logging {
124122
val coordinator = SparkEnv.get.outputCommitCoordinator
125123
val commitAuthorized = coordinator.canCommit(context.stageId(), partId, attemptId)
126124
if (commitAuthorized) {
127-
logInfo(s"Writer for stage $stageId, task $partId.$attemptId is authorized to commit.")
125+
logInfo(
126+
s"Writer for stage $stageId, task $partId (TID $attemptId) is authorized to commit.")
128127
dataWriter.commit()
129128
} else {
130-
val message = s"Stage $stageId, task $partId.$attemptId: driver did not authorize commit"
129+
val message =
130+
s"Stage $stageId, task $partId (TID $attemptId): driver did not authorize commit"
131131
logInfo(message)
132132
// throwing CommitDeniedException will trigger the catch block for abort
133133
throw new CommitDeniedException(message, stageId, partId, attemptId)
@@ -138,15 +138,15 @@ object DataWritingSparkTask extends Logging {
138138
dataWriter.commit()
139139
}
140140

141-
logInfo(s"Writer for stage $stageId, task $partId.$attemptId committed.")
141+
logInfo(s"Writer for stage $stageId, task $partId (TID $attemptId) committed.")
142142

143143
msg
144144

145145
})(catchBlock = {
146146
// If there is an error, abort this writer
147-
logError(s"Writer for stage $stageId, task $partId.$attemptId is aborting.")
147+
logError(s"Writer for stage $stageId, task $partId (TID $attemptId) is aborting.")
148148
dataWriter.abort()
149-
logError(s"Writer for stage $stageId, task $partId.$attemptId aborted.")
149+
logError(s"Writer for stage $stageId, task $partId (TID $attemptId) aborted.")
150150
})
151151
}
152152
}

0 commit comments

Comments
 (0)