Skip to content

Commit 701d045

Browse files
committed
Rebase with sort-based shuffle
1 parent 9160149 commit 701d045

File tree

2 files changed

+4
-5
lines changed

2 files changed

+4
-5
lines changed

core/src/main/scala/org/apache/spark/shuffle/sort/SortShuffleWriter.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -94,8 +94,7 @@ private[spark] class SortShuffleWriter[K, V, C](
9494
for (elem <- elements) {
9595
writer.write(elem)
9696
}
97-
writer.commit()
98-
writer.close()
97+
writer.commitAndClose()
9998
val segment = writer.fileSegment()
10099
offsets(id + 1) = segment.offset + segment.length
101100
lengths(id) = segment.length

core/src/main/scala/org/apache/spark/util/collection/ExternalSorter.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -270,9 +270,10 @@ private[spark] class ExternalSorter[K, V, C](
270270
// How many elements we have in each partition
271271
val elementsPerPartition = new Array[Long](numPartitions)
272272

273-
// Flush the disk writer's contents to disk, and update relevant variables
273+
// Flush the disk writer's contents to disk, and update relevant variables.
274+
// The writer is closed at the end of this process, and cannot be reused.
274275
def flush() = {
275-
writer.commit()
276+
writer.commitAndClose()
276277
val bytesWritten = writer.bytesWritten
277278
batchSizes.append(bytesWritten)
278279
_diskBytesSpilled += bytesWritten
@@ -293,7 +294,6 @@ private[spark] class ExternalSorter[K, V, C](
293294

294295
if (objectsWritten == serializerBatchSize) {
295296
flush()
296-
writer.close()
297297
writer = blockManager.getDiskWriter(blockId, file, ser, fileBufferSize)
298298
}
299299
}

0 commit comments

Comments
 (0)