Skip to content

Commit f9819d5

Browse files
committed
Fix inconsistent state in DiskBlockObject when expection occurred
Change-Id: I837b5135dd67034d74a9832133dc29800c88f089
1 parent de62ddf commit f9819d5

File tree

1 file changed

+24
-17
lines changed

1 file changed

+24
-17
lines changed

core/src/main/scala/org/apache/spark/storage/DiskBlockObjectWriter.scala

Lines changed: 24 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -128,16 +128,19 @@ private[spark] class DiskBlockObjectWriter(
128128
*/
129129
private def closeResources(): Unit = {
130130
if (initialized) {
131-
mcs.manualClose()
132-
channel = null
133-
mcs = null
134-
bs = null
135-
fos = null
136-
ts = null
137-
objOut = null
138-
initialized = false
139-
streamOpen = false
140-
hasBeenClosed = true
131+
Utils.tryWithSafeFinally {
132+
mcs.manualClose()
133+
} {
134+
channel = null
135+
mcs = null
136+
bs = null
137+
fos = null
138+
ts = null
139+
objOut = null
140+
initialized = false
141+
streamOpen = false
142+
hasBeenClosed = true
143+
}
141144
}
142145
}
143146

@@ -206,18 +209,22 @@ private[spark] class DiskBlockObjectWriter(
206209
streamOpen = false
207210
closeResources()
208211
}
212+
} catch {
213+
case e: Exception =>
214+
logError("Uncaught exception while closing file " + file, e)
215+
}
209216

210-
val truncateStream = new FileOutputStream(file, true)
211-
try {
212-
truncateStream.getChannel.truncate(committedPosition)
213-
file
214-
} finally {
215-
truncateStream.close()
216-
}
217+
var truncateStream: FileOutputStream = null
218+
try {
219+
truncateStream = new FileOutputStream(file, true)
220+
truncateStream.getChannel.truncate(committedPosition)
221+
file
217222
} catch {
218223
case e: Exception =>
219224
logError("Uncaught exception while reverting partial writes to file " + file, e)
220225
file
226+
} finally {
227+
truncateStream.close()
221228
}
222229
}
223230

0 commit comments

Comments
 (0)