17
17
18
18
package org .apache .spark .scheduler
19
19
20
+ import com .fasterxml .jackson .core .JsonParseException
20
21
import java .io .{InputStream , IOException }
21
22
22
23
import scala .io .Source
@@ -40,15 +41,31 @@ private[spark] class ReplayListenerBus extends SparkListenerBus with Logging {
40
41
*
41
42
* @param logData Stream containing event log data.
42
43
* @param sourceName Filename (or other source identifier) from whence @logData is being read
44
+ * @param sourceTruncated Indicate whether log file might be truncated (some abnormal situation
45
+ * encountered, log file not finish writing) or not
43
46
*/
44
- def replay (logData : InputStream , sourceName : String ): Unit = {
47
+ def replay (
48
+ logData : InputStream ,
49
+ sourceName : String ,
50
+ sourceTruncated : Boolean = false ): Unit = {
45
51
var currentLine : String = null
46
52
var lineNumber : Int = 1
47
53
try {
48
54
val lines = Source .fromInputStream(logData).getLines()
49
- lines.foreach { line =>
50
- currentLine = line
51
- postToAll(JsonProtocol .sparkEventFromJson(parse(line)))
55
+ while (lines.hasNext) {
56
+ currentLine = lines.next()
57
+ try {
58
+ postToAll(JsonProtocol .sparkEventFromJson(parse(currentLine)))
59
+ } catch {
60
+ case jpe : JsonParseException =>
61
+ // We can only ignore exception from last line of the file that might be truncated
62
+ if (! sourceTruncated || lines.hasNext) {
63
+ throw jpe
64
+ } else {
65
+ logWarning(s " Get json parse exception from log file $sourceName" +
66
+ s " in line $lineNumber, the file might not finished writing. " )
67
+ }
68
+ }
52
69
lineNumber += 1
53
70
}
54
71
} catch {
0 commit comments