Skip to content

Commit 473ec4b

Browse files
committed
[SPARK-1690] Tolerating empty elements when saving Python RDD to text files
1 parent 3308722 commit 473ec4b

File tree

1 file changed

+3
-2
lines changed

1 file changed

+3
-2
lines changed

core/src/main/scala/org/apache/spark/api/python/PythonRDD.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -94,6 +94,7 @@ private[spark] class PythonRDD[T: ClassTag](
9494
val obj = new Array[Byte](length)
9595
stream.readFully(obj)
9696
obj
97+
case 0 => Array.empty[Byte]
9798
case SpecialLengths.TIMING_DATA =>
9899
// Timing data from worker
99100
val bootTime = stream.readLong()
@@ -123,7 +124,7 @@ private[spark] class PythonRDD[T: ClassTag](
123124
stream.readFully(update)
124125
accumulator += Collections.singletonList(update)
125126
}
126-
Array.empty[Byte]
127+
null
127128
}
128129
} catch {
129130

@@ -143,7 +144,7 @@ private[spark] class PythonRDD[T: ClassTag](
143144

144145
var _nextObj = read()
145146

146-
def hasNext = _nextObj.length != 0
147+
def hasNext = _nextObj != null
147148
}
148149
new InterruptibleIterator(context, stdoutIterator)
149150
}

0 commit comments

Comments
 (0)