Skip to content

Commit c40c52d

Browse files
committed
change first(), take(n) to has the same behavior as RDD
1 parent 98ac6c2 commit c40c52d

File tree

2 files changed

+9
-12
lines changed

2 files changed

+9
-12
lines changed

python/pyspark/streaming/dstream.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -174,18 +174,19 @@ def take(self, n):
174174
"""
175175
Return the first `n` RDDs in the stream (will start and stop).
176176
"""
177-
rdds = []
177+
results = []
178178

179179
def take(_, rdd):
180-
if rdd and len(rdds) < n:
181-
rdds.append(rdd)
180+
if rdd and len(results) < n:
181+
results.extend(rdd.take(n - len(results)))
182+
182183
self.foreachRDD(take)
183184

184185
self._ssc.start()
185-
while len(rdds) < n:
186+
while len(results) < n:
186187
time.sleep(0.01)
187188
self._ssc.stop(False, True)
188-
return rdds
189+
return results
189190

190191
def collect(self):
191192
"""

python/pyspark/streaming/tests.py

Lines changed: 3 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -87,16 +87,12 @@ class TestBasicOperations(PySparkStreamingTestCase):
8787
def test_take(self):
8888
input = [range(i) for i in range(3)]
8989
dstream = self.ssc.queueStream(input)
90-
rdds = dstream.take(3)
91-
self.assertEqual(3, len(rdds))
92-
for d, rdd in zip(input, rdds):
93-
self.assertEqual(d, rdd.collect())
90+
self.assertEqual([0, 0, 1], dstream.take(3))
9491

9592
def test_first(self):
9693
input = [range(10)]
9794
dstream = self.ssc.queueStream(input)
98-
rdd = dstream.first()
99-
self.assertEqual(range(10), rdd.collect())
95+
self.assertEqual(0, dstream)
10096

10197
def test_map(self):
10298
"""Basic operation test for DStream.map."""
@@ -385,7 +381,7 @@ def func(rdds):
385381

386382
dstream = self.ssc.transform([dstream1, dstream2, dstream3], func)
387383

388-
self.assertEqual([2, 3, 1], dstream.first().collect())
384+
self.assertEqual([2, 3, 1], dstream.take(3))
389385

390386

391387
if __name__ == "__main__":

0 commit comments

Comments
 (0)