Skip to content

Commit 9c85e48

Browse files
committed
clean up exmples
1 parent 24f95db commit 9c85e48

File tree

2 files changed

+10
-10
lines changed

2 files changed

+10
-10
lines changed
Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
import sys
2-
from operator import add
32

43
from pyspark.streaming.context import StreamingContext
54
from pyspark.streaming.duration import *
@@ -12,10 +11,10 @@
1211
duration=Seconds(1))
1312

1413
lines = ssc.socketTextStream(sys.argv[1], int(sys.argv[2]))
15-
words = lines.flatMap(lambda line: line.split(" "))
16-
mapped_words = words.map(lambda word: (word, 1))
17-
count = mapped_words.reduceByKey(add)
18-
count.pyprint()
14+
counts = lines.flatMap(lambda line: line.split(" "))\
15+
.map(lambda word: (word, 1))\
16+
.reduceByKey(lambda a,b: a+b)
17+
counts.pyprint()
1918

2019
ssc.start()
2120
ssc.awaitTermination()

examples/src/main/python/streaming/wordcount.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -8,13 +8,14 @@
88
print >> sys.stderr, "Usage: wordcount <directory>"
99
exit(-1)
1010

11-
ssc = StreamingContext(appName="PythonStreamingWordCount", duration=Seconds(1))
11+
ssc = StreamingContext(appName="PythonStreamingWordCount",
12+
duration=Seconds(1))
1213

1314
lines = ssc.textFileStream(sys.argv[1])
14-
words = lines.flatMap(lambda line: line.split(" "))
15-
mapped_words = words.map(lambda x: (x, 1))
16-
count = mapped_words.reduceByKey(lambda a, b: a+b)
17-
count.pyprint()
15+
counts = lines.flatMap(lambda line: line.split(" "))\
16+
.map(lambda x: (x, 1))\
17+
.reduceByKey(lambda a, b: a+b)
18+
counts.pyprint()
1819

1920
ssc.start()
2021
ssc.awaitTermination()

0 commit comments

Comments
 (0)