File tree Expand file tree Collapse file tree 2 files changed +5
-3
lines changed
streaming/src/main/scala/org/apache/spark/streaming/dstream Expand file tree Collapse file tree 2 files changed +5
-3
lines changed Original file line number Diff line number Diff line change @@ -56,7 +56,8 @@ def _sum(self):
56
56
"""
57
57
Add up the elements in this DStream.
58
58
"""
59
- return self ._mapPartitions (lambda x : [sum (x )]).reduce (operator .add )
59
+ pass
60
+ #return self._mapPartitions(lambda x: [sum(x)]).reduce(operator.add)
60
61
61
62
def print_ (self , label = None ):
62
63
"""
@@ -152,7 +153,7 @@ def combineLocally(iterator):
152
153
else :
153
154
combiners [k ] = mergeValue (combiners [k ], v )
154
155
return combiners .iteritems ()
155
- locally_combined = self .mapPartitions (combineLocally )
156
+ locally_combined = self ._mapPartitions (combineLocally )
156
157
shuffled = locally_combined .partitionBy (numPartitions )
157
158
158
159
def _mergeCombiners (iterator ):
Original file line number Diff line number Diff line change @@ -620,7 +620,7 @@ abstract class DStream[T: ClassTag] (
620
620
new ForEachDStream (this , context.sparkContext.clean(foreachFunc)).register()
621
621
}
622
622
623
- // TODO move pyprint to PythonDStream and executed by py4j call back function
623
+ // TODO: move pyprint to PythonDStream and executed by py4j call back function
624
624
/**
625
625
* Print the first ten elements of each PythonRDD generated in this PythonDStream. This is an output
626
626
* operator, so this PythonDStream will be registered as an output stream and there materialized.
@@ -644,6 +644,7 @@ abstract class DStream[T: ClassTag] (
644
644
645
645
// pythonExec should be passed from python. Move pyprint to PythonDStream
646
646
val pythonExec = new ProcessBuilder ().environment().get(" PYSPARK_PYTHON" )
647
+
647
648
val sparkHome = new ProcessBuilder ().environment().get(" SPARK_HOME" )
648
649
// Call python script to deserialize and print result in stdout
649
650
val pb = new ProcessBuilder (pythonExec, sparkHome + " /python/pyspark/streaming/pyprint.py" , tempFile.getAbsolutePath)
You can’t perform that action at this time.
0 commit comments