Skip to content

Commit b406252

Browse files
Ken Takagiwagiwa
Ken Takagiwa
authored andcommitted
comment PythonDStream.PairwiseDStream
1 parent 454981d commit b406252

File tree

2 files changed

+4
-5
lines changed

2 files changed

+4
-5
lines changed

streaming/src/main/scala/org/apache/spark/streaming/api/python/PythonDStream.scala

+2-1
Original file line numberDiff line numberDiff line change
@@ -129,7 +129,7 @@ class PythonDStream[T: ClassTag](
129129
}
130130
}
131131

132-
132+
/*
133133
private class PairwiseDStream(prev:DStream[Array[Byte]]) extends
134134
DStream[(Long, Array[Byte])](prev.ssc){
135135
override def dependencies = List(prev)
@@ -146,6 +146,7 @@ DStream[(Long, Array[Byte])](prev.ssc){
146146
}
147147
val asJavaPairDStream : JavaPairDStream[Long, Array[Byte]] = JavaPairDStream(this)
148148
}
149+
*/
149150

150151

151152

streaming/src/main/scala/org/apache/spark/streaming/dstream/DStream.scala

+2-4
Original file line numberDiff line numberDiff line change
@@ -620,10 +620,7 @@ abstract class DStream[T: ClassTag] (
620620
new ForEachDStream(this, context.sparkContext.clean(foreachFunc)).register()
621621
}
622622

623-
624-
625-
626-
623+
//TODO move pyprint to PythonDStream
627624
/**
628625
* Print the first ten elements of each PythonRDD generated in this PythonDStream. This is an output
629626
* operator, so this PythonDStream will be registered as an output stream and there materialized.
@@ -644,6 +641,7 @@ abstract class DStream[T: ClassTag] (
644641
tempFileStream.close()
645642

646643
// This value has to be passed from python
644+
// Python currently does not do cluster deployment. But what happened
647645
val pythonExec = new ProcessBuilder().environment().get("PYSPARK_PYTHON")
648646
val sparkHome = new ProcessBuilder().environment().get("SPARK_HOME")
649647
//val pb = new ProcessBuilder(Seq(pythonExec, sparkHome + "/python/pyspark/streaming/pyprint.py", tempFile.getAbsolutePath())) // why this fails to compile???

0 commit comments

Comments
 (0)