Skip to content

Commit

Permalink
comment PythonDStream.PairwiseDStream
Browse files Browse the repository at this point in the history
  • Loading branch information
Ken Takagiwa authored and giwa committed Sep 20, 2014
1 parent 454981d commit b406252
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 5 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ class PythonDStream[T: ClassTag](
}
}


/*
private class PairwiseDStream(prev:DStream[Array[Byte]]) extends
DStream[(Long, Array[Byte])](prev.ssc){
override def dependencies = List(prev)
Expand All @@ -146,6 +146,7 @@ DStream[(Long, Array[Byte])](prev.ssc){
}
val asJavaPairDStream : JavaPairDStream[Long, Array[Byte]] = JavaPairDStream(this)
}
*/



Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -620,10 +620,7 @@ abstract class DStream[T: ClassTag] (
new ForEachDStream(this, context.sparkContext.clean(foreachFunc)).register()
}





//TODO move pyprint to PythonDStream
/**
* Print the first ten elements of each PythonRDD generated in this PythonDStream. This is an output
* operator, so this PythonDStream will be registered as an output stream and there materialized.
Expand All @@ -644,6 +641,7 @@ abstract class DStream[T: ClassTag] (
tempFileStream.close()

// This value has to be passed from python
// Python currently does not do cluster deployment. But what happened
val pythonExec = new ProcessBuilder().environment().get("PYSPARK_PYTHON")
val sparkHome = new ProcessBuilder().environment().get("SPARK_HOME")
//val pb = new ProcessBuilder(Seq(pythonExec, sparkHome + "/python/pyspark/streaming/pyprint.py", tempFile.getAbsolutePath())) // why this fails to compile???
Expand Down

0 comments on commit b406252

Please sign in to comment.