diff --git a/python/pyspark/streaming/context.py b/python/pyspark/streaming/context.py index 3f455a3e06072..cdd6926bf1c7a 100644 --- a/python/pyspark/streaming/context.py +++ b/python/pyspark/streaming/context.py @@ -102,6 +102,7 @@ def start(self): def awaitTermination(self, timeout=None): """ Wait for the execution to stop. + @param timeout: time to wait in milliseconds """ if timeout is None: self._jssc.awaitTermination() diff --git a/streaming/src/main/scala/org/apache/spark/streaming/api/python/PythonDStream.scala b/streaming/src/main/scala/org/apache/spark/streaming/api/python/PythonDStream.scala index 681ef5997002e..2c44f6cc1d42f 100644 --- a/streaming/src/main/scala/org/apache/spark/streaming/api/python/PythonDStream.scala +++ b/streaming/src/main/scala/org/apache/spark/streaming/api/python/PythonDStream.scala @@ -184,8 +184,6 @@ class PythonForeachDStream( this.register() } -/* -This does not work. Ignore this for now. -TD class PythonTransformedDStream( prev: DStream[Array[Byte]], transformFunction: PythonRDDFunction @@ -204,7 +202,6 @@ class PythonTransformedDStream( val asJavaDStream = JavaDStream.fromDStream(this) //val asJavaPairDStream : JavaPairDStream[Long, Array[Byte]] = JavaPairDStream.fromJavaDStream(this) } -*/ /** * This is a input stream just for the unitest. This is equivalent to a checkpointable,