diff --git a/src/sql/run/sparkdataframe.py b/src/sql/run/sparkdataframe.py index 762c69cb1..e47c0d318 100644 --- a/src/sql/run/sparkdataframe.py +++ b/src/sql/run/sparkdataframe.py @@ -9,9 +9,9 @@ def handle_spark_dataframe(dataframe, should_cache=False): - """Execute a ResultSet sqlaproxy using pysark module.""" + """Execute a ResultSet sqlaproxy using pyspark module.""" if not DataFrame and not CDataFrame: - raise exceptions.MissingPackageError("pysark not installed") + raise exceptions.MissingPackageError("pyspark not installed") return SparkResultProxy(dataframe, dataframe.columns, should_cache)