Skip to content

Commit

Permalink
chore: Bumping pyarrow / pandas (#17290)
Browse files Browse the repository at this point in the history
* Bumping pyarrow / pandas

* Appeasing the pylint gods

* Adjusting UPDATING.md

* Buping testing requirements
  • Loading branch information
craig-rueda authored and AAfghahi committed Jan 10, 2022
1 parent 403cf79 commit 6b7f2d3
Show file tree
Hide file tree
Showing 13 changed files with 44 additions and 27 deletions.
1 change: 1 addition & 0 deletions UPDATING.md
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ assists people when migrating to a new version.

### Breaking Changes

- [17290](https://github.com/apache/superset/pull/17290): Bumps pandas to `1.3.4` and pyarrow to `5.0.0`
- [16660](https://github.com/apache/incubator-superset/pull/16660): The `columns` Jinja parameter has been renamed `table_columns` to make the `columns` query object parameter available in the Jinja context.
- [16711](https://github.com/apache/incubator-superset/pull/16711): The `url_param` Jinja function will now by default escape the result. For instance, the value `O'Brien` will now be changed to `O''Brien`. To disable this behavior, call `url_param` with `escape_result` set to `False`: `url_param("my_key", "my default", escape_result=False)`.

Expand Down
4 changes: 2 additions & 2 deletions requirements/base.txt
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,7 @@ packaging==21.0
# via
# bleach
# deprecation
pandas==1.2.5
pandas==1.3.4
# via apache-superset
parsedatetime==2.6
# via apache-superset
Expand All @@ -178,7 +178,7 @@ polyline==1.4.0
# via apache-superset
prison==0.2.1
# via flask-appbuilder
pyarrow==4.0.1
pyarrow==5.0.0
# via apache-superset
pycparser==2.20
# via cffi
Expand Down
4 changes: 2 additions & 2 deletions requirements/development.in
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,11 @@
# under the License.
-r base.in
flask-cors>=2.0.0
mysqlclient==1.4.2.post1
mysqlclient==2.0.3
pillow>=8.3.1,<9
pydruid>=0.6.1,<0.7
pyhive[hive]>=0.6.1
psycopg2-binary==2.8.5
psycopg2-binary==2.9.1
tableschema
thrift>=0.11.0,<1.0.0
progress>=1.5,<2
Expand Down
6 changes: 3 additions & 3 deletions requirements/development.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# SHA1:dbd3e93a11a36fc6b18d6194ac96ba29bd0ad2a8
# SHA1:a2fe77c9b8bffc8c8f3de4df6709c8be957c2f87
#
# This file is autogenerated by pip-compile-multi
# To update, run:
Expand Down Expand Up @@ -36,15 +36,15 @@ jsonlines==2.0.0
# via tabulator
linear-tsv==1.1.0
# via tabulator
mysqlclient==1.4.2.post1
mysqlclient==2.0.3
# via -r requirements/development.in
openpyxl==3.0.7
# via tabulator
pillow==8.3.1
# via -r requirements/development.in
progress==1.6
# via -r requirements/development.in
psycopg2-binary==2.8.5
psycopg2-binary==2.9.1
# via -r requirements/development.in
pure-sasl==0.6.2
# via thrift-sasl
Expand Down
3 changes: 3 additions & 0 deletions requirements/testing.in
Original file line number Diff line number Diff line change
Expand Up @@ -19,13 +19,16 @@
docker
flask-testing
freezegun
google-cloud-bigquery
ipdb
# pinning ipython as pip-compile-multi was bringing higher version
# of the ipython that was not found in CI
ipython
openapi-spec-validator
openpyxl
pandas_gbq
parameterized
pybigquery
pyfakefs
pyhive[presto]>=0.6.3
pylint==2.9.6
Expand Down
31 changes: 21 additions & 10 deletions requirements/testing.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# SHA1:a36e63b551290f1060a819fe4f1f50bc6200403c
# SHA1:4aabffca9a6688f2911d6f8697495e7045a529d0
#
# This file is autogenerated by pip-compile-multi
# To update, run:
Expand Down Expand Up @@ -31,7 +31,7 @@ flask-testing==0.8.1
# via -r requirements/testing.in
freezegun==1.1.0
# via -r requirements/testing.in
google-api-core[grpc]==2.1.0
google-api-core[grpc]==2.2.1
# via
# google-cloud-bigquery
# google-cloud-bigquery-storage
Expand All @@ -49,8 +49,9 @@ google-auth-oauthlib==0.4.6
# via
# pandas-gbq
# pydata-google-auth
google-cloud-bigquery[bqstorage,pandas]==2.28.0
google-cloud-bigquery[bqstorage,pandas]==2.29.0
# via
# -r requirements/testing.in
# apache-superset
# pandas-gbq
# pybigquery
Expand All @@ -60,14 +61,19 @@ google-cloud-core==2.1.0
# via google-cloud-bigquery
google-crc32c==1.3.0
# via google-resumable-media
google-resumable-media==2.0.3
google-resumable-media==2.1.0
# via google-cloud-bigquery
googleapis-common-protos==1.53.0
# via google-api-core
grpcio==1.41.0
# via
# google-api-core
# grpcio-status
grpcio==1.41.1
# via
# google-api-core
# google-cloud-bigquery
# grpcio-status
grpcio-status==1.41.1
# via google-api-core
iniconfig==1.1.1
# via pytest
ipdb==0.13.9
Expand Down Expand Up @@ -99,7 +105,9 @@ openapi-schema-validator==0.1.5
openapi-spec-validator==0.3.1
# via -r requirements/testing.in
pandas-gbq==0.15.0
# via apache-superset
# via
# -r requirements/testing.in
# apache-superset
parameterized==0.8.1
# via -r requirements/testing.in
parso==0.8.2
Expand All @@ -110,15 +118,16 @@ pickleshare==0.7.5
# via ipython
prompt-toolkit==3.0.19
# via ipython
proto-plus==1.19.2
proto-plus==1.19.7
# via
# google-cloud-bigquery
# google-cloud-bigquery-storage
protobuf==3.18.1
protobuf==3.19.1
# via
# google-api-core
# google-cloud-bigquery
# googleapis-common-protos
# grpcio-status
# proto-plus
ptyprocess==0.7.0
# via pexpect
Expand All @@ -129,7 +138,9 @@ pyasn1==0.4.8
pyasn1-modules==0.2.8
# via google-auth
pybigquery==0.10.2
# via apache-superset
# via
# -r requirements/testing.in
# apache-superset
pydata-google-auth==1.2.0
# via pandas-gbq
pyfakefs==4.5.0
Expand Down
4 changes: 2 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,15 +90,15 @@ def get_git_sha() -> str:
"isodate",
"markdown>=3.0",
"msgpack>=1.0.0, <1.1",
"pandas>=1.2.2, <1.3",
"pandas>=1.3.0, <1.4",
"parsedatetime",
"pgsanity",
"polyline",
"pyparsing>=2.4.7, <3.0.0",
"python-dateutil",
"python-dotenv",
"python-geohash",
"pyarrow>=4.0.1, <4.1",
"pyarrow>=5.0.0, <6.0",
"pyyaml>=5.4",
"PyJWT>=1.7.1, <2",
"redis",
Expand Down
4 changes: 2 additions & 2 deletions superset/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@
from dateutil import tz
from flask import Blueprint
from flask_appbuilder.security.manager import AUTH_DB
from pandas.io.parsers import STR_NA_VALUES
from pandas._libs.parsers import STR_NA_VALUES # pylint: disable=no-name-in-module
from typing_extensions import Literal
from werkzeug.local import LocalProxy

Expand Down Expand Up @@ -1301,7 +1301,7 @@ def SQL_QUERY_MUTATOR( # pylint: disable=invalid-name,unused-argument
elif importlib.util.find_spec("superset_config") and not is_test():
try:
import superset_config # pylint: disable=import-error
from superset_config import * # type: ignore # pylint: disable=import-error,wildcard-import
from superset_config import * # type: ignore # pylint: disable=import-error,wildcard-import,unused-wildcard-import

print(f"Loaded your LOCAL configuration at [{superset_config.__file__}]")
except Exception:
Expand Down
2 changes: 1 addition & 1 deletion superset/db_engine_specs/bigquery.py
Original file line number Diff line number Diff line change
Expand Up @@ -384,7 +384,7 @@ def get_parameters_from_uri(

@classmethod
def get_dbapi_exception_mapping(cls) -> Dict[Type[Exception], Type[Exception]]:
# pylint: disable=import-error,import-outside-toplevel
# pylint: disable=import-outside-toplevel
from google.auth.exceptions import DefaultCredentialsError

return {DefaultCredentialsError: SupersetDBAPIDisconnectionError}
Expand Down
6 changes: 4 additions & 2 deletions superset/db_engine_specs/druid.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,10 +57,12 @@ class DruidEngineSpec(BaseEngineSpec):
"P3M": "TIME_FLOOR(CAST({col} AS TIMESTAMP), 'P3M')",
"P1Y": "TIME_FLOOR(CAST({col} AS TIMESTAMP), 'P1Y')",
"P1W/1970-01-03T00:00:00Z": (
"TIME_SHIFT(TIME_FLOOR(TIME_SHIFT(CAST({col} AS TIMESTAMP), 'P1D', 1), 'P1W'), 'P1D', 5)" # pylint: disable=line-too-long
"TIME_SHIFT(TIME_FLOOR(TIME_SHIFT(CAST({col} AS TIMESTAMP), "
"'P1D', 1), 'P1W'), 'P1D', 5)"
),
"1969-12-28T00:00:00Z/P1W": (
"TIME_SHIFT(TIME_FLOOR(TIME_SHIFT(CAST({col} AS TIMESTAMP), 'P1D', 1), 'P1W'), 'P1D', -1)" # pylint: disable=line-too-long
"TIME_SHIFT(TIME_FLOOR(TIME_SHIFT(CAST({col} AS TIMESTAMP), "
"'P1D', 1), 'P1W'), 'P1D', -1)"
),
}

Expand Down
2 changes: 1 addition & 1 deletion superset/db_engine_specs/snowflake.py
Original file line number Diff line number Diff line change
Expand Up @@ -231,7 +231,7 @@ def get_parameters_from_uri(

@classmethod
def validate_parameters(
cls, parameters: SnowflakeParametersType # pylint: disable=unused-argument
cls, parameters: SnowflakeParametersType
) -> List[SupersetError]:
errors: List[SupersetError] = []
required = {
Expand Down
2 changes: 1 addition & 1 deletion superset/sql_lab.py
Original file line number Diff line number Diff line change
Expand Up @@ -176,7 +176,7 @@ def get_sql_results( # pylint: disable=too-many-arguments
return handle_query_error(ex, query, session)


def execute_sql_statement( # pylint: disable=too-many-arguments,too-many-locals,too-many-statements
def execute_sql_statement( # pylint: disable=too-many-arguments,too-many-locals
sql_statement: str,
query: Query,
user_name: Optional[str],
Expand Down
2 changes: 1 addition & 1 deletion superset/utils/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
# under the License.
"""Utility functions used across Superset"""
# pylint: disable=too-many-lines
import _thread # pylint: disable=C0411
import _thread
import collections
import decimal
import errno
Expand Down

0 comments on commit 6b7f2d3

Please sign in to comment.