diff --git a/superset/db_engine_specs/db2.py b/superset/db_engine_specs/db2.py index 9288a069277d9..45241b3d89adb 100644 --- a/superset/db_engine_specs/db2.py +++ b/superset/db_engine_specs/db2.py @@ -27,7 +27,7 @@ class Db2EngineSpec(BaseEngineSpec): _time_grain_expressions = { None: "{col}", - "PT1S": "CAST({col} as TIMESTAMP)" " - MICROSECOND({col}) MICROSECONDS", + "PT1S": "CAST({col} as TIMESTAMP) - MICROSECOND({col}) MICROSECONDS", "PT1M": "CAST({col} as TIMESTAMP)" " - SECOND({col}) SECONDS" " - MICROSECOND({col}) MICROSECONDS", @@ -45,7 +45,7 @@ class Db2EngineSpec(BaseEngineSpec): "P3M": "{col} - (DAY({col})-1) DAYS" " - (MONTH({col})-1) MONTHS" " + ((QUARTER({col})-1) * 3) MONTHS", - "P1Y": "{col} - (DAY({col})-1) DAYS" " - (MONTH({col})-1) MONTHS", + "P1Y": "{col} - (DAY({col})-1) DAYS - (MONTH({col})-1) MONTHS", } @classmethod diff --git a/superset/db_engine_specs/hive.py b/superset/db_engine_specs/hive.py index a7d4a13be3b3f..f591ef6d825a3 100644 --- a/superset/db_engine_specs/hive.py +++ b/superset/db_engine_specs/hive.py @@ -124,7 +124,7 @@ class HiveEngineSpec(PrestoEngineSpec): jobs_stats_r = re.compile(r".*INFO.*Total jobs = (?P[0-9]+)") # 17/02/07 19:37:08 INFO ql.Driver: Launching Job 2 out of 5 launching_job_r = re.compile( - ".*INFO.*Launching Job (?P[0-9]+) out of " "(?P[0-9]+)" + ".*INFO.*Launching Job (?P[0-9]+) out of (?P[0-9]+)" ) # 17/02/07 19:36:58 INFO exec.Task: 2017-02-07 19:36:58,152 Stage-18 # map = 0%, reduce = 0% diff --git a/superset/db_engine_specs/mysql.py b/superset/db_engine_specs/mysql.py index 110d127e2afb6..6258f6b21a4c6 100644 --- a/superset/db_engine_specs/mysql.py +++ b/superset/db_engine_specs/mysql.py @@ -131,13 +131,13 @@ class MySQLEngineSpec(BaseEngineSpec, BasicParametersMixin): " + SECOND({col})) SECOND)", "PT1M": "DATE_ADD(DATE({col}), " "INTERVAL (HOUR({col})*60 + MINUTE({col})) MINUTE)", - "PT1H": "DATE_ADD(DATE({col}), " "INTERVAL HOUR({col}) HOUR)", + "PT1H": "DATE_ADD(DATE({col}), INTERVAL HOUR({col}) HOUR)", "P1D": "DATE({col})", - "P1W": "DATE(DATE_SUB({col}, " "INTERVAL DAYOFWEEK({col}) - 1 DAY))", - "P1M": "DATE(DATE_SUB({col}, " "INTERVAL DAYOFMONTH({col}) - 1 DAY))", + "P1W": "DATE(DATE_SUB({col}, INTERVAL DAYOFWEEK({col}) - 1 DAY))", + "P1M": "DATE(DATE_SUB({col}, INTERVAL DAYOFMONTH({col}) - 1 DAY))", "P3M": "MAKEDATE(YEAR({col}), 1) " "+ INTERVAL QUARTER({col}) QUARTER - INTERVAL 1 QUARTER", - "P1Y": "DATE(DATE_SUB({col}, " "INTERVAL DAYOFYEAR({col}) - 1 DAY))", + "P1Y": "DATE(DATE_SUB({col}, INTERVAL DAYOFYEAR({col}) - 1 DAY))", "1969-12-29T00:00:00Z/P1W": "DATE(DATE_SUB({col}, " "INTERVAL DAYOFWEEK(DATE_SUB({col}, " "INTERVAL 1 DAY)) - 1 DAY))", diff --git a/superset/models/helpers.py b/superset/models/helpers.py index 532b97bea60a5..b38089e381b7e 100644 --- a/superset/models/helpers.py +++ b/superset/models/helpers.py @@ -1288,12 +1288,22 @@ def get_time_filter( # pylint: disable=too-many-arguments time_col: "TableColumn", start_dttm: Optional[sa.DateTime], end_dttm: Optional[sa.DateTime], + time_grain: Optional[str] = None, label: Optional[str] = "__time", template_processor: Optional[BaseTemplateProcessor] = None, ) -> ColumnElement: - col = self.convert_tbl_column_to_sqla_col( - time_col, label=label, template_processor=template_processor + col = ( + time_col.get_timestamp_expression( + time_grain=time_grain, + label=label, + template_processor=template_processor, + ) + if time_grain + else self.convert_tbl_column_to_sqla_col( + time_col, label=label, template_processor=template_processor + ) ) + l = [] if start_dttm: l.append( @@ -1353,6 +1363,7 @@ def get_timestamp_expression( """ Return a SQLAlchemy Core element representation of self to be used in a query. + :param column: column object :param time_grain: Optional time grain, e.g. P1Y :param label: alias/label that column is expected to have :param template_processor: template processor @@ -1699,6 +1710,7 @@ def get_sqla_query( # pylint: disable=too-many-arguments,too-many-locals,too-ma continue flt_col = flt["col"] val = flt.get("val") + flt_grain = flt.get("grain") op = flt["op"].upper() col_obj: Optional["TableColumn"] = None sqla_col: Optional[Column] = None @@ -1855,6 +1867,7 @@ def get_sqla_query( # pylint: disable=too-many-arguments,too-many-locals,too-ma time_col=col_obj, start_dttm=_since, end_dttm=_until, + time_grain=flt_grain, label=sqla_col.key, template_processor=template_processor, ) @@ -1945,7 +1958,6 @@ def get_sqla_query( # pylint: disable=too-many-arguments,too-many-locals,too-ma inner_groupby_exprs = [] inner_select_exprs = [] for gby_name, gby_obj in groupby_series_columns.items(): - label = get_column_name(gby_name) inner = self.make_sqla_column_compatible(gby_obj, gby_name + "__") inner_groupby_exprs.append(inner) inner_select_exprs.append(inner) diff --git a/tests/integration_tests/charts/data/api_tests.py b/tests/integration_tests/charts/data/api_tests.py index 5315bbbaed700..8ad24fc9de58d 100644 --- a/tests/integration_tests/charts/data/api_tests.py +++ b/tests/integration_tests/charts/data/api_tests.py @@ -1159,6 +1159,31 @@ def test_custom_cache_timeout(test_client, login_as_admin, physical_query_contex assert rv.json["result"][0]["cache_timeout"] == 5678 +def test_time_filter_with_grain(test_client, login_as_admin, physical_query_context): + physical_query_context["queries"][0]["filters"] = [ + { + "col": "col5", + "op": "TEMPORAL_RANGE", + "val": "Last quarter : ", + "grain": "P1W", + }, + ] + rv = test_client.post(CHART_DATA_URI, json=physical_query_context) + query = rv.json["result"][0]["query"] + backend = get_example_database().backend + if backend == "sqlite": + assert ( + "DATETIME(col5, 'start of day', -strftime('%w', col5) || ' days') >=" + in query + ) + elif backend == "mysql": + assert "DATE(DATE_SUB(col5, INTERVAL DAYOFWEEK(col5) - 1 DAY)) >=" in query + elif backend == "postgresql": + assert "DATE_TRUNC('week', col5) >=" in query + elif backend == "presto": + assert "date_trunc('week', CAST(col5 AS TIMESTAMP)) >=" in query + + def test_force_cache_timeout(test_client, login_as_admin, physical_query_context): physical_query_context["custom_cache_timeout"] = -1 test_client.post(CHART_DATA_URI, json=physical_query_context)