Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix(dashboard): unset empty time filter indicator #16272

Merged
merged 1 commit into from
Aug 16, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion superset/common/query_actions.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
ChartDataResultType,
extract_column_dtype,
extract_dataframe_dtypes,
ExtraFiltersReasonType,
get_time_filter_status,
QueryStatus,
)
Expand Down Expand Up @@ -114,7 +115,7 @@ def _get_full(
{"column": col} for col in filter_columns if col in columns
] + applied_time_columns
payload["rejected_filters"] = [
{"reason": "not_in_datasource", "column": col}
{"reason": ExtraFiltersReasonType.COL_NOT_IN_DATASOURCE, "column": col}
for col in filter_columns
if col not in columns
] + rejected_time_columns
Expand Down
21 changes: 15 additions & 6 deletions superset/utils/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,8 @@

DTTM_ALIAS = "__timestamp"

NO_TIME_RANGE = "No filter"

TIME_COMPARISION = "__"

JS_MAX_INTEGER = 9007199254740991 # Largest int Java Script can handle 2^53-1
Expand Down Expand Up @@ -223,6 +225,12 @@ class ExtraFiltersTimeColumnType(str, Enum):
TIME_RANGE = "__time_range"


class ExtraFiltersReasonType(str, Enum):
NO_TEMPORAL_COLUMN = "no_temporal_column"
COL_NOT_IN_DATASOURCE = "not_in_datasource"
NOT_DRUID_DATASOURCE = "not_druid_datasource"


class FilterOperator(str, Enum):
"""
Operators used filter controls
Expand Down Expand Up @@ -1620,7 +1628,7 @@ def get_time_filter_status( # pylint: disable=too-many-branches
else:
rejected.append(
{
"reason": "not_in_datasource",
"reason": ExtraFiltersReasonType.COL_NOT_IN_DATASOURCE,
"column": ExtraFiltersTimeColumnType.TIME_COL,
}
)
Expand All @@ -1632,19 +1640,20 @@ def get_time_filter_status( # pylint: disable=too-many-branches
else:
rejected.append(
{
"reason": "no_temporal_column",
"reason": ExtraFiltersReasonType.NO_TEMPORAL_COLUMN,
"column": ExtraFiltersTimeColumnType.TIME_GRAIN,
}
)

if ExtraFiltersTimeColumnType.TIME_RANGE in applied_time_extras:
time_range = applied_time_extras.get(ExtraFiltersTimeColumnType.TIME_RANGE)
if time_range and time_range != NO_TIME_RANGE:
# are there any temporal columns to assign the time grain to?
if temporal_columns:
applied.append({"column": ExtraFiltersTimeColumnType.TIME_RANGE})
else:
rejected.append(
{
"reason": "no_temporal_column",
"reason": ExtraFiltersReasonType.NO_TEMPORAL_COLUMN,
"column": ExtraFiltersTimeColumnType.TIME_RANGE,
}
)
Expand All @@ -1655,7 +1664,7 @@ def get_time_filter_status( # pylint: disable=too-many-branches
else:
rejected.append(
{
"reason": "not_druid_datasource",
"reason": ExtraFiltersReasonType.NOT_DRUID_DATASOURCE,
"column": ExtraFiltersTimeColumnType.TIME_ORIGIN,
}
)
Expand All @@ -1666,7 +1675,7 @@ def get_time_filter_status( # pylint: disable=too-many-branches
else:
rejected.append(
{
"reason": "not_druid_datasource",
"reason": ExtraFiltersReasonType.NOT_DRUID_DATASOURCE,
"column": ExtraFiltersTimeColumnType.GRANULARITY,
}
)
Expand Down
3 changes: 2 additions & 1 deletion superset/utils/date_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@
TimeRangeAmbiguousError,
TimeRangeParseFailError,
)
from superset.utils.core import NO_TIME_RANGE
from superset.utils.memoized import memoized

ParserElement.enablePackrat()
Expand Down Expand Up @@ -174,7 +175,7 @@ def get_since_until(
_relative_start = relative_start if relative_start else "today"
_relative_end = relative_end if relative_end else "today"

if time_range == "No filter":
if time_range == NO_TIME_RANGE:
return None, None

if time_range and time_range.startswith("Last") and separator not in time_range:
Expand Down
3 changes: 2 additions & 1 deletion superset/viz.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,7 @@
from superset.utils.cache import set_and_log_cache
from superset.utils.core import (
DTTM_ALIAS,
ExtraFiltersReasonType,
JS_MAX_INTEGER,
merge_extra_filters,
QueryMode,
Expand Down Expand Up @@ -477,7 +478,7 @@ def get_payload(self, query_obj: Optional[QueryObjectDict] = None) -> VizPayload
if col in columns or col in filter_values_columns
] + applied_time_columns
payload["rejected_filters"] = [
{"reason": "not_in_datasource", "column": col}
{"reason": ExtraFiltersReasonType.COL_NOT_IN_DATASOURCE, "column": col}
for col in filter_columns
if col not in columns and col not in filter_values_columns
] + rejected_time_columns
Expand Down
74 changes: 74 additions & 0 deletions tests/unit_tests/core_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,16 @@

from superset.utils.core import (
AdhocMetric,
ExtraFiltersReasonType,
ExtraFiltersTimeColumnType,
GenericDataType,
get_metric_name,
get_metric_names,
get_time_filter_status,
is_adhoc_metric,
NO_TIME_RANGE,
)
from tests.unit_tests.fixtures.datasets import get_dataset_mock

STR_METRIC = "my_metric"
SIMPLE_SUM_ADHOC_METRIC: AdhocMetric = {
Expand Down Expand Up @@ -98,3 +103,72 @@ def test_is_adhoc_metric():
assert is_adhoc_metric(STR_METRIC) is False
assert is_adhoc_metric(SIMPLE_SUM_ADHOC_METRIC) is True
assert is_adhoc_metric(SQL_ADHOC_METRIC) is True


def test_get_time_filter_status_time_col():
dataset = get_dataset_mock()

assert get_time_filter_status(
dataset, {ExtraFiltersTimeColumnType.TIME_COL: "ds"}
) == ([{"column": ExtraFiltersTimeColumnType.TIME_COL}], [])


def test_get_time_filter_status_time_range():
dataset = get_dataset_mock()

assert get_time_filter_status(
dataset, {ExtraFiltersTimeColumnType.TIME_RANGE: NO_TIME_RANGE}
) == ([], [])

assert get_time_filter_status(
dataset, {ExtraFiltersTimeColumnType.TIME_RANGE: "1 year ago"}
) == ([{"column": ExtraFiltersTimeColumnType.TIME_RANGE}], [])


def test_get_time_filter_status_time_grain():
dataset = get_dataset_mock()

assert get_time_filter_status(
dataset, {ExtraFiltersTimeColumnType.TIME_GRAIN: "PT1M"}
) == ([{"column": ExtraFiltersTimeColumnType.TIME_GRAIN}], [])


def test_get_time_filter_status_no_temporal_col():
dataset = get_dataset_mock()
dataset.columns[0].is_dttm = False

assert get_time_filter_status(
dataset, {ExtraFiltersTimeColumnType.TIME_COL: "foobar"}
) == (
[],
[
{
"reason": ExtraFiltersReasonType.COL_NOT_IN_DATASOURCE,
"column": ExtraFiltersTimeColumnType.TIME_COL,
}
],
)

assert get_time_filter_status(
dataset, {ExtraFiltersTimeColumnType.TIME_RANGE: "1 year ago"}
) == (
[],
[
{
"reason": ExtraFiltersReasonType.NO_TEMPORAL_COLUMN,
"column": ExtraFiltersTimeColumnType.TIME_RANGE,
}
],
)

assert get_time_filter_status(
dataset, {ExtraFiltersTimeColumnType.TIME_GRAIN: "PT1M"}
) == (
[],
[
{
"reason": ExtraFiltersReasonType.NO_TEMPORAL_COLUMN,
"column": ExtraFiltersTimeColumnType.TIME_GRAIN,
}
],
)
Loading