diff --git a/docs/docs/miscellaneous/native-filter-migration.mdx b/docs/docs/miscellaneous/native-filter-migration.mdx new file mode 100644 index 0000000000000..b231c049b2378 --- /dev/null +++ b/docs/docs/miscellaneous/native-filter-migration.mdx @@ -0,0 +1,103 @@ +--- +title: Migrating from Legacy to Native Filters +sidebar_position: 5 +version: 1 +--- + +## + +The `superset native-filters` CLI command group—somewhat akin to an Alembic migration— +comprises of a number of sub-commands which allows administrators to upgrade/downgrade +existing dashboards which use the legacy filter-box charts—in combination with the +filter scopes/filter mapping—to use the native filter dashboard component. + +Even though both legacy and native filters can coexist the overall user experience (UX) +is substandard as the already convoluted filter space becomes overly complex. After +enabling the `DASHBOARD_NATIVE_FILTERS` it is strongly advised to run the migration ASAP to +ensure users are not exposed to the hybrid state. + +### Upgrading + +The + +``` +superset native-filters upgrade +``` + +command—which provides the option to target either specific dashboard(s) or all +dashboards—migrates the legacy filters to native filters. + +Specifically, the command performs the following: + +- Replaces every filter-box chart within the dashboard with a markdown element which +provides a link to the deprecated chart. This preserves the layout whilst simultaneously +providing context to help owners review/verify said change. +- Migrates the filter scopes/filter mappings to the native filter configuration. + +#### Quality Control + +Dashboard owners should: + +- Verify that the filter behavior is correct. +- Consolidate any conflicting/redundant filters—this previously may not have been +obvious given the embedded nature of the legacy filters and/or the non-optimal UX of the +legacy filter mapping (scopes and immunity). +- Rename the filters—which may not be uniquely named—to provide the necessary context +which previously was likely provided by both the location of the filter-box and the +corresponding filter-box title. + +Dashboard owners may: + +- Remove† the markdown elements from their dashboards and adjust the layout accordingly. + +† Note removing the markdown elements—which contain metadata relating to the replaced +chart—prevents the dashboard from being fully restored and thus this operation should +only be performed if it is evident that a downgrade is not necessary. + +### Downgrading + +Similarly the + +``` +superset native-filters downgrade +``` + +command reverses said migration, i.e., restores the dashboard to the previous state. + + +### Cleanup + +The ability to downgrade/reverse the migration requires temporary storage of the +dashboard metadata—relating to both positional composition and filter configuration. + +Once the upgrade has been verified it is recommended to run the + +``` +superset native-filters cleanup +``` + +command—which provides the option to target either specific dashboard(s) or all +dashboards. Note this operation is irreversible. + +Specifically, the command performs the following: + +- Removes the temporary dashboard metadata. +- Deletes the filter-box charts associated with the dashboard†. + +† Note the markdown elements will still remain however the link to the referenced filter-box +chart will no longer be valid. + +Finally, the + +``` +superset native-filers cleanup --all +``` + +command will additionally delete all filter-box charts, irrespective of whether they +were ever associated with a dashboard. + +#### Quality Control + +Dashboard owners should: + +- Remove the markdown elements from their dashboards and adjust the layout accordingly. diff --git a/requirements/base.txt b/requirements/base.txt index 3a5ec607fe9f3..5e54e1e06d232 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -40,12 +40,15 @@ click==8.0.4 # apache-superset # celery # click-didyoumean + # click-option-group # click-plugins # click-repl # flask # flask-appbuilder click-didyoumean==0.3.0 # via celery +click-option-group==0.5.5 + # via apache-superset click-plugins==1.1.1 # via celery click-repl==0.2.0 @@ -64,6 +67,8 @@ cryptography==39.0.1 # via # apache-superset # paramiko +deprecated==1.2.13 + # via limits deprecation==2.1.0 # via apache-superset dnspython==2.1.0 @@ -78,6 +83,7 @@ flask==2.1.3 # flask-caching # flask-compress # flask-jwt-extended + # flask-limiter # flask-login # flask-migrate # flask-sqlalchemy @@ -92,6 +98,8 @@ flask-compress==1.13 # via apache-superset flask-jwt-extended==4.3.1 # via flask-appbuilder +flask-limiter==3.3.0 + # via flask-appbuilder flask-login==0.6.0 # via # apache-superset @@ -128,6 +136,8 @@ humanize==3.11.0 # via apache-superset idna==3.2 # via email-validator +importlib-metadata==6.0.0 + # via flask isodate==0.6.0 # via apache-superset itsdangerous==2.1.1 @@ -144,10 +154,14 @@ kombu==5.2.4 # via celery korean-lunar-calendar==0.2.1 # via holidays +limits==3.2.0 + # via flask-limiter mako==1.1.4 # via alembic markdown==3.3.4 # via apache-superset +markdown-it-py==2.2.0 + # via rich markupsafe==2.1.1 # via # jinja2 @@ -162,6 +176,8 @@ marshmallow-enum==1.5.1 # via flask-appbuilder marshmallow-sqlalchemy==0.23.1 # via flask-appbuilder +mdurl==0.1.2 + # via markdown-it-py msgpack==1.0.2 # via apache-superset numpy==1.23.5 @@ -169,10 +185,13 @@ numpy==1.23.5 # apache-superset # pandas # pyarrow +ordered-set==4.1.0 + # via flask-limiter packaging==21.3 # via # bleach # deprecation + # limits pandas==1.5.3 # via apache-superset paramiko==2.11.0 @@ -191,6 +210,8 @@ pyarrow==10.0.1 # via apache-superset pycparser==2.20 # via cffi +pygments==2.14.0 + # via rich pyjwt==2.4.0 # via # apache-superset @@ -232,8 +253,12 @@ pyyaml==5.4.1 # apispec redis==3.5.3 # via apache-superset +rich==13.3.1 + # via flask-limiter selenium==3.141.0 # via apache-superset +shortid==0.1.2 + # via apache-superset simplejson==3.17.3 # via apache-superset six==1.16.0 @@ -269,7 +294,11 @@ sshtunnel==0.4.0 tabulate==0.8.9 # via apache-superset typing-extensions==4.4.0 - # via apache-superset + # via + # apache-superset + # flask-limiter + # limits + # rich urllib3==1.26.6 # via selenium vine==5.0.0 @@ -286,6 +315,8 @@ werkzeug==2.1.2 # flask # flask-jwt-extended # flask-login +wrapt==1.12.1 + # via deprecated wtforms==2.3.3 # via # apache-superset @@ -296,6 +327,8 @@ wtforms-json==0.3.3 # via apache-superset xlsxwriter==3.0.7 # via apache-superset +zipp==3.15.0 + # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/requirements/development.txt b/requirements/development.txt index 47fe7a17372dd..aa92fcfda4d89 100644 --- a/requirements/development.txt +++ b/requirements/development.txt @@ -80,8 +80,6 @@ pure-sasl==0.6.2 # via thrift-sasl pydruid==0.6.5 # via apache-superset -pygments==2.12.0 - # via ipython pyhive[hive]==0.6.5 # via apache-superset pyinstrument==4.0.2 diff --git a/requirements/integration.txt b/requirements/integration.txt index 59c619a38602d..c11f956c68d03 100644 --- a/requirements/integration.txt +++ b/requirements/integration.txt @@ -30,7 +30,7 @@ packaging==21.3 pep517==0.11.0 # via build pip-compile-multi==2.6.2 - # via -r integration.in + # via -r requirements/integration.in pip-tools==6.8.0 # via pip-compile-multi platformdirs==2.6.2 @@ -38,7 +38,7 @@ platformdirs==2.6.2 pluggy==0.13.1 # via tox pre-commit==3.2.2 - # via -r integration.in + # via -r requirements/integration.in py==1.10.0 # via tox pyparsing==3.0.6 @@ -50,11 +50,11 @@ six==1.16.0 toml==0.10.2 # via tox tomli==1.2.1 - # via pep517 + # via build toposort==1.6 # via pip-compile-multi tox==3.25.1 - # via -r integration.in + # via -r requirements/integration.in virtualenv==20.17.1 # via # pre-commit diff --git a/setup.py b/setup.py index c6850070a0a71..b314614b46b8d 100644 --- a/setup.py +++ b/setup.py @@ -77,6 +77,7 @@ def get_git_sha() -> str: "cachelib>=0.4.1,<0.5", "celery>=5.2.2, <6.0.0", "click>=8.0.3", + "click-option-group", "colorama", "croniter>=0.3.28", "cron-descriptor", @@ -114,6 +115,7 @@ def get_git_sha() -> str: "PyJWT>=2.4.0, <3.0", "redis", "selenium>=3.141.0", + "shortid", "sshtunnel>=0.4.0, <0.5", "simplejson>=3.15.0", "slack_sdk>=3.1.1, <4", diff --git a/superset/cli/main.py b/superset/cli/main.py index aaad7be42e864..006f8eb5c9e80 100755 --- a/superset/cli/main.py +++ b/superset/cli/main.py @@ -50,9 +50,12 @@ def make_shell_context() -> Dict[str, Any]: ): module = importlib.import_module(module_name) for attribute in module.__dict__.values(): - if isinstance(attribute, click.core.Command): + if isinstance(attribute, (click.core.Command, click.core.Group)): superset.add_command(attribute) + if isinstance(attribute, click.core.Group): + break + @superset.command() @with_appcontext diff --git a/superset/cli/native_filters.py b/superset/cli/native_filters.py new file mode 100644 index 0000000000000..d6172855be505 --- /dev/null +++ b/superset/cli/native_filters.py @@ -0,0 +1,398 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import json +from copy import deepcopy +from textwrap import dedent +from typing import Set, Tuple + +import click +from click_option_group import optgroup, RequiredMutuallyExclusiveOptionGroup +from flask.cli import with_appcontext +from sqlalchemy import Column, ForeignKey, Integer, String, Table, Text +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import relationship + +from superset import db, is_feature_enabled + +Base = declarative_base() + + +dashboard_slices = Table( + "dashboard_slices", + Base.metadata, + Column("id", Integer, primary_key=True), + Column("dashboard_id", Integer, ForeignKey("dashboards.id")), + Column("slice_id", Integer, ForeignKey("slices.id")), +) + + +slice_user = Table( + "slice_user", + Base.metadata, + Column("id", Integer, primary_key=True), + Column("slice_id", Integer, ForeignKey("slices.id")), +) + + +class Dashboard(Base): # type: ignore # pylint: disable=too-few-public-methods + __tablename__ = "dashboards" + + id = Column(Integer, primary_key=True) + json_metadata = Column(Text) + slices = relationship("Slice", secondary=dashboard_slices, backref="dashboards") + position_json = Column() + + def __repr__(self) -> str: + return f"Dashboard<{self.id}>" + + +class Slice(Base): # type: ignore # pylint: disable=too-few-public-methods + __tablename__ = "slices" + + id = Column(Integer, primary_key=True) + datasource_id = Column(Integer) + params = Column(Text) + slice_name = Column(String(250)) + viz_type = Column(String(250)) + + def __repr__(self) -> str: + return f"Slice<{self.id}>" + + +@click.group() +def native_filters() -> None: + """ + Perform native filter operations. + """ + + +@native_filters.command() +@with_appcontext +@optgroup.group( + "Grouped options", + cls=RequiredMutuallyExclusiveOptionGroup, +) +@optgroup.option( + "--all", + "all_", + default=False, + help="Upgrade all dashboards", + is_flag=True, +) +@optgroup.option( + "--id", + "dashboard_ids", + help="Upgrade the specific dashboard. Can be supplied multiple times.", + multiple=True, + type=int, +) +def upgrade( + all_: bool, # pylint: disable=unused-argument + dashboard_ids: Tuple[int, ...], +) -> None: + """ + Upgrade legacy filter-box charts to native dashboard filters. + """ + + # pylint: disable=import-outside-toplevel + from superset.utils.dashboard_filter_scopes_converter import ( + convert_filter_scopes_to_native_filters, + ) + + if not is_feature_enabled("DASHBOARD_NATIVE_FILTERS"): + click.echo("The 'DASHBOARD_NATIVE_FILTERS' feature needs to be enabled.") + return + + # Mapping between the CHART- and MARKDOWN- IDs. + mapping = {} + + for dashboard in ( # pylint: disable=too-many-nested-blocks + db.session.query(Dashboard) + .filter(*[Dashboard.id.in_(dashboard_ids)] if dashboard_ids else []) + .all() + ): + click.echo(f"Upgrading {str(dashboard)}") + + try: + json_metadata = json.loads(dashboard.json_metadata or "{}") + position_json = json.loads(dashboard.position_json or "{}") + + if "native_filter_migration" in json_metadata: + click.echo(f"{dashboard} has already been upgraded") + continue + + # Save the native and legacy filter configurations for recovery purposes. + json_metadata["native_filter_migration"] = { + key: deepcopy(json_metadata[key]) + for key in ( + "default_filters", + "filter_scopes", + "native_filter_configuration", + ) + if key in json_metadata + } + + filter_boxes_by_id = { + slc.id: slc for slc in dashboard.slices if slc.viz_type == "filter_box" + } + + # Convert the legacy filter configurations to native filters. + native_filter_configuration = json_metadata.setdefault( + "native_filter_configuration", + [], + ) + + native_filter_configuration.extend( + convert_filter_scopes_to_native_filters( + json_metadata, + position_json, + filter_boxes=list(filter_boxes_by_id.values()), + ), + ) + + # Remove the legacy filter configuration. + for key in ["default_filters", "filter_scopes"]: + json_metadata.pop(key, None) + + # Replace the filter-box charts with markdown elements. + for key, value in list(position_json.items()): # Immutable iteration + if ( + isinstance(value, dict) + and value["type"] == "CHART" + and value["meta"]["chartId"] in filter_boxes_by_id + ): + slc = filter_boxes_by_id[value["meta"]["chartId"]] + mapping[key] = key.replace("CHART-", "MARKDOWN-") + + value["id"] = mapping[key] + value["type"] = "MARKDOWN" + + value["meta"]["code"] = dedent( + f""" + ⚠ The {slc.slice_name} + filter-box chart has been migrated to a native filter. + + This placeholder markdown element can be safely removed after + verifiying that the native filter have been correct applied, + otherwise ask an admin to revert the migration. + """ + ) + + # Save the filter-box info for recovery purposes. + value["meta"]["native_filter_migration"] = { + key: value["meta"].pop(key) + for key in ( + "chartId", + "sliceName", + "sliceNameOverride", + ) + if key in value["meta"] + } + + position_json[mapping[key]] = value + del position_json[key] + + # Replace the relevant CHART- references. + for value in position_json.values(): + if isinstance(value, dict): + for relation in ["children", "parents"]: + if relation in value: + for idx, key in enumerate(value[relation]): + if key in mapping: + value[relation][idx] = mapping[key] + + # Remove the filter-box charts from the dashboard/slice mapping + dashboard.slices = [ + slc for slc in dashboard.slices if slc.viz_type != "filter_box" + ] + + dashboard.json_metadata = json.dumps(json_metadata) + dashboard.position_json = json.dumps(position_json) + except Exception: # pylint: disable=broad-except + click.echo(f"Unable to upgrade {str(dashboard)}") + + db.session.commit() + db.session.close() + + +@native_filters.command() +@with_appcontext +@optgroup.group( + "Grouped options", + cls=RequiredMutuallyExclusiveOptionGroup, +) +@optgroup.option( + "--all", + "all_", + default=False, + help="Downgrade all dashboards", + is_flag=True, +) +@optgroup.option( + "--id", + "dashboard_ids", + help="Downgrade the specific dashboard. Can be supplied multiple times.", + multiple=True, + type=int, +) +def downgrade( + all_: bool, # pylint: disable=unused-argument + dashboard_ids: Tuple[int, ...], +) -> None: + """ + Downgrade native dashboard filters to legacy filter-box charts (where applicable). + """ + + # Mapping between the MARKDOWN- and CHART- IDs. + mapping = {} + + for dashboard in ( # pylint: disable=too-many-nested-blocks + db.session.query(Dashboard) + .filter(*[Dashboard.id.in_(dashboard_ids)] if dashboard_ids else []) + .all() + ): + click.echo(f"Downgrading {str(dashboard)}") + + try: + json_metadata = json.loads(dashboard.json_metadata or "{}") + position_json = json.loads(dashboard.position_json or "{}") + + if "native_filter_migration" not in json_metadata: + click.echo(f"{str(dashboard)} has not been upgraded") + continue + + # Restore the native and legacy filter configurations. + for key in ( + "default_filters", + "filter_scopes", + "native_filter_configuration", + ): + json_metadata.pop(key, None) + + json_metadata.update(json_metadata.pop("native_filter_migration")) + + # Replace the relevant markdown elements with filter-box charts. + slice_ids = set() + + for key, value in list(position_json.items()): # Immutable iteration + if ( + isinstance(value, dict) + and value["type"] == "MARKDOWN" + and "native_filter_migration" in value["meta"] + ): + value["meta"].update(value["meta"].pop("native_filter_migration")) + slice_ids.add(value["meta"]["chartId"]) + mapping[key] = key.replace("MARKDOWN-", "CHART-") + value["id"] = mapping[key] + del value["meta"]["code"] + value["type"] = "CHART" + position_json[mapping[key]] = value + del position_json[key] + + # Replace the relevant CHART- references. + for value in position_json.values(): + if isinstance(value, dict): + for relation in ["children", "parents"]: + if relation in value: + for idx, key in enumerate(value[relation]): + if key in mapping: + value[relation][idx] = mapping[key] + + # Restore the filter-box charts to the dashboard/slice mapping. + for slc in db.session.query(Slice).filter(Slice.id.in_(slice_ids)).all(): + dashboard.slices.append(slc) + + dashboard.json_metadata = json.dumps(json_metadata) + dashboard.position_json = json.dumps(position_json) + except Exception: # pylint: disable=broad-except + click.echo(f"Unable to downgrade {str(dashboard)}") + + db.session.commit() + db.session.close() + + +@native_filters.command() +@with_appcontext +@optgroup.group( + "Grouped options", + cls=RequiredMutuallyExclusiveOptionGroup, +) +@optgroup.option( + "--all", + "all_", + default=False, + help="Cleanup all dashboards", + is_flag=True, +) +@optgroup.option( + "--id", + "dashboard_ids", + help="Cleanup the specific dashboard. Can be supplied multiple times.", + multiple=True, + type=int, +) +def cleanup( + all_: bool, # pylint: disable=unused-argument + dashboard_ids: Tuple[int, ...], +) -> None: + """ + Cleanup obsolete legacy filter-box charts and interim metadata. + + Note this operation is irreversible. + """ + + slice_ids: Set[int] = set() + + # Cleanup the dashboard which contains legacy fields used for downgrading. + for dashboard in ( + db.session.query(Dashboard) + .filter(*[Dashboard.id.in_(dashboard_ids)] if dashboard_ids else []) + .all() + ): + click.echo(f"Cleaning up {str(dashboard)}") + + try: + json_metadata = json.loads(dashboard.json_metadata or "{}") + position_json = json.loads(dashboard.position_json or "{}") + + if "native_filter_migration" not in json_metadata: + click.echo(f"{str(dashboard)} has not been upgraded") + continue + + # Remove the saved filter configurations. + del json_metadata["native_filter_migration"] + dashboard.json_metadata = json.dumps(json_metadata) + + for value in position_json.values(): + if ( + isinstance(value, dict) + and "native_filter_migration" in value["meta"] + ): + slice_ids.add(value["meta"]["native_filter_migration"]["chartId"]) + del value["meta"]["native_filter_migration"] + + dashboard.json_metadata = json.dumps(json_metadata) + dashboard.position_json = json.dumps(position_json) + except Exception: # pylint: disable=broad-except + click.echo(f"Unable to cleanup {str(dashboard)}") + + # Delete the obsolete filter-box charts associated with the dashboards. + db.session.query(slice_user).filter(slice_user.c.slice_id.in_(slice_ids)).delete() + db.session.query(Slice).filter(Slice.id.in_(slice_ids)).delete() + + db.session.commit() + db.session.close() diff --git a/superset/dashboards/schemas.py b/superset/dashboards/schemas.py index 014f5dafd53ba..c1f435301cd07 100644 --- a/superset/dashboards/schemas.py +++ b/superset/dashboards/schemas.py @@ -134,6 +134,7 @@ class DashboardJSONMetadataSchema(Schema): import_time = fields.Integer() remote_id = fields.Integer() filter_bar_orientation = fields.Str(allow_none=True) + native_filter_migration = fields.Dict() @pre_load def remove_show_native_filters( # pylint: disable=unused-argument, no-self-use diff --git a/superset/utils/dashboard_filter_scopes_converter.py b/superset/utils/dashboard_filter_scopes_converter.py index 2e48c114152a3..3cc93c967af21 100644 --- a/superset/utils/dashboard_filter_scopes_converter.py +++ b/superset/utils/dashboard_filter_scopes_converter.py @@ -19,13 +19,15 @@ from collections import defaultdict from typing import Any, Dict, List +from shortid import ShortId + from superset.models.slice import Slice logger = logging.getLogger(__name__) def convert_filter_scopes( - json_metadata: Dict[Any, Any], filters: List[Slice] + json_metadata: Dict[Any, Any], filter_boxes: List[Slice] ) -> Dict[int, Dict[str, Dict[str, Any]]]: filter_scopes = {} immuned_by_id: List[int] = json_metadata.get("filter_immune_slices") or [] @@ -51,10 +53,10 @@ def add_filter_scope( else: logging.info("slice [%i] has invalid field: %s", filter_id, filter_field) - for filter_slice in filters: + for filter_box in filter_boxes: filter_fields: Dict[str, Dict[str, Any]] = {} - filter_id = filter_slice.id - slice_params = json.loads(filter_slice.params or "{}") + filter_id = filter_box.id + slice_params = json.loads(filter_box.params or "{}") configs = slice_params.get("filter_configs") or [] if slice_params.get("date_filter"): @@ -88,3 +90,249 @@ def copy_filter_scopes( if int(slice_id) in old_to_new_slc_id_dict ] return new_filter_scopes + + +def convert_filter_scopes_to_native_filters( # pylint: disable=invalid-name,too-many-branches,too-many-locals,too-many-nested-blocks,too-many-statements + json_metadata: Dict[str, Any], + position_json: Dict[str, Any], + filter_boxes: List[Slice], +) -> List[Dict[str, Any]]: + """ + Convert the legacy filter scopes et al. to the native filter configuration. + + Dashboard filter scopes are implicitly defined where an undefined scope implies + no immunity, i.e., they apply to all applicable charts. The `convert_filter_scopes` + method provides an explicit definition by extracting the underlying filter-box + configurations. + + Hierarchical legacy filters are defined via non-exclusion of peer or children + filter-box charts whereas native hierarchical filters are defined via explicit + parental relationships, i.e., the inverse. + + :param json_metata: The dashboard metadata + :param position_json: The dashboard layout + :param filter_boxes: The filter-box charts associated with the dashboard + :returns: The native filter configuration + :see: convert_filter_scopes + """ + + shortid = ShortId() + default_filters = json.loads(json_metadata.get("default_filters") or "{}") + filter_scopes = json_metadata.get("filter_scopes", {}) + filter_box_ids = {filter_box.id for filter_box in filter_boxes} + + filter_scope_by_key_and_field: Dict[str, Dict[str, Dict[str, Any]]] = defaultdict( + dict + ) + + filter_by_key_and_field: Dict[str, Dict[str, Dict[str, Any]]] = defaultdict(dict) + + # Dense representation of filter scopes, falling back to chart level filter configs + # if the respective filter scope is not defined at the dashboard level. + for filter_box in filter_boxes: + key = str(filter_box.id) + + filter_scope_by_key_and_field[key] = { + **( + convert_filter_scopes( + json_metadata, + filter_boxes=[filter_box], + ).get(filter_box.id, {}) + ), + **(filter_scopes.get(key, {})), + } + + # Contruct the native filters. + for filter_box in filter_boxes: + key = str(filter_box.id) + params = json.loads(filter_box.params or "{}") + + for field, filter_scope in filter_scope_by_key_and_field[key].items(): + default = default_filters.get(key, {}).get(field) + + fltr: Dict[str, Any] = { + "cascadeParentIds": [], + "id": f"NATIVE_FILTER-{shortid.generate()}", + "scope": { + "rootPath": filter_scope["scope"], + "excluded": [ + id_ + for id_ in filter_scope["immune"] + if id_ not in filter_box_ids + ], + }, + "type": "NATIVE_FILTER", + } + + if field == "__time_col" and params.get("show_sqla_time_column"): + fltr.update( + { + "filterType": "filter_timecolumn", + "name": "Time Column", + "targets": [{"datasetId": filter_box.datasource_id}], + } + ) + + if not default: + default = params.get("granularity_sqla") + + if default: + fltr["defaultDataMask"] = { + "extraFormData": {"granularity_sqla": default}, + "filterState": {"value": [default]}, + } + elif field == "__time_grain" and params.get("show_sqla_time_granularity"): + fltr.update( + { + "filterType": "filter_timegrain", + "name": "Time Grain", + "targets": [{"datasetId": filter_box.datasource_id}], + } + ) + + if not default: + default = params.get("time_grain_sqla") + + if default: + fltr["defaultDataMask"] = { + "extraFormData": {"time_grain_sqla": default}, + "filterState": {"value": [default]}, + } + elif field == "__time_range" and params.get("date_filter"): + fltr.update( + { + "filterType": "filter_time", + "name": "Time Range", + "targets": [{}], + } + ) + + if not default: + default = params.get("time_range") + + if default and default != "No filter": + fltr["defaultDataMask"] = { + "extraFormData": {"time_range": default}, + "filterState": {"value": default}, + } + else: + for config in params.get("filter_configs") or []: + if config["column"] == field: + fltr.update( + { + "controlValues": { + "defaultToFirstItem": False, + "enableEmptyFilter": not config.get( + "clearable", + True, + ), + "inverseSelection": False, + "multiSelect": config.get( + "multiple", + False, + ), + "searchAllOptions": config.get( + "searchAllOptions", + False, + ), + }, + "filterType": "filter_select", + "name": config.get("label") or field, + "targets": [ + { + "column": {"name": field}, + "datasetId": filter_box.datasource_id, + }, + ], + } + ) + + if "metric" in config: + fltr["sortMetric"] = config["metric"] + fltr["controlValues"]["sortAscending"] = config["asc"] + + if params.get("adhoc_filters"): + fltr["adhoc_filters"] = params["adhoc_filters"] + + # Pre-filter available values based on time range/column. + time_range = params.get("time_range") + + if time_range and time_range != "No filter": + fltr.update( + { + "time_range": time_range, + "granularity_sqla": params.get("granularity_sqla"), + } + ) + + if not default: + default = config.get("defaultValue") + + if default: + if config["multiple"]: + default = default.split(";") + else: + default = [default] + + if default: + fltr["defaultDataMask"] = { + "extraFormData": { + "filters": [ + { + "col": field, + "op": "IN", + "val": default, + } + ], + }, + "filterState": {"value": default}, + } + + break + + if "filterType" in fltr: + filter_by_key_and_field[key][field] = fltr + + # Ancestors of filter-box charts. + ancestors_by_id = defaultdict(set) + + for filter_box in filter_boxes: + for value in position_json.values(): + if ( + isinstance(value, dict) + and value["type"] == "CHART" + and value["meta"]["chartId"] == filter_box.id + and value["parents"] # Misnomer as this the the complete ancestry. + ): + ancestors_by_id[filter_box.id] = set(value["parents"]) + + # Wire up the hierarchical filters. + for this in filter_boxes: + for other in filter_boxes: + if ( + this != other + and any( # Immunity is at the chart rather than field level. + this.id not in filter_scope["immune"] + and set(filter_scope["scope"]) <= ancestors_by_id[this.id] + for filter_scope in filter_scope_by_key_and_field[ + str(other.id) + ].values() + ) + ): + for child in filter_by_key_and_field[str(this.id)].values(): + if child["filterType"] == "filter_select": + for parent in filter_by_key_and_field[str(other.id)].values(): + if ( + parent["filterType"] in {"filter_select", "filter_time"} + and parent["id"] not in child["cascadeParentIds"] + ): + child["cascadeParentIds"].append(parent["id"]) + + return sorted( + [ + fltr + for key in filter_by_key_and_field + for fltr in filter_by_key_and_field[key].values() + ], + key=lambda fltr: fltr["filterType"], + )