From 9c6d53567ca04ebdfe2ba32c2cc3dce06b2642db Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 17 Jul 2023 10:54:00 -0600 Subject: [PATCH 01/43] build(deps-dev): bump eslint from 8.44.0 to 8.45.0 in /superset-websocket (#24712) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- superset-websocket/package-lock.json | 18 +++++++----------- superset-websocket/package.json | 2 +- 2 files changed, 8 insertions(+), 12 deletions(-) diff --git a/superset-websocket/package-lock.json b/superset-websocket/package-lock.json index 730702ff73f34..a709c8aadc030 100644 --- a/superset-websocket/package-lock.json +++ b/superset-websocket/package-lock.json @@ -27,7 +27,7 @@ "@types/ws": "^8.5.5", "@typescript-eslint/eslint-plugin": "^5.61.0", "@typescript-eslint/parser": "^5.62.0", - "eslint": "^8.44.0", + "eslint": "^8.45.0", "eslint-config-prettier": "^8.8.0", "jest": "^27.3.1", "prettier": "^2.8.8", @@ -2493,9 +2493,9 @@ } }, "node_modules/eslint": { - "version": "8.44.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.44.0.tgz", - "integrity": "sha512-0wpHoUbDUHgNCyvFB5aXLiQVfK9B0at6gUvzy83k4kAsQ/u769TQDX6iKC+aO4upIHO9WSaA3QoXYQDHbNwf1A==", + "version": "8.45.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.45.0.tgz", + "integrity": "sha512-pd8KSxiQpdYRfYa9Wufvdoct3ZPQQuVuU5O6scNgMuOMYuxvH0IGaYK0wUFjo4UYYQQCUndlXiMbnxopwvvTiw==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", @@ -2523,7 +2523,6 @@ "globals": "^13.19.0", "graphemer": "^1.4.0", "ignore": "^5.2.0", - "import-fresh": "^3.0.0", "imurmurhash": "^0.1.4", "is-glob": "^4.0.0", "is-path-inside": "^3.0.3", @@ -2535,7 +2534,6 @@ "natural-compare": "^1.4.0", "optionator": "^0.9.3", "strip-ansi": "^6.0.1", - "strip-json-comments": "^3.1.0", "text-table": "^0.2.0" }, "bin": { @@ -7900,9 +7898,9 @@ } }, "eslint": { - "version": "8.44.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.44.0.tgz", - "integrity": "sha512-0wpHoUbDUHgNCyvFB5aXLiQVfK9B0at6gUvzy83k4kAsQ/u769TQDX6iKC+aO4upIHO9WSaA3QoXYQDHbNwf1A==", + "version": "8.45.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.45.0.tgz", + "integrity": "sha512-pd8KSxiQpdYRfYa9Wufvdoct3ZPQQuVuU5O6scNgMuOMYuxvH0IGaYK0wUFjo4UYYQQCUndlXiMbnxopwvvTiw==", "dev": true, "requires": { "@eslint-community/eslint-utils": "^4.2.0", @@ -7930,7 +7928,6 @@ "globals": "^13.19.0", "graphemer": "^1.4.0", "ignore": "^5.2.0", - "import-fresh": "^3.0.0", "imurmurhash": "^0.1.4", "is-glob": "^4.0.0", "is-path-inside": "^3.0.3", @@ -7942,7 +7939,6 @@ "natural-compare": "^1.4.0", "optionator": "^0.9.3", "strip-ansi": "^6.0.1", - "strip-json-comments": "^3.1.0", "text-table": "^0.2.0" }, "dependencies": { diff --git a/superset-websocket/package.json b/superset-websocket/package.json index a657bb52e57e0..8f9f89828ee0f 100644 --- a/superset-websocket/package.json +++ b/superset-websocket/package.json @@ -33,7 +33,7 @@ "@types/ws": "^8.5.5", "@typescript-eslint/eslint-plugin": "^5.61.0", "@typescript-eslint/parser": "^5.62.0", - "eslint": "^8.44.0", + "eslint": "^8.45.0", "eslint-config-prettier": "^8.8.0", "jest": "^27.3.1", "prettier": "^2.8.8", From 1b5a6790f067f1c690354fcc8e96c484f6ee9285 Mon Sep 17 00:00:00 2001 From: John Bodley <4567245+john-bodley@users.noreply.github.com> Date: Tue, 18 Jul 2023 08:17:52 -0700 Subject: [PATCH 02/43] chore: Remove obsolete legacy visualizations (#24694) --- superset/common/query_context_processor.py | 65 +++- superset/examples/birth_names.py | 10 + superset/viz.py | 213 ------------ tests/integration_tests/cache_tests.py | 4 +- tests/integration_tests/charts/api_tests.py | 2 +- .../charts/commands_tests.py | 2 +- tests/integration_tests/core_tests.py | 14 +- tests/integration_tests/security_tests.py | 2 +- tests/integration_tests/utils_tests.py | 2 +- tests/integration_tests/viz_tests.py | 307 +----------------- 10 files changed, 80 insertions(+), 541 deletions(-) diff --git a/superset/common/query_context_processor.py b/superset/common/query_context_processor.py index 56f53cc16a983..f6152b232a938 100644 --- a/superset/common/query_context_processor.py +++ b/superset/common/query_context_processor.py @@ -33,7 +33,10 @@ from superset.common.query_actions import get_query_results from superset.common.utils import dataframe_utils from superset.common.utils.query_cache_manager import QueryCacheManager -from superset.common.utils.time_range_utils import get_since_until_from_query_object +from superset.common.utils.time_range_utils import ( + get_since_until_from_query_object, + get_since_until_from_time_range, +) from superset.connectors.base.models import BaseDatasource from superset.constants import CacheRegion, TimeGrain from superset.daos.annotation import AnnotationLayerDAO @@ -64,6 +67,7 @@ from superset.utils.date_parser import get_past_or_future, normalize_time_delta from superset.utils.pandas_postprocessing.utils import unescape_separator from superset.views.utils import get_viz +from superset.viz import viz_types if TYPE_CHECKING: from superset.common.query_context import QueryContext @@ -685,22 +689,53 @@ def get_native_annotation_data(query_obj: QueryObject) -> dict[str, Any]: def get_viz_annotation_data( annotation_layer: dict[str, Any], force: bool ) -> dict[str, Any]: - chart = ChartDAO.find_by_id(annotation_layer["value"]) - if not chart: + # pylint: disable=import-outside-toplevel,superfluous-parens + from superset.charts.data.commands.get_data_command import ChartDataCommand + + if not (chart := ChartDAO.find_by_id(annotation_layer["value"])): raise QueryObjectValidationError(_("The chart does not exist")) - if not chart.datasource: - raise QueryObjectValidationError(_("The chart datasource does not exist")) - form_data = chart.form_data.copy() - form_data.update(annotation_layer.get("overrides", {})) + try: - viz_obj = get_viz( - datasource_type=chart.datasource.type, - datasource_id=chart.datasource.id, - form_data=form_data, - force=force, - ) - payload = viz_obj.get_payload() - return payload["data"] + if chart.viz_type in viz_types: + if not chart.datasource: + raise QueryObjectValidationError( + _("The chart datasource does not exist"), + ) + + form_data = chart.form_data.copy() + form_data.update(annotation_layer.get("overrides", {})) + + payload = get_viz( + datasource_type=chart.datasource.type, + datasource_id=chart.datasource.id, + form_data=form_data, + force=force, + ).get_payload() + + return payload["data"] + + if not (query_context := chart.get_query_context()): + raise QueryObjectValidationError( + _("The chart query context does not exist"), + ) + + if overrides := annotation_layer.get("overrides"): + if time_grain_sqla := overrides.get("time_grain_sqla"): + for query_object in query_context.queries: + query_object.extras["time_grain_sqla"] = time_grain_sqla + + if time_range := overrides.get("time_range"): + from_dttm, to_dttm = get_since_until_from_time_range(time_range) + + for query_object in query_context.queries: + query_object.from_dttm = from_dttm + query_object.to_dttm = to_dttm + + query_context.force = force + command = ChartDataCommand(query_context) + command.validate() + payload = command.run() + return {"records": payload["queries"][0]["data"]} except SupersetException as ex: raise QueryObjectValidationError(error_msg_from_exception(ex)) from ex diff --git a/superset/examples/birth_names.py b/superset/examples/birth_names.py index e91640b19029f..c9e38f1686cae 100644 --- a/superset/examples/birth_names.py +++ b/superset/examples/birth_names.py @@ -424,6 +424,16 @@ def create_slices(tbl: SqlaTable) -> tuple[list[Slice], list[Slice]]: viz_type="table", metrics=metrics, ), + query_context=get_slice_json( + default_query_context, + queries=[ + { + "columns": ["ds"], + "metrics": metrics, + "time_range": "1983 : 2023", + } + ], + ), ), Slice( **slice_kwargs, diff --git a/superset/viz.py b/superset/viz.py index 4e39ae2a19f26..3051f104e20af 100644 --- a/superset/viz.py +++ b/superset/viz.py @@ -75,10 +75,8 @@ get_column_name, get_column_names, get_column_names_from_columns, - get_metric_names, JS_MAX_INTEGER, merge_extra_filters, - QueryMode, simple_filter_to_adhoc, ) from superset.utils.date_parser import get_since_until, parse_past_timedelta @@ -701,158 +699,6 @@ def raise_for_access(self) -> None: security_manager.raise_for_access(viz=self) -class TableViz(BaseViz): - - """A basic html table that is sortable and searchable""" - - viz_type = "table" - verbose_name = _("Table View") - credits = 'a Superset original' - is_timeseries = False - enforce_numerical_metrics = False - - @deprecated(deprecated_in="3.0") - def process_metrics(self) -> None: - """Process form data and store parsed column configs. - 1. Determine query mode based on form_data params. - - Use `query_mode` if it has a valid value - - Set as RAW mode if `all_columns` is set - - Otherwise defaults to AGG mode - 2. Determine output columns based on query mode. - """ - # Verify form data first: if not specifying query mode, then cannot have both - # GROUP BY and RAW COLUMNS. - if ( - not self.form_data.get("query_mode") - and self.form_data.get("all_columns") - and ( - self.form_data.get("groupby") - or self.form_data.get("metrics") - or self.form_data.get("percent_metrics") - ) - ): - raise QueryObjectValidationError( - _( - "You cannot use [Columns] in combination with " - "[Group By]/[Metrics]/[Percentage Metrics]. " - "Please choose one or the other." - ) - ) - - super().process_metrics() - - self.query_mode: QueryMode = QueryMode.get( - self.form_data.get("query_mode") - ) or ( - # infer query mode from the presence of other fields - QueryMode.RAW - if len(self.form_data.get("all_columns") or []) > 0 - else QueryMode.AGGREGATE - ) - - columns: list[str] # output columns sans time and percent_metric column - percent_columns: list[str] = [] # percent columns that needs extra computation - - if self.query_mode == QueryMode.RAW: - columns = get_metric_names(self.form_data.get("all_columns")) - else: - columns = get_column_names(self.groupby) + get_metric_names( - self.form_data.get("metrics") - ) - percent_columns = get_metric_names( - self.form_data.get("percent_metrics") or [] - ) - - self.columns = columns - self.percent_columns = percent_columns - self.is_timeseries = self.should_be_timeseries() - - @deprecated(deprecated_in="3.0") - def should_be_timeseries(self) -> bool: - # TODO handle datasource-type-specific code in datasource - conditions_met = self.form_data.get("granularity_sqla") and self.form_data.get( - "time_grain_sqla" - ) - if self.form_data.get("include_time") and not conditions_met: - raise QueryObjectValidationError( - _("Pick a granularity in the Time section or " "uncheck 'Include Time'") - ) - return bool(self.form_data.get("include_time")) - - @deprecated(deprecated_in="3.0") - def query_obj(self) -> QueryObjectDict: - query_obj = super().query_obj() - if self.query_mode == QueryMode.RAW: - query_obj["columns"] = self.form_data.get("all_columns") - order_by_cols = self.form_data.get("order_by_cols") or [] - query_obj["orderby"] = [json.loads(t) for t in order_by_cols] - # must disable groupby and metrics in raw mode - query_obj["groupby"] = [] - query_obj["metrics"] = [] - # raw mode does not support timeseries queries - query_obj["timeseries_limit_metric"] = None - query_obj["timeseries_limit"] = None - query_obj["is_timeseries"] = None - else: - sort_by = self.form_data.get("timeseries_limit_metric") - if sort_by: - sort_by_label = utils.get_metric_name(sort_by) - if sort_by_label not in utils.get_metric_names(query_obj["metrics"]): - query_obj["metrics"].append(sort_by) - query_obj["orderby"] = [ - (sort_by, not self.form_data.get("order_desc", True)) - ] - elif query_obj["metrics"]: - # Legacy behavior of sorting by first metric by default - first_metric = query_obj["metrics"][0] - query_obj["orderby"] = [ - (first_metric, not self.form_data.get("order_desc", True)) - ] - return query_obj - - @deprecated(deprecated_in="3.0") - def get_data(self, df: pd.DataFrame) -> VizData: - """ - Transform the query result to the table representation. - - :param df: The interim dataframe - :returns: The table visualization data - - The interim dataframe comprises of the group-by and non-group-by columns and - the union of the metrics representing the non-percent and percent metrics. Note - the percent metrics have yet to be transformed. - """ - # Transform the data frame to adhere to the UI ordering of the columns and - # metrics whilst simultaneously computing the percentages (via normalization) - # for the percent metrics. - if df.empty: - return None - - columns, percent_columns = self.columns, self.percent_columns - if DTTM_ALIAS in df and self.is_timeseries: - columns = [DTTM_ALIAS] + columns - df = pd.concat( - [ - df[columns], - (df[percent_columns].div(df[percent_columns].sum()).add_prefix("%")), - ], - axis=1, - ) - return self.handle_js_int_overflow( - dict(records=df.to_dict(orient="records"), columns=list(df.columns)) - ) - - @staticmethod - @deprecated(deprecated_in="3.0") - def json_dumps(query_obj: Any, sort_keys: bool = False) -> str: - return json.dumps( - query_obj, - default=utils.json_iso_dttm_ser, - sort_keys=sort_keys, - ignore_nan=True, - ) - - class TimeTableViz(BaseViz): """A data table with rich time-series related columns""" @@ -1076,65 +922,6 @@ def get_data(self, df: pd.DataFrame) -> VizData: } -class BigNumberViz(BaseViz): - - """Put emphasis on a single metric with this big number viz""" - - viz_type = "big_number" - verbose_name = _("Big Number with Trendline") - credits = 'a Superset original' - is_timeseries = True - - @deprecated(deprecated_in="3.0") - def query_obj(self) -> QueryObjectDict: - query_obj = super().query_obj() - metric = self.form_data.get("metric") - if not metric: - raise QueryObjectValidationError(_("Pick a metric!")) - query_obj["metrics"] = [self.form_data.get("metric")] - self.form_data["metric"] = metric - return query_obj - - @deprecated(deprecated_in="3.0") - def get_data(self, df: pd.DataFrame) -> VizData: - if df.empty: - return None - - df = df.pivot_table( - index=DTTM_ALIAS, - columns=[], - values=self.metric_labels, - dropna=False, - aggfunc=np.min, # looking for any (only) value, preserving `None` - ) - df = self.apply_rolling(df) - df[DTTM_ALIAS] = df.index - return super().get_data(df) - - -class BigNumberTotalViz(BaseViz): - - """Put emphasis on a single metric with this big number viz""" - - viz_type = "big_number_total" - verbose_name = _("Big Number") - credits = 'a Superset original' - is_timeseries = False - - @deprecated(deprecated_in="3.0") - def query_obj(self) -> QueryObjectDict: - query_obj = super().query_obj() - metric = self.form_data.get("metric") - if not metric: - raise QueryObjectValidationError(_("Pick a metric!")) - query_obj["metrics"] = [self.form_data.get("metric")] - self.form_data["metric"] = metric - - # Limiting rows is not required as only one cell is returned - query_obj["row_limit"] = None - return query_obj - - class NVD3TimeSeriesViz(NVD3Viz): """A rich line chart component with tons of options""" diff --git a/tests/integration_tests/cache_tests.py b/tests/integration_tests/cache_tests.py index a7da8a50d2a59..b2a8704dfb237 100644 --- a/tests/integration_tests/cache_tests.py +++ b/tests/integration_tests/cache_tests.py @@ -46,7 +46,7 @@ def test_no_data_cache(self): app.config["DATA_CACHE_CONFIG"] = {"CACHE_TYPE": "NullCache"} cache_manager.init_app(app) - slc = self.get_slice("Girls", db.session) + slc = self.get_slice("Top 10 Girl Name Share", db.session) json_endpoint = "/superset/explore_json/{}/{}/".format( slc.datasource_type, slc.datasource_id ) @@ -73,7 +73,7 @@ def test_slice_data_cache(self): } cache_manager.init_app(app) - slc = self.get_slice("Boys", db.session) + slc = self.get_slice("Top 10 Girl Name Share", db.session) json_endpoint = "/superset/explore_json/{}/{}/".format( slc.datasource_type, slc.datasource_id ) diff --git a/tests/integration_tests/charts/api_tests.py b/tests/integration_tests/charts/api_tests.py index f09662a8de919..734990a1eecc3 100644 --- a/tests/integration_tests/charts/api_tests.py +++ b/tests/integration_tests/charts/api_tests.py @@ -1715,7 +1715,7 @@ def test_gets_owned_created_favorited_by_me_filter(self): ) def test_warm_up_cache(self): self.login() - slc = self.get_slice("Girls", db.session) + slc = self.get_slice("Top 10 Girl Name Share", db.session) rv = self.client.put("/api/v1/chart/warm_up_cache", json={"chart_id": slc.id}) self.assertEqual(rv.status_code, 200) data = json.loads(rv.data.decode("utf-8")) diff --git a/tests/integration_tests/charts/commands_tests.py b/tests/integration_tests/charts/commands_tests.py index 911f3bf5daa4c..9580c2bf33e6b 100644 --- a/tests/integration_tests/charts/commands_tests.py +++ b/tests/integration_tests/charts/commands_tests.py @@ -456,7 +456,7 @@ def test_warm_up_cache_command_chart_not_found(self): @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") def test_warm_up_cache(self): - slc = self.get_slice("Girls", db.session) + slc = self.get_slice("Top 10 Girl Name Share", db.session) result = ChartWarmUpCacheCommand(slc.id, None, None).run() self.assertEqual( result, {"chart_id": slc.id, "viz_error": None, "viz_status": "success"} diff --git a/tests/integration_tests/core_tests.py b/tests/integration_tests/core_tests.py index c96555503b598..0acb19969a23d 100644 --- a/tests/integration_tests/core_tests.py +++ b/tests/integration_tests/core_tests.py @@ -173,7 +173,7 @@ def test_slice_endpoint(self): @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") def test_viz_cache_key(self): self.login(username="admin") - slc = self.get_slice("Girls", db.session) + slc = self.get_slice("Top 10 Girl Name Share", db.session) viz = slc.viz qobj = viz.query_obj() @@ -279,7 +279,9 @@ def test_slice_data(self): # slice data should have some required attributes self.login(username="admin") slc = self.get_slice( - slice_name="Girls", session=db.session, expunge_from_session=False + slice_name="Top 10 Girl Name Share", + session=db.session, + expunge_from_session=False, ) slc_data_attributes = slc.data.keys() assert "changed_on" in slc_data_attributes @@ -391,7 +393,7 @@ def test_databaseview_edit(self, username="admin"): ) def test_warm_up_cache(self): self.login() - slc = self.get_slice("Girls", db.session) + slc = self.get_slice("Top 10 Girl Name Share", db.session) data = self.get_json_resp(f"/superset/warm_up_cache?slice_id={slc.id}") self.assertEqual( data, [{"slice_id": slc.id, "viz_error": None, "viz_status": "success"}] @@ -418,10 +420,10 @@ def test_cache_logging(self): self.login("admin") store_cache_keys = app.config["STORE_CACHE_KEYS_IN_METADATA_DB"] app.config["STORE_CACHE_KEYS_IN_METADATA_DB"] = True - girls_slice = self.get_slice("Girls", db.session) - self.get_json_resp(f"/superset/warm_up_cache?slice_id={girls_slice.id}") + slc = self.get_slice("Top 10 Girl Name Share", db.session) + self.get_json_resp(f"/superset/warm_up_cache?slice_id={slc.id}") ck = db.session.query(CacheKey).order_by(CacheKey.id.desc()).first() - assert ck.datasource_uid == f"{girls_slice.table.id}__table" + assert ck.datasource_uid == f"{slc.table.id}__table" app.config["STORE_CACHE_KEYS_IN_METADATA_DB"] = store_cache_keys def test_redirect_invalid(self): diff --git a/tests/integration_tests/security_tests.py b/tests/integration_tests/security_tests.py index fe199443875dd..1518a69f9dbbe 100644 --- a/tests/integration_tests/security_tests.py +++ b/tests/integration_tests/security_tests.py @@ -1680,7 +1680,7 @@ def test_raise_for_access_table(self, mock_can_access): def test_raise_for_access_viz( self, mock_can_access_schema, mock_can_access, mock_is_owner ): - test_viz = viz.TableViz(self.get_datasource_mock(), form_data={}) + test_viz = viz.TimeTableViz(self.get_datasource_mock(), form_data={}) mock_can_access_schema.return_value = True security_manager.raise_for_access(viz=test_viz) diff --git a/tests/integration_tests/utils_tests.py b/tests/integration_tests/utils_tests.py index 2986188ff98ec..c0383d1d0b75d 100644 --- a/tests/integration_tests/utils_tests.py +++ b/tests/integration_tests/utils_tests.py @@ -974,7 +974,7 @@ def test_get_form_data_corrupted_json(self) -> None: def test_log_this(self) -> None: # TODO: Add additional scenarios. self.login(username="admin") - slc = self.get_slice("Girls", db.session) + slc = self.get_slice("Top 10 Girl Name Share", db.session) dashboard_id = 1 assert slc.viz is not None diff --git a/tests/integration_tests/viz_tests.py b/tests/integration_tests/viz_tests.py index ac390b3976a04..f1665e96888d0 100644 --- a/tests/integration_tests/viz_tests.py +++ b/tests/integration_tests/viz_tests.py @@ -45,7 +45,7 @@ def test_constructor_exception_no_datasource(self): viz.BaseViz(datasource, form_data) def test_process_metrics(self): - # test TableViz metrics in correct order + # test TimeTableViz metrics in correct order form_data = { "url_params": {}, "row_limit": 500, @@ -55,7 +55,7 @@ def test_process_metrics(self): "granularity_sqla": "year", "page_length": 0, "all_columns": [], - "viz_type": "table", + "viz_type": "time_table", "since": "2014-01-01", "until": "2014-01-02", "metrics": ["sum__SP_POP_TOTL", "SUM(SE_PRM_NENR_MA)", "SUM(SP_URB_TOTL)"], @@ -177,273 +177,6 @@ def test_cache_timeout(self): app.config["DATA_CACHE_CONFIG"]["CACHE_DEFAULT_TIMEOUT"] = data_cache_timeout -class TestTableViz(SupersetTestCase): - def test_get_data_applies_percentage(self): - form_data = { - "groupby": ["groupA", "groupB"], - "metrics": [ - { - "expressionType": "SIMPLE", - "aggregate": "SUM", - "label": "SUM(value1)", - "column": {"column_name": "value1", "type": "DOUBLE"}, - }, - "count", - "avg__C", - ], - "percent_metrics": [ - { - "expressionType": "SIMPLE", - "aggregate": "SUM", - "label": "SUM(value1)", - "column": {"column_name": "value1", "type": "DOUBLE"}, - }, - "avg__B", - ], - } - datasource = self.get_datasource_mock() - - df = pd.DataFrame( - { - "SUM(value1)": [15, 20, 25, 40], - "avg__B": [10, 20, 5, 15], - "avg__C": [11, 22, 33, 44], - "count": [6, 7, 8, 9], - "groupA": ["A", "B", "C", "C"], - "groupB": ["x", "x", "y", "z"], - } - ) - - test_viz = viz.TableViz(datasource, form_data) - data = test_viz.get_data(df) - # Check method correctly transforms data and computes percents - self.assertEqual( - [ - "groupA", - "groupB", - "SUM(value1)", - "count", - "avg__C", - "%SUM(value1)", - "%avg__B", - ], - list(data["columns"]), - ) - expected = [ - { - "groupA": "A", - "groupB": "x", - "SUM(value1)": 15, - "count": 6, - "avg__C": 11, - "%SUM(value1)": 0.15, - "%avg__B": 0.2, - }, - { - "groupA": "B", - "groupB": "x", - "SUM(value1)": 20, - "count": 7, - "avg__C": 22, - "%SUM(value1)": 0.2, - "%avg__B": 0.4, - }, - { - "groupA": "C", - "groupB": "y", - "SUM(value1)": 25, - "count": 8, - "avg__C": 33, - "%SUM(value1)": 0.25, - "%avg__B": 0.1, - }, - { - "groupA": "C", - "groupB": "z", - "SUM(value1)": 40, - "count": 9, - "avg__C": 44, - "%SUM(value1)": 0.4, - "%avg__B": 0.3, - }, - ] - self.assertEqual(expected, data["records"]) - - def test_parse_adhoc_filters(self): - form_data = { - "metrics": [ - { - "expressionType": "SIMPLE", - "aggregate": "SUM", - "label": "SUM(value1)", - "column": {"column_name": "value1", "type": "DOUBLE"}, - } - ], - "adhoc_filters": [ - { - "expressionType": "SIMPLE", - "clause": "WHERE", - "subject": "value2", - "operator": ">", - "comparator": "100", - }, - { - "expressionType": "SQL", - "clause": "HAVING", - "sqlExpression": "SUM(value1) > 5", - }, - { - "expressionType": "SQL", - "clause": "WHERE", - "sqlExpression": "value3 in ('North America')", - }, - ], - } - datasource = self.get_datasource_mock() - test_viz = viz.TableViz(datasource, form_data) - query_obj = test_viz.query_obj() - self.assertEqual( - [{"col": "value2", "val": "100", "op": ">"}], query_obj["filter"] - ) - self.assertEqual("(value3 in ('North America'))", query_obj["extras"]["where"]) - self.assertEqual("(SUM(value1) > 5)", query_obj["extras"]["having"]) - - def test_adhoc_filters_overwrite_legacy_filters(self): - form_data = { - "metrics": [ - { - "expressionType": "SIMPLE", - "aggregate": "SUM", - "label": "SUM(value1)", - "column": {"column_name": "value1", "type": "DOUBLE"}, - } - ], - "adhoc_filters": [ - { - "expressionType": "SIMPLE", - "clause": "WHERE", - "subject": "value2", - "operator": ">", - "comparator": "100", - }, - { - "expressionType": "SQL", - "clause": "WHERE", - "sqlExpression": "value3 in ('North America')", - }, - ], - "having": "SUM(value1) > 5", - } - datasource = self.get_datasource_mock() - test_viz = viz.TableViz(datasource, form_data) - query_obj = test_viz.query_obj() - self.assertEqual( - [{"col": "value2", "val": "100", "op": ">"}], query_obj["filter"] - ) - self.assertEqual("(value3 in ('North America'))", query_obj["extras"]["where"]) - self.assertEqual("", query_obj["extras"]["having"]) - - def test_query_obj_merges_percent_metrics(self): - datasource = self.get_datasource_mock() - form_data = { - "metrics": ["sum__A", "count", "avg__C"], - "percent_metrics": ["sum__A", "avg__B", "max__Y"], - } - test_viz = viz.TableViz(datasource, form_data) - query_obj = test_viz.query_obj() - self.assertEqual( - ["sum__A", "count", "avg__C", "avg__B", "max__Y"], query_obj["metrics"] - ) - - def test_query_obj_throws_columns_and_metrics(self): - datasource = self.get_datasource_mock() - form_data = {"all_columns": ["A", "B"], "metrics": ["x", "y"]} - with self.assertRaises(Exception): - test_viz = viz.TableViz(datasource, form_data) - test_viz.query_obj() - del form_data["metrics"] - form_data["groupby"] = ["B", "C"] - with self.assertRaises(Exception): - test_viz = viz.TableViz(datasource, form_data) - test_viz.query_obj() - - @patch("superset.viz.BaseViz.query_obj") - def test_query_obj_merges_all_columns(self, super_query_obj): - datasource = self.get_datasource_mock() - form_data = { - "all_columns": ["colA", "colB", "colC"], - "order_by_cols": ['["colA", "colB"]', '["colC"]'], - } - super_query_obj.return_value = { - "columns": ["colD", "colC"], - "groupby": ["colA", "colB"], - } - test_viz = viz.TableViz(datasource, form_data) - query_obj = test_viz.query_obj() - self.assertEqual(form_data["all_columns"], query_obj["columns"]) - self.assertEqual([], query_obj["groupby"]) - self.assertEqual([["colA", "colB"], ["colC"]], query_obj["orderby"]) - - def test_query_obj_uses_sortby(self): - datasource = self.get_datasource_mock() - form_data = { - "metrics": ["colA", "colB"], - "order_desc": False, - } - - def run_test(metric): - form_data["timeseries_limit_metric"] = metric - test_viz = viz.TableViz(datasource, form_data) - query_obj = test_viz.query_obj() - self.assertEqual(["colA", "colB", metric], query_obj["metrics"]) - self.assertEqual([(metric, True)], query_obj["orderby"]) - - run_test("simple_metric") - run_test( - { - "label": "adhoc_metric", - "expressionType": "SIMPLE", - "aggregate": "SUM", - "column": { - "column_name": "sort_column", - }, - } - ) - - def test_should_be_timeseries_raises_when_no_granularity(self): - datasource = self.get_datasource_mock() - form_data = {"include_time": True} - with self.assertRaises(Exception): - test_viz = viz.TableViz(datasource, form_data) - test_viz.should_be_timeseries() - - def test_adhoc_metric_with_sortby(self): - metrics = [ - { - "expressionType": "SIMPLE", - "aggregate": "SUM", - "label": "sum_value", - "column": {"column_name": "value1", "type": "DOUBLE"}, - } - ] - form_data = { - "metrics": metrics, - "timeseries_limit_metric": { - "expressionType": "SIMPLE", - "aggregate": "SUM", - "label": "SUM(value1)", - "column": {"column_name": "value1", "type": "DOUBLE"}, - }, - "order_desc": False, - } - - df = pd.DataFrame({"SUM(value1)": [15], "sum_value": [15]}) - datasource = self.get_datasource_mock() - test_viz = viz.TableViz(datasource, form_data) - data = test_viz.get_data(df) - self.assertEqual(["sum_value"], data["columns"]) - - class TestDistBarViz(SupersetTestCase): def test_groupby_nulls(self): form_data = { @@ -1311,7 +1044,7 @@ def test_apply_rolling(self): data={"y": [1.0, 2.0, 3.0, 4.0]}, ) self.assertEqual( - viz.BigNumberViz( + viz.NVD3TimeSeriesViz( datasource, { "metrics": ["y"], @@ -1325,7 +1058,7 @@ def test_apply_rolling(self): [1.0, 3.0, 6.0, 10.0], ) self.assertEqual( - viz.BigNumberViz( + viz.NVD3TimeSeriesViz( datasource, { "metrics": ["y"], @@ -1339,7 +1072,7 @@ def test_apply_rolling(self): [1.0, 3.0, 5.0, 7.0], ) self.assertEqual( - viz.BigNumberViz( + viz.NVD3TimeSeriesViz( datasource, { "metrics": ["y"], @@ -1361,7 +1094,7 @@ def test_apply_rolling_without_data(self): ), data={"y": [1.0, 2.0, 3.0, 4.0]}, ) - test_viz = viz.BigNumberViz( + test_viz = viz.NVD3TimeSeriesViz( datasource, { "metrics": ["y"], @@ -1374,34 +1107,6 @@ def test_apply_rolling_without_data(self): test_viz.apply_rolling(df) -class TestBigNumberViz(SupersetTestCase): - def test_get_data(self): - datasource = self.get_datasource_mock() - df = pd.DataFrame( - data={ - DTTM_ALIAS: pd.to_datetime( - ["2019-01-01", "2019-01-02", "2019-01-05", "2019-01-07"] - ), - "y": [1.0, 2.0, 3.0, 4.0], - } - ) - data = viz.BigNumberViz(datasource, {"metrics": ["y"]}).get_data(df) - self.assertEqual(data[2], {DTTM_ALIAS: pd.Timestamp("2019-01-05"), "y": 3}) - - def test_get_data_with_none(self): - datasource = self.get_datasource_mock() - df = pd.DataFrame( - data={ - DTTM_ALIAS: pd.to_datetime( - ["2019-01-01", "2019-01-02", "2019-01-05", "2019-01-07"] - ), - "y": [1.0, 2.0, None, 4.0], - } - ) - data = viz.BigNumberViz(datasource, {"metrics": ["y"]}).get_data(df) - assert np.isnan(data[2]["y"]) - - class TestFilterBoxViz(SupersetTestCase): def test_get_data(self): form_data = { From aa01b51177a59dd0dcf4e66221a58e2331591716 Mon Sep 17 00:00:00 2001 From: John Bodley <4567245+john-bodley@users.noreply.github.com> Date: Tue, 18 Jul 2023 14:29:29 -0700 Subject: [PATCH 03/43] chore: Bump pyyaml bounds (#24731) --- requirements/base.txt | 10 +++------- requirements/integration.txt | 13 +++++++++---- requirements/testing.txt | 4 ++++ setup.py | 2 +- 4 files changed, 17 insertions(+), 12 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 7f806c7c28691..dc042a7747210 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -131,9 +131,7 @@ humanize==3.11.0 idna==3.2 # via email-validator importlib-metadata==6.6.0 - # via - # apache-superset - # flask + # via apache-superset importlib-resources==5.12.0 # via limits isodate==0.6.0 @@ -243,7 +241,7 @@ pytz==2021.3 # celery # flask-babel # pandas -pyyaml==5.4.1 +pyyaml==6.0.1 # via # apache-superset # apispec @@ -318,9 +316,7 @@ wtforms-json==0.3.5 xlsxwriter==3.0.7 # via apache-superset zipp==3.15.0 - # via - # importlib-metadata - # importlib-resources + # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/requirements/integration.txt b/requirements/integration.txt index 9deecc0ab92bb..a8e0d2ce9701d 100644 --- a/requirements/integration.txt +++ b/requirements/integration.txt @@ -35,7 +35,7 @@ packaging==23.1 # pyproject-api # tox pip-compile-multi==2.6.3 - # via -r integration.in + # via -r requirements/integration.in pip-tools==6.13.0 # via pip-compile-multi platformdirs==3.8.1 @@ -45,17 +45,22 @@ platformdirs==3.8.1 pluggy==1.2.0 # via tox pre-commit==3.3.3 - # via -r integration.in + # via -r requirements/integration.in pyproject-api==1.5.2 # via tox pyproject-hooks==1.0.0 # via build -pyyaml==5.4.1 +pyyaml==6.0.1 # via pre-commit +tomli==2.0.1 + # via + # build + # pyproject-api + # tox toposort==1.10 # via pip-compile-multi tox==4.6.4 - # via -r integration.in + # via -r requirements/integration.in virtualenv==20.23.1 # via # pre-commit diff --git a/requirements/testing.txt b/requirements/testing.txt index f550edf3ab0b2..5605167228c7f 100644 --- a/requirements/testing.txt +++ b/requirements/testing.txt @@ -16,6 +16,8 @@ cmdstanpy==1.1.0 # via prophet contourpy==1.0.7 # via matplotlib +convertdate==2.4.0 + # via prophet coverage[toml]==7.2.5 # via pytest-cov cycler==0.11.0 @@ -121,6 +123,8 @@ pyfakefs==5.2.2 # via -r requirements/testing.in pyhive[presto]==0.6.5 # via apache-superset +pymeeus==0.5.12 + # via convertdate pytest==7.3.1 # via # -r requirements/testing.in diff --git a/setup.py b/setup.py index f16aa5c002c8c..814214f5a5c91 100644 --- a/setup.py +++ b/setup.py @@ -111,7 +111,7 @@ def get_git_sha() -> str: "python-dotenv", "python-geohash", "pyarrow>=12.0.0, <13", - "pyyaml>=5.4", + "pyyaml>=6.0.0, <7.0.0", "PyJWT>=2.4.0, <3.0", "redis>=4.5.4, <5.0", "selenium>=3.141.0, <4.10.0", From 068864b9ea7ee7edafb84ada3c3ae896676062d1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 18 Jul 2023 17:44:38 -0600 Subject: [PATCH 04/43] build(deps-dev): bump word-wrap from 1.2.3 to 1.2.4 in /superset-embedded-sdk (#24734) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- superset-embedded-sdk/package-lock.json | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/superset-embedded-sdk/package-lock.json b/superset-embedded-sdk/package-lock.json index d348c7a1c40ca..1ccf12d117c78 100644 --- a/superset-embedded-sdk/package-lock.json +++ b/superset-embedded-sdk/package-lock.json @@ -8074,9 +8074,9 @@ "dev": true }, "node_modules/word-wrap": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", - "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.4.tgz", + "integrity": "sha512-2V81OA4ugVo5pRo46hAoD2ivUJx8jXmWXfUkY4KFNw0hEptvN0QfH3K4nHiwzGeKl5rFKedV48QVoqYavy4YpA==", "dev": true, "engines": { "node": ">=0.10.0" @@ -14253,9 +14253,9 @@ "dev": true }, "word-wrap": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", - "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.4.tgz", + "integrity": "sha512-2V81OA4ugVo5pRo46hAoD2ivUJx8jXmWXfUkY4KFNw0hEptvN0QfH3K4nHiwzGeKl5rFKedV48QVoqYavy4YpA==", "dev": true }, "wrap-ansi": { From 2785b8f11e4012010f1fe874b52f01b69f64a9ff Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 18 Jul 2023 17:44:55 -0600 Subject: [PATCH 05/43] build(deps-dev): bump word-wrap from 1.2.3 to 1.2.4 in /superset-frontend (#24735) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- superset-frontend/package-lock.json | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/superset-frontend/package-lock.json b/superset-frontend/package-lock.json index ca2abb0e89081..82bb7c360785d 100644 --- a/superset-frontend/package-lock.json +++ b/superset-frontend/package-lock.json @@ -60363,9 +60363,9 @@ "dev": true }, "node_modules/word-wrap": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", - "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.4.tgz", + "integrity": "sha512-2V81OA4ugVo5pRo46hAoD2ivUJx8jXmWXfUkY4KFNw0hEptvN0QfH3K4nHiwzGeKl5rFKedV48QVoqYavy4YpA==", "dev": true, "engines": { "node": ">=0.10.0" @@ -106977,11 +106977,12 @@ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", "requires": { - "ansi-regex": "^4.1.1" + "ansi-regex": "^2.0.0" }, "dependencies": { "ansi-regex": { - "version": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", "integrity": "sha512-TIGnTpdo+E3+pCyAluZvtED5p5wCqLdezCyhPZzKPcxvFplEt4i+W7OONCKgeZFT3+y5NZZfOOS/Bdcanm1MYA==" } } @@ -110251,9 +110252,9 @@ "dev": true }, "word-wrap": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", - "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.4.tgz", + "integrity": "sha512-2V81OA4ugVo5pRo46hAoD2ivUJx8jXmWXfUkY4KFNw0hEptvN0QfH3K4nHiwzGeKl5rFKedV48QVoqYavy4YpA==", "dev": true }, "wordwrap": { From b7dcc9f7e3ced48f617b33da08872fe4bf1b247a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 18 Jul 2023 17:45:05 -0600 Subject: [PATCH 06/43] build(deps): bump word-wrap from 1.2.3 to 1.2.4 in /superset-frontend/cypress-base (#24733) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- superset-frontend/cypress-base/package-lock.json | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/superset-frontend/cypress-base/package-lock.json b/superset-frontend/cypress-base/package-lock.json index 817a3acef456d..b58d8ee574dcd 100644 --- a/superset-frontend/cypress-base/package-lock.json +++ b/superset-frontend/cypress-base/package-lock.json @@ -11911,9 +11911,9 @@ } }, "node_modules/word-wrap": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", - "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.4.tgz", + "integrity": "sha512-2V81OA4ugVo5pRo46hAoD2ivUJx8jXmWXfUkY4KFNw0hEptvN0QfH3K4nHiwzGeKl5rFKedV48QVoqYavy4YpA==", "engines": { "node": ">=0.10.0" } @@ -20967,9 +20967,9 @@ } }, "word-wrap": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", - "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==" + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.4.tgz", + "integrity": "sha512-2V81OA4ugVo5pRo46hAoD2ivUJx8jXmWXfUkY4KFNw0hEptvN0QfH3K4nHiwzGeKl5rFKedV48QVoqYavy4YpA==" }, "wrap-ansi": { "version": "7.0.0", From 11bb8c35b0ea4d07833a97f02f601048adfc00a4 Mon Sep 17 00:00:00 2001 From: Sebastian Liebscher <112352529+sebastianliebscher@users.noreply.github.com> Date: Wed, 19 Jul 2023 01:48:13 +0200 Subject: [PATCH 07/43] chore: update deprecated arguments in schema (#24715) --- superset/row_level_security/schemas.py | 63 +++++++++++++++----------- 1 file changed, 37 insertions(+), 26 deletions(-) diff --git a/superset/row_level_security/schemas.py b/superset/row_level_security/schemas.py index 718294f6444f1..617f2b04dc467 100644 --- a/superset/row_level_security/schemas.py +++ b/superset/row_level_security/schemas.py @@ -50,52 +50,54 @@ class TablesSchema(Schema): class RLSListSchema(Schema): - id = fields.Integer(description=id_description) - name = fields.String(description=name_description) + id = fields.Integer(metadata={"description": "id_description"}) + name = fields.String(metadata={"description": "name_description"}) filter_type = fields.String( - description=filter_type_description, + metadata={"description": "filter_type_description"}, validate=OneOf( [filter_type.value for filter_type in RowLevelSecurityFilterType] ), ) roles = fields.List(fields.Nested(RolesSchema)) tables = fields.List(fields.Nested(TablesSchema)) - clause = fields.String(description=clause_description) + clause = fields.String(metadata={"description": "clause_description"}) changed_on_delta_humanized = fields.Function( RowLevelSecurityFilter.created_on_delta_humanized ) - group_key = fields.String(description=group_key_description) - description = fields.String(description=description_description) + group_key = fields.String(metadata={"description": "group_key_description"}) + description = fields.String(metadata={"description": "description_description"}) class RLSShowSchema(Schema): - id = fields.Integer(description=id_description) - name = fields.String(description=name_description) + id = fields.Integer(metadata={"description": "id_description"}) + name = fields.String(metadata={"description": "name_description"}) filter_type = fields.String( - description=filter_type_description, + metadata={"description": "filter_type_description"}, validate=OneOf( [filter_type.value for filter_type in RowLevelSecurityFilterType] ), ) roles = fields.List(fields.Nested(RolesSchema)) tables = fields.List(fields.Nested(TablesSchema)) - clause = fields.String(description=clause_description) - group_key = fields.String(description=group_key_description) - description = fields.String(description=description_description) + clause = fields.String(metadata={"description": "clause_description"}) + group_key = fields.String(metadata={"description": "group_key_description"}) + description = fields.String(metadata={"description": "description_description"}) class RLSPostSchema(Schema): name = fields.String( - description=name_description, + metadata={"description": "name_description"}, required=True, allow_none=False, validate=Length(1, 255), ) description = fields.String( - description=description_description, required=False, allow_none=True + metadata={"description": "description_description"}, + required=False, + allow_none=True, ) filter_type = fields.String( - description=filter_type_description, + metadata={"description": "filter_type_description"}, required=True, allow_none=False, validate=OneOf( @@ -104,34 +106,41 @@ class RLSPostSchema(Schema): ) tables = fields.List( fields.Integer(), - description=tables_description, + metadata={"description": "tables_description"}, required=True, allow_none=False, validate=Length(1), ) roles = fields.List( - fields.Integer(), description=roles_description, required=True, allow_none=False + fields.Integer(), + metadata={"description": "roles_description"}, + required=True, + allow_none=False, ) group_key = fields.String( - description=group_key_description, required=False, allow_none=True + metadata={"description": "group_key_description"}, + required=False, + allow_none=True, ) clause = fields.String( - description=clause_description, required=True, allow_none=False + metadata={"description": "clause_description"}, required=True, allow_none=False ) class RLSPutSchema(Schema): name = fields.String( - description=name_description, + metadata={"description": "name_description"}, required=False, allow_none=False, validate=Length(1, 255), ) description = fields.String( - description=description_description, required=False, allow_none=True + metadata={"description": "description_description"}, + required=False, + allow_none=True, ) filter_type = fields.String( - description=filter_type_description, + metadata={"description": "filter_type_description"}, required=False, allow_none=False, validate=OneOf( @@ -140,19 +149,21 @@ class RLSPutSchema(Schema): ) tables = fields.List( fields.Integer(), - description=tables_description, + metadata={"description": "tables_description"}, required=False, allow_none=False, ) roles = fields.List( fields.Integer(), - description=roles_description, + metadata={"description": "roles_description"}, required=False, allow_none=False, ) group_key = fields.String( - description=group_key_description, required=False, allow_none=True + metadata={"description": "group_key_description"}, + required=False, + allow_none=True, ) clause = fields.String( - description=clause_description, required=False, allow_none=False + metadata={"description": "clause_description"}, required=False, allow_none=False ) From 2b0ffb01b654c1168a33870c668f938aea9afdbd Mon Sep 17 00:00:00 2001 From: Arjun Devarajan Date: Wed, 19 Jul 2023 09:16:20 -0400 Subject: [PATCH 08/43] feat: use Scarf Gateway for Superset helm charts/Docker compose downloads (#24432) --- docker-compose-non-dev.yml | 2 +- docker-compose.yml | 2 +- docs/.gitignore | 2 ++ docs/docs/frequently-asked-questions.mdx | 6 ++++++ .../installing-superset-using-docker-compose.mdx | 9 ++++++++- docs/docs/installation/running-on-kubernetes.mdx | 4 ++++ helm/superset/Chart.yaml | 2 +- helm/superset/README.md | 4 ++-- helm/superset/values.yaml | 2 +- 9 files changed, 26 insertions(+), 7 deletions(-) diff --git a/docker-compose-non-dev.yml b/docker-compose-non-dev.yml index 070a1bb12c605..0ce96e00bafd2 100644 --- a/docker-compose-non-dev.yml +++ b/docker-compose-non-dev.yml @@ -14,7 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -x-superset-image: &superset-image apache/superset:${TAG:-latest-dev} +x-superset-image: &superset-image apachesuperset.docker.scarf.sh/apache/superset:${TAG:-latest-dev} x-superset-depends-on: &superset-depends-on - db - redis diff --git a/docker-compose.yml b/docker-compose.yml index c6af8e54f390c..bd965f156af62 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -14,7 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -x-superset-image: &superset-image apache/superset:${TAG:-latest-dev} +x-superset-image: &superset-image apachesuperset.docker.scarf.sh/apache/superset:${TAG:-latest-dev} x-superset-user: &superset-user root x-superset-depends-on: &superset-depends-on - db diff --git a/docs/.gitignore b/docs/.gitignore index b2d6de30624f6..9f2d3dbb1c88f 100644 --- a/docs/.gitignore +++ b/docs/.gitignore @@ -18,3 +18,5 @@ npm-debug.log* yarn-debug.log* yarn-error.log* + +docs/.zshrc diff --git a/docs/docs/frequently-asked-questions.mdx b/docs/docs/frequently-asked-questions.mdx index 381746e6504c9..bbb94d617b986 100644 --- a/docs/docs/frequently-asked-questions.mdx +++ b/docs/docs/frequently-asked-questions.mdx @@ -282,3 +282,9 @@ guarantees and are not recommended but may fit your use case temporarily: In the Edit Dataset view, you can specify a time offset. This field lets you configure the number of hours to be added or subtracted from the time column. This can be used, for example, to convert UTC time to local time. + +### Does Superset collect any telemetry data? + +Superset uses [Scarf](https://about.scarf.sh/) by default to collect basic telemetry data upon installing and/or running Superset. This data helps the maintainers of Superset better understand which versions of Superset are being used, in order to prioritize patch/minor releases and security fixes. +We use the [Scarf Gateway](https://docs.scarf.sh/gateway/) to sit in front of container registries, and the [scarf-js](https://about.scarf.sh/package-sdks) package to track `npm` installations. +Scarf purges PII and provides aggregated statistics. Superset users can easily opt out of analytics in various ways documented [here](https://docs.scarf.sh/gateway/#do-not-track) and [here](https://docs.scarf.sh/package-analytics/#as-a-user-of-a-package-using-scarf-js-how-can-i-opt-out-of-analytics). Additional opt-out instructions for Docker users are available on the [Docker Installation](https://superset.apache.org/docs/installation/installing-superset-using-docker-compose) page. diff --git a/docs/docs/installation/installing-superset-using-docker-compose.mdx b/docs/docs/installation/installing-superset-using-docker-compose.mdx index 7cfb76fce1bea..981fca8147dc5 100644 --- a/docs/docs/installation/installing-superset-using-docker-compose.mdx +++ b/docs/docs/installation/installing-superset-using-docker-compose.mdx @@ -96,7 +96,14 @@ You can configure the Docker Compose environment varirables for dev and non-dev One important variable is `SUPERSET_LOAD_EXAMPLES` which determines whether the `superset_init` container will load example data and visualizations into the database and Superset. These examples are quite helpful for most people, but probably unnecessary for experienced users. The loading process can sometimes take a few minutes and a good amount of CPU, so you may want to disable it on a resource-constrained device. -**Note:** Users often want to connect to other databases from Superset. Currently, the easiest way to do this is to modify the `docker-compose-non-dev.yml` file and add your database as a service that the other services depend on (via `x-superset-depends-on`). Others have attempted to set `network_mode: host` on the Superset services, but these generally break the installation, because the configuration requires use of the Docker Compose DNS resolver for the service names. If you have a good solution for this, let us know! + +:::note +Users often want to connect to other databases from Superset. Currently, the easiest way to do this is to modify the `docker-compose-non-dev.yml` file and add your database as a service that the other services depend on (via `x-superset-depends-on`). Others have attempted to set `network_mode: host` on the Superset services, but these generally break the installation, because the configuration requires use of the Docker Compose DNS resolver for the service names. If you have a good solution for this, let us know! +::: + +:::note +Superset uses [Scarf Gateway](https://about.scarf.sh/scarf-gateway) to collect telmetry data to better understand and support the need for patch versions of Sueprset. Scarf purges PII and provides aggregated statistics. Superset users can easily opt out of analytics in various ways documented [here](https://docs.scarf.sh/gateway/#do-not-track). However, if you wish to opt-out of this in your Docker-based installation, you can simply edit your `docker-compose.yml` or `docker-compose-non-dev.yml` file and remove `apachesuperset.docker.scarf.sh/` from the `x-superset-image` setting, so that it's simply pulling `apache/superset:${TAG:-latest-dev}` +::: ### 4. Log in to Superset diff --git a/docs/docs/installation/running-on-kubernetes.mdx b/docs/docs/installation/running-on-kubernetes.mdx index 17b884aeeab22..2a63de9b46ac5 100644 --- a/docs/docs/installation/running-on-kubernetes.mdx +++ b/docs/docs/installation/running-on-kubernetes.mdx @@ -121,6 +121,10 @@ init: . {{ .Values.configMountPath }}/superset_init.sh ``` +:::note +Superset uses [Scarf Gateway](https://about.scarf.sh/scarf-gateway) to collect telmetry data to better understand and support the need for patch versions of Sueprset. Scarf purges PII and provides aggregated statistics. Superset users can easily opt out of analytics in various ways documented [here](https://docs.scarf.sh/gateway/#do-not-track). However, if you wish to opt-out of this in your Helm-based installation, you can simply edit your `helm/superset/values.yaml` file and remove `apachesuperset.docker.scarf.sh/` from the `repository` field, so that it's simply pulling `apache/superset` +::: + #### Dependencies Install additional packages and do any other bootstrap configuration in the bootstrap script. diff --git a/helm/superset/Chart.yaml b/helm/superset/Chart.yaml index 3198e648ac3f7..7582c560c4805 100644 --- a/helm/superset/Chart.yaml +++ b/helm/superset/Chart.yaml @@ -29,7 +29,7 @@ maintainers: - name: craig-rueda email: craig@craigrueda.com url: https://github.com/craig-rueda -version: 0.10.4 +version: 0.10.5 dependencies: - name: postgresql version: 12.1.6 diff --git a/helm/superset/README.md b/helm/superset/README.md index 217b27b67092b..9ee8b9d12cfed 100644 --- a/helm/superset/README.md +++ b/helm/superset/README.md @@ -23,7 +23,7 @@ NOTE: This file is generated by helm-docs: https://github.com/norwoodj/helm-docs # superset -![Version: 0.10.4](https://img.shields.io/badge/Version-0.10.4-informational?style=flat-square) +![Version: 0.10.5](https://img.shields.io/badge/Version-0.10.5-informational?style=flat-square) Apache Superset is a modern, enterprise-ready business intelligence web application @@ -70,7 +70,7 @@ helm install my-superset superset/superset | fullnameOverride | string | `nil` | Provide a name to override the full names of resources | | hostAliases | list | `[]` | Custom hostAliases for all superset pods # https://kubernetes.io/docs/tasks/network/customize-hosts-file-for-pods/ | | image.pullPolicy | string | `"IfNotPresent"` | | -| image.repository | string | `"apache/superset"` | | +| image.repository | string | `"apachesuperset.docker.scarf.sh/apache/superset"` | | | image.tag | string | `""` | | | imagePullSecrets | list | `[]` | | | ingress.annotations | object | `{}` | | diff --git a/helm/superset/values.yaml b/helm/superset/values.yaml index bff8b30efd481..660aac8af5329 100644 --- a/helm/superset/values.yaml +++ b/helm/superset/values.yaml @@ -176,7 +176,7 @@ configMountPath: "/app/pythonpath" extraConfigMountPath: "/app/configs" image: - repository: apache/superset + repository: apachesuperset.docker.scarf.sh/apache/superset tag: "" pullPolicy: IfNotPresent From 4c5ada421c44054c56a3c6ccb5551352e4fd94c8 Mon Sep 17 00:00:00 2001 From: John Bodley <4567245+john-bodley@users.noreply.github.com> Date: Wed, 19 Jul 2023 07:21:43 -0700 Subject: [PATCH 09/43] fix(druid): Delete obsolete Druid NoSQL slice parameters (#24737) --- ...e_obsolete_druid_nosql_slice_parameters.py | 103 ++++++++++++++++++ 1 file changed, 103 insertions(+) create mode 100644 superset/migrations/versions/2023-07-18_15-30_863adcf72773_delete_obsolete_druid_nosql_slice_parameters.py diff --git a/superset/migrations/versions/2023-07-18_15-30_863adcf72773_delete_obsolete_druid_nosql_slice_parameters.py b/superset/migrations/versions/2023-07-18_15-30_863adcf72773_delete_obsolete_druid_nosql_slice_parameters.py new file mode 100644 index 0000000000000..ce25bd85ef058 --- /dev/null +++ b/superset/migrations/versions/2023-07-18_15-30_863adcf72773_delete_obsolete_druid_nosql_slice_parameters.py @@ -0,0 +1,103 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""delete obsolete Druid NoSQL slice parameters + +Revision ID: 863adcf72773 +Revises: 6d05b0a70c89 +Create Date: 2023-07-18 15:30:43.695135 + +""" + +# revision identifiers, used by Alembic. +revision = "863adcf72773" +down_revision = "6d05b0a70c89" + +import json +import logging + +from alembic import op +from sqlalchemy import Column, Integer, Text +from sqlalchemy.ext.declarative import declarative_base + +from superset import db + +Base = declarative_base() + + +class Slice(Base): + __tablename__ = "slices" + + id = Column(Integer, primary_key=True) + params = Column(Text) + query_context = Column(Text) + + +def upgrade(): + bind = op.get_bind() + session = db.Session(bind=bind) + + for slc in session.query(Slice).all(): + if slc.params: + updated = False + + try: + params = json.loads(slc.params) + + for key in ["druid_time_origin", "granularity"]: + if key in params: + del params[key] + updated = True + + if updated: + slc.params = json.dumps(params) + except Exception: + logging.exception(f"Unable to parse params for slice {slc.id}") + + if slc.query_context: + updated = False + + try: + query_context = json.loads(slc.query_context) + + if form_data := query_context.get("form_data"): + for key in ["druid_time_origin", "granularity"]: + if key in form_data: + del form_data[key] + updated = True + + for query in query_context.get("queries", []): + for key in ["druid_time_origin", "granularity"]: + if key in query: + del query[key] + updated = True + + if extras := query.get("extras"): + if "having_druid" in extras: + del extras["having_druid"] + updated = True + + if updated: + slc.query_context = json.dumps(query_context) + except Exception: + logging.exception(f"Unable to parse query context for slice {slc.id}") + + session.commit() + session.close() + + +def downgrade(): + pass From 837e3c55ca4c00bdbe34b921516f6ceb8fc56720 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 19 Jul 2023 10:44:22 -0600 Subject: [PATCH 10/43] build(deps-dev): bump word-wrap from 1.2.3 to 1.2.4 in /superset-websocket (#24732) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- superset-websocket/package-lock.json | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/superset-websocket/package-lock.json b/superset-websocket/package-lock.json index a709c8aadc030..cf4f089a853f3 100644 --- a/superset-websocket/package-lock.json +++ b/superset-websocket/package-lock.json @@ -5868,9 +5868,9 @@ } }, "node_modules/word-wrap": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", - "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.4.tgz", + "integrity": "sha512-2V81OA4ugVo5pRo46hAoD2ivUJx8jXmWXfUkY4KFNw0hEptvN0QfH3K4nHiwzGeKl5rFKedV48QVoqYavy4YpA==", "dev": true, "engines": { "node": ">=0.10.0" @@ -10431,9 +10431,9 @@ } }, "word-wrap": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", - "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.4.tgz", + "integrity": "sha512-2V81OA4ugVo5pRo46hAoD2ivUJx8jXmWXfUkY4KFNw0hEptvN0QfH3K4nHiwzGeKl5rFKedV48QVoqYavy4YpA==", "dev": true }, "wrap-ansi": { From 5f49e0fdd06b558a9837d6fe07739d3989de9f61 Mon Sep 17 00:00:00 2001 From: John Bodley <4567245+john-bodley@users.noreply.github.com> Date: Wed, 19 Jul 2023 11:12:36 -0700 Subject: [PATCH 11/43] fix(cache): Add cache warmup for non-legacy charts (#24671) --- superset/charts/commands/export.py | 2 +- superset/charts/commands/warm_up_cache.py | 73 ++++++++---- superset/views/core.py | 59 ++++------ tests/integration_tests/charts/api_tests.py | 105 ++++++++++++++++-- .../charts/commands_tests.py | 48 +------- tests/integration_tests/core_tests.py | 89 +++++++++------ 6 files changed, 221 insertions(+), 155 deletions(-) diff --git a/superset/charts/commands/export.py b/superset/charts/commands/export.py index d1183a999c813..c942aa96c9a69 100644 --- a/superset/charts/commands/export.py +++ b/superset/charts/commands/export.py @@ -34,7 +34,7 @@ # keys present in the standard export that are not needed -REMOVE_KEYS = ["datasource_type", "datasource_name", "query_context", "url_params"] +REMOVE_KEYS = ["datasource_type", "datasource_name", "url_params"] class ExportChartsCommand(ExportModelsCommand): diff --git a/superset/charts/commands/warm_up_cache.py b/superset/charts/commands/warm_up_cache.py index 97d5c4fe1e62a..3ef05d50a5be8 100644 --- a/superset/charts/commands/warm_up_cache.py +++ b/superset/charts/commands/warm_up_cache.py @@ -21,12 +21,17 @@ import simplejson as json from flask import g -from superset.charts.commands.exceptions import WarmUpCacheChartNotFoundError +from superset.charts.commands.exceptions import ( + ChartInvalidError, + WarmUpCacheChartNotFoundError, +) +from superset.charts.data.commands.get_data_command import ChartDataCommand from superset.commands.base import BaseCommand from superset.extensions import db from superset.models.slice import Slice from superset.utils.core import error_msg_from_exception from superset.views.utils import get_dashboard_extra_filters, get_form_data, get_viz +from superset.viz import viz_types class ChartWarmUpCacheCommand(BaseCommand): @@ -43,31 +48,51 @@ def __init__( def run(self) -> dict[str, Any]: self.validate() chart: Slice = self._chart_or_id # type: ignore + try: form_data = get_form_data(chart.id, use_slice_data=True)[0] - if self._dashboard_id: - form_data["extra_filters"] = ( - json.loads(self._extra_filters) - if self._extra_filters - else get_dashboard_extra_filters(chart.id, self._dashboard_id) - ) - - if not chart.datasource: - raise Exception("Chart's datasource does not exist") - - obj = get_viz( - datasource_type=chart.datasource.type, - datasource_id=chart.datasource.id, - form_data=form_data, - force=True, - ) - - # pylint: disable=assigning-non-slot - g.form_data = form_data - payload = obj.get_payload() - delattr(g, "form_data") - error = payload["errors"] or None - status = payload["status"] + + if form_data.get("viz_type") in viz_types: + # Legacy visualizations. + if not chart.datasource: + raise ChartInvalidError("Chart's datasource does not exist") + + if self._dashboard_id: + form_data["extra_filters"] = ( + json.loads(self._extra_filters) + if self._extra_filters + else get_dashboard_extra_filters(chart.id, self._dashboard_id) + ) + + g.form_data = form_data # pylint: disable=assigning-non-slot + payload = get_viz( + datasource_type=chart.datasource.type, + datasource_id=chart.datasource.id, + form_data=form_data, + force=True, + ).get_payload() + delattr(g, "form_data") + error = payload["errors"] or None + status = payload["status"] + else: + # Non-legacy visualizations. + query_context = chart.get_query_context() + + if not query_context: + raise ChartInvalidError("Chart's query context does not exist") + + query_context.force = True + command = ChartDataCommand(query_context) + command.validate() + payload = command.run() + + # Report the first error. + for query in payload["queries"]: + error = query["error"] + status = query["status"] + + if error is not None: + break except Exception as ex: # pylint: disable=broad-except error = error_msg_from_exception(ex) status = None diff --git a/superset/views/core.py b/superset/views/core.py index 33b794a0bb2b1..f9bf51d54c905 100755 --- a/superset/views/core.py +++ b/superset/views/core.py @@ -43,6 +43,7 @@ security_manager, ) from superset.charts.commands.exceptions import ChartNotFoundError +from superset.charts.commands.warm_up_cache import ChartWarmUpCacheCommand from superset.common.chart_data import ChartDataResultFormat, ChartDataResultType from superset.connectors.base.models import BaseDatasource from superset.connectors.sqla.models import SqlaTable @@ -72,6 +73,7 @@ from superset.utils.async_query_manager import AsyncQueryTokenException from superset.utils.cache import etag_cache from superset.utils.core import ( + base_json_conv, DatasourceType, get_user_id, get_username, @@ -95,7 +97,6 @@ check_datasource_perms, check_explore_cache_perms, check_resource_permissions, - get_dashboard_extra_filters, get_datasource_info, get_form_data, get_viz, @@ -769,7 +770,8 @@ def save_or_overwrite_slice( @api @has_access_api @expose("/warm_up_cache/", methods=("GET",)) - def warm_up_cache( # pylint: disable=too-many-locals,no-self-use + @deprecated(new_target="api/v1/chart/warm_up_cache/") + def warm_up_cache( # pylint: disable=no-self-use self, ) -> FlaskResponse: """Warms up the cache for the slice or table. @@ -825,43 +827,22 @@ def warm_up_cache( # pylint: disable=too-many-locals,no-self-use .all() ) - result = [] - - for slc in slices: - try: - form_data = get_form_data(slc.id, use_slice_data=True)[0] - if dashboard_id: - form_data["extra_filters"] = ( - json.loads(extra_filters) - if extra_filters - else get_dashboard_extra_filters(slc.id, dashboard_id) - ) - - if not slc.datasource: - raise Exception("Slice's datasource does not exist") - - obj = get_viz( - datasource_type=slc.datasource.type, - datasource_id=slc.datasource.id, - form_data=form_data, - force=True, - ) - - # pylint: disable=assigning-non-slot - g.form_data = form_data - payload = obj.get_payload() - delattr(g, "form_data") - error = payload["errors"] or None - status = payload["status"] - except Exception as ex: # pylint: disable=broad-except - error = utils.error_msg_from_exception(ex) - status = None - - result.append( - {"slice_id": slc.id, "viz_error": error, "viz_status": status} - ) - - return json_success(json.dumps(result)) + return json_success( + json.dumps( + [ + { + "slice_id" if key == "chart_id" else key: value + for key, value in ChartWarmUpCacheCommand( + slc, dashboard_id, extra_filters + ) + .run() + .items() + } + for slc in slices + ], + default=base_json_conv, + ), + ) @has_access @expose("/dashboard//") diff --git a/tests/integration_tests/charts/api_tests.py b/tests/integration_tests/charts/api_tests.py index 734990a1eecc3..2db0cc7de301a 100644 --- a/tests/integration_tests/charts/api_tests.py +++ b/tests/integration_tests/charts/api_tests.py @@ -14,37 +14,41 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -# isort:skip_file """Unit tests for Superset""" import json from io import BytesIO +from unittest import mock from zipfile import is_zipfile, ZipFile import prison import pytest import yaml +from flask_babel import lazy_gettext as _ +from parameterized import parameterized from sqlalchemy import and_ from sqlalchemy.sql import func +from superset.charts.commands.exceptions import ChartDataQueryFailedError +from superset.charts.data.commands.get_data_command import ChartDataCommand from superset.connectors.sqla.models import SqlaTable from superset.extensions import cache_manager, db, security_manager from superset.models.core import Database, FavStar, FavStarClassName from superset.models.dashboard import Dashboard -from superset.reports.models import ReportSchedule, ReportScheduleType from superset.models.slice import Slice +from superset.reports.models import ReportSchedule, ReportScheduleType from superset.utils.core import get_example_default_schema from superset.utils.database import get_example_database - -from tests.integration_tests.conftest import with_feature_flags +from superset.viz import viz_types from tests.integration_tests.base_api_tests import ApiOwnersTestCaseMixin from tests.integration_tests.base_tests import SupersetTestCase +from tests.integration_tests.conftest import with_feature_flags from tests.integration_tests.fixtures.birth_names_dashboard import ( load_birth_names_dashboard_with_slices, load_birth_names_data, ) from tests.integration_tests.fixtures.energy_dashboard import ( - load_energy_table_with_slice, load_energy_table_data, + load_energy_table_with_slice, ) from tests.integration_tests.fixtures.importexport import ( chart_config, @@ -1710,12 +1714,16 @@ def test_gets_owned_created_favorited_by_me_filter(self): assert data["result"][0]["slice_name"] == "name0" assert data["result"][0]["datasource_id"] == 1 - @pytest.mark.usefixtures( - "load_energy_table_with_slice", "load_birth_names_dashboard_with_slices" + @parameterized.expand( + [ + "Top 10 Girl Name Share", # Legacy chart + "Pivot Table v2", # Non-legacy chart + ], ) - def test_warm_up_cache(self): + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_warm_up_cache(self, slice_name): self.login() - slc = self.get_slice("Top 10 Girl Name Share", db.session) + slc = self.get_slice(slice_name, db.session) rv = self.client.put("/api/v1/chart/warm_up_cache", json={"chart_id": slc.id}) self.assertEqual(rv.status_code, 200) data = json.loads(rv.data.decode("utf-8")) @@ -1780,7 +1788,6 @@ def test_warm_up_cache_payload_validation(self): ) self.assertEqual(rv.status_code, 400) data = json.loads(rv.data.decode("utf-8")) - print(data) self.assertEqual( data, { @@ -1791,3 +1798,81 @@ def test_warm_up_cache_payload_validation(self): } }, ) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_warm_up_cache_error(self) -> None: + self.login() + slc = self.get_slice("Pivot Table v2", db.session) + + with mock.patch.object(ChartDataCommand, "run") as mock_run: + mock_run.side_effect = ChartDataQueryFailedError( + _( + "Error: %(error)s", + error=_("Empty query?"), + ) + ) + + assert json.loads( + self.client.put( + "/api/v1/chart/warm_up_cache", + json={"chart_id": slc.id}, + ).data + ) == { + "result": [ + { + "chart_id": slc.id, + "viz_error": "Error: Empty query?", + "viz_status": None, + }, + ], + } + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_warm_up_cache_no_query_context(self) -> None: + self.login() + slc = self.get_slice("Pivot Table v2", db.session) + + with mock.patch.object(Slice, "get_query_context") as mock_get_query_context: + mock_get_query_context.return_value = None + + assert json.loads( + self.client.put( + f"/api/v1/chart/warm_up_cache", + json={"chart_id": slc.id}, + ).data + ) == { + "result": [ + { + "chart_id": slc.id, + "viz_error": "Chart's query context does not exist", + "viz_status": None, + }, + ], + } + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_warm_up_cache_no_datasource(self) -> None: + self.login() + slc = self.get_slice("Top 10 Girl Name Share", db.session) + + with mock.patch.object( + Slice, + "datasource", + new_callable=mock.PropertyMock, + ) as mock_datasource: + mock_datasource.return_value = None + + assert json.loads( + self.client.put( + f"/api/v1/chart/warm_up_cache", + json={"chart_id": slc.id}, + ).data + ) == { + "result": [ + { + "chart_id": slc.id, + "viz_error": "Chart's datasource does not exist", + "viz_status": None, + }, + ], + } diff --git a/tests/integration_tests/charts/commands_tests.py b/tests/integration_tests/charts/commands_tests.py index 9580c2bf33e6b..f9785a4dd6c20 100644 --- a/tests/integration_tests/charts/commands_tests.py +++ b/tests/integration_tests/charts/commands_tests.py @@ -96,52 +96,7 @@ def test_export_chart_command(self, mock_g): "dataset_uuid": str(example_chart.table.uuid), "uuid": str(example_chart.uuid), "version": "1.0.0", - } - - @patch("superset.security.manager.g") - @pytest.mark.usefixtures("load_energy_table_with_slice") - def test_export_chart_with_query_context(self, mock_g): - """Test that charts that have a query_context are exported correctly""" - - mock_g.user = security_manager.find_user("alpha") - example_chart = db.session.query(Slice).filter_by(slice_name="Heatmap").one() - command = ExportChartsCommand([example_chart.id]) - - contents = dict(command.run()) - - expected = [ - "metadata.yaml", - f"charts/Heatmap_{example_chart.id}.yaml", - "datasets/examples/energy_usage.yaml", - "databases/examples.yaml", - ] - assert expected == list(contents.keys()) - - metadata = yaml.safe_load(contents[f"charts/Heatmap_{example_chart.id}.yaml"]) - - assert metadata == { - "slice_name": "Heatmap", - "description": None, - "certified_by": None, - "certification_details": None, - "viz_type": "heatmap", - "params": { - "all_columns_x": "source", - "all_columns_y": "target", - "canvas_image_rendering": "pixelated", - "collapsed_fieldsets": "", - "linear_color_scheme": "blue_white_yellow", - "metric": "sum__value", - "normalize_across": "heatmap", - "slice_name": "Heatmap", - "viz_type": "heatmap", - "xscale_interval": "1", - "yscale_interval": "1", - }, - "cache_timeout": None, - "dataset_uuid": str(example_chart.table.uuid), - "uuid": str(example_chart.uuid), - "version": "1.0.0", + "query_context": None, } @patch("superset.security.manager.g") @@ -187,6 +142,7 @@ def test_export_chart_command_key_order(self, mock_g): "certification_details", "viz_type", "params", + "query_context", "cache_timeout", "uuid", "version", diff --git a/tests/integration_tests/core_tests.py b/tests/integration_tests/core_tests.py index 0acb19969a23d..e036602d0f973 100644 --- a/tests/integration_tests/core_tests.py +++ b/tests/integration_tests/core_tests.py @@ -14,72 +14,67 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -# isort:skip_file """Unit tests for Superset""" import datetime import doctest import html import json import logging -from urllib.parse import quote - -import prison -import superset.utils.database -from superset.utils.core import backend -from tests.integration_tests.fixtures.public_role import public_role_like_gamma -from tests.integration_tests.fixtures.birth_names_dashboard import ( - load_birth_names_dashboard_with_slices, - load_birth_names_data, -) -from sqlalchemy import Table - -import pytest -import pytz import random import unittest from unittest import mock +from urllib.parse import quote import pandas as pd +import prison +import pytest +import pytz import sqlalchemy as sqla +from flask_babel import lazy_gettext as _ +from sqlalchemy import Table from sqlalchemy.exc import SQLAlchemyError -from superset.models.cache import CacheKey -from superset.utils.database import get_example_database -from tests.integration_tests.conftest import with_feature_flags -from tests.integration_tests.fixtures.energy_dashboard import ( - load_energy_table_with_slice, - load_energy_table_data, -) -from tests.integration_tests.insert_chart_mixin import InsertChartMixin -from tests.integration_tests.test_app import app + +import superset.utils.database import superset.views.utils -from superset import ( - dataframe, - db, - security_manager, - sql_lab, -) +from superset import dataframe, db, security_manager, sql_lab +from superset.charts.commands.exceptions import ChartDataQueryFailedError +from superset.charts.data.commands.get_data_command import ChartDataCommand from superset.common.db_query_status import QueryStatus from superset.connectors.sqla.models import SqlaTable from superset.db_engine_specs.base import BaseEngineSpec from superset.db_engine_specs.mssql import MssqlEngineSpec -from superset.exceptions import SupersetException +from superset.exceptions import QueryObjectValidationError, SupersetException from superset.extensions import async_query_manager, cache_manager from superset.models import core as models from superset.models.annotations import Annotation, AnnotationLayer +from superset.models.cache import CacheKey from superset.models.dashboard import Dashboard from superset.models.slice import Slice from superset.models.sql_lab import Query from superset.result_set import SupersetResultSet from superset.utils import core as utils +from superset.utils.core import backend +from superset.utils.database import get_example_database from superset.views import core as views from superset.views.database.views import DatabaseView - -from .base_tests import SupersetTestCase +from tests.integration_tests.conftest import CTAS_SCHEMA_NAME, with_feature_flags +from tests.integration_tests.fixtures.birth_names_dashboard import ( + load_birth_names_dashboard_with_slices, + load_birth_names_data, +) +from tests.integration_tests.fixtures.energy_dashboard import ( + load_energy_table_data, + load_energy_table_with_slice, +) +from tests.integration_tests.fixtures.public_role import public_role_like_gamma from tests.integration_tests.fixtures.world_bank_dashboard import ( load_world_bank_dashboard_with_slices, load_world_bank_data, ) -from tests.integration_tests.conftest import CTAS_SCHEMA_NAME +from tests.integration_tests.insert_chart_mixin import InsertChartMixin +from tests.integration_tests.test_app import app + +from .base_tests import SupersetTestCase logger = logging.getLogger(__name__) @@ -389,7 +384,8 @@ def test_databaseview_edit(self, username="admin"): db.session.commit() @pytest.mark.usefixtures( - "load_energy_table_with_slice", "load_birth_names_dashboard_with_slices" + "load_birth_names_dashboard_with_slices", + "load_energy_table_with_slice", ) def test_warm_up_cache(self): self.login() @@ -415,6 +411,29 @@ def test_warm_up_cache(self): + quote(json.dumps([{"col": "name", "op": "in", "val": ["Jennifer"]}])) ) == [{"slice_id": slc.id, "viz_error": None, "viz_status": "success"}] + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_warm_up_cache_error(self) -> None: + self.login() + slc = self.get_slice("Pivot Table v2", db.session) + + with mock.patch.object( + ChartDataCommand, + "run", + side_effect=ChartDataQueryFailedError( + _( + "Error: %(error)s", + error=_("Empty query?"), + ) + ), + ): + assert self.get_json_resp(f"/superset/warm_up_cache?slice_id={slc.id}") == [ + { + "slice_id": slc.id, + "viz_error": "Error: Empty query?", + "viz_status": None, + } + ] + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") def test_cache_logging(self): self.login("admin") From 2d58dddbdc4057ce854ac7869d8ef989350e0376 Mon Sep 17 00:00:00 2001 From: Stepan <66589759+Always-prog@users.noreply.github.com> Date: Wed, 19 Jul 2023 22:48:50 +0300 Subject: [PATCH 12/43] fix(range-slider): removed localization of metric key (#24716) --- superset-frontend/src/filters/components/Range/buildQuery.ts | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/superset-frontend/src/filters/components/Range/buildQuery.ts b/superset-frontend/src/filters/components/Range/buildQuery.ts index 72e2fa549247c..8a76e5a28c167 100644 --- a/superset-frontend/src/filters/components/Range/buildQuery.ts +++ b/superset-frontend/src/filters/components/Range/buildQuery.ts @@ -20,7 +20,6 @@ import { buildQueryContext, GenericDataType, QueryFormData, - t, } from '@superset-ui/core'; /** @@ -55,7 +54,7 @@ export default function buildQuery(formData: QueryFormData) { }, expressionType: 'SIMPLE', hasCustomLabel: true, - label: t('min'), + label: 'min', }, { aggregate: 'MAX', @@ -66,7 +65,7 @@ export default function buildQuery(formData: QueryFormData) { }, expressionType: 'SIMPLE', hasCustomLabel: true, - label: t('max'), + label: 'max', }, ], }, From 5878c117f20b6a5abb8f624defa6500aaadbb5e8 Mon Sep 17 00:00:00 2001 From: Arjun Devarajan Date: Wed, 19 Jul 2023 17:00:45 -0400 Subject: [PATCH 13/43] feat: use Scarf Gateway for Superset npm downloads (#24433) Co-authored-by: Arjun Devarajan Co-authored-by: Evan Rusackas Co-authored-by: Evan Rusackas --- CONTRIBUTING.md | 12 ++++++++++++ superset-frontend/package-lock.json | 12 ++++++++++++ superset-frontend/package.json | 4 ++++ 3 files changed, 28 insertions(+) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 8c5d60f236829..91f224bc03cf7 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -577,6 +577,18 @@ cd superset-frontend npm ci ``` +Note that Superset uses [Scarf](https://docs.scarf.sh) to capture telemetry/analytics about versions being installed, including the `scarf-js` npm package. As noted elsewhere in this documentation, Scarf gathers aggregated stats for the sake of security/release strategy, and does not capture/retain PII. [You can read here](https://docs.scarf.sh/package-analytics/) about the package, and various means to opt out of it, but one easy way to opt out is to add this setting in `superset-frontent/package.json`: +```json +// your-package/package.json +{ + // ... + "scarfSettings": { + "enabled": false + } + // ... +} +``` + #### Build assets There are three types of assets you can build: diff --git a/superset-frontend/package-lock.json b/superset-frontend/package-lock.json index 82bb7c360785d..1e9eca6f27f09 100644 --- a/superset-frontend/package-lock.json +++ b/superset-frontend/package-lock.json @@ -21,6 +21,7 @@ "@emotion/styled": "^11.3.0", "@fontsource/inter": "^4.0.0", "@reduxjs/toolkit": "^1.9.3", + "@scarf/scarf": "^1.1.1", "@superset-ui/chart-controls": "file:./packages/superset-ui-chart-controls", "@superset-ui/core": "file:./packages/superset-ui-core", "@superset-ui/legacy-plugin-chart-calendar": "file:./plugins/legacy-plugin-chart-calendar", @@ -12317,6 +12318,12 @@ } } }, + "node_modules/@scarf/scarf": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@scarf/scarf/-/scarf-1.1.1.tgz", + "integrity": "sha512-VGbKDbk1RFIaSmdVb0cNjjWJoRWRI/Weo23AjRCC2nryO0iAS8pzsToJfPVPtVs74WHw4L1UTADNdIYRLkirZQ==", + "hasInstallScript": true + }, "node_modules/@sigstore/protobuf-specs": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.1.0.tgz", @@ -72722,6 +72729,11 @@ "any-observable": "^0.3.0" } }, + "@scarf/scarf": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@scarf/scarf/-/scarf-1.1.1.tgz", + "integrity": "sha512-VGbKDbk1RFIaSmdVb0cNjjWJoRWRI/Weo23AjRCC2nryO0iAS8pzsToJfPVPtVs74WHw4L1UTADNdIYRLkirZQ==" + }, "@sigstore/protobuf-specs": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.1.0.tgz", diff --git a/superset-frontend/package.json b/superset-frontend/package.json index 5104ad87db2a1..1a6b202e4be12 100644 --- a/superset-frontend/package.json +++ b/superset-frontend/package.json @@ -86,6 +86,7 @@ "@emotion/styled": "^11.3.0", "@fontsource/inter": "^4.0.0", "@reduxjs/toolkit": "^1.9.3", + "@scarf/scarf": "^1.1.1", "@superset-ui/chart-controls": "file:./packages/superset-ui-chart-controls", "@superset-ui/core": "file:./packages/superset-ui-core", "@superset-ui/legacy-plugin-chart-calendar": "file:./plugins/legacy-plugin-chart-calendar", @@ -357,5 +358,8 @@ } }, "readme": "ERROR: No README data found!", + "scarfSettings": { + "allowTopLevel": true + }, "_id": "superset@0.0.0-dev" } From 1a9724582f7bd292db562404f2c0b465342e7513 Mon Sep 17 00:00:00 2001 From: Elizabeth Thompson Date: Wed, 19 Jul 2023 14:22:32 -0700 Subject: [PATCH 14/43] chore: turn off talisman for ephemeral environments in ci (#24627) --- .github/workflows/ecs-task-definition.json | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ecs-task-definition.json b/.github/workflows/ecs-task-definition.json index 42fc0eb4caa09..7ef503681d602 100644 --- a/.github/workflows/ecs-task-definition.json +++ b/.github/workflows/ecs-task-definition.json @@ -25,8 +25,12 @@ "value": "8080" }, { - "name": "SUPERSET_SECRET_KEY", - "value": "super-secret-for-ephemerals" + "name": "SUPERSET_SECRET_KEY", + "value": "super-secret-for-ephemerals" + }, + { + "name": "TALISMAN_ENABLED", + "value": "False" } ], "mountPoints": [], From cb9b865a5398c479d221308ac6be1bd0a442f778 Mon Sep 17 00:00:00 2001 From: Daniel Vaz Gaspar Date: Thu, 20 Jul 2023 13:08:07 +0100 Subject: [PATCH 15/43] fix: import database engine validation (#24697) --- .../databases/commands/importers/v1/utils.py | 12 ++++++-- .../databases/commands_tests.py | 8 +++--- .../fixtures/importexport.py | 21 ++++++++++++-- .../commands/importers/v1/import_test.py | 28 ++++++++++++++++++- 4 files changed, 59 insertions(+), 10 deletions(-) diff --git a/superset/databases/commands/importers/v1/utils.py b/superset/databases/commands/importers/v1/utils.py index 8881f78a9c39c..c8c2847b9f673 100644 --- a/superset/databases/commands/importers/v1/utils.py +++ b/superset/databases/commands/importers/v1/utils.py @@ -20,10 +20,13 @@ from sqlalchemy.orm import Session -from superset import security_manager +from superset import app, security_manager from superset.commands.exceptions import ImportFailedError from superset.databases.ssh_tunnel.models import SSHTunnel +from superset.databases.utils import make_url_safe +from superset.exceptions import SupersetSecurityException from superset.models.core import Database +from superset.security.analytics_db_safety import check_sqlalchemy_uri def import_database( @@ -45,7 +48,12 @@ def import_database( raise ImportFailedError( "Database doesn't exist and user doesn't have permission to create databases" ) - + # Check if this URI is allowed + if app.config["PREVENT_UNSAFE_DB_CONNECTIONS"]: + try: + check_sqlalchemy_uri(make_url_safe(config["sqlalchemy_uri"])) + except SupersetSecurityException as exc: + raise ImportFailedError(exc.message) from exc # https://github.com/apache/superset/pull/16756 renamed ``csv`` to ``file``. config["allow_file_upload"] = config.pop("allow_csv_upload") if "schemas_allowed_for_csv_upload" in config["extra"]: diff --git a/tests/integration_tests/databases/commands_tests.py b/tests/integration_tests/databases/commands_tests.py index 8ffb31b78215f..d5946d8b6d105 100644 --- a/tests/integration_tests/databases/commands_tests.py +++ b/tests/integration_tests/databases/commands_tests.py @@ -421,7 +421,7 @@ def test_import_v1_database(self, mock_g): assert database.database_name == "imported_database" assert database.expose_in_sqllab assert database.extra == "{}" - assert database.sqlalchemy_uri == "sqlite:///test.db" + assert database.sqlalchemy_uri == "someengine://user:pass@host1" db.session.delete(database) db.session.commit() @@ -460,7 +460,7 @@ def test_import_v1_database_broken_csv_fields(self, mock_g): assert database.database_name == "imported_database" assert database.expose_in_sqllab assert database.extra == '{"schemas_allowed_for_file_upload": ["upload"]}' - assert database.sqlalchemy_uri == "sqlite:///test.db" + assert database.sqlalchemy_uri == "someengine://user:pass@host1" db.session.delete(database) db.session.commit() @@ -716,7 +716,7 @@ def test_import_v1_database_with_ssh_tunnel_password( assert database.database_name == "imported_database" assert database.expose_in_sqllab assert database.extra == "{}" - assert database.sqlalchemy_uri == "sqlite:///test.db" + assert database.sqlalchemy_uri == "someengine://user:pass@host1" model_ssh_tunnel = ( db.session.query(SSHTunnel) @@ -761,7 +761,7 @@ def test_import_v1_database_with_ssh_tunnel_private_key_and_password( assert database.database_name == "imported_database" assert database.expose_in_sqllab assert database.extra == "{}" - assert database.sqlalchemy_uri == "sqlite:///test.db" + assert database.sqlalchemy_uri == "someengine://user:pass@host1" model_ssh_tunnel = ( db.session.query(SSHTunnel) diff --git a/tests/integration_tests/fixtures/importexport.py b/tests/integration_tests/fixtures/importexport.py index 74237f4a82cfb..d0fa04e97dfc7 100644 --- a/tests/integration_tests/fixtures/importexport.py +++ b/tests/integration_tests/fixtures/importexport.py @@ -346,7 +346,7 @@ "type": "SavedQuery", "timestamp": "2021-03-30T20:37:54.791187+00:00", } -database_config: dict[str, Any] = { +database_config_sqlite: dict[str, Any] = { "allow_csv_upload": True, "allow_ctas": True, "allow_cvas": True, @@ -361,6 +361,21 @@ "version": "1.0.0", } +database_config: dict[str, Any] = { + "allow_csv_upload": True, + "allow_ctas": True, + "allow_cvas": True, + "allow_dml": True, + "allow_run_async": False, + "cache_timeout": None, + "database_name": "imported_database", + "expose_in_sqllab": True, + "extra": {}, + "sqlalchemy_uri": "someengine://user:pass@host1", + "uuid": "b8a1ccd3-779d-4ab7-8ad8-9ab119d7fe89", + "version": "1.0.0", +} + database_with_ssh_tunnel_config_private_key: dict[str, Any] = { "allow_csv_upload": True, "allow_ctas": True, @@ -371,7 +386,7 @@ "database_name": "imported_database", "expose_in_sqllab": True, "extra": {}, - "sqlalchemy_uri": "sqlite:///test.db", + "sqlalchemy_uri": "someengine://user:pass@host1", "uuid": "b8a1ccd3-779d-4ab7-8ad8-9ab119d7fe89", "ssh_tunnel": { "server_address": "localhost", @@ -393,7 +408,7 @@ "database_name": "imported_database", "expose_in_sqllab": True, "extra": {}, - "sqlalchemy_uri": "sqlite:///test.db", + "sqlalchemy_uri": "someengine://user:pass@host1", "uuid": "b8a1ccd3-779d-4ab7-8ad8-9ab119d7fe89", "ssh_tunnel": { "server_address": "localhost", diff --git a/tests/unit_tests/databases/commands/importers/v1/import_test.py b/tests/unit_tests/databases/commands/importers/v1/import_test.py index f9d2695f26072..b8bd24d94d187 100644 --- a/tests/unit_tests/databases/commands/importers/v1/import_test.py +++ b/tests/unit_tests/databases/commands/importers/v1/import_test.py @@ -42,7 +42,7 @@ def test_import_database(mocker: MockFixture, session: Session) -> None: config = copy.deepcopy(database_config) database = import_database(session, config) assert database.database_name == "imported_database" - assert database.sqlalchemy_uri == "sqlite:///test.db" + assert database.sqlalchemy_uri == "someengine://user:pass@host1" assert database.cache_timeout is None assert database.expose_in_sqllab is True assert database.allow_run_async is False @@ -65,6 +65,32 @@ def test_import_database(mocker: MockFixture, session: Session) -> None: assert database.allow_dml is False +def test_import_database_sqlite_invalid(mocker: MockFixture, session: Session) -> None: + """ + Test importing a database. + """ + from superset import app, security_manager + from superset.databases.commands.importers.v1.utils import import_database + from superset.models.core import Database + from tests.integration_tests.fixtures.importexport import database_config_sqlite + + app.config["PREVENT_UNSAFE_DB_CONNECTIONS"] = True + mocker.patch.object(security_manager, "can_access", return_value=True) + + engine = session.get_bind() + Database.metadata.create_all(engine) # pylint: disable=no-member + + config = copy.deepcopy(database_config_sqlite) + with pytest.raises(ImportFailedError) as excinfo: + _ = import_database(session, config) + assert ( + str(excinfo.value) + == "SQLiteDialect_pysqlite cannot be used as a data source for security reasons." + ) + # restore app config + app.config["PREVENT_UNSAFE_DB_CONNECTIONS"] = True + + def test_import_database_managed_externally( mocker: MockFixture, session: Session, From d1d5ff6f9fe7c09047fec13bf5992357ef981b4f Mon Sep 17 00:00:00 2001 From: Maxime Beauchemin Date: Thu, 20 Jul 2023 10:00:15 -0700 Subject: [PATCH 16/43] chore: make antd table font size same as data table (#24741) --- superset-frontend/src/components/Table/VirtualTable.tsx | 5 ++++- superset-frontend/src/components/Table/index.tsx | 4 ++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/superset-frontend/src/components/Table/VirtualTable.tsx b/superset-frontend/src/components/Table/VirtualTable.tsx index d8658dde60997..4d4dc2a00423c 100644 --- a/superset-frontend/src/components/Table/VirtualTable.tsx +++ b/superset-frontend/src/components/Table/VirtualTable.tsx @@ -61,8 +61,11 @@ const StyledTable = styled(AntTable)<{ height?: number }>( .ant-pagination-item-active { border-color: ${theme.colors.primary.base}; + } + } + .ant-table.ant-table-small { + font-size: ${theme.typography.sizes.s}px; } - } `, ); diff --git a/superset-frontend/src/components/Table/index.tsx b/superset-frontend/src/components/Table/index.tsx index ef9830f49c61b..84ca7883f5757 100644 --- a/superset-frontend/src/components/Table/index.tsx +++ b/superset-frontend/src/components/Table/index.tsx @@ -184,6 +184,10 @@ const StyledTable = styled(AntTable)<{ height?: number }>( .ant-pagination-item-active { border-color: ${theme.colors.primary.base}; } + + .ant-table.ant-table-small { + font-size: ${theme.typography.sizes.s}px; + } `, ); const StyledVirtualTable = styled(VirtualTable)( From 4086514fa576b0ad39afcf9e983c67eb8bcb2ce5 Mon Sep 17 00:00:00 2001 From: "JUST.in DO IT" Date: Thu, 20 Jul 2023 10:00:44 -0700 Subject: [PATCH 17/43] fix(native filter): clean deleted parent filter ids (#24749) Co-authored-by: John Bodley --- .../FiltersConfigModal.test.tsx | 83 ++++++++++++++++++- .../FiltersConfigModal/FiltersConfigModal.tsx | 37 +++++---- ...280_cleanup_erroneous_parent_filter_ids.py | 82 ++++++++++++++++++ 3 files changed, 185 insertions(+), 17 deletions(-) create mode 100644 superset/migrations/versions/2023-07-19_16-48_a23c6f8b1280_cleanup_erroneous_parent_filter_ids.py diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigModal.test.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigModal.test.tsx index 58ebcea548bc4..3070e6d0bceda 100644 --- a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigModal.test.tsx +++ b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigModal.test.tsx @@ -353,10 +353,91 @@ test('filters are draggable', async () => { adds a new time grain filter type with all fields filled collapsible controls opens by default when it is checked advanced section opens by default when it has an option checked - deletes a filter disables the default value when default to first item is checked changes the default value options when the column changes switches to configuration tab when validation fails displays cancel message when there are pending operations do not displays cancel message when there are no pending operations */ + +test('deletes a filter', async () => { + const nativeFilterState = [ + buildNativeFilter('NATIVE_FILTER-1', 'state', ['NATIVE_FILTER-2']), + buildNativeFilter('NATIVE_FILTER-2', 'country', []), + buildNativeFilter('NATIVE_FILTER-3', 'product', []), + ]; + const state = { + ...defaultState(), + dashboardInfo: { + metadata: { native_filter_configuration: nativeFilterState }, + }, + dashboardLayout, + }; + const onSave = jest.fn(); + defaultRender(state, { + ...props, + createNewOnOpen: false, + onSave, + }); + const removeButtons = screen.getAllByRole('img', { name: 'trash' }); + // remove NATIVE_FILTER-3 which isn't a dependancy of any other filter + userEvent.click(removeButtons[2]); + userEvent.click(screen.getByRole('button', { name: SAVE_REGEX })); + await waitFor(() => + expect(onSave).toHaveBeenCalledWith( + expect.arrayContaining([ + expect.objectContaining({ + type: 'NATIVE_FILTER', + id: 'NATIVE_FILTER-1', + cascadeParentIds: ['NATIVE_FILTER-2'], + }), + expect.objectContaining({ + type: 'NATIVE_FILTER', + id: 'NATIVE_FILTER-2', + cascadeParentIds: [], + }), + ]), + ), + ); +}); + +test('deletes a filter including dependencies', async () => { + const nativeFilterState = [ + buildNativeFilter('NATIVE_FILTER-1', 'state', ['NATIVE_FILTER-2']), + buildNativeFilter('NATIVE_FILTER-2', 'country', []), + buildNativeFilter('NATIVE_FILTER-3', 'product', []), + ]; + const state = { + ...defaultState(), + dashboardInfo: { + metadata: { native_filter_configuration: nativeFilterState }, + }, + dashboardLayout, + }; + const onSave = jest.fn(); + defaultRender(state, { + ...props, + createNewOnOpen: false, + onSave, + }); + const removeButtons = screen.getAllByRole('img', { name: 'trash' }); + // remove NATIVE_FILTER-2 which is a dependancy of NATIVE_FILTER-1 + userEvent.click(removeButtons[1]); + userEvent.click(screen.getByRole('button', { name: SAVE_REGEX })); + await waitFor(() => + expect(onSave).toHaveBeenCalledWith( + expect.arrayContaining([ + expect.objectContaining({ + type: 'NATIVE_FILTER', + id: 'NATIVE_FILTER-1', + cascadeParentIds: [], + }), + expect.objectContaining({ + type: 'NATIVE_FILTER', + id: 'NATIVE_FILTER-3', + cascadeParentIds: [], + }), + ]), + ), + ); +}); diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigModal.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigModal.tsx index 0abca30a303f2..29833af580eca 100644 --- a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigModal.tsx +++ b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigModal.tsx @@ -306,21 +306,25 @@ function FiltersConfigModal({ ); const cleanDeletedParents = (values: NativeFiltersForm | null) => { - Object.keys(filterConfigMap).forEach(key => { - const filter = filterConfigMap[key]; - if (!('cascadeParentIds' in filter)) { - return; - } - const { cascadeParentIds } = filter; - if (cascadeParentIds) { - dispatch( - updateCascadeParentIds( - key, - cascadeParentIds.filter(id => canBeUsedAsDependency(id)), - ), + const updatedFilterConfigMap = Object.keys(filterConfigMap).reduce( + (acc, key) => { + const filter = filterConfigMap[key]; + const cascadeParentIds = filter.cascadeParentIds?.filter(id => + canBeUsedAsDependency(id), ); - } - }); + if (cascadeParentIds) { + dispatch(updateCascadeParentIds(key, cascadeParentIds)); + } + return { + ...acc, + [key]: { + ...filter, + cascadeParentIds, + }, + }; + }, + {}, + ); const filters = values?.filters; if (filters) { @@ -337,6 +341,7 @@ function FiltersConfigModal({ } }); } + return updatedFilterConfigMap; }; const handleErroredFilters = useCallback(() => { @@ -375,9 +380,9 @@ function FiltersConfigModal({ handleErroredFilters(); if (values) { - cleanDeletedParents(values); + const updatedFilterConfigMap = cleanDeletedParents(values); createHandleSave( - filterConfigMap, + updatedFilterConfigMap, orderedFilters, removedFilters, onSave, diff --git a/superset/migrations/versions/2023-07-19_16-48_a23c6f8b1280_cleanup_erroneous_parent_filter_ids.py b/superset/migrations/versions/2023-07-19_16-48_a23c6f8b1280_cleanup_erroneous_parent_filter_ids.py new file mode 100644 index 0000000000000..17ad592b22fdc --- /dev/null +++ b/superset/migrations/versions/2023-07-19_16-48_a23c6f8b1280_cleanup_erroneous_parent_filter_ids.py @@ -0,0 +1,82 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""cleanup erroneous parent filter IDs + +Revision ID: a23c6f8b1280 +Revises: 863adcf72773 +Create Date: 2023-07-19 16:48:05.571149 + +""" + +# revision identifiers, used by Alembic. +revision = "a23c6f8b1280" +down_revision = "863adcf72773" + + +import json +import logging + +from alembic import op +from sqlalchemy import Column, Integer, Text +from sqlalchemy.ext.declarative import declarative_base + +from superset import db + +Base = declarative_base() + + +class Dashboard(Base): + __tablename__ = "dashboards" + + id = Column(Integer, primary_key=True) + json_metadata = Column(Text) + + +def upgrade(): + bind = op.get_bind() + session = db.Session(bind=bind) + + for dashboard in session.query(Dashboard).all(): + if dashboard.json_metadata: + updated = False + + try: + json_metadata = json.loads(dashboard.json_metadata) + + if filters := json_metadata.get("native_filter_configuration"): + filter_ids = {fltr["id"] for fltr in filters} + + for fltr in filters: + for parent_id in fltr["cascadeParentIds"][:]: + if parent_id not in filter_ids: + fltr["cascadeParentIds"].remove(parent_id) + updated = True + + if updated: + dashboard.json_metadata = json.dumps(json_metadata) + + except Exception: + logging.exception( + f"Unable to parse JSON metadata for dashboard {dashboard.id}" + ) + + session.commit() + session.close() + + +def downgrade(): + pass From 317aa989c233160fcf4fe9ce3e5c1953634c5524 Mon Sep 17 00:00:00 2001 From: "Michael S. Molina" <70410625+michael-s-molina@users.noreply.github.com> Date: Thu, 20 Jul 2023 14:02:52 -0300 Subject: [PATCH 18/43] fix: Dashboard time grain in Table (#24746) --- .../plugin-chart-table/src/buildQuery.ts | 13 ++++-- .../test/buildQuery.test.ts | 42 ++++++++++++++++++- 2 files changed, 50 insertions(+), 5 deletions(-) diff --git a/superset-frontend/plugins/plugin-chart-table/src/buildQuery.ts b/superset-frontend/plugins/plugin-chart-table/src/buildQuery.ts index 211070f8bcc4a..13bf2e9c2d4c8 100644 --- a/superset-frontend/plugins/plugin-chart-table/src/buildQuery.ts +++ b/superset-frontend/plugins/plugin-chart-table/src/buildQuery.ts @@ -52,10 +52,15 @@ const buildQuery: BuildQuery = ( formData: TableChartFormData, options, ) => { - const { percent_metrics: percentMetrics, order_desc: orderDesc = false } = - formData; + const { + percent_metrics: percentMetrics, + order_desc: orderDesc = false, + extra_form_data, + } = formData; const queryMode = getQueryMode(formData); const sortByMetric = ensureIsArray(formData.timeseries_limit_metric)[0]; + const time_grain_sqla = + extra_form_data?.time_grain_sqla || formData.time_grain_sqla; let formDataCopy = formData; // never include time in raw records mode if (queryMode === QueryMode.raw) { @@ -102,12 +107,12 @@ const buildQuery: BuildQuery = ( columns = columns.map(col => { if ( isPhysicalColumn(col) && - formData.time_grain_sqla && + time_grain_sqla && hasGenericChartAxes && formData?.temporal_columns_lookup?.[col] ) { return { - timeGrain: formData.time_grain_sqla, + timeGrain: time_grain_sqla, columnType: 'BASE_AXIS', sqlExpression: col, label: col, diff --git a/superset-frontend/plugins/plugin-chart-table/test/buildQuery.test.ts b/superset-frontend/plugins/plugin-chart-table/test/buildQuery.test.ts index 4578dd139ddc1..164f31aa05492 100644 --- a/superset-frontend/plugins/plugin-chart-table/test/buildQuery.test.ts +++ b/superset-frontend/plugins/plugin-chart-table/test/buildQuery.test.ts @@ -16,7 +16,8 @@ * specific language governing permissions and limitations * under the License. */ -import { QueryMode } from '@superset-ui/core'; +import { QueryMode, TimeGranularity } from '@superset-ui/core'; +import * as supersetCoreModule from '@superset-ui/core'; import buildQuery from '../src/buildQuery'; import { TableChartFormData } from '../src/types'; @@ -81,5 +82,44 @@ describe('plugin-chart-table', () => { expect(query.columns).toEqual(['rawcol']); expect(query.post_processing).toEqual([]); }); + it('should prefer extra_form_data.time_grain_sqla over formData.time_grain_sqla', () => { + Object.defineProperty(supersetCoreModule, 'hasGenericChartAxes', { + value: true, + }); + const query = buildQuery({ + ...basicFormData, + groupby: ['col1'], + query_mode: QueryMode.aggregate, + time_grain_sqla: TimeGranularity.MONTH, + extra_form_data: { time_grain_sqla: TimeGranularity.QUARTER }, + temporal_columns_lookup: { col1: true }, + }).queries[0]; + expect(query.columns?.[0]).toEqual({ + timeGrain: TimeGranularity.QUARTER, + columnType: 'BASE_AXIS', + sqlExpression: 'col1', + label: 'col1', + expressionType: 'SQL', + }); + }); + it('should fallback to formData.time_grain_sqla if extra_form_data.time_grain_sqla is not set', () => { + Object.defineProperty(supersetCoreModule, 'hasGenericChartAxes', { + value: true, + }); + const query = buildQuery({ + ...basicFormData, + time_grain_sqla: TimeGranularity.MONTH, + groupby: ['col1'], + query_mode: QueryMode.aggregate, + temporal_columns_lookup: { col1: true }, + }).queries[0]; + expect(query.columns?.[0]).toEqual({ + timeGrain: TimeGranularity.MONTH, + columnType: 'BASE_AXIS', + sqlExpression: 'col1', + label: 'col1', + expressionType: 'SQL', + }); + }); }); }); From 05e724f3d7ce0b74b264527eab0e04dafcb558a5 Mon Sep 17 00:00:00 2001 From: "JUST.in DO IT" Date: Thu, 20 Jul 2023 10:46:13 -0700 Subject: [PATCH 19/43] chore(native filters): Expandable filter config modal (#24559) Co-authored-by: Justin Park --- .../FilterCard/FilterCard.test.tsx | 7 -- .../FilterTitleContainer.tsx | 8 +- .../FiltersConfigModal/FiltersConfigModal.tsx | 83 +++++++++++++++---- 3 files changed, 76 insertions(+), 22 deletions(-) diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FilterCard/FilterCard.test.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FilterCard/FilterCard.test.tsx index 93c12a53daf53..f75612baeb10f 100644 --- a/superset-frontend/src/dashboard/components/nativeFilters/FilterCard/FilterCard.test.tsx +++ b/superset-frontend/src/dashboard/components/nativeFilters/FilterCard/FilterCard.test.tsx @@ -205,13 +205,6 @@ jest.mock('@superset-ui/core', () => ({ }), })); -jest.mock( - 'src/components/Icons/Icon', - () => - ({ fileName }: { fileName: string }) => - , -); - // extract text from embedded html tags // source: https://polvara.me/posts/five-things-you-didnt-know-about-testing-library const getTextInHTMLTags = diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FilterTitleContainer.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FilterTitleContainer.tsx index bfae2ce016887..5a65fd1d60fa6 100644 --- a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FilterTitleContainer.tsx +++ b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FilterTitleContainer.tsx @@ -112,7 +112,13 @@ const FilterTitleContainer = forwardRef( className={classNames.join(' ')} >
-
+
{isRemoved ? t('(Removed)') : getFilterTitle(id)}
{!removedFilters[id] && isErrored && ( diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigModal.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigModal.tsx index 29833af580eca..26db1c9a97357 100644 --- a/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigModal.tsx +++ b/superset-frontend/src/dashboard/components/nativeFilters/FiltersConfigModal/FiltersConfigModal.tsx @@ -32,13 +32,17 @@ import { styled, SLOW_DEBOUNCE, t, + css, + useTheme, } from '@superset-ui/core'; import { useDispatch } from 'react-redux'; import { AntdForm } from 'src/components'; +import Icons from 'src/components/Icons'; import ErrorBoundary from 'src/components/ErrorBoundary'; import { StyledModal } from 'src/components/Modal'; import { testWithId } from 'src/utils/testUtils'; import { updateCascadeParentIds } from 'src/dashboard/actions/nativeFilters'; +import useEffectEvent from 'src/hooks/useEffectEvent'; import { useFilterConfigMap, useFilterConfiguration } from '../state'; import FilterConfigurePane from './FilterConfigurePane'; import FiltersConfigForm, { @@ -58,17 +62,40 @@ import { } from './utils'; import DividerConfigForm from './DividerConfigForm'; -const StyledModalWrapper = styled(StyledModal)` - min-width: 700px; +const MODAL_MARGIN = 16; + +const StyledModalWrapper = styled(StyledModal)<{ expanded: boolean }>` + min-width: 880px; + width: ${({ expanded }) => (expanded ? '100%' : '70%')} !important; + + @media (max-width: ${880 + MODAL_MARGIN * 2}px) { + width: 100% !important; + min-width: auto; + } + .ant-modal-body { padding: 0px; } + + ${({ expanded }) => + expanded && + css` + height: 100%; + + .ant-modal-body { + flex: 1 1 auto; + } + .ant-modal-content { + height: 100%; + } + `} `; -export const StyledModalBody = styled.div` +export const StyledModalBody = styled.div<{ expanded: boolean }>` display: flex; - height: 700px; + height: ${({ expanded }) => (expanded ? '100%' : '700px')}; flex-direction: row; + flex: 1; .filters-list { width: ${({ theme }) => theme.gridUnit * 50}px; overflow: auto; @@ -79,6 +106,10 @@ export const StyledForm = styled(AntdForm)` width: 100%; `; +export const StyledExpandButtonWrapper = styled.div` + margin-left: ${({ theme }) => theme.gridUnit * 4}px; +`; + export const FILTERS_CONFIG_MODAL_TEST_ID = 'filters-config-modal'; export const getFiltersConfigModalTestId = testWithId( FILTERS_CONFIG_MODAL_TEST_ID, @@ -119,6 +150,7 @@ function FiltersConfigModal({ onCancel, }: FiltersConfigModalProps) { const dispatch = useDispatch(); + const theme = useTheme(); const [form] = AntdForm.useForm(); @@ -482,6 +514,14 @@ function FiltersConfigModal({ [buildDependencyMap, canBeUsedAsDependency, orderedFilters], ); + const [expanded, setExpanded] = useState(false); + const toggleExpand = useEffectEvent(() => { + setExpanded(!expanded); + }); + const ToggleIcon = expanded + ? Icons.FullscreenExitOutlined + : Icons.FullscreenOutlined; + const handleValuesChange = useMemo( () => debounce((changes: any, values: NativeFiltersForm) => { @@ -586,25 +626,40 @@ function FiltersConfigModal({ visible={isOpen} maskClosable={false} title={t('Add and edit filters')} - width="50%" + expanded={expanded} destroyOnClose onCancel={handleCancel} onOk={handleSave} centered data-test="filter-modal" footer={ -