diff --git a/superset/config.py b/superset/config.py index 75fda6eb37a1c..0b70328e0b4c0 100644 --- a/superset/config.py +++ b/superset/config.py @@ -266,6 +266,20 @@ def _try_json_readsha(filepath: str, length: int) -> str | None: # Configuration for scheduling queries from SQL Lab. SCHEDULED_QUERIES: dict[str, Any] = {} +# FAB Rate limiting: this is a security feature for preventing DDOS attacks. The +# feature is on by default to make Superset secure by default, but you should +# fine tune the limits to your needs. You can read more about the different +# parameters here: https://flask-limiter.readthedocs.io/en/stable/configuration.html +RATELIMIT_ENABLED = True +RATELIMIT_APPLICATION = "50 per second" +AUTH_RATE_LIMITED = True +AUTH_RATE_LIMIT = "5 per second" +# A storage location conforming to the scheme in storage-scheme. See the limits +# library for allowed values: https://limits.readthedocs.io/en/stable/storage.html +# RATELIMIT_STORAGE_URI = "redis://host:port" +# A callable that returns the unique identity of the current request. +# RATELIMIT_REQUEST_IDENTIFIER = flask.Request.endpoint + # ------------------------------ # GLOBALS FOR APP Builder # ------------------------------ diff --git a/superset/dashboards/api.py b/superset/dashboards/api.py index 1602c8e2f961c..b2aa43b0ee41b 100644 --- a/superset/dashboards/api.py +++ b/superset/dashboards/api.py @@ -817,7 +817,7 @@ def export(self, **kwargs: Any) -> Response: # pylint: disable=too-many-locals Dashboard.id.in_(requested_ids) ) query = self._base_filters.apply_all(query) - ids = [item.id for item in query.all()] + ids = {item.id for item in query.all()} if not ids: return self.response_404() export = Dashboard.export_dashboards(ids) diff --git a/superset/models/dashboard.py b/superset/models/dashboard.py index 719a6df8e4b10..f837c76610cca 100644 --- a/superset/models/dashboard.py +++ b/superset/models/dashboard.py @@ -373,7 +373,8 @@ def clear_cache_for_datasource(cls, datasource_id: int) -> None: @classmethod def export_dashboards( # pylint: disable=too-many-locals - cls, dashboard_ids: list[int] + cls, + dashboard_ids: set[int], ) -> str: copied_dashboards = [] datasource_ids = set() diff --git a/superset/utils/dashboard_import_export.py b/superset/utils/dashboard_import_export.py index fc61d0a422d0c..eef8cbe6df1cd 100644 --- a/superset/utils/dashboard_import_export.py +++ b/superset/utils/dashboard_import_export.py @@ -27,8 +27,8 @@ def export_dashboards(session: Session) -> str: """Returns all dashboards metadata as a json dump""" logger.info("Starting export") dashboards = session.query(Dashboard) - dashboard_ids = [] + dashboard_ids = set() for dashboard in dashboards: - dashboard_ids.append(dashboard.id) + dashboard_ids.add(dashboard.id) data = Dashboard.export_dashboards(dashboard_ids) return data diff --git a/superset/views/dashboard/views.py b/superset/views/dashboard/views.py index a96d56fc14bc5..ce5e8f1e07507 100644 --- a/superset/views/dashboard/views.py +++ b/superset/views/dashboard/views.py @@ -78,7 +78,7 @@ def mulexport( @expose("/export_dashboards_form") def download_dashboards(self) -> FlaskResponse: if request.args.get("action") == "go": - ids = request.args.getlist("id") + ids = set(request.args.getlist("id")) return Response( DashboardModel.export_dashboards(ids), headers=generate_download_headers("json"), diff --git a/tests/integration_tests/superset_test_config.py b/tests/integration_tests/superset_test_config.py index 77e007a2ddbd8..bcc3146083bf7 100644 --- a/tests/integration_tests/superset_test_config.py +++ b/tests/integration_tests/superset_test_config.py @@ -97,6 +97,8 @@ def GET_FEATURE_FLAGS_FUNC(ff): REDIS_RESULTS_DB = os.environ.get("REDIS_RESULTS_DB", 3) REDIS_CACHE_DB = os.environ.get("REDIS_CACHE_DB", 4) +RATELIMIT_ENABLED = False + CACHE_CONFIG = { "CACHE_TYPE": "RedisCache",