diff --git a/superset/config.py b/superset/config.py index f2d9fa5adf589..39ce66e875ab4 100644 --- a/superset/config.py +++ b/superset/config.py @@ -278,6 +278,20 @@ def _try_json_readsha(filepath: str, length: int) -> Optional[str]: # Configuration for scheduling queries from SQL Lab. SCHEDULED_QUERIES: Dict[str, Any] = {} +# FAB Rate limiting: this is a security feature for preventing DDOS attacks. The +# feature is on by default to make Superset secure by default, but you should +# fine tune the limits to your needs. You can read more about the different +# parameters here: https://flask-limiter.readthedocs.io/en/stable/configuration.html +RATELIMIT_ENABLED = True +RATELIMIT_APPLICATION = "50 per second" +AUTH_RATE_LIMITED = True +AUTH_RATE_LIMIT = "5 per second" +# A storage location conforming to the scheme in storage-scheme. See the limits +# library for allowed values: https://limits.readthedocs.io/en/stable/storage.html +# RATELIMIT_STORAGE_URI = "redis://host:port" +# A callable that returns the unique identity of the current request. +# RATELIMIT_REQUEST_IDENTIFIER = flask.Request.endpoint + # ------------------------------ # GLOBALS FOR APP Builder # ------------------------------ diff --git a/superset/dashboards/api.py b/superset/dashboards/api.py index 1a476a0a978b5..6f478e22ccc65 100644 --- a/superset/dashboards/api.py +++ b/superset/dashboards/api.py @@ -813,7 +813,7 @@ def export(self, **kwargs: Any) -> Response: Dashboard.id.in_(requested_ids) ) query = self._base_filters.apply_all(query) - ids = [item.id for item in query.all()] + ids = {item.id for item in query.all()} if not ids: return self.response_404() export = Dashboard.export_dashboards(ids) diff --git a/superset/models/dashboard.py b/superset/models/dashboard.py index 60a8ea0e30138..e2192ae2f2ccd 100644 --- a/superset/models/dashboard.py +++ b/superset/models/dashboard.py @@ -372,7 +372,8 @@ def clear_cache_for_datasource(cls, datasource_id: int) -> None: @classmethod def export_dashboards( # pylint: disable=too-many-locals - cls, dashboard_ids: List[int] + cls, + dashboard_ids: Set[int], ) -> str: copied_dashboards = [] datasource_ids = set() diff --git a/superset/utils/dashboard_import_export.py b/superset/utils/dashboard_import_export.py index fc61d0a422d0c..eef8cbe6df1cd 100644 --- a/superset/utils/dashboard_import_export.py +++ b/superset/utils/dashboard_import_export.py @@ -27,8 +27,8 @@ def export_dashboards(session: Session) -> str: """Returns all dashboards metadata as a json dump""" logger.info("Starting export") dashboards = session.query(Dashboard) - dashboard_ids = [] + dashboard_ids = set() for dashboard in dashboards: - dashboard_ids.append(dashboard.id) + dashboard_ids.add(dashboard.id) data = Dashboard.export_dashboards(dashboard_ids) return data diff --git a/superset/views/dashboard/views.py b/superset/views/dashboard/views.py index 52cb2da82e911..e476a88f31b08 100644 --- a/superset/views/dashboard/views.py +++ b/superset/views/dashboard/views.py @@ -76,7 +76,7 @@ def mulexport( # pylint: disable=no-self-use @expose("/export_dashboards_form") def download_dashboards(self) -> FlaskResponse: if request.args.get("action") == "go": - ids = request.args.getlist("id") + ids = set(request.args.getlist("id")) return Response( DashboardModel.export_dashboards(ids), headers=generate_download_headers("json"), diff --git a/tests/integration_tests/superset_test_config.py b/tests/integration_tests/superset_test_config.py index 19c2cc000f545..76b83fb46584a 100644 --- a/tests/integration_tests/superset_test_config.py +++ b/tests/integration_tests/superset_test_config.py @@ -96,6 +96,8 @@ def GET_FEATURE_FLAGS_FUNC(ff): REDIS_RESULTS_DB = os.environ.get("REDIS_RESULTS_DB", 3) REDIS_CACHE_DB = os.environ.get("REDIS_CACHE_DB", 4) +RATELIMIT_ENABLED = False + CACHE_CONFIG = { "CACHE_TYPE": "RedisCache",