-
Notifications
You must be signed in to change notification settings - Fork 13.9k
/
config.py
1929 lines (1683 loc) · 76.2 KB
/
config.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""The main config file for Superset
All configuration in this file can be overridden by providing a superset_config
in your PYTHONPATH as there is a ``from superset_config import *``
at the end of this file.
"""
# mypy: ignore-errors
# pylint: disable=too-many-lines
from __future__ import annotations
import importlib.util
import json
import logging
import os
import re
import sys
from collections import OrderedDict
from contextlib import contextmanager
from datetime import timedelta
from email.mime.multipart import MIMEMultipart
from importlib.resources import files
from typing import Any, Callable, Iterator, Literal, TYPE_CHECKING, TypedDict
import click
import pkg_resources
from celery.schedules import crontab
from flask import Blueprint
from flask_appbuilder.security.manager import AUTH_DB
from flask_caching.backends.base import BaseCache
from pandas import Series
from pandas._libs.parsers import STR_NA_VALUES
from sqlalchemy.engine.url import URL
from sqlalchemy.orm.query import Query
from superset.advanced_data_type.plugins.internet_address import internet_address
from superset.advanced_data_type.plugins.internet_port import internet_port
from superset.advanced_data_type.types import AdvancedDataType
from superset.constants import CHANGE_ME_SECRET_KEY
from superset.jinja_context import BaseTemplateProcessor
from superset.key_value.types import JsonKeyValueCodec
from superset.stats_logger import DummyStatsLogger
from superset.superset_typing import CacheConfig
from superset.tasks.types import ExecutorType
from superset.utils import core as utils
from superset.utils.core import is_test, NO_TIME_RANGE, parse_boolean_string
from superset.utils.encrypt import SQLAlchemyUtilsAdapter
from superset.utils.log import DBEventLogger
from superset.utils.logging_configurator import DefaultLoggingConfigurator
logger = logging.getLogger(__name__)
if TYPE_CHECKING:
from flask_appbuilder.security.sqla import models
from sqlglot import Dialect, Dialects
from superset.connectors.sqla.models import SqlaTable
from superset.models.core import Database
from superset.models.dashboard import Dashboard
from superset.models.slice import Slice
# Realtime stats logger, a StatsD implementation exists
STATS_LOGGER = DummyStatsLogger()
# By default will log events to the metadata database with `DBEventLogger`
# Note that you can use `StdOutEventLogger` for debugging
# Note that you can write your own event logger by extending `AbstractEventLogger`
# https://github.com/apache/superset/blob/master/superset/utils/log.py
EVENT_LOGGER = DBEventLogger()
SUPERSET_LOG_VIEW = True
BASE_DIR = pkg_resources.resource_filename("superset", "")
if "SUPERSET_HOME" in os.environ:
DATA_DIR = os.environ["SUPERSET_HOME"]
else:
DATA_DIR = os.path.expanduser("~/.superset")
# ---------------------------------------------------------
# Superset specific config
# ---------------------------------------------------------
VERSION_INFO_FILE = str(files("superset") / "static/version_info.json")
PACKAGE_JSON_FILE = str(files("superset") / "static/assets/package.json")
# Multiple favicons can be specified here. The "href" property
# is mandatory, but "sizes," "type," and "rel" are optional.
# For example:
# {
# "href":path/to/image.png",
# "sizes": "16x16",
# "type": "image/png"
# "rel": "icon"
# },
FAVICONS = [{"href": "/static/assets/images/favicon.png"}]
def _try_json_readversion(filepath: str) -> str | None:
try:
with open(filepath) as f:
return json.load(f).get("version")
except Exception: # pylint: disable=broad-except
return None
def _try_json_readsha(filepath: str, length: int) -> str | None:
try:
with open(filepath) as f:
return json.load(f).get("GIT_SHA")[:length]
except Exception: # pylint: disable=broad-except
return None
#
# If True, we will skip the call to load the logger config found in alembic.init
#
ALEMBIC_SKIP_LOG_CONFIG = False
# Depending on the context in which this config is loaded, the
# version_info.json file may or may not be available, as it is
# generated on install via setup.py. In the event that we're
# actually running Superset, we will have already installed,
# therefore it WILL exist. When unit tests are running, however,
# it WILL NOT exist, so we fall back to reading package.json
VERSION_STRING = _try_json_readversion(VERSION_INFO_FILE) or _try_json_readversion(
PACKAGE_JSON_FILE
)
VERSION_SHA_LENGTH = 8
VERSION_SHA = _try_json_readsha(VERSION_INFO_FILE, VERSION_SHA_LENGTH)
# Build number is shown in the About section if available. This
# can be replaced at build time to expose build information.
BUILD_NUMBER = None
# default viz used in chart explorer & SQL Lab explore
DEFAULT_VIZ_TYPE = "table"
# default row limit when requesting chart data
ROW_LIMIT = 50000
# default row limit when requesting samples from datasource in explore view
SAMPLES_ROW_LIMIT = 1000
# default row limit for native filters
NATIVE_FILTER_DEFAULT_ROW_LIMIT = 1000
# max rows retrieved by filter select auto complete
FILTER_SELECT_ROW_LIMIT = 10000
# default time filter in explore
# values may be "Last day", "Last week", "<ISO date> : now", etc.
DEFAULT_TIME_FILTER = NO_TIME_RANGE
# This is an important setting, and should be lower than your
# [load balancer / proxy / envoy / kong / ...] timeout settings.
# You should also make sure to configure your WSGI server
# (gunicorn, nginx, apache, ...) timeout setting to be <= to this setting
SUPERSET_WEBSERVER_TIMEOUT = int(timedelta(minutes=1).total_seconds())
# this 2 settings are used by dashboard period force refresh feature
# When user choose auto force refresh frequency
# < SUPERSET_DASHBOARD_PERIODICAL_REFRESH_LIMIT
# they will see warning message in the Refresh Interval Modal.
# please check PR #9886
SUPERSET_DASHBOARD_PERIODICAL_REFRESH_LIMIT = 0
SUPERSET_DASHBOARD_PERIODICAL_REFRESH_WARNING_MESSAGE = None
SUPERSET_DASHBOARD_POSITION_DATA_LIMIT = 65535
CUSTOM_SECURITY_MANAGER = None
SQLALCHEMY_TRACK_MODIFICATIONS = False
# ---------------------------------------------------------
# Your App secret key. Make sure you override it on superset_config.py
# or use `SUPERSET_SECRET_KEY` environment variable.
# Use a strong complex alphanumeric string and use a tool to help you generate
# a sufficiently random sequence, ex: openssl rand -base64 42"
SECRET_KEY = os.environ.get("SUPERSET_SECRET_KEY") or CHANGE_ME_SECRET_KEY
# The SQLAlchemy connection string.
SQLALCHEMY_DATABASE_URI = (
f"""sqlite:///{os.path.join(DATA_DIR, "superset.db")}?check_same_thread=false"""
)
# SQLALCHEMY_DATABASE_URI = 'mysql://myapp@localhost/myapp'
# SQLALCHEMY_DATABASE_URI = 'postgresql://root:password@localhost/myapp'
# This config is exposed through flask-sqlalchemy, and can be used to set your metadata
# database connection settings. You can use this to set arbitrary connection settings
# that may be specific to the database engine you are using.
# Note that you can use this to set the isolation level of your database, as in
# `SQLALCHEMY_ENGINE_OPTIONS = {"isolation_level": "READ COMMITTED"}`
# Also note that we recommend READ COMMITTED for regular operation.
# Find out more here https://flask-sqlalchemy.palletsprojects.com/en/3.1.x/config/
SQLALCHEMY_ENGINE_OPTIONS = {}
# In order to hook up a custom password store for all SQLALCHEMY connections
# implement a function that takes a single argument of type 'sqla.engine.url',
# returns a password and set SQLALCHEMY_CUSTOM_PASSWORD_STORE.
#
# e.g.:
# def lookup_password(url):
# return 'secret'
# SQLALCHEMY_CUSTOM_PASSWORD_STORE = lookup_password
SQLALCHEMY_CUSTOM_PASSWORD_STORE = None
#
# The EncryptedFieldTypeAdapter is used whenever we're building SqlAlchemy models
# which include sensitive fields that should be app-encrypted BEFORE sending
# to the DB.
#
# Note: the default impl leverages SqlAlchemyUtils' EncryptedType, which defaults
# to AesEngine that uses AES-128 under the covers using the app's SECRET_KEY
# as key material. Do note that AesEngine allows for queryability over the
# encrypted fields.
#
# To change the default engine you need to define your own adapter:
#
# e.g.:
#
# class AesGcmEncryptedAdapter(
# AbstractEncryptedFieldAdapter
# ):
# def create(
# self,
# app_config: Optional[Dict[str, Any]],
# *args: List[Any],
# **kwargs: Optional[Dict[str, Any]],
# ) -> TypeDecorator:
# if app_config:
# return EncryptedType(
# *args, app_config["SECRET_KEY"], engine=AesGcmEngine, **kwargs
# )
# raise Exception("Missing app_config kwarg")
#
#
# SQLALCHEMY_ENCRYPTED_FIELD_TYPE_ADAPTER = AesGcmEncryptedAdapter
SQLALCHEMY_ENCRYPTED_FIELD_TYPE_ADAPTER = ( # pylint: disable=invalid-name
SQLAlchemyUtilsAdapter
)
# Extends the default SQLGlot dialects with additional dialects
SQLGLOT_DIALECTS_EXTENSIONS: dict[str, Dialects | type[Dialect]] = {}
# The limit of queries fetched for query search
QUERY_SEARCH_LIMIT = 1000
# Flask-WTF flag for CSRF
WTF_CSRF_ENABLED = True
# Add endpoints that need to be exempt from CSRF protection
WTF_CSRF_EXEMPT_LIST = [
"superset.views.core.log",
"superset.views.core.explore_json",
"superset.charts.data.api.data",
"superset.dashboards.api.cache_dashboard_screenshot",
]
# Whether to run the web server in debug mode or not
DEBUG = parse_boolean_string(os.environ.get("FLASK_DEBUG"))
FLASK_USE_RELOAD = True
# Enable profiling of Python calls. Turn this on and append ``?_instrument=1``
# to the page to see the call stack.
PROFILING = False
# Superset allows server-side python stacktraces to be surfaced to the
# user when this feature is on. This may have security implications
# and it's more secure to turn it off in production settings.
SHOW_STACKTRACE = False
# Use all X-Forwarded headers when ENABLE_PROXY_FIX is True.
# When proxying to a different port, set "x_port" to 0 to avoid downstream issues.
ENABLE_PROXY_FIX = False
PROXY_FIX_CONFIG = {"x_for": 1, "x_proto": 1, "x_host": 1, "x_port": 1, "x_prefix": 1}
# Configuration for scheduling queries from SQL Lab.
SCHEDULED_QUERIES: dict[str, Any] = {}
# FAB Rate limiting: this is a security feature for preventing DDOS attacks. The
# feature is on by default to make Superset secure by default, but you should
# fine tune the limits to your needs. You can read more about the different
# parameters here: https://flask-limiter.readthedocs.io/en/stable/configuration.html
RATELIMIT_ENABLED = os.environ.get("SUPERSET_ENV") == "production"
RATELIMIT_APPLICATION = "50 per second"
AUTH_RATE_LIMITED = True
AUTH_RATE_LIMIT = "5 per second"
# A storage location conforming to the scheme in storage-scheme. See the limits
# library for allowed values: https://limits.readthedocs.io/en/stable/storage.html
# RATELIMIT_STORAGE_URI = "redis://host:port"
# A callable that returns the unique identity of the current request.
# RATELIMIT_REQUEST_IDENTIFIER = flask.Request.endpoint
# ------------------------------
# GLOBALS FOR APP Builder
# ------------------------------
# Uncomment to setup Your App name
APP_NAME = "Superset"
# Specify the App icon
APP_ICON = "/static/assets/images/superset-logo-horiz.png"
# Specify where clicking the logo would take the user'
# Default value of None will take you to '/superset/welcome'
# You can also specify a relative URL e.g. '/superset/welcome' or '/dashboards/list'
# or you can specify a full URL e.g. 'https://foo.bar'
LOGO_TARGET_PATH = None
# Specify tooltip that should appear when hovering over the App Icon/Logo
LOGO_TOOLTIP = ""
# Specify any text that should appear to the right of the logo
LOGO_RIGHT_TEXT: Callable[[], str] | str = ""
# Enables SWAGGER UI for superset openapi spec
# ex: http://localhost:8080/swagger/v1
FAB_API_SWAGGER_UI = True
# ----------------------------------------------------
# AUTHENTICATION CONFIG
# ----------------------------------------------------
# The authentication type
# AUTH_OID : Is for OpenID
# AUTH_DB : Is for database (username/password)
# AUTH_LDAP : Is for LDAP
# AUTH_REMOTE_USER : Is for using REMOTE_USER from web server
AUTH_TYPE = AUTH_DB
# Uncomment to setup Full admin role name
# AUTH_ROLE_ADMIN = 'Admin'
# Uncomment to setup Public role name, no authentication needed
# AUTH_ROLE_PUBLIC = 'Public'
# Will allow user self registration
# AUTH_USER_REGISTRATION = True
# The default user self registration role
# AUTH_USER_REGISTRATION_ROLE = "Public"
# When using LDAP Auth, setup the LDAP server
# AUTH_LDAP_SERVER = "ldap://ldapserver.new"
# Uncomment to setup OpenID providers example for OpenID authentication
# OPENID_PROVIDERS = [
# { 'name': 'Yahoo', 'url': 'https://open.login.yahoo.com/' },
# { 'name': 'Flickr', 'url': 'https://www.flickr.com/<username>' },
# ]
# ---------------------------------------------------
# Roles config
# ---------------------------------------------------
# Grant public role the same set of permissions as for a selected builtin role.
# This is useful if one wants to enable anonymous users to view
# dashboards. Explicit grant on specific datasets is still required.
PUBLIC_ROLE_LIKE: str | None = None
# ---------------------------------------------------
# Babel config for translations
# ---------------------------------------------------
# Setup default language
BABEL_DEFAULT_LOCALE = "en"
# Your application default translation path
BABEL_DEFAULT_FOLDER = "superset/translations"
# The allowed translation for your app
LANGUAGES = {
"en": {"flag": "us", "name": "English"},
"es": {"flag": "es", "name": "Spanish"},
"it": {"flag": "it", "name": "Italian"},
"fr": {"flag": "fr", "name": "French"},
"zh": {"flag": "cn", "name": "Chinese"},
"zh_TW": {"flag": "tw", "name": "Traditional Chinese"},
"ja": {"flag": "jp", "name": "Japanese"},
"de": {"flag": "de", "name": "German"},
"pt": {"flag": "pt", "name": "Portuguese"},
"pt_BR": {"flag": "br", "name": "Brazilian Portuguese"},
"ru": {"flag": "ru", "name": "Russian"},
"ko": {"flag": "kr", "name": "Korean"},
"sk": {"flag": "sk", "name": "Slovak"},
"sl": {"flag": "si", "name": "Slovenian"},
"nl": {"flag": "nl", "name": "Dutch"},
"uk": {"flag": "uk", "name": "Ukranian"},
}
# Turning off i18n by default as translation in most languages are
# incomplete and not well maintained.
LANGUAGES = {}
# Override the default d3 locale format
# Default values are equivalent to
# D3_FORMAT = {
# "decimal": ".", # - decimal place string (e.g., ".").
# "thousands": ",", # - group separator string (e.g., ",").
# "grouping": [3], # - array of group sizes (e.g., [3]), cycled as needed.
# "currency": ["$", ""] # - currency prefix/suffix strings (e.g., ["$", ""])
# }
# https://github.com/d3/d3-format/blob/main/README.md#formatLocale
class D3Format(TypedDict, total=False):
decimal: str
thousands: str
grouping: list[int]
currency: list[str]
D3_FORMAT: D3Format = {}
# Override the default d3 locale for time format
# Default values are equivalent to
# D3_TIME_FORMAT = {
# "dateTime": "%x, %X",
# "date": "%-m/%-d/%Y",
# "time": "%-I:%M:%S %p",
# "periods": ["AM", "PM"],
# "days": ["Sunday", "Monday", "Tuesday", "Wednesday",
# "Thursday", "Friday", "Saturday"],
# "shortDays": ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"],
# "months": ["January", "February", "March", "April",
# "May", "June", "July", "August",
# "September", "October", "November", "December"],
# "shortMonths": ["Jan", "Feb", "Mar", "Apr",
# "May", "Jun", "Jul", "Aug",
# "Sep", "Oct", "Nov", "Dec"]
# }
# https://github.com/d3/d3-time-format/tree/main#locales
class D3TimeFormat(TypedDict, total=False):
date: str
dateTime: str
time: str
periods: list[str]
days: list[str]
shortDays: list[str]
months: list[str]
shortMonths: list[str]
D3_TIME_FORMAT: D3TimeFormat = {}
CURRENCIES = ["USD", "EUR", "GBP", "INR", "MXN", "JPY", "CNY"]
# ---------------------------------------------------
# Feature flags
# ---------------------------------------------------
# Feature flags that are set by default go here. Their values can be
# overwritten by those specified under FEATURE_FLAGS in superset_config.py
# For example, DEFAULT_FEATURE_FLAGS = { 'FOO': True, 'BAR': False } here
# and FEATURE_FLAGS = { 'BAR': True, 'BAZ': True } in superset_config.py
# will result in combined feature flags of { 'FOO': True, 'BAR': True, 'BAZ': True }
DEFAULT_FEATURE_FLAGS: dict[str, bool] = {
# When using a recent version of Druid that supports JOINs turn this on
"DRUID_JOINS": False,
"DYNAMIC_PLUGINS": False,
# With Superset 2.0, we are updating the default so that the legacy datasource
# editor no longer shows. Currently this is set to false so that the editor
# option does show, but we will be depreciating it.
"DISABLE_LEGACY_DATASOURCE_EDITOR": True,
"ENABLE_TEMPLATE_PROCESSING": False,
# Allow for javascript controls components
# this enables programmers to customize certain charts (like the
# geospatial ones) by inputting javascript in controls. This exposes
# an XSS security vulnerability
"ENABLE_JAVASCRIPT_CONTROLS": False, # deprecated
"KV_STORE": False, # deprecated
# When this feature is enabled, nested types in Presto will be
# expanded into extra columns and/or arrays. This is experimental,
# and doesn't work with all nested types.
"PRESTO_EXPAND_DATA": False,
# Exposes API endpoint to compute thumbnails
"THUMBNAILS": False,
# Enable the endpoints to cache and retrieve dashboard screenshots via webdriver.
# Requires configuring Celery and a cache using THUMBNAIL_CACHE_CONFIG.
"ENABLE_DASHBOARD_SCREENSHOT_ENDPOINTS": False,
# Generate screenshots (PDF or JPG) of dashboards using the web driver.
# When disabled, screenshots are generated on the fly by the browser.
# This feature flag is used by the download feature in the dashboard view.
# It is dependent on ENABLE_DASHBOARD_SCREENSHOT_ENDPOINT being enabled.
"ENABLE_DASHBOARD_DOWNLOAD_WEBDRIVER_SCREENSHOT": False,
"SHARE_QUERIES_VIA_KV_STORE": False,
"TAGGING_SYSTEM": False,
"SQLLAB_BACKEND_PERSISTENCE": True,
"LISTVIEWS_DEFAULT_CARD_VIEW": False,
# When True, this escapes HTML (rather than rendering it) in Markdown components
"ESCAPE_MARKDOWN_HTML": False,
"DASHBOARD_CROSS_FILTERS": True, # deprecated
"DASHBOARD_VIRTUALIZATION": True,
# This feature flag is stil in beta and is not recommended for production use.
"GLOBAL_ASYNC_QUERIES": False,
"EMBEDDED_SUPERSET": False,
# Enables Alerts and reports new implementation
"ALERT_REPORTS": False,
"ALERT_REPORT_TABS": False,
"ALERT_REPORT_SLACK_V2": False,
"DASHBOARD_RBAC": False,
"ENABLE_ADVANCED_DATA_TYPES": False,
# Enabling ALERTS_ATTACH_REPORTS, the system sends email and slack message
# with screenshot and link
# Disables ALERTS_ATTACH_REPORTS, the system DOES NOT generate screenshot
# for report with type 'alert' and sends email and slack message with only link;
# for report with type 'report' still send with email and slack message with
# screenshot and link
"ALERTS_ATTACH_REPORTS": True,
# Allow users to export full CSV of table viz type.
# This could cause the server to run out of memory or compute.
"ALLOW_FULL_CSV_EXPORT": False,
"ALLOW_ADHOC_SUBQUERY": False,
"USE_ANALAGOUS_COLORS": False,
# Apply RLS rules to SQL Lab queries. This requires parsing and manipulating the
# query, and might break queries and/or allow users to bypass RLS. Use with care!
"RLS_IN_SQLLAB": False,
# When impersonating a user, use the email prefix instead of the username
"IMPERSONATE_WITH_EMAIL_PREFIX": False,
# Enable caching per impersonation key (e.g username) in a datasource where user
# impersonation is enabled
"CACHE_IMPERSONATION": False,
# Enable caching per user key for Superset cache (not database cache impersonation)
"CACHE_QUERY_BY_USER": False,
# Enable sharing charts with embedding
"EMBEDDABLE_CHARTS": True,
"DRILL_TO_DETAIL": True,
"DRILL_BY": True,
"DATAPANEL_CLOSED_BY_DEFAULT": False,
"HORIZONTAL_FILTER_BAR": False,
# The feature is off by default, and currently only supported in Presto and Postgres,
# and Bigquery.
# It also needs to be enabled on a per-database basis, by adding the key/value pair
# `cost_estimate_enabled: true` to the database `extra` attribute.
"ESTIMATE_QUERY_COST": False,
# Allow users to enable ssh tunneling when creating a DB.
# Users must check whether the DB engine supports SSH Tunnels
# otherwise enabling this flag won't have any effect on the DB.
"SSH_TUNNELING": False,
"AVOID_COLORS_COLLISION": True,
# Do not show user info in the menu
"MENU_HIDE_USER_INFO": False,
# Allows users to add a ``superset://`` DB that can query across databases. This is
# an experimental feature with potential security and performance risks, so use with
# caution. If the feature is enabled you can also set a limit for how much data is
# returned from each database in the ``SUPERSET_META_DB_LIMIT`` configuration value
# in this file.
"ENABLE_SUPERSET_META_DB": False,
# Set to True to replace Selenium with Playwright to execute reports and thumbnails.
# Unlike Selenium, Playwright reports support deck.gl visualizations
# Enabling this feature flag requires installing "playwright" pip package
"PLAYWRIGHT_REPORTS_AND_THUMBNAILS": False,
# Set to True to enable experimental chart plugins
"CHART_PLUGINS_EXPERIMENTAL": False,
# Regardless of database configuration settings, force SQLLAB to run async using Celery
"SQLLAB_FORCE_RUN_ASYNC": False,
# Set to True to to enable factory resent CLI command
"ENABLE_FACTORY_RESET_COMMAND": False,
# Whether Superset should use Slack avatars for users.
# If on, you'll want to add "https://avatars.slack-edge.com" to the list of allowed
# domains in your TALISMAN_CONFIG
"SLACK_ENABLE_AVATARS": False,
}
# ------------------------------
# SSH Tunnel
# ------------------------------
# Allow users to set the host used when connecting to the SSH Tunnel
# as localhost and any other alias (0.0.0.0)
# ----------------------------------------------------------------------
# |
# -------------+ | +----------+
# LOCAL | | | REMOTE | :22 SSH
# CLIENT | <== SSH ========> | SERVER | :8080 web service
# -------------+ | +----------+
# |
# FIREWALL (only port 22 is open)
# ----------------------------------------------------------------------
SSH_TUNNEL_MANAGER_CLASS = "superset.extensions.ssh.SSHManager"
SSH_TUNNEL_LOCAL_BIND_ADDRESS = "127.0.0.1"
#: Timeout (seconds) for tunnel connection (open_channel timeout)
SSH_TUNNEL_TIMEOUT_SEC = 10.0
#: Timeout (seconds) for transport socket (``socket.settimeout``)
SSH_TUNNEL_PACKET_TIMEOUT_SEC = 1.0
# Feature flags may also be set via 'SUPERSET_FEATURE_' prefixed environment vars.
DEFAULT_FEATURE_FLAGS.update(
{
k[len("SUPERSET_FEATURE_") :]: parse_boolean_string(v)
for k, v in os.environ.items()
if re.search(r"^SUPERSET_FEATURE_\w+", k)
}
)
# This is merely a default.
FEATURE_FLAGS: dict[str, bool] = {}
# A function that receives a dict of all feature flags
# (DEFAULT_FEATURE_FLAGS merged with FEATURE_FLAGS)
# can alter it, and returns a similar dict. Note the dict of feature
# flags passed to the function is a deepcopy of the dict in the config,
# and can therefore be mutated without side-effect
#
# GET_FEATURE_FLAGS_FUNC can be used to implement progressive rollouts,
# role-based features, or a full on A/B testing framework.
#
# from flask import g, request
# def GET_FEATURE_FLAGS_FUNC(feature_flags_dict: Dict[str, bool]) -> Dict[str, bool]:
# if hasattr(g, "user") and g.user.is_active:
# feature_flags_dict['some_feature'] = g.user and g.user.get_id() == 5
# return feature_flags_dict
GET_FEATURE_FLAGS_FUNC: Callable[[dict[str, bool]], dict[str, bool]] | None = None
# A function that receives a feature flag name and an optional default value.
# Has a similar utility to GET_FEATURE_FLAGS_FUNC but it's useful to not force the
# evaluation of all feature flags when just evaluating a single one.
#
# Note that the default `get_feature_flags` will evaluate each feature with this
# callable when the config key is set, so don't use both GET_FEATURE_FLAGS_FUNC
# and IS_FEATURE_ENABLED_FUNC in conjunction.
IS_FEATURE_ENABLED_FUNC: Callable[[str, bool | None], bool] | None = None
# A function that expands/overrides the frontend `bootstrap_data.common` object.
# Can be used to implement custom frontend functionality,
# or dynamically change certain configs.
#
# Values in `bootstrap_data.common` should have these characteristics:
# - They are not specific to a page the user is visiting
# - They do not contain secrets
#
# Takes as a parameter the common bootstrap payload before transformations.
# Returns a dict containing data that should be added or overridden to the payload.
COMMON_BOOTSTRAP_OVERRIDES_FUNC: Callable[ # noqa: E731
[dict[str, Any]], dict[str, Any]
] = lambda data: {}
# EXTRA_CATEGORICAL_COLOR_SCHEMES is used for adding custom categorical color schemes
# example code for "My custom warm to hot" color scheme
# EXTRA_CATEGORICAL_COLOR_SCHEMES = [
# {
# "id": 'myVisualizationColors',
# "description": '',
# "label": 'My Visualization Colors',
# "isDefault": True,
# "colors":
# ['#006699', '#009DD9', '#5AAA46', '#44AAAA', '#DDAA77', '#7799BB', '#88AA77',
# '#552288', '#5AAA46', '#CC7788', '#EEDD55', '#9977BB', '#BBAA44', '#DDCCDD']
# }]
# This is merely a default
EXTRA_CATEGORICAL_COLOR_SCHEMES: list[dict[str, Any]] = []
# THEME_OVERRIDES is used for adding custom theme to superset
# example code for "My theme" custom scheme
# THEME_OVERRIDES = {
# "borderRadius": 4,
# "colors": {
# "primary": {
# "base": 'red',
# },
# "secondary": {
# "base": 'green',
# },
# "grayscale": {
# "base": 'orange',
# }
# }
# }
THEME_OVERRIDES: dict[str, Any] = {}
# EXTRA_SEQUENTIAL_COLOR_SCHEMES is used for adding custom sequential color schemes
# EXTRA_SEQUENTIAL_COLOR_SCHEMES = [
# {
# "id": 'warmToHot',
# "description": '',
# "isDiverging": True,
# "label": 'My custom warm to hot',
# "isDefault": True,
# "colors":
# ['#552288', '#5AAA46', '#CC7788', '#EEDD55', '#9977BB', '#BBAA44', '#DDCCDD',
# '#006699', '#009DD9', '#5AAA46', '#44AAAA', '#DDAA77', '#7799BB', '#88AA77']
# }]
# This is merely a default
EXTRA_SEQUENTIAL_COLOR_SCHEMES: list[dict[str, Any]] = []
# ---------------------------------------------------
# Thumbnail config (behind feature flag)
# ---------------------------------------------------
# By default, thumbnails are rendered per user, and will fall back to the Selenium
# user for anonymous users. Similar to Alerts & Reports, thumbnails
# can be configured to always be rendered as a fixed user. See
# `superset.tasks.types.ExecutorType` for a full list of executor options.
# To always use a fixed user account, use the following configuration:
# THUMBNAIL_EXECUTE_AS = [ExecutorType.SELENIUM]
THUMBNAIL_SELENIUM_USER: str | None = "admin"
THUMBNAIL_EXECUTE_AS = [ExecutorType.CURRENT_USER, ExecutorType.SELENIUM]
# By default, thumbnail digests are calculated based on various parameters in the
# chart/dashboard metadata, and in the case of user-specific thumbnails, the
# username. To specify a custom digest function, use the following config parameters
# to define callbacks that receive
# 1. the model (dashboard or chart)
# 2. the executor type (e.g. ExecutorType.SELENIUM)
# 3. the executor's username (note, this is the executor as defined by
# `THUMBNAIL_EXECUTE_AS`; the executor is only equal to the currently logged in
# user if the executor type is equal to `ExecutorType.CURRENT_USER`)
# and return the final digest string:
THUMBNAIL_DASHBOARD_DIGEST_FUNC: (
None | (Callable[[Dashboard, ExecutorType, str], str])
) = None
THUMBNAIL_CHART_DIGEST_FUNC: Callable[[Slice, ExecutorType, str], str] | None = None
THUMBNAIL_CACHE_CONFIG: CacheConfig = {
"CACHE_TYPE": "NullCache",
"CACHE_NO_NULL_WARNING": True,
}
# Time before selenium times out after trying to locate an element on the page and wait
# for that element to load for a screenshot.
SCREENSHOT_LOCATE_WAIT = int(timedelta(seconds=10).total_seconds())
# Time before selenium times out after waiting for all DOM class elements named
# "loading" are gone.
SCREENSHOT_LOAD_WAIT = int(timedelta(minutes=1).total_seconds())
# Selenium destroy retries
SCREENSHOT_SELENIUM_RETRIES = 5
# Give selenium an headstart, in seconds
SCREENSHOT_SELENIUM_HEADSTART = 3
# Wait for the chart animation, in seconds
SCREENSHOT_SELENIUM_ANIMATION_WAIT = 5
# Replace unexpected errors in screenshots with real error messages
SCREENSHOT_REPLACE_UNEXPECTED_ERRORS = False
# Max time to wait for error message modal to show up, in seconds
SCREENSHOT_WAIT_FOR_ERROR_MODAL_VISIBLE = 5
# Max time to wait for error message modal to close, in seconds
SCREENSHOT_WAIT_FOR_ERROR_MODAL_INVISIBLE = 5
# Event that Playwright waits for when loading a new page
# Possible values: "load", "commit", "domcontentloaded", "networkidle"
# Docs: https://playwright.dev/python/docs/api/class-page#page-goto-option-wait-until
SCREENSHOT_PLAYWRIGHT_WAIT_EVENT = "load"
# Default timeout for Playwright browser context for all operations
SCREENSHOT_PLAYWRIGHT_DEFAULT_TIMEOUT = int(
timedelta(seconds=30).total_seconds() * 1000
)
# ---------------------------------------------------
# Image and file configuration
# ---------------------------------------------------
# The file upload folder, when using models with files
UPLOAD_FOLDER = BASE_DIR + "/app/static/uploads/"
UPLOAD_CHUNK_SIZE = 4096
# The image upload folder, when using models with images
IMG_UPLOAD_FOLDER = BASE_DIR + "/app/static/uploads/"
# The image upload url, when using models with images
IMG_UPLOAD_URL = "/static/uploads/"
# Setup image size default is (300, 200, True)
# IMG_SIZE = (300, 200, True)
# Default cache timeout, applies to all cache backends unless specifically overridden in
# each cache config.
CACHE_DEFAULT_TIMEOUT = int(timedelta(days=1).total_seconds())
# Default cache for Superset objects
CACHE_CONFIG: CacheConfig = {"CACHE_TYPE": "NullCache"}
# Cache for datasource metadata and query results
DATA_CACHE_CONFIG: CacheConfig = {"CACHE_TYPE": "NullCache"}
# Cache for dashboard filter state. `CACHE_TYPE` defaults to `SupersetMetastoreCache`
# that stores the values in the key-value table in the Superset metastore, as it's
# required for Superset to operate correctly, but can be replaced by any
# `Flask-Caching` backend.
FILTER_STATE_CACHE_CONFIG: CacheConfig = {
"CACHE_TYPE": "SupersetMetastoreCache",
"CACHE_DEFAULT_TIMEOUT": int(timedelta(days=90).total_seconds()),
# Should the timeout be reset when retrieving a cached value?
"REFRESH_TIMEOUT_ON_RETRIEVAL": True,
# The following parameter only applies to `MetastoreCache`:
# How should entries be serialized/deserialized?
"CODEC": JsonKeyValueCodec(),
}
# Cache for explore form data state. `CACHE_TYPE` defaults to `SupersetMetastoreCache`
# that stores the values in the key-value table in the Superset metastore, as it's
# required for Superset to operate correctly, but can be replaced by any
# `Flask-Caching` backend.
EXPLORE_FORM_DATA_CACHE_CONFIG: CacheConfig = {
"CACHE_TYPE": "SupersetMetastoreCache",
"CACHE_DEFAULT_TIMEOUT": int(timedelta(days=7).total_seconds()),
# Should the timeout be reset when retrieving a cached value?
"REFRESH_TIMEOUT_ON_RETRIEVAL": True,
# The following parameter only applies to `MetastoreCache`:
# How should entries be serialized/deserialized?
"CODEC": JsonKeyValueCodec(),
}
# store cache keys by datasource UID (via CacheKey) for custom processing/invalidation
STORE_CACHE_KEYS_IN_METADATA_DB = False
# CORS Options
ENABLE_CORS = False
CORS_OPTIONS: dict[Any, Any] = {}
# Sanitizes the HTML content used in markdowns to allow its rendering in a safe manner.
# Disabling this option is not recommended for security reasons. If you wish to allow
# valid safe elements that are not included in the default sanitization schema, use the
# HTML_SANITIZATION_SCHEMA_EXTENSIONS configuration.
HTML_SANITIZATION = True
# Use this configuration to extend the HTML sanitization schema.
# By default we use the GitHub schema defined in
# https://github.com/syntax-tree/hast-util-sanitize/blob/main/lib/schema.js
# For example, the following configuration would allow the rendering of the
# style attribute for div elements and the ftp protocol in hrefs:
# HTML_SANITIZATION_SCHEMA_EXTENSIONS = {
# "attributes": {
# "div": ["style"],
# },
# "protocols": {
# "href": ["ftp"],
# }
# }
# Be careful when extending the default schema to avoid XSS attacks.
HTML_SANITIZATION_SCHEMA_EXTENSIONS: dict[str, Any] = {}
# Chrome allows up to 6 open connections per domain at a time. When there are more
# than 6 slices in dashboard, a lot of time fetch requests are queued up and wait for
# next available socket. PR #5039 is trying to allow domain sharding for Superset,
# and this feature will be enabled by configuration only (by default Superset
# doesn't allow cross-domain request).
SUPERSET_WEBSERVER_DOMAINS = None
# Allowed format types for upload on Database view
EXCEL_EXTENSIONS = {"xlsx", "xls"}
CSV_EXTENSIONS = {"csv", "tsv", "txt"}
COLUMNAR_EXTENSIONS = {"parquet", "zip"}
ALLOWED_EXTENSIONS = {*EXCEL_EXTENSIONS, *CSV_EXTENSIONS, *COLUMNAR_EXTENSIONS}
# Optional maximum file size in bytes when uploading a CSV
CSV_UPLOAD_MAX_SIZE = None
# CSV Options: key/value pairs that will be passed as argument to DataFrame.to_csv
# method.
# note: index option should not be overridden
CSV_EXPORT = {"encoding": "utf-8"}
# Excel Options: key/value pairs that will be passed as argument to DataFrame.to_excel
# method.
# note: index option should not be overridden
EXCEL_EXPORT: dict[str, Any] = {}
# ---------------------------------------------------
# Time grain configurations
# ---------------------------------------------------
# List of time grains to disable in the application (see list of builtin
# time grains in superset/db_engine_specs/base.py).
# For example: to disable 1 second time grain:
# TIME_GRAIN_DENYLIST = ['PT1S']
TIME_GRAIN_DENYLIST: list[str] = []
# Additional time grains to be supported using similar definitions as in
# superset/db_engine_specs/base.py.
# For example: To add a new 2 second time grain:
# TIME_GRAIN_ADDONS = {'PT2S': '2 second'}
TIME_GRAIN_ADDONS: dict[str, str] = {}
# Implementation of additional time grains per engine.
# The column to be truncated is denoted `{col}` in the expression.
# For example: To implement 2 second time grain on clickhouse engine:
# TIME_GRAIN_ADDON_EXPRESSIONS = {
# 'clickhouse': {
# 'PT2S': 'toDateTime(intDiv(toUInt32(toDateTime({col})), 2)*2)'
# }
# }
TIME_GRAIN_ADDON_EXPRESSIONS: dict[str, dict[str, str]] = {}
# Map of custom time grains and artificial join column producers used
# when generating the join key between results and time shifts.
# See superset/common/query_context_processor.get_aggregated_join_column
#
# Example of a join column producer that aggregates by fiscal year
# def join_producer(row: Series, column_index: int) -> str:
# return row[index].strftime("%F")
#
# TIME_GRAIN_JOIN_COLUMN_PRODUCERS = {"P1F": join_producer}
TIME_GRAIN_JOIN_COLUMN_PRODUCERS: dict[str, Callable[[Series, int], str]] = {}
# ---------------------------------------------------
# List of viz_types not allowed in your environment
# For example: Disable pivot table and treemap:
# VIZ_TYPE_DENYLIST = ['pivot_table', 'treemap']
# ---------------------------------------------------
VIZ_TYPE_DENYLIST: list[str] = []
# --------------------------------------------------
# Modules, datasources and middleware to be registered
# --------------------------------------------------
DEFAULT_MODULE_DS_MAP = OrderedDict(
[
("superset.connectors.sqla.models", ["SqlaTable"]),
]
)
ADDITIONAL_MODULE_DS_MAP: dict[str, list[str]] = {}
ADDITIONAL_MIDDLEWARE: list[Callable[..., Any]] = []
# 1) https://docs.python-guide.org/writing/logging/
# 2) https://docs.python.org/2/library/logging.config.html
# Default configurator will consume the LOG_* settings below
LOGGING_CONFIGURATOR = DefaultLoggingConfigurator()
# Console Log Settings
LOG_FORMAT = "%(asctime)s:%(levelname)s:%(name)s:%(message)s"
LOG_LEVEL = logging.DEBUG if DEBUG else logging.INFO
# ---------------------------------------------------
# Enable Time Rotate Log Handler
# ---------------------------------------------------
# LOG_LEVEL = DEBUG, INFO, WARNING, ERROR, CRITICAL
ENABLE_TIME_ROTATE = False
TIME_ROTATE_LOG_LEVEL = logging.DEBUG if DEBUG else logging.INFO
FILENAME = os.path.join(DATA_DIR, "superset.log")
ROLLOVER = "midnight"
INTERVAL = 1
BACKUP_COUNT = 30
# Custom logger for auditing queries. This can be used to send ran queries to a
# structured immutable store for auditing purposes. The function is called for
# every query ran, in both SQL Lab and charts/dashboards.
# def QUERY_LOGGER(
# database,
# query,
# schema=None,
# client=None,
# security_manager=None,
# log_params=None,
# ):
# pass
QUERY_LOGGER = None
# Set this API key to enable Mapbox visualizations
MAPBOX_API_KEY = os.environ.get("MAPBOX_API_KEY", "")
# Maximum number of rows returned for any analytical database query
SQL_MAX_ROW = 100000
# Maximum number of rows displayed in SQL Lab UI
# Is set to avoid out of memory/localstorage issues in browsers. Does not affect
# exported CSVs
DISPLAY_MAX_ROW = 10000
# Default row limit for SQL Lab queries. Is overridden by setting a new limit in
# the SQL Lab UI
DEFAULT_SQLLAB_LIMIT = 1000
# The limit for the Superset Meta DB when the feature flag ENABLE_SUPERSET_META_DB is on
SUPERSET_META_DB_LIMIT: int | None = 1000
# Adds a warning message on sqllab save query and schedule query modals.
SQLLAB_SAVE_WARNING_MESSAGE = None
SQLLAB_SCHEDULE_WARNING_MESSAGE = None
# Max payload size (MB) for SQL Lab to prevent browser hangs with large results.
SQLLAB_PAYLOAD_MAX_MB = None
# Force refresh while auto-refresh in dashboard
DASHBOARD_AUTO_REFRESH_MODE: Literal["fetch", "force"] = "force"
# Dashboard auto refresh intervals
DASHBOARD_AUTO_REFRESH_INTERVALS = [
[0, "Don't refresh"],
[10, "10 seconds"],
[30, "30 seconds"],
[60, "1 minute"],
[300, "5 minutes"],
[1800, "30 minutes"],
[3600, "1 hour"],
[21600, "6 hours"],
[43200, "12 hours"],
[86400, "24 hours"],
]
# This is used as a workaround for the alerts & reports scheduler task to get the time
# celery beat triggered it, see https://github.com/celery/celery/issues/6974 for details
CELERY_BEAT_SCHEDULER_EXPIRES = timedelta(weeks=1)
# Default celery config is to use SQLA as a broker, in a production setting
# you'll want to use a proper broker as specified here:
# https://docs.celeryq.dev/en/stable/getting-started/backends-and-brokers/index.html
class CeleryConfig: # pylint: disable=too-few-public-methods