We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
A clear and concise description of what the bug is.
result of long query
Log details: superset_1 | superset_1 | During handling of the above exception, another exception occurred: superset_1 | superset_1 | Traceback (most recent call last): superset_1 | File "/usr/local/lib/python3.6/site-packages/sqlalchemy/pool/base.py", line 680, in _finalize_fairy superset_1 | fairy._reset(pool) superset_1 | File "/usr/local/lib/python3.6/site-packages/sqlalchemy/pool/base.py", line 867, in _reset superset_1 | pool._dialect.do_rollback(self) superset_1 | File "/usr/local/lib/python3.6/site-packages/sqlalchemy/dialects/mysql/base.py", line 2302, in do_rollback superset_1 | dbapi_connection.rollback() superset_1 | MySQLdb._exceptions.OperationalError: (2006, 'MySQL server has gone away') superset_1 | ERROR:root:Query SELECT DATE(ticket_issue_date) AS __timestamp, superset_1 | COUNT(*) AS count superset_1 | FROM pruva.ticket_test superset_1 | GROUP BY DATE(ticket_issue_date) superset_1 | ORDER BY count DESC superset_1 | LIMIT 50000 on schema pruva failed superset_1 | Traceback (most recent call last): superset_1 | File "/app/superset/connectors/sqla/models.py", line 1004, in query superset_1 | df = self.database.get_df(sql, self.schema, mutator) superset_1 | File "/app/superset/models/core.py", line 358, in get_df superset_1 | self.db_engine_spec.execute(cursor, sqls[-1]) superset_1 | File "/app/superset/db_engine_specs/base.py", line 759, in execute superset_1 | cursor.execute(query) superset_1 | File "/usr/local/lib/python3.6/site-packages/MySQLdb/cursors.py", line 206, in execute superset_1 | res = self._query(query) superset_1 | File "/usr/local/lib/python3.6/site-packages/MySQLdb/cursors.py", line 312, in _query superset_1 | db.query(q) superset_1 | File "/usr/local/lib/python3.6/site-packages/MySQLdb/connections.py", line 224, in query superset_1 | _mysql.connection.query(self, query) superset_1 | MySQLdb._exceptions.OperationalError: (2013, 'Lost connection to MySQL server during query')
what actually happens.
If applicable, add screenshots to help explain your problem.
(please complete the following information):
Make sure these boxes are checked before submitting your issue - thank you!
import logging import os
from werkzeug.contrib.cache import FileSystemCache
logger = logging.getLogger()
def get_env_variable(var_name, default=None): """Get the environment variable or raise exception.""" try: return os.environ[var_name] except KeyError: if default is not None: return default else: error_msg = "The environment variable {} was missing, abort...".format( var_name ) raise EnvironmentError(error_msg)
DATABASE_DIALECT = get_env_variable("DATABASE_DIALECT") DATABASE_USER = get_env_variable("DATABASE_USER") DATABASE_PASSWORD = get_env_variable("DATABASE_PASSWORD") DATABASE_HOST = get_env_variable("DATABASE_HOST") DATABASE_PORT = get_env_variable("DATABASE_PORT") DATABASE_DB = get_env_variable("DATABASE_DB")
SQLALCHEMY_DATABASE_URI = "%s://%s:%s@%s:%s/%s" % ( DATABASE_DIALECT, DATABASE_USER, DATABASE_PASSWORD, DATABASE_HOST, DATABASE_PORT, DATABASE_DB, )
REDIS_HOST = get_env_variable("REDIS_HOST") REDIS_PORT = get_env_variable("REDIS_PORT")
RESULTS_BACKEND = FileSystemCache('/app/superset_home/sqllab')
class CeleryConfig(object): BROKER_URL = 'redis://redis:6379/1' CELERY_IMPORTS = ( 'superset.sql_lab', 'superset.tasks', ) CELERY_RESULT_BACKEND = 'redis://redis:6379/1' CELERYD_LOG_LEVEL = 'INFO' CELERYD_PREFETCH_MULTIPLIER = 10 CELERY_ACKS_LATE = True CELERY_ANNOTATIONS = { "sql_lab.get_sql_results": {"rate_limit": "1000/s"}, #100 dü "email_reports.send": { "rate_limit": "1/s", "time_limit": 120, "soft_time_limit": 150, "ignore_result": True, }, } CELERYBEAT_SCHEDULE = { "email_reports.schedule_hourly": { "task": "email_reports.schedule_hourly", # "schedule": crontab(minute=1, hour="*"), } }
CELERY_CONFIG = CeleryConfig
MAPBOX_API_KEY = os.getenv('MAPBOX_API_KEY', '') CACHE_CONFIG = { 'CACHE_TYPE': 'redis', 'CACHE_DEFAULT_TIMEOUT': 606024*365, 'CACHE_KEY_PREFIX': 'superset_', 'CACHE_REDIS_HOST': 'redis', 'CACHE_REDIS_PORT': 6379, 'CACHE_REDIS_DB': 1, 'CACHE_REDIS_URL': 'redis://redis:6379/1' }
CSRF_ENABLED = True
ENABLE_CORS = True
CORS_OPTIONS = { 'supports_credentials': True, 'allow_headers': [ 'X-CSRFToken', 'Content-Type', 'Origin', 'X-Requested-With', 'Accept', ], 'resources': [ '/superset/csrf_token/' , '/api/v1/formData/', # sliceId => formData '/superset/explore_json/*', # legacy query API, formData => queryData '/api/v1/query/', # new query API, queryContext => queryData '/superset/fetch_datasource_metadata/' '/superset/explore_json/'
], 'origins': [ 'http://localhost:9001' ,'http://localhost:3000' ,'http://172.19.0.2:9001' ,'http://172.20.0.4:9001' ,'http://172.20.0.4:3000' ,'https://apache-superset.github.io' ,'localhost:9000' ,'http://localhost:9000' ,'http://gvarol.test' ,'http://localhost:8088' ,'http://172.17.0.1:8088' ,'http://172.17.0.1:9000' ,'*' ],
}
HTTP_HEADERS = {'X-Frame-Options': 'ALLOWALL'}
SESSION_COOKIE_SECURE = False SESSION_COOKIE_SAMESITE = None
PUBLIC_ROLE_LIKE_GAMMA = True AUTH_ROLE_PUBLIC = 'Alpha' #'Public' AUTH_USER_REGISTRATION_ROLE = "Public"
BABEL_DEFAULT_LOCALE = "tr"
BABEL_DEFAULT_FOLDER = "superset/translations"
LANGUAGES = { "en": {"flag": "us", "name": "English"}, "es": {"flag": "es", "name": "Spanish"}, "it": {"flag": "it", "name": "Italian"}, "fr": {"flag": "fr", "name": "French"}, "zh": {"flag": "cn", "name": "Chinese"}, "ja": {"flag": "jp", "name": "Japanese"}, "de": {"flag": "de", "name": "German"}, "pt": {"flag": "pt", "name": "Portuguese"}, "pt_BR": {"flag": "br", "name": "Brazilian Portuguese"}, "ru": {"flag": "ru", "name": "Russian"}, "ko": {"flag": "kr", "name": "Korean"}, "tr": {"flag": "tr", "name": "Türkçe"}, }
SQLALCHEMY_POOL_SIZE=20 SQLALCHEMY_POOL_TIMEOUT=300
SQLLAB_ASYNC_TIME_LIMIT_SEC = 60 * 60 * 6 SUPERSET_WEBSERVER_TIMEOUT = 600
The text was updated successfully, but these errors were encountered:
Hey @dpugulo I'm getting the exact same issue on Apache Superset (1.0.1, 1.1.0 and latest).
How did you manage to solve your issue?
Sorry, something went wrong.
No branches or pull requests
A clear and concise description of what the bug is.
Expected results
result of long query
Actual results
Log details:
superset_1 |
superset_1 | During handling of the above exception, another exception occurred:
superset_1 |
superset_1 | Traceback (most recent call last):
superset_1 | File "/usr/local/lib/python3.6/site-packages/sqlalchemy/pool/base.py", line 680, in _finalize_fairy
superset_1 | fairy._reset(pool)
superset_1 | File "/usr/local/lib/python3.6/site-packages/sqlalchemy/pool/base.py", line 867, in _reset
superset_1 | pool._dialect.do_rollback(self)
superset_1 | File "/usr/local/lib/python3.6/site-packages/sqlalchemy/dialects/mysql/base.py", line 2302, in do_rollback
superset_1 | dbapi_connection.rollback()
superset_1 | MySQLdb._exceptions.OperationalError: (2006, 'MySQL server has gone away')
superset_1 | ERROR:root:Query SELECT DATE(ticket_issue_date) AS __timestamp,
superset_1 | COUNT(*) AS count
superset_1 | FROM pruva.ticket_test
superset_1 | GROUP BY DATE(ticket_issue_date)
superset_1 | ORDER BY count DESC
superset_1 | LIMIT 50000 on schema pruva failed
superset_1 | Traceback (most recent call last):
superset_1 | File "/app/superset/connectors/sqla/models.py", line 1004, in query
superset_1 | df = self.database.get_df(sql, self.schema, mutator)
superset_1 | File "/app/superset/models/core.py", line 358, in get_df
superset_1 | self.db_engine_spec.execute(cursor, sqls[-1])
superset_1 | File "/app/superset/db_engine_specs/base.py", line 759, in execute
superset_1 | cursor.execute(query)
superset_1 | File "/usr/local/lib/python3.6/site-packages/MySQLdb/cursors.py", line 206, in execute
superset_1 | res = self._query(query)
superset_1 | File "/usr/local/lib/python3.6/site-packages/MySQLdb/cursors.py", line 312, in _query
superset_1 | db.query(q)
superset_1 | File "/usr/local/lib/python3.6/site-packages/MySQLdb/connections.py", line 224, in query
superset_1 | _mysql.connection.query(self, query)
superset_1 | MySQLdb._exceptions.OperationalError: (2013, 'Lost connection to MySQL server during query')
what actually happens.
Screenshots
If applicable, add screenshots to help explain your problem.
How to reproduce the bug
Environment
(please complete the following information):
Checklist
Make sure these boxes are checked before submitting your issue - thank you!
Additional context
my superset_config.py
import logging
import os
from werkzeug.contrib.cache import FileSystemCache
logger = logging.getLogger()
def get_env_variable(var_name, default=None):
"""Get the environment variable or raise exception."""
try:
return os.environ[var_name]
except KeyError:
if default is not None:
return default
else:
error_msg = "The environment variable {} was missing, abort...".format(
var_name
)
raise EnvironmentError(error_msg)
DATABASE_DIALECT = get_env_variable("DATABASE_DIALECT")
DATABASE_USER = get_env_variable("DATABASE_USER")
DATABASE_PASSWORD = get_env_variable("DATABASE_PASSWORD")
DATABASE_HOST = get_env_variable("DATABASE_HOST")
DATABASE_PORT = get_env_variable("DATABASE_PORT")
DATABASE_DB = get_env_variable("DATABASE_DB")
SQLALCHEMY_DATABASE_URI = "%s://%s:%s@%s:%s/%s" % (
DATABASE_DIALECT,
DATABASE_USER,
DATABASE_PASSWORD,
DATABASE_HOST,
DATABASE_PORT,
DATABASE_DB,
)
REDIS_HOST = get_env_variable("REDIS_HOST")
REDIS_PORT = get_env_variable("REDIS_PORT")
RESULTS_BACKEND = FileSystemCache('/app/superset_home/sqllab')
class CeleryConfig(object):
BROKER_URL = 'redis://redis:6379/1'
CELERY_IMPORTS = (
'superset.sql_lab',
'superset.tasks',
)
CELERY_RESULT_BACKEND = 'redis://redis:6379/1'
CELERYD_LOG_LEVEL = 'INFO'
CELERYD_PREFETCH_MULTIPLIER = 10
CELERY_ACKS_LATE = True
CELERY_ANNOTATIONS = {
"sql_lab.get_sql_results": {"rate_limit": "1000/s"}, #100 dü
"email_reports.send": {
"rate_limit": "1/s",
"time_limit": 120,
"soft_time_limit": 150,
"ignore_result": True,
},
}
CELERYBEAT_SCHEDULE = {
"email_reports.schedule_hourly": {
"task": "email_reports.schedule_hourly",
# "schedule": crontab(minute=1, hour="*"),
}
}
CELERY_CONFIG = CeleryConfig
MAPBOX_API_KEY = os.getenv('MAPBOX_API_KEY', '')
CACHE_CONFIG = {
'CACHE_TYPE': 'redis',
'CACHE_DEFAULT_TIMEOUT': 606024*365,
'CACHE_KEY_PREFIX': 'superset_',
'CACHE_REDIS_HOST': 'redis',
'CACHE_REDIS_PORT': 6379,
'CACHE_REDIS_DB': 1,
'CACHE_REDIS_URL': 'redis://redis:6379/1'
}
CSRF_ENABLED = True
ENABLE_CORS = True
CORS_OPTIONS = {
'supports_credentials': True,
'allow_headers': [
'X-CSRFToken', 'Content-Type', 'Origin', 'X-Requested-With', 'Accept',
],
'resources': [
'/superset/csrf_token/' ,
'/api/v1/formData/', # sliceId => formData
'/superset/explore_json/*', # legacy query API, formData => queryData
'/api/v1/query/', # new query API, queryContext => queryData
'/superset/fetch_datasource_metadata/'
'/superset/explore_json/'
}
HTTP_HEADERS = {'X-Frame-Options': 'ALLOWALL'}
SESSION_COOKIE_SECURE = False
SESSION_COOKIE_SAMESITE = None
PUBLIC_ROLE_LIKE_GAMMA = True
AUTH_ROLE_PUBLIC = 'Alpha' #'Public'
AUTH_USER_REGISTRATION_ROLE = "Public"
BABEL_DEFAULT_LOCALE = "tr"
BABEL_DEFAULT_FOLDER = "superset/translations"
LANGUAGES = {
"en": {"flag": "us", "name": "English"},
"es": {"flag": "es", "name": "Spanish"},
"it": {"flag": "it", "name": "Italian"},
"fr": {"flag": "fr", "name": "French"},
"zh": {"flag": "cn", "name": "Chinese"},
"ja": {"flag": "jp", "name": "Japanese"},
"de": {"flag": "de", "name": "German"},
"pt": {"flag": "pt", "name": "Portuguese"},
"pt_BR": {"flag": "br", "name": "Brazilian Portuguese"},
"ru": {"flag": "ru", "name": "Russian"},
"ko": {"flag": "kr", "name": "Korean"},
"tr": {"flag": "tr", "name": "Türkçe"},
}
SQLALCHEMY_POOL_SIZE=20
SQLALCHEMY_POOL_TIMEOUT=300
SQLLAB_ASYNC_TIME_LIMIT_SEC = 60 * 60 * 6
SUPERSET_WEBSERVER_TIMEOUT = 600
The text was updated successfully, but these errors were encountered: