diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index e620e6afe8b4f..680f4f198ccc9 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -294,7 +294,17 @@ python setup.py build_sphinx ### Flask server -Make sure your machine meets the [OS dependencies](https://superset.incubator.apache.org/installation.html#os-dependencies) before following these steps. +#### OS Dependencies + +Make sure your machine meets the [OS dependencies](https://superset.incubator.apache.org/installation.html#os-dependencies) before following these steps. + +Developers should use a virtualenv. + +``` +pip install virtualenv +``` + +Then proceed with: ```bash # Create a virtual environemnt and activate it (recommended) @@ -304,10 +314,11 @@ source venv/bin/activate # Install external dependencies pip install -r requirements.txt pip install -r requirements-dev.txt + # Install Superset in editable (development) mode pip install -e . -# Create an admin user +# Create an admin user in your metadata database fabmanager create-admin --app superset # Initialize the database @@ -319,11 +330,10 @@ superset init # Load some data to play with superset load_examples -# Start the Flask dev web server from inside the `superset` dir at port 8088 +# Start the Flask dev web server from inside your virtualenv. # Note that your page may not have css at this point. # See instructions below how to build the front-end assets. -cd superset -FLASK_ENV=development flask run -p 8088 --with-threads --reload --debugger +FLASK_ENV=development superset run -p 8088 --with-threads --reload --debugger ``` #### Logging to the browser console @@ -355,7 +365,14 @@ app.logger.info(form_data) Frontend assets (JavaScript, CSS, and images) must be compiled in order to properly display the web UI. The `superset/assets` directory contains all NPM-managed front end assets. Note that there are additional frontend assets bundled with Flask-Appbuilder (e.g. jQuery and bootstrap); these are not managed by NPM, and may be phased out in the future. -First, be sure you are using recent versions of NodeJS and npm. Using [nvm](https://github.com/creationix/nvm) to manage them is recommended. +#### nvm and node + +First, be sure you are using recent versions of NodeJS and npm. Using [nvm](https://github.com/creationix/nvm) to manage them is recommended. Check the docs at the link to be sure, but at the time of writing the following would install nvm and node: + +```bash +curl -o- https://raw.githubusercontent.com/creationix/nvm/v0.34.0/install.sh | bash +nvm install node +``` #### Prerequisite @@ -396,6 +413,12 @@ npm run dev npm run prod ``` +If you run this service from somewhere other than your local machine, you may need to add hostname value to webpack.config.js at .devServer.public specifying the endpoint at which you will access the app. For example: myhost:9001. For convenience you may want to install webpack, webpack-cli and webpack-dev-server globally so that you can run them directly: + +```bash +npm install --global webpack webpack-cli webpack-dev-server +``` + #### Updating NPM packages Use npm in the prescribed way, making sure that @@ -517,14 +540,15 @@ superset db upgrade superset init superset load_test_users superset load_examples -superset runserver +superset run --port 8081 ``` Run Cypress tests: ```bash -cd /superset/superset/assets +cd superset/assets npm run build +npm run install-cypress npm run cypress run # run tests from a specific file @@ -534,6 +558,8 @@ npm run cypress run -- --spec cypress/integration/explore/link.test.js npm run cypress run -- --spec cypress/integration/dashboard/index.test.js --config video=true ``` +See [`superset/assets/cypress_build.sh`](https://github.com/apache/incubator-superset/blob/master/superset/assets/cypress_build.sh). + ## Translating We use [Babel](http://babel.pocoo.org/en/latest/) to translate Superset. In Python files, we import the magic `_` function using: diff --git a/README.md b/README.md index 6b13d4e1fb5e4..962c8c9bfb26b 100644 --- a/README.md +++ b/README.md @@ -171,6 +171,7 @@ the world know they are using Superset. Join our growing community! 1. [Airbnb](https://github.com/airbnb) 1. [Airboxlab](https://foobot.io) 1. [Aktia Bank plc](https://www.aktia.com) + 1. [American Express](https://www.americanexpress.com) 1. [Amino](https://amino.com) 1. [Apollo GraphQL](https://www.apollographql.com/) 1. [Ascendica Development](http://ascendicadevelopment.com) diff --git a/contrib/docker/README.md b/contrib/docker/README.md new file mode 100644 index 0000000000000..0c3146f7e7347 --- /dev/null +++ b/contrib/docker/README.md @@ -0,0 +1,58 @@ + + +# Getting Start with Superset using Docker + +Docker is an easy way to get stated with Superset. + +## Initializing Database + +To initialize the database with a user and example charts, dashboards and datasets run: + +```bash +SUPERSET_LOAD_EXAMPLES=yes docker-compose run --rm superset ./docker-init.sh +``` + +This may take a minute. + +## Normal Operation + +To run the container, simply run: + +```bash +docker-compose up +``` + +After several minutes for superset initialization to finish, you can open a browser and view [`http://localhost:8088`](http://localhost:8088) +to start your journey. + +## Developing + +While running, the container server will reload on modification of the superset python and javascript source code. +Don't forget to reload the page to take the new frontend into account though. + +## Production + +It is also possible to run Superset in non-development mode: in the `docker-compose.yml` file remove +the volumes needed for development and change the variable `SUPERSET_ENV` to `production`. + +## Resource Constraints + +If you are attempting to build on a Mac and it exits with 137 you need to increase your docker resources. +OSX instructions: https://docs.docker.com/docker-for-mac/#advanced (Search for memory) diff --git a/docs/installation.rst b/docs/installation.rst index 0e879a2a3891b..3405b8ab2bb95 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -91,6 +91,8 @@ OSX instructions: https://docs.docker.com/docker-for-mac/#advanced (Search for m Or if you're curious and want to install superset from bottom up, then go ahead. +See also `contrib/docker/README.md `_ + OS dependencies --------------- @@ -121,7 +123,13 @@ that the required dependencies are installed: :: sudo yum upgrade python-setuptools sudo yum install gcc gcc-c++ libffi-devel python-devel python-pip python-wheel openssl-devel libsasl2-devel openldap-devel -**OSX**, system python is not recommended. brew's python also ships with pip :: +**Mac OS X** If possible, you should upgrade to the latest version of OS X as issues are more likely to be resolved for that version. +You *will likely need* the latest version of XCode available for your installed version of OS X. You should also install +the XCode command line tools: :: + + xcode-select --install + +System python is not recommended. Homebrew's python also ships with pip: :: brew install pkg-config libffi openssl python env LDFLAGS="-L$(brew --prefix openssl)/lib" CFLAGS="-I$(brew --prefix openssl)/include" pip install cryptography==2.4.2 @@ -184,8 +192,7 @@ Follow these few simple steps to install Superset.:: superset init # To start a development web server on port 8088, use -p to bind to another port - flask run -p 8080 --with-threads --reload --debugger - + superset run -p 8080 --with-threads --reload --debugger After installation, you should be able to point your browser to the right hostname:port `http://localhost:8088 `_, login using @@ -219,10 +226,8 @@ Refer to the `Gunicorn documentation `_ for more information. -Note that *gunicorn* does not -work on Windows so the `superset runserver` command is not expected to work -in that context. Also, note that the development web -server (`superset runserver -d`) is not intended for production use. +Note that the development web +server (`superset run` or `flask run`) is not intended for production use. If not using gunicorn, you may want to disable the use of flask-compress by setting `ENABLE_FLASK_COMPRESS = False` in your `superset_config.py` @@ -387,6 +392,12 @@ Here's a list of some of the recommended packages. | Pinot | ``pip install pinotdb`` | ``pinot+http://controller:5436/`` | | | | ``query?server=http://controller:5983/`` | +---------------+-------------------------------------+-------------------------------------------------+ +| Apache Drill | | For the REST API:`` | +| | | ``drill+sadrill://`` | +| | | For JDBC | +| | | ``drill+jdbc://`` | ++---------------+-------------------------------------+-------------------------------------------------+ + Note that many other databases are supported, the main criteria being the existence of a functional SqlAlchemy dialect and Python driver. Googling @@ -444,6 +455,31 @@ Required environment variables: :: See `Teradata SQLAlchemy `_. +Apache Drill +--------- +At the time of writing, the SQLAlchemy Dialect is not available on pypi and must be downloaded here: +`SQLAlchemy Drill `_ + +Alternatively, you can install it completely from the command line as follows: :: + + git clone https://github.com/JohnOmernik/sqlalchemy-drill + cd sqlalchemy-drill + python3 setup.py install + +Once that is done, you can connect to Drill in two ways, either via the REST interface or by JDBC. If you are connecting via JDBC, you must have the +Drill JDBC Driver installed. + +The basic connection string for Drill looks like this :: + + drill+sadrill://{username}:{password}@{host}:{port}/{storage_plugin}?use_ssl=True + +If you are using JDBC to connect to Drill, the connection string looks like this: :: + + drill+jdbc://{username}:{password}@{host}:{port}/{storage_plugin} + +For a complete tutorial about how to use Apache Drill with Superset, see this tutorial: +`Visualize Anything with Superset and Drill `_ + Caching ------- @@ -905,7 +941,13 @@ To allow scheduled queries, add the following to your `config.py`: # this is where the error message is shown 'container': 'end_date', }, - ] + ], + # link to the scheduler; this example links to an Airflow pipeline + # that uses the query id and the output table as its name + 'linkback': ( + 'https://airflow.example.com/admin/airflow/tree?' + 'dag_id=query_${id}_${extra_json.schedule_info.output_table}' + ), }, } diff --git a/requirements-dev.txt b/requirements-dev.txt index 857b9ad07d09f..7059084678669 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -16,22 +16,22 @@ # coverage==4.5.3 flake8-commas==2.0.0 -flake8-import-order==0.18 +flake8-import-order==0.18.1 flake8-mypy==17.8.0 -flake8-quotes==1.0.0 -flake8==3.6.0 -flask-cors==3.0.6 -ipdb==0.11 +flake8-quotes==2.0.1 +flake8==3.7.7 +flask-cors==3.0.7 +ipdb==0.12 mypy==0.670 -mysqlclient==1.3.13 +mysqlclient==1.4.2.post1 nose==1.3.7 -pip-tools==3.5.0 +pip-tools==3.7.0 psycopg2-binary==2.7.5 -pycodestyle==2.4.0 +pycodestyle==2.5.0 pyhive==0.6.1 pylint==1.9.2 python-dotenv==0.10.1 redis==2.10.6 statsd==3.3.0 thrift==0.11.0 -tox==3.5.3 +tox==3.11.1 diff --git a/requirements.txt b/requirements.txt index 22269317d4643..517f828de56cd 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,9 +2,8 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file requirements.txt setup.py +# pip-compile --output-file=requirements.txt setup.py # - alembic==1.0.0 # via flask-migrate amqp==2.3.2 # via kombu apispec[yaml]==1.2.0 # via flask-appbuilder @@ -43,7 +42,7 @@ humanize==0.5.1 idna==2.6 isodate==0.6.0 itsdangerous==0.24 # via flask -jinja2==2.10 # via flask, flask-babel +jinja2==2.10.1 # via flask, flask-babel jsonschema==3.0.1 # via flask-appbuilder kombu==4.2.1 # via celery mako==1.0.7 # via alembic @@ -64,20 +63,21 @@ pydruid==0.5.2 pyjwt==1.7.1 # via flask-appbuilder, flask-jwt-extended pyrsistent==0.14.11 # via jsonschema python-dateutil==2.6.1 +python-dotenv==0.10.1 python-editor==1.0.3 # via alembic python-geohash==0.8.5 python3-openid==3.1.0 # via flask-openid pytz==2018.5 # via babel, celery, pandas -pyyaml==3.13 -requests==2.20.0 +pyyaml==5.1 +requests==2.22.0 retry==0.9.2 selenium==3.141.0 simplejson==3.15.0 -sqlalchemy-utils==0.32.21 +six==1.11.0 # via bleach, cryptography, flask-jwt-extended, flask-talisman, isodate, jsonschema, pathlib2, polyline, prison, pydruid, pyrsistent, python-dateutil, sqlalchemy-utils, wtforms-json +sqlalchemy-utils==0.33.11 sqlalchemy==1.3.1 sqlparse==0.2.4 -unicodecsv==0.14.1 -urllib3==1.22 # via requests, selenium +urllib3==1.24.3 # via requests, selenium vine==1.1.4 # via amqp webencodings==0.5.1 # via bleach werkzeug==0.14.1 # via flask, flask-jwt-extended diff --git a/setup.py b/setup.py index ea58d48f19c31..fc91ea484d59c 100644 --- a/setup.py +++ b/setup.py @@ -95,16 +95,16 @@ def get_git_sha(): 'polyline', 'pydruid>=0.5.2', 'python-dateutil', + 'python-dotenv', 'python-geohash', - 'pyyaml>=3.13', - 'requests>=2.20.0', + 'pyyaml>=5.1', + 'requests>=2.22.0', 'retry>=0.9.2', 'selenium>=3.141.0', 'simplejson>=3.15.0', 'sqlalchemy>=1.3.1,<2.0', - 'sqlalchemy-utils', + 'sqlalchemy-utils>=0.33.2', 'sqlparse', - 'unicodecsv', 'wtforms-json', ], extras_require={ diff --git a/superset/__init__.py b/superset/__init__.py index 47811dbc2566a..6971dc9d4c8ab 100644 --- a/superset/__init__.py +++ b/superset/__init__.py @@ -230,7 +230,9 @@ def is_feature_enabled(feature): if conf.get('ENABLE_FLASK_COMPRESS'): Compress(app) -Talisman(app, content_security_policy=None) +if app.config['TALISMAN_ENABLED']: + talisman_config = app.config.get('TALISMAN_CONFIG') + Talisman(app, **talisman_config) # Hook that provides administrators a handle on the Flask APP # after initialization diff --git a/superset/assets/cypress/integration/dashboard/save.js b/superset/assets/cypress/integration/dashboard/save.js index 772862d5b87e3..d0895777f48fa 100644 --- a/superset/assets/cypress/integration/dashboard/save.js +++ b/superset/assets/cypress/integration/dashboard/save.js @@ -20,6 +20,8 @@ import readResponseBlob from '../../utils/readResponseBlob'; import { WORLD_HEALTH_DASHBOARD } from './dashboard.helper'; export default () => describe('save', () => { + Cypress.config('chromeWebSecurity', false); + let dashboardId; let boxplotChartId; diff --git a/superset/assets/package-lock.json b/superset/assets/package-lock.json index 6ac24910d4762..3bdcd3d069d9c 100644 --- a/superset/assets/package-lock.json +++ b/superset/assets/package-lock.json @@ -1378,7 +1378,7 @@ }, "@data-ui/event-flow": { "version": "0.0.54", - "resolved": "https://registry.npmjs.org/@data-ui/event-flow/-/event-flow-0.0.54.tgz", + "resolved": "http://registry.npmjs.org/@data-ui/event-flow/-/event-flow-0.0.54.tgz", "integrity": "sha1-uwPh/StWNCSGVbjfnTxsOKdH5l4=", "requires": { "@data-ui/forms": "0.0.50", @@ -1669,7 +1669,7 @@ }, "@data-ui/radial-chart": { "version": "0.0.54", - "resolved": "https://registry.npmjs.org/@data-ui/radial-chart/-/radial-chart-0.0.54.tgz", + "resolved": "http://registry.npmjs.org/@data-ui/radial-chart/-/radial-chart-0.0.54.tgz", "integrity": "sha1-DSiwdoHZtgJ9msI7cpJBgn1RMAE=", "requires": { "@data-ui/shared": "0.0.54", @@ -1684,7 +1684,7 @@ }, "@data-ui/shared": { "version": "0.0.54", - "resolved": "https://registry.npmjs.org/@data-ui/shared/-/shared-0.0.54.tgz", + "resolved": "http://registry.npmjs.org/@data-ui/shared/-/shared-0.0.54.tgz", "integrity": "sha1-L7DW3ukNrCC/jzwpE8aFCoIj1Zs=", "requires": { "@data-ui/theme": "0.0.48", @@ -1757,7 +1757,7 @@ }, "@data-ui/sparkline": { "version": "0.0.54", - "resolved": "https://registry.npmjs.org/@data-ui/sparkline/-/sparkline-0.0.54.tgz", + "resolved": "http://registry.npmjs.org/@data-ui/sparkline/-/sparkline-0.0.54.tgz", "integrity": "sha1-zj0WbZ4LI5oLoC84lMuejIQXHO8=", "requires": { "@data-ui/shared": "0.0.54", @@ -2599,7 +2599,7 @@ }, "@sinonjs/formatio": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@sinonjs/formatio/-/formatio-2.0.0.tgz", + "resolved": "http://registry.npmjs.org/@sinonjs/formatio/-/formatio-2.0.0.tgz", "integrity": "sha512-ls6CAMA6/5gG+O/IdsBcblvnd8qcO/l1TYoNeAzp3wcISOxlPXQEus0mLcdwazEkWjaBdaJ3TaxmNgCLWwvWzg==", "dev": true, "requires": { @@ -3777,7 +3777,7 @@ }, "acorn-jsx": { "version": "3.0.1", - "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-3.0.1.tgz", + "resolved": "http://registry.npmjs.org/acorn-jsx/-/acorn-jsx-3.0.1.tgz", "integrity": "sha1-r9+UiPsezvyDSPb7IvRk4ypYs2s=", "dev": true, "requires": { @@ -3786,7 +3786,7 @@ "dependencies": { "acorn": { "version": "3.3.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-3.3.0.tgz", + "resolved": "http://registry.npmjs.org/acorn/-/acorn-3.3.0.tgz", "integrity": "sha1-ReN/s56No/JbruP/U2niu18iAXo=", "dev": true } @@ -4231,7 +4231,7 @@ }, "array-equal": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/array-equal/-/array-equal-1.0.0.tgz", + "resolved": "http://registry.npmjs.org/array-equal/-/array-equal-1.0.0.tgz", "integrity": "sha1-jCpe8kcv2ep0KwTHenUJO6J1fJM=", "dev": true }, @@ -4841,7 +4841,7 @@ }, "babel-plugin-syntax-dynamic-import": { "version": "6.18.0", - "resolved": "https://registry.npmjs.org/babel-plugin-syntax-dynamic-import/-/babel-plugin-syntax-dynamic-import-6.18.0.tgz", + "resolved": "http://registry.npmjs.org/babel-plugin-syntax-dynamic-import/-/babel-plugin-syntax-dynamic-import-6.18.0.tgz", "integrity": "sha1-jWomIpyDdFqZgqRBBRVyyqF5sdo=", "dev": true }, @@ -5160,7 +5160,7 @@ }, "brace": { "version": "0.11.1", - "resolved": "https://registry.npmjs.org/brace/-/brace-0.11.1.tgz", + "resolved": "http://registry.npmjs.org/brace/-/brace-0.11.1.tgz", "integrity": "sha1-SJb8ydVE7vRfS7dmDbMg07N5/lg=" }, "brace-expansion": { @@ -5192,7 +5192,7 @@ }, "brfs": { "version": "1.6.1", - "resolved": "https://registry.npmjs.org/brfs/-/brfs-1.6.1.tgz", + "resolved": "http://registry.npmjs.org/brfs/-/brfs-1.6.1.tgz", "integrity": "sha512-OfZpABRQQf+Xsmju8XE9bDjs+uU4vLREGolP7bDgcpsI17QREyZ4Bl+2KLxxx1kCgA0fAIhKQBaBYh+PEcCqYQ==", "requires": { "quote-stream": "^1.0.1", @@ -5232,7 +5232,7 @@ }, "browserify-aes": { "version": "1.2.0", - "resolved": "https://registry.npmjs.org/browserify-aes/-/browserify-aes-1.2.0.tgz", + "resolved": "http://registry.npmjs.org/browserify-aes/-/browserify-aes-1.2.0.tgz", "integrity": "sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA==", "dev": true, "requires": { @@ -5269,7 +5269,7 @@ }, "browserify-rsa": { "version": "4.0.1", - "resolved": "https://registry.npmjs.org/browserify-rsa/-/browserify-rsa-4.0.1.tgz", + "resolved": "http://registry.npmjs.org/browserify-rsa/-/browserify-rsa-4.0.1.tgz", "integrity": "sha1-IeCr+vbyApzy+vsTNWenAdQTVSQ=", "dev": true, "requires": { @@ -5820,7 +5820,7 @@ }, "clean-webpack-plugin": { "version": "0.1.19", - "resolved": "https://registry.npmjs.org/clean-webpack-plugin/-/clean-webpack-plugin-0.1.19.tgz", + "resolved": "http://registry.npmjs.org/clean-webpack-plugin/-/clean-webpack-plugin-0.1.19.tgz", "integrity": "sha512-M1Li5yLHECcN2MahoreuODul5LkjohJGFxLPTjl3j1ttKrF5rgjZET1SJduuqxLAuT1gAPOdkhg03qcaaU1KeA==", "dev": true, "requires": { @@ -6282,7 +6282,7 @@ }, "create-hash": { "version": "1.2.0", - "resolved": "https://registry.npmjs.org/create-hash/-/create-hash-1.2.0.tgz", + "resolved": "http://registry.npmjs.org/create-hash/-/create-hash-1.2.0.tgz", "integrity": "sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg==", "dev": true, "requires": { @@ -6295,7 +6295,7 @@ }, "create-hmac": { "version": "1.1.7", - "resolved": "https://registry.npmjs.org/create-hmac/-/create-hmac-1.1.7.tgz", + "resolved": "http://registry.npmjs.org/create-hmac/-/create-hmac-1.1.7.tgz", "integrity": "sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg==", "dev": true, "requires": { @@ -6436,7 +6436,7 @@ }, "css-in-js-utils": { "version": "2.0.1", - "resolved": "https://registry.npmjs.org/css-in-js-utils/-/css-in-js-utils-2.0.1.tgz", + "resolved": "http://registry.npmjs.org/css-in-js-utils/-/css-in-js-utils-2.0.1.tgz", "integrity": "sha512-PJF0SpJT+WdbVVt0AOYp9C8GnuruRlL/UFW7932nLWmFLQTaWEzTBQEx7/hn4BuV+WON75iAViSUJLiU3PKbpA==", "requires": { "hyphenate-style-name": "^1.0.2", @@ -6496,7 +6496,7 @@ }, "css-select": { "version": "1.2.0", - "resolved": "https://registry.npmjs.org/css-select/-/css-select-1.2.0.tgz", + "resolved": "http://registry.npmjs.org/css-select/-/css-select-1.2.0.tgz", "integrity": "sha1-KzoRBTnFNV8c2NMUYj6HCxIeyFg=", "requires": { "boolbase": "~1.0.0", @@ -6973,7 +6973,7 @@ }, "d3-geo-projection": { "version": "0.2.16", - "resolved": "https://registry.npmjs.org/d3-geo-projection/-/d3-geo-projection-0.2.16.tgz", + "resolved": "http://registry.npmjs.org/d3-geo-projection/-/d3-geo-projection-0.2.16.tgz", "integrity": "sha1-SZTs0QM92xUztsTFUoocgdzClCc=", "requires": { "brfs": "^1.3.0" @@ -7399,7 +7399,7 @@ }, "diffie-hellman": { "version": "5.0.3", - "resolved": "https://registry.npmjs.org/diffie-hellman/-/diffie-hellman-5.0.3.tgz", + "resolved": "http://registry.npmjs.org/diffie-hellman/-/diffie-hellman-5.0.3.tgz", "integrity": "sha512-kqag/Nl+f3GwyK25fhUMYj81BUOrZ9IuJsjIcDE5icNM9FJHAVm3VcUDxdLPoQtTuUylWm6ZIknYJwwaPxsUzg==", "dev": true, "requires": { @@ -7428,7 +7428,7 @@ }, "dnd-core": { "version": "2.6.0", - "resolved": "https://registry.npmjs.org/dnd-core/-/dnd-core-2.6.0.tgz", + "resolved": "http://registry.npmjs.org/dnd-core/-/dnd-core-2.6.0.tgz", "integrity": "sha1-ErrWbVh0LG5ffPKUP7aFlED4CcQ=", "requires": { "asap": "^2.0.6", @@ -7553,7 +7553,7 @@ }, "duplexer": { "version": "0.1.1", - "resolved": "https://registry.npmjs.org/duplexer/-/duplexer-0.1.1.tgz", + "resolved": "http://registry.npmjs.org/duplexer/-/duplexer-0.1.1.tgz", "integrity": "sha1-rOb/gIwc5mtX0ev5eXessCM0z8E=", "dev": true }, @@ -7788,7 +7788,7 @@ }, "es6-promise": { "version": "3.3.1", - "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-3.3.1.tgz", + "resolved": "http://registry.npmjs.org/es6-promise/-/es6-promise-3.3.1.tgz", "integrity": "sha1-oIzd6EzNvzTQJ6FFG8kdS80ophM=" }, "es6bindall": { @@ -7834,7 +7834,7 @@ }, "eslint": { "version": "4.19.1", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-4.19.1.tgz", + "resolved": "http://registry.npmjs.org/eslint/-/eslint-4.19.1.tgz", "integrity": "sha512-bT3/1x1EbZB7phzYu7vCr1v3ONuzDtX8WjuM9c0iYxe+cq+pwcKEoQjl7zd3RpC6YOLgnSy3cTN58M2jcoPDIQ==", "dev": true, "requires": { @@ -7938,7 +7938,7 @@ }, "fast-deep-equal": { "version": "1.1.0", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-1.1.0.tgz", + "resolved": "http://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-1.1.0.tgz", "integrity": "sha1-wFNHeBfIa1HaqFPIHgWbcz0CNhQ=", "dev": true }, @@ -8260,7 +8260,7 @@ }, "espree": { "version": "3.5.4", - "resolved": "https://registry.npmjs.org/espree/-/espree-3.5.4.tgz", + "resolved": "http://registry.npmjs.org/espree/-/espree-3.5.4.tgz", "integrity": "sha512-yAcIQxtmMiB/jL32dzEp2enBeidsB7xWPLNiw3IIkpVds1P+h7qF9YwJq1yUNzp2OKXgAprs4F61ih66UsoD1A==", "dev": true, "requires": { @@ -8443,7 +8443,7 @@ "dependencies": { "source-map": { "version": "0.5.0", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.0.tgz", + "resolved": "http://registry.npmjs.org/source-map/-/source-map-0.5.0.tgz", "integrity": "sha1-D+llA6yGpa213mP05BKuSHLNvoY=", "dev": true } @@ -8516,7 +8516,7 @@ }, "external-editor": { "version": "2.2.0", - "resolved": "https://registry.npmjs.org/external-editor/-/external-editor-2.2.0.tgz", + "resolved": "http://registry.npmjs.org/external-editor/-/external-editor-2.2.0.tgz", "integrity": "sha512-bSn6gvGxKt+b7+6TKEv1ZycHleA7aHhRHyAqJyp5pbUFuYYNIzpZnQDk7AsYckyWdEnTeAnay0aCy2aV6iTk9A==", "dev": true, "requires": { @@ -8738,7 +8738,7 @@ }, "file-loader": { "version": "1.1.11", - "resolved": "https://registry.npmjs.org/file-loader/-/file-loader-1.1.11.tgz", + "resolved": "http://registry.npmjs.org/file-loader/-/file-loader-1.1.11.tgz", "integrity": "sha512-TGR4HU7HUsGg6GCOPJnFk06RhWgEWFLAGWiT6rcD+GRC2keU3s9RGJ+b3Z6/U73jwwNb2gKLJ7YCrp+jvU4ALg==", "dev": true, "requires": { @@ -8748,7 +8748,7 @@ }, "file-type": { "version": "3.9.0", - "resolved": "https://registry.npmjs.org/file-type/-/file-type-3.9.0.tgz", + "resolved": "http://registry.npmjs.org/file-type/-/file-type-3.9.0.tgz", "integrity": "sha1-JXoHg4TR24CHvESdEH1SpSZyuek=" }, "fileset": { @@ -10087,7 +10087,7 @@ }, "gettext-parser": { "version": "1.1.0", - "resolved": "https://registry.npmjs.org/gettext-parser/-/gettext-parser-1.1.0.tgz", + "resolved": "http://registry.npmjs.org/gettext-parser/-/gettext-parser-1.1.0.tgz", "integrity": "sha1-LFpmONiTk0ubVQN9CtgstwBLJnk=", "dev": true, "requires": { @@ -10179,7 +10179,7 @@ }, "globby": { "version": "6.1.0", - "resolved": "https://registry.npmjs.org/globby/-/globby-6.1.0.tgz", + "resolved": "http://registry.npmjs.org/globby/-/globby-6.1.0.tgz", "integrity": "sha1-9abXDoOV4hyFj7BInWTfAkJNUGw=", "dev": true, "requires": { @@ -10472,7 +10472,7 @@ }, "hoist-non-react-statics": { "version": "1.2.0", - "resolved": "https://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-1.2.0.tgz", + "resolved": "http://registry.npmjs.org/hoist-non-react-statics/-/hoist-non-react-statics-1.2.0.tgz", "integrity": "sha1-qkSM8JhtVcxAdzsXF0t90GbLfPs=" }, "homedir-polyfill": { @@ -11326,7 +11326,7 @@ }, "is-accessor-descriptor": { "version": "0.1.6", - "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz", + "resolved": "http://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz", "integrity": "sha1-qeEss66Nh2cn7u84Q/igiXtcmNY=", "requires": { "kind-of": "^3.0.2" @@ -11419,7 +11419,7 @@ }, "is-data-descriptor": { "version": "0.1.4", - "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz", + "resolved": "http://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz", "integrity": "sha1-C17mSDiOLIYCgueT8YVv7D8wG1Y=", "requires": { "kind-of": "^3.0.2" @@ -13856,7 +13856,7 @@ }, "magic-string": { "version": "0.22.5", - "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.22.5.tgz", + "resolved": "http://registry.npmjs.org/magic-string/-/magic-string-0.22.5.tgz", "integrity": "sha512-oreip9rJZkzvA8Qzk9HFs8fZGF/u7H/gtrE8EN6RjKJ9kh2HlC+yQ2QezifqTZfGyiuAV0dRv5a+y/8gBb1m9w==", "requires": { "vlq": "^0.2.2" @@ -14035,7 +14035,7 @@ }, "mathjs": { "version": "3.20.2", - "resolved": "https://registry.npmjs.org/mathjs/-/mathjs-3.20.2.tgz", + "resolved": "http://registry.npmjs.org/mathjs/-/mathjs-3.20.2.tgz", "integrity": "sha512-3f6/+uf1cUtIz1rYFz775wekl/UEDSQ3mU6xdxW7qzpvvhc2v28i3UtLsGTRB+u8OqDWoSX6Dz8gehaGFs6tCA==", "requires": { "complex.js": "2.0.4", @@ -14085,7 +14085,7 @@ }, "media-typer": { "version": "0.3.0", - "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", + "resolved": "http://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", "integrity": "sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=", "dev": true }, @@ -14285,7 +14285,7 @@ }, "minimist": { "version": "1.2.0", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz", + "resolved": "http://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz", "integrity": "sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ=" }, "minipass": { @@ -14355,7 +14355,7 @@ }, "mkdirp": { "version": "0.5.1", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", + "resolved": "http://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", "requires": { "minimist": "0.0.8" @@ -14363,7 +14363,7 @@ "dependencies": { "minimist": { "version": "0.0.8", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", + "resolved": "http://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=" } } @@ -15017,7 +15017,7 @@ }, "os-tmpdir": { "version": "1.0.2", - "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", + "resolved": "http://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", "integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=" }, "osenv": { @@ -15220,7 +15220,7 @@ }, "path-browserify": { "version": "0.0.0", - "resolved": "https://registry.npmjs.org/path-browserify/-/path-browserify-0.0.0.tgz", + "resolved": "http://registry.npmjs.org/path-browserify/-/path-browserify-0.0.0.tgz", "integrity": "sha1-oLhwcpquIUAFt9UDLsLLuw+0RRo=", "dev": true }, @@ -15238,7 +15238,7 @@ }, "path-is-absolute": { "version": "1.0.1", - "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "resolved": "http://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=" }, "path-is-inside": { @@ -15377,7 +15377,7 @@ }, "po2json": { "version": "0.4.5", - "resolved": "https://registry.npmjs.org/po2json/-/po2json-0.4.5.tgz", + "resolved": "http://registry.npmjs.org/po2json/-/po2json-0.4.5.tgz", "integrity": "sha1-R7spUtoy1Yob4vJWpZjuvAt0URg=", "dev": true, "requires": { @@ -15439,7 +15439,7 @@ "dependencies": { "async": { "version": "1.5.2", - "resolved": "https://registry.npmjs.org/async/-/async-1.5.2.tgz", + "resolved": "http://registry.npmjs.org/async/-/async-1.5.2.tgz", "integrity": "sha1-7GphrlZIDAw8skHJVhjiCJL5Zyo=", "dev": true } @@ -17859,7 +17859,7 @@ }, "react-ace": { "version": "5.10.0", - "resolved": "https://registry.npmjs.org/react-ace/-/react-ace-5.10.0.tgz", + "resolved": "http://registry.npmjs.org/react-ace/-/react-ace-5.10.0.tgz", "integrity": "sha512-aEK/XZCowP8IXq91e2DYqOtGhabk1bbjt+fyeW0UBcIkzDzP/RX/MeJKeyW7wsZcwElACVwyy9nnwXBTqgky3A==", "requires": { "brace": "^0.11.0", @@ -17968,7 +17968,7 @@ }, "react-dnd": { "version": "2.6.0", - "resolved": "https://registry.npmjs.org/react-dnd/-/react-dnd-2.6.0.tgz", + "resolved": "http://registry.npmjs.org/react-dnd/-/react-dnd-2.6.0.tgz", "integrity": "sha1-f6JWds+CfViokSk+PBq1naACVFo=", "requires": { "disposables": "^1.0.1", @@ -17988,7 +17988,7 @@ }, "react-dnd-html5-backend": { "version": "2.6.0", - "resolved": "https://registry.npmjs.org/react-dnd-html5-backend/-/react-dnd-html5-backend-2.6.0.tgz", + "resolved": "http://registry.npmjs.org/react-dnd-html5-backend/-/react-dnd-html5-backend-2.6.0.tgz", "integrity": "sha1-WQzRzKeEQbsnTt1XH+9MCxbdz44=", "requires": { "lodash": "^4.2.0" @@ -18639,7 +18639,7 @@ }, "readable-stream": { "version": "2.3.6", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", + "resolved": "http://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", "integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", "requires": { "core-util-is": "~1.0.0", @@ -18962,7 +18962,7 @@ }, "reduce-css-calc": { "version": "1.3.0", - "resolved": "https://registry.npmjs.org/reduce-css-calc/-/reduce-css-calc-1.3.0.tgz", + "resolved": "http://registry.npmjs.org/reduce-css-calc/-/reduce-css-calc-1.3.0.tgz", "integrity": "sha1-dHyRTgSWFKTJz7umKYca0dKSdxY=", "requires": { "balanced-match": "^0.4.2", @@ -19099,7 +19099,7 @@ }, "regexpp": { "version": "1.1.0", - "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-1.1.0.tgz", + "resolved": "http://registry.npmjs.org/regexpp/-/regexpp-1.1.0.tgz", "integrity": "sha512-LOPw8FpgdQF9etWMaAfG/WRthIdXJGYp4mJ2Jgn/2lpkbod9jPn0t9UqN7AxBOKNfzRbYyVfgc7Vk4t/MpnXgw==", "dev": true }, @@ -19271,7 +19271,7 @@ }, "require-uncached": { "version": "1.0.3", - "resolved": "https://registry.npmjs.org/require-uncached/-/require-uncached-1.0.3.tgz", + "resolved": "http://registry.npmjs.org/require-uncached/-/require-uncached-1.0.3.tgz", "integrity": "sha1-Tg1W1slmL9MeQwEcS5WqSZVUIdM=", "dev": true, "requires": { @@ -19435,7 +19435,7 @@ }, "safe-regex": { "version": "1.1.0", - "resolved": "https://registry.npmjs.org/safe-regex/-/safe-regex-1.1.0.tgz", + "resolved": "http://registry.npmjs.org/safe-regex/-/safe-regex-1.1.0.tgz", "integrity": "sha1-QKNmnzsHfR6UPURinhV91IAjvy4=", "requires": { "ret": "~0.1.10" @@ -19661,7 +19661,7 @@ }, "sha.js": { "version": "2.4.11", - "resolved": "https://registry.npmjs.org/sha.js/-/sha.js-2.4.11.tgz", + "resolved": "http://registry.npmjs.org/sha.js/-/sha.js-2.4.11.tgz", "integrity": "sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ==", "dev": true, "requires": { @@ -19696,7 +19696,7 @@ }, "iconv-lite": { "version": "0.2.11", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.2.11.tgz", + "resolved": "http://registry.npmjs.org/iconv-lite/-/iconv-lite-0.2.11.tgz", "integrity": "sha1-HOYKOleGSiktEyH/RgnKS7llrcg=" } } @@ -19775,7 +19775,7 @@ }, "sinon": { "version": "4.5.0", - "resolved": "https://registry.npmjs.org/sinon/-/sinon-4.5.0.tgz", + "resolved": "http://registry.npmjs.org/sinon/-/sinon-4.5.0.tgz", "integrity": "sha512-trdx+mB0VBBgoYucy6a9L7/jfQOmvGeaKZT4OOJ+lPAtI8623xyGr8wLiE4eojzBS8G9yXbhx42GHUOVLr4X2w==", "dev": true, "requires": { @@ -20160,7 +20160,7 @@ }, "split": { "version": "0.2.10", - "resolved": "https://registry.npmjs.org/split/-/split-0.2.10.tgz", + "resolved": "http://registry.npmjs.org/split/-/split-0.2.10.tgz", "integrity": "sha1-Zwl8YB1pfOE2j0GPBs0gHPBSGlc=", "requires": { "through": "2" @@ -20428,7 +20428,7 @@ }, "string-width": { "version": "1.0.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", + "resolved": "http://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", "requires": { "code-point-at": "^1.0.0", @@ -20448,7 +20448,7 @@ }, "string_decoder": { "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "resolved": "http://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", "requires": { "safe-buffer": "~5.1.0" @@ -20456,7 +20456,7 @@ }, "strip-ansi": { "version": "3.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "resolved": "http://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", "requires": { "ansi-regex": "^2.0.0" @@ -20470,7 +20470,7 @@ }, "strip-eof": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/strip-eof/-/strip-eof-1.0.0.tgz", + "resolved": "http://registry.npmjs.org/strip-eof/-/strip-eof-1.0.0.tgz", "integrity": "sha1-u0P/VZim6wXYm1n80SnJgzE2Br8=", "dev": true }, @@ -20792,7 +20792,7 @@ }, "fast-deep-equal": { "version": "1.1.0", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-1.1.0.tgz", + "resolved": "http://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-1.1.0.tgz", "integrity": "sha1-wFNHeBfIa1HaqFPIHgWbcz0CNhQ=", "dev": true }, @@ -20849,7 +20849,7 @@ }, "tapable": { "version": "0.1.10", - "resolved": "https://registry.npmjs.org/tapable/-/tapable-0.1.10.tgz", + "resolved": "http://registry.npmjs.org/tapable/-/tapable-0.1.10.tgz", "integrity": "sha1-KcNXB8K3DlDQdIK10gLo7URtr9Q=", "dev": true }, @@ -21037,7 +21037,7 @@ }, "through": { "version": "2.3.8", - "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", + "resolved": "http://registry.npmjs.org/through/-/through-2.3.8.tgz", "integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=" }, "through2": { @@ -21756,7 +21756,7 @@ }, "tty-browserify": { "version": "0.0.0", - "resolved": "https://registry.npmjs.org/tty-browserify/-/tty-browserify-0.0.0.tgz", + "resolved": "http://registry.npmjs.org/tty-browserify/-/tty-browserify-0.0.0.tgz", "integrity": "sha1-oVe6QC2iTpv5V/mqadUk7tQpAaY=", "dev": true }, @@ -22331,7 +22331,7 @@ }, "vm-browserify": { "version": "0.0.4", - "resolved": "https://registry.npmjs.org/vm-browserify/-/vm-browserify-0.0.4.tgz", + "resolved": "http://registry.npmjs.org/vm-browserify/-/vm-browserify-0.0.4.tgz", "integrity": "sha1-XX6kW7755Kb/ZflUOOCofDV9WnM=", "dev": true, "requires": { @@ -23955,7 +23955,7 @@ }, "wrap-ansi": { "version": "2.1.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-2.1.0.tgz", + "resolved": "http://registry.npmjs.org/wrap-ansi/-/wrap-ansi-2.1.0.tgz", "integrity": "sha1-2Pw9KE3QV5T+hJc8rs3Rz4JP3YU=", "dev": true, "requires": { @@ -24048,7 +24048,7 @@ }, "xmlbuilder": { "version": "9.0.7", - "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-9.0.7.tgz", + "resolved": "http://registry.npmjs.org/xmlbuilder/-/xmlbuilder-9.0.7.tgz", "integrity": "sha1-Ey7mPS7FVlxVfiD0wi35rKaGsQ0=" }, "xmlhttprequest": { diff --git a/superset/assets/spec/javascripts/components/TableSelector_spec.jsx b/superset/assets/spec/javascripts/components/TableSelector_spec.jsx index 70e2cca1f1925..13665927556d3 100644 --- a/superset/assets/spec/javascripts/components/TableSelector_spec.jsx +++ b/superset/assets/spec/javascripts/components/TableSelector_spec.jsx @@ -208,19 +208,20 @@ describe('TableSelector', () => { it('test 1', () => { wrapper.instance().changeTable({ - value: 'birth_names', + value: { schema: 'main', table: 'birth_names' }, label: 'birth_names', }); expect(wrapper.state().tableName).toBe('birth_names'); }); - it('test 2', () => { + it('should call onTableChange with schema from table object', () => { + wrapper.setProps({ schema: null }); wrapper.instance().changeTable({ - value: 'main.my_table', - label: 'my_table', + value: { schema: 'other_schema', table: 'my_table' }, + label: 'other_schema.my_table', }); expect(mockedProps.onTableChange.getCall(0).args[0]).toBe('my_table'); - expect(mockedProps.onTableChange.getCall(0).args[1]).toBe('main'); + expect(mockedProps.onTableChange.getCall(0).args[1]).toBe('other_schema'); }); }); diff --git a/superset/assets/spec/javascripts/explore/components/MetricsControl_spec.jsx b/superset/assets/spec/javascripts/explore/components/MetricsControl_spec.jsx index 6f2c657a485ff..31bddf87dd686 100644 --- a/superset/assets/spec/javascripts/explore/components/MetricsControl_spec.jsx +++ b/superset/assets/spec/javascripts/explore/components/MetricsControl_spec.jsx @@ -85,6 +85,14 @@ describe('MetricsControl', () => { ]); }); + it('does not show aggregates in options if no columns', () => { + const { wrapper } = setup({ columns: [] }); + expect(wrapper.state('options')).toEqual([ + { optionName: 'sum__value', metric_name: 'sum__value', expression: 'SUM(energy_usage.value)' }, + { optionName: 'avg__value', metric_name: 'avg__value', expression: 'AVG(energy_usage.value)' }, + ]); + }); + it('coerces Adhoc Metrics from form data into instances of the AdhocMetric class and leaves saved metrics', () => { const { wrapper } = setup({ value: [ diff --git a/superset/assets/spec/javascripts/explore/components/withVerification_spec.jsx b/superset/assets/spec/javascripts/explore/components/withVerification_spec.jsx new file mode 100644 index 0000000000000..44377ea2669c3 --- /dev/null +++ b/superset/assets/spec/javascripts/explore/components/withVerification_spec.jsx @@ -0,0 +1,106 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import React from 'react'; +import sinon from 'sinon'; +import { shallow } from 'enzyme'; +import fetchMock from 'fetch-mock'; + +import MetricsControl from '../../../../src/explore/components/controls/MetricsControl'; +import withVerification from '../../../../src/explore/components/controls/withVerification'; + +const defaultProps = { + name: 'metrics', + label: 'Metrics', + value: undefined, + multi: true, + columns: [ + { type: 'VARCHAR(255)', column_name: 'source' }, + { type: 'VARCHAR(255)', column_name: 'target' }, + { type: 'DOUBLE', column_name: 'value' }, + ], + savedMetrics: [ + { metric_name: 'sum__value', expression: 'SUM(energy_usage.value)' }, + { metric_name: 'avg__value', expression: 'AVG(energy_usage.value)' }, + ], + datasourceType: 'sqla', + getEndpoint: controlValues => `valid_metrics?data=${controlValues}`, +}; + +const VALID_METRIC = { metric_name: 'sum__value', expression: 'SUM(energy_usage.value)' }; + +function setup(overrides) { + const onChange = sinon.spy(); + const props = { + onChange, + ...defaultProps, + ...overrides, + }; + const VerifiedControl = withVerification(MetricsControl, 'metric_name', 'savedMetrics'); + const wrapper = shallow(); + fetchMock.mock('glob:*/valid_metrics*', `["${VALID_METRIC.metric_name}"]`); + return { props, wrapper, onChange }; +} + +afterEach(fetchMock.restore); + +describe('VerifiedMetricsControl', () => { + + it('Gets valid options', () => { + const { wrapper } = setup(); + setTimeout(() => { + expect(fetchMock.calls(defaultProps.getEndpoint())).toHaveLength(1); + expect(wrapper.state('validOptions')).toEqual([VALID_METRIC]); + fetchMock.reset(); + }, 0); + }); + + it('Returns verified options', () => { + const { wrapper } = setup(); + setTimeout(() => { + expect(fetchMock.calls(defaultProps.getEndpoint())).toHaveLength(1); + const child = wrapper.find(MetricsControl); + expect(child.props().savedMetrics).toEqual([VALID_METRIC]); + fetchMock.reset(); + }, 0); + }); + + it('Makes no calls if endpoint is not set', () => { + const { wrapper } = setup({ + getEndpoint: () => null, + }); + setTimeout(() => { + expect(fetchMock.calls(defaultProps.getEndpoint())).toHaveLength(0); + expect(wrapper.state('validOptions')).toEqual(new Set()); + fetchMock.reset(); + }, 0); + }); + + it('Calls endpoint if control values change', () => { + const { props, wrapper } = setup({ controlValues: { metrics: 'sum__value' } }); + setTimeout(() => { + expect(fetchMock.calls(defaultProps.getEndpoint())).toHaveLength(1); + fetchMock.reset(); + }, 0); + wrapper.setProps({ ...props, controlValues: { metrics: 'avg__value' } }); + setTimeout(() => { + expect(fetchMock.calls(defaultProps.getEndpoint())).toHaveLength(1); + fetchMock.reset(); + }, 0); + }); +}); diff --git a/superset/assets/spec/javascripts/showSavedQuery/utils_spec.jsx b/superset/assets/spec/javascripts/showSavedQuery/utils_spec.jsx new file mode 100644 index 0000000000000..d198a49180f6b --- /dev/null +++ b/superset/assets/spec/javascripts/showSavedQuery/utils_spec.jsx @@ -0,0 +1,65 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import { getNestedValue, interpolate } from '../../../src/showSavedQuery/utils'; + +describe('getNestedValue', () => { + it('is a function', () => { + expect(typeof getNestedValue).toBe('function'); + }); + + it('works with simple ids', () => { + const obj = { a: '1' }; + const id = 'a'; + expect(getNestedValue(obj, id)).toEqual('1'); + }); + + it('works with complex ids', () => { + const obj = { a: { b: '1' } }; + const id = 'a.b'; + expect(getNestedValue(obj, id)).toEqual('1'); + }); + + it('works with other separators', () => { + const obj = { a: { b: { c: '1' } } }; + const id = 'a__b__c'; + const separator = '__'; + expect(getNestedValue(obj, id, separator)).toEqual('1'); + }); +}); + + +describe('interpolate', () => { + it('is a function', () => { + expect(typeof interpolate).toBe('function'); + }); + + it('works with simple ids', () => { + const obj = { a: '1' }; + // eslint-disable-next-line no-template-curly-in-string + const str = 'value: ${a}'; + expect(interpolate(str, obj)).toEqual('value: 1'); + }); + + it('works with complex ids', () => { + const obj = { a: { b: '1' } }; + // eslint-disable-next-line no-template-curly-in-string + const str = 'value: ${a.b}'; + expect(interpolate(str, obj)).toEqual('value: 1'); + }); +}); diff --git a/superset/assets/spec/javascripts/sqllab/fixtures.js b/superset/assets/spec/javascripts/sqllab/fixtures.js index 6471be1286556..f43f43f550a68 100644 --- a/superset/assets/spec/javascripts/sqllab/fixtures.js +++ b/superset/assets/spec/javascripts/sqllab/fixtures.js @@ -329,15 +329,15 @@ export const databases = { export const tables = { options: [ { - value: 'birth_names', + value: { schema: 'main', table: 'birth_names' }, label: 'birth_names', }, { - value: 'energy_usage', + value: { schema: 'main', table: 'energy_usage' }, label: 'energy_usage', }, { - value: 'wb_health_population', + value: { schema: 'main', table: 'wb_health_population' }, label: 'wb_health_population', }, ], diff --git a/superset/assets/src/SqlLab/actions/sqlLab.js b/superset/assets/src/SqlLab/actions/sqlLab.js index 81c8e8d5593ec..9c5a9d04826bb 100644 --- a/superset/assets/src/SqlLab/actions/sqlLab.js +++ b/superset/assets/src/SqlLab/actions/sqlLab.js @@ -107,6 +107,17 @@ export function saveQuery(query) { .catch(() => dispatch(addDangerToast(t('Your query could not be saved')))); } +export function scheduleQuery(query) { + return dispatch => + SupersetClient.post({ + endpoint: '/savedqueryviewapi/api/create', + postPayload: query, + stringify: false, + }) + .then(() => dispatch(addSuccessToast(t('Your query has been scheduled. To see details of your query, navigate to Saved Queries')))) + .catch(() => dispatch(addDangerToast(t('Your query could not be scheduled')))); +} + export function startQuery(query) { Object.assign(query, { id: query.id ? query.id : shortid.generate(), diff --git a/superset/assets/src/SqlLab/components/QueryAutoRefresh.jsx b/superset/assets/src/SqlLab/components/QueryAutoRefresh.jsx index 8ad02adad3bf9..6704d393fe754 100644 --- a/superset/assets/src/SqlLab/components/QueryAutoRefresh.jsx +++ b/superset/assets/src/SqlLab/components/QueryAutoRefresh.jsx @@ -38,19 +38,13 @@ class QueryAutoRefresh extends React.PureComponent { } shouldCheckForQueries() { // if there are started or running queries, this method should return true - const { queries, queriesLastUpdate } = this.props; + const { queries } = this.props; const now = new Date().getTime(); - - // due to a race condition, queries can be marked as successful before the - // results key is set; this is a workaround until we fix the underlying - // problem const isQueryRunning = q => ( - ['running', 'started', 'pending', 'fetching'].indexOf(q.state) >= 0 || - (q.state === 'success' && q.resultsKey === null) + ['running', 'started', 'pending', 'fetching'].indexOf(q.state) >= 0 ); return ( - queriesLastUpdate > 0 && Object.values(queries).some( q => isQueryRunning(q) && now - q.startDttm < MAX_QUERY_AGE_TO_POLL, diff --git a/superset/assets/src/SqlLab/components/ScheduleQueryButton.jsx b/superset/assets/src/SqlLab/components/ScheduleQueryButton.jsx index 99ecaa5256052..d8f82fba62428 100644 --- a/superset/assets/src/SqlLab/components/ScheduleQueryButton.jsx +++ b/superset/assets/src/SqlLab/components/ScheduleQueryButton.jsx @@ -20,6 +20,7 @@ import React from 'react'; import PropTypes from 'prop-types'; import Form from 'react-jsonschema-form'; import chrono from 'chrono-node'; +import { Col, FormControl, FormGroup, Row } from 'react-bootstrap'; import { t } from '@superset-ui/translation'; import Button from '../../components/Button'; @@ -76,11 +77,17 @@ const propTypes = { dbId: PropTypes.number.isRequired, animation: PropTypes.bool, onSchedule: PropTypes.func, + scheduleQueryWarning: PropTypes.string, + disabled: PropTypes.bool, + tooltip: PropTypes.string, }; const defaultProps = { defaultLabel: t('Undefined'), animation: true, onSchedule: () => {}, + scheduleQueryWarning: null, + disabled: false, + tooltip: null, }; class ScheduleQueryButton extends React.PureComponent { @@ -123,12 +130,53 @@ class ScheduleQueryButton extends React.PureComponent { } renderModalBody() { return ( -
+ + + + + + + + + + + + + + + + + + + {this.props.scheduleQueryWarning && ( + + + + {this.props.scheduleQueryWarning} + + + + )} + ); } render() { @@ -139,7 +187,13 @@ class ScheduleQueryButton extends React.PureComponent { modalTitle={t('Schedule Query')} modalBody={this.renderModalBody()} triggerNode={ - } diff --git a/superset/assets/src/SqlLab/components/SqlEditor.jsx b/superset/assets/src/SqlLab/components/SqlEditor.jsx index 0b25dcf20ecad..a4aabb73fd49a 100644 --- a/superset/assets/src/SqlLab/components/SqlEditor.jsx +++ b/superset/assets/src/SqlLab/components/SqlEditor.jsx @@ -67,13 +67,14 @@ const propTypes = { defaultQueryLimit: PropTypes.number.isRequired, maxRow: PropTypes.number.isRequired, saveQueryWarning: PropTypes.string, + scheduleQueryWarning: PropTypes.string, }; const defaultProps = { database: null, latestQuery: null, hideLeftBar: false, - saveQueryWarning: null, + scheduleQueryWarning: null, }; class SqlEditor extends React.PureComponent { @@ -334,6 +335,10 @@ class SqlEditor extends React.PureComponent { ); } + const successful = this.props.latestQuery && this.props.latestQuery.state === 'success'; + const scheduleToolTip = successful + ? t('Schedule the query periodically') + : t('You must run the query successfully first'); return (
@@ -355,9 +360,12 @@ class SqlEditor extends React.PureComponent { defaultLabel={qe.title} sql={qe.sql} className="m-r-5" - onSchedule={this.props.actions.saveQuery} + onSchedule={this.props.actions.scheduleQuery} schema={qe.schema} dbId={qe.dbId} + scheduleQueryWarning={this.props.scheduleQueryWarning} + tooltip={scheduleToolTip} + disabled={!successful} /> } diff --git a/superset/assets/src/SqlLab/components/SqlEditorLeftBar.jsx b/superset/assets/src/SqlLab/components/SqlEditorLeftBar.jsx index 9d0796cd8ce74..43ea4873a0f73 100644 --- a/superset/assets/src/SqlLab/components/SqlEditorLeftBar.jsx +++ b/superset/assets/src/SqlLab/components/SqlEditorLeftBar.jsx @@ -83,17 +83,10 @@ export default class SqlEditorLeftBar extends React.PureComponent { this.setState({ tableName: '' }); return; } - const namePieces = tableOpt.value.split('.'); - let tableName = namePieces[0]; - let schemaName = this.props.queryEditor.schema; - if (namePieces.length === 1) { - this.setState({ tableName }); - } else { - schemaName = namePieces[0]; - tableName = namePieces[1]; - this.setState({ tableName }); - this.props.actions.queryEditorSetSchema(this.props.queryEditor, schemaName); - } + const schemaName = tableOpt.value.schema; + const tableName = tableOpt.value.table; + this.setState({ tableName }); + this.props.actions.queryEditorSetSchema(this.props.queryEditor, schemaName); this.props.actions.addTable(this.props.queryEditor, tableName, schemaName); } diff --git a/superset/assets/src/SqlLab/components/TabbedSqlEditors.jsx b/superset/assets/src/SqlLab/components/TabbedSqlEditors.jsx index a8516f174387b..7c32021107e2d 100644 --- a/superset/assets/src/SqlLab/components/TabbedSqlEditors.jsx +++ b/superset/assets/src/SqlLab/components/TabbedSqlEditors.jsx @@ -41,11 +41,13 @@ const propTypes = { tables: PropTypes.array.isRequired, offline: PropTypes.bool, saveQueryWarning: PropTypes.string, + scheduleQueryWarning: PropTypes.string, }; const defaultProps = { queryEditors: [], offline: false, saveQueryWarning: null, + scheduleQueryWarning: null, }; let queryCount = 1; @@ -250,6 +252,7 @@ class TabbedSqlEditors extends React.PureComponent { defaultQueryLimit={this.props.defaultQueryLimit} maxRow={this.props.maxRow} saveQueryWarning={this.props.saveQueryWarning} + scheduleQueryWarning={this.props.scheduleQueryWarning} /> )} @@ -294,6 +297,7 @@ function mapStateToProps({ sqlLab, common }) { defaultQueryLimit: common.conf.DEFAULT_SQLLAB_LIMIT, maxRow: common.conf.SQL_MAX_ROW, saveQueryWarning: common.conf.SQLLAB_SAVE_WARNING_MESSAGE, + scheduleQueryWarning: common.conf.SQLLAB_SCHEDULE_WARNING_MESSAGE, }; } function mapDispatchToProps(dispatch) { diff --git a/superset/assets/src/components/TableSelector.jsx b/superset/assets/src/components/TableSelector.jsx index ba2cebb2799d8..940e1c274b93a 100644 --- a/superset/assets/src/components/TableSelector.jsx +++ b/superset/assets/src/components/TableSelector.jsx @@ -170,13 +170,8 @@ export default class TableSelector extends React.PureComponent { this.setState({ tableName: '' }); return; } - const namePieces = tableOpt.value.split('.'); - let tableName = namePieces[0]; - let schemaName = this.props.schema; - if (namePieces.length > 1) { - schemaName = namePieces[0]; - tableName = namePieces[1]; - } + const schemaName = tableOpt.value.schema; + const tableName = tableOpt.value.table; if (this.props.tableNameSticky) { this.setState({ tableName }, this.onChange); } diff --git a/superset/assets/src/explore/components/controls/MetricsControl.jsx b/superset/assets/src/explore/components/controls/MetricsControl.jsx index b42cc814d67b6..1e49355e3d841 100644 --- a/superset/assets/src/explore/components/controls/MetricsControl.jsx +++ b/superset/assets/src/explore/components/controls/MetricsControl.jsx @@ -238,10 +238,14 @@ export default class MetricsControl extends React.PureComponent { } optionsForSelect(props) { + const { columns, savedMetrics } = props; + const aggregates = columns && columns.length ? + Object.keys(AGGREGATES).map(aggregate => ({ aggregate_name: aggregate })) : + []; const options = [ - ...props.columns, - ...Object.keys(AGGREGATES).map(aggregate => ({ aggregate_name: aggregate })), - ...props.savedMetrics, + ...columns, + ...aggregates, + ...savedMetrics, ]; return options.reduce((results, option) => { diff --git a/superset/assets/src/explore/components/controls/index.js b/superset/assets/src/explore/components/controls/index.js index 32a8d449c36e9..a5800f2d11c69 100644 --- a/superset/assets/src/explore/components/controls/index.js +++ b/superset/assets/src/explore/components/controls/index.js @@ -40,6 +40,7 @@ import MetricsControl from './MetricsControl'; import AdhocFilterControl from './AdhocFilterControl'; import FilterPanel from './FilterPanel'; import FilterBoxItemControl from './FilterBoxItemControl'; +import withVerification from './withVerification'; const controlMap = { AnnotationLayerControl, @@ -66,5 +67,8 @@ const controlMap = { AdhocFilterControl, FilterPanel, FilterBoxItemControl, + MetricsControlVerifiedOptions: withVerification(MetricsControl, 'metric_name', 'savedMetrics'), + SelectControlVerifiedOptions: withVerification(SelectControl, 'column_name', 'options'), + AdhocFilterControlVerifiedOptions: withVerification(AdhocFilterControl, 'column_name', 'columns'), }; export default controlMap; diff --git a/superset/assets/src/explore/components/controls/withVerification.jsx b/superset/assets/src/explore/components/controls/withVerification.jsx new file mode 100644 index 0000000000000..8f1e549d0a00a --- /dev/null +++ b/superset/assets/src/explore/components/controls/withVerification.jsx @@ -0,0 +1,88 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import React from 'react'; +import { SupersetClient } from '@superset-ui/connection'; + +import { isEqual } from 'lodash'; + +export default function withVerification(WrappedComponent, optionLabel, optionsName) { + /* + * This function will verify control options before passing them to the control by calling an + * endpoint on mount and when the controlValues change. controlValues should be set in + * mapStateToProps that can be added as a control override along with getEndpoint. + */ + class withVerificationComponent extends React.Component { + constructor(props) { + super(props); + this.state = { + validOptions: new Set(), + hasRunVerification: false, + }; + + this.getValidOptions = this.getValidOptions.bind(this); + } + + componentDidMount() { + this.getValidOptions(); + } + + componentDidUpdate(prevProps) { + const { hasRunVerification } = this.state; + if (!isEqual(this.props.controlValues, prevProps.controlValues) || !hasRunVerification) { + this.getValidOptions(); + } + } + + getValidOptions() { + const endpoint = this.props.getEndpoint(this.props.controlValues); + if (endpoint) { + SupersetClient.get({ + endpoint, + }).then(({ json }) => { + if (Array.isArray(json)) { + this.setState({ validOptions: new Set(json) || new Set() }); + } + }).catch(error => console.log(error)); + + if (!this.state.hasRunVerification) { + this.setState({ hasRunVerification: true }); + } + } + } + + render() { + const { validOptions } = this.state; + const options = this.props[optionsName]; + const verifiedOptions = validOptions.size ? + options.filter(o => (validOptions.has(o[optionLabel]))) : + options; + + const newProps = { ...this.props, [optionsName]: verifiedOptions }; + + return ( + + ); + } + } + withVerificationComponent.propTypes = WrappedComponent.propTypes; + return withVerificationComponent; +} + diff --git a/superset/assets/src/explore/controls.jsx b/superset/assets/src/explore/controls.jsx index f56b02c7006fd..c88539a20b60b 100644 --- a/superset/assets/src/explore/controls.jsx +++ b/superset/assets/src/explore/controls.jsx @@ -1865,7 +1865,7 @@ export const controls = { 'Either a numerical column or `Auto`, which scales the point based ' + 'on the largest cluster'), mapStateToProps: state => ({ - choices: columnChoices(state.datasource), + choices: formatSelectOptions(['Auto']).concat(columnChoices(state.datasource)), }), }, diff --git a/superset/assets/src/showSavedQuery/index.css b/superset/assets/src/showSavedQuery/index.css new file mode 100644 index 0000000000000..026dd784f34ca --- /dev/null +++ b/superset/assets/src/showSavedQuery/index.css @@ -0,0 +1,20 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +.btn-add { display: none; } +.linkback { padding: 0 10px 20px 2px; } diff --git a/superset/assets/src/showSavedQuery/index.jsx b/superset/assets/src/showSavedQuery/index.jsx index cd384fe5b461e..c64626cb6a4f9 100644 --- a/superset/assets/src/showSavedQuery/index.jsx +++ b/superset/assets/src/showSavedQuery/index.jsx @@ -19,25 +19,39 @@ import React from 'react'; import ReactDom from 'react-dom'; import Form from 'react-jsonschema-form'; +import { interpolate } from 'src/showSavedQuery/utils'; +import './index.css'; const scheduleInfoContainer = document.getElementById('schedule-info'); const bootstrapData = JSON.parse(scheduleInfoContainer.getAttribute('data-bootstrap')); -const schemas = bootstrapData.common.feature_flags.SCHEDULED_QUERIES; -const scheduleInfo = bootstrapData.common.extra_json.schedule_info; +const config = bootstrapData.common.feature_flags.SCHEDULED_QUERIES; +const query = bootstrapData.common.query; +const scheduleInfo = query.extra_json.schedule_info; +const linkback = config.linkback + ? interpolate(config.linkback, query) + : null; -if (scheduleInfo && schemas) { +if (scheduleInfo && config) { // hide instructions when showing schedule info - schemas.JSONSCHEMA.description = ''; + config.JSONSCHEMA.description = ''; ReactDom.render( - -
- , +
+
+
+
+ {linkback && } +
, scheduleInfoContainer, ); } diff --git a/superset/assets/src/showSavedQuery/utils.js b/superset/assets/src/showSavedQuery/utils.js new file mode 100644 index 0000000000000..9cd712bc893f3 --- /dev/null +++ b/superset/assets/src/showSavedQuery/utils.js @@ -0,0 +1,44 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +export function getNestedValue(obj, id, separator = '.') { + /* + * Given a nested object and an id, return the nested value. + * + * > getNestedValue({a:{b:1}}, 'a.b') + * < 1 + */ + const index = id.indexOf(separator); + if (index === -1) { + return obj[id]; + } + const name = id.slice(0, index); + const rest = id.slice(index + separator.length); + return getNestedValue(obj[name], rest, separator); +} + +export function interpolate(str, obj) { + /* + * Programmatic template string for interpolation. + * + * > interpolate('foo ${a.b}', {a:{b:1}}) + * < "foo 1" + */ + return str.replace(/\$\{(.+?)\}/g, (match, id) => getNestedValue(obj, id)); +} diff --git a/superset/cli.py b/superset/cli.py index 52429104fbf79..6691b0148f8f0 100755 --- a/superset/cli.py +++ b/superset/cli.py @@ -132,7 +132,7 @@ def load_examples(load_test_data): @click.option('--datasource', '-d', help='Specify which datasource name to load, if ' 'omitted, all datasources will be refreshed') @click.option('--merge', '-m', is_flag=True, default=False, - help='Specify using \'merge\' property during operation. ' + help="Specify using 'merge' property during operation. " 'Default value is False.') def refresh_druid(datasource, merge): """Refresh druid datasources""" @@ -288,9 +288,9 @@ def update_datasources_cache(): if database.allow_multi_schema_metadata_fetch: print('Fetching {} datasources ...'.format(database.name)) try: - database.all_table_names_in_database( + database.get_all_table_names_in_database( force=True, cache=True, cache_timeout=24 * 60 * 60) - database.all_view_names_in_database( + database.get_all_view_names_in_database( force=True, cache=True, cache_timeout=24 * 60 * 60) except Exception as e: print('{}'.format(str(e))) @@ -371,7 +371,7 @@ def load_test_users_run(): security_manager.add_permission_role(gamma_sqllab_role, perm) utils.get_or_create_main_db() db_perm = utils.get_main_database(security_manager.get_session).perm - security_manager.merge_perm('database_access', db_perm) + security_manager.add_permission_view_menu('database_access', db_perm) db_pvm = security_manager.find_permission_view_menu( view_menu_name=db_perm, permission_name='database_access') gamma_sqllab_role.permissions.append(db_pvm) diff --git a/superset/config.py b/superset/config.py index a89dd1bf01476..a3e27635b6324 100644 --- a/superset/config.py +++ b/superset/config.py @@ -357,8 +357,9 @@ # Maximum number of tables/views displayed in the dropdown window in SQL Lab. MAX_TABLE_NAMES = 3000 -# Adds a warning message on sqllab save query modal. +# Adds a warning message on sqllab save query and schedule query modals. SQLLAB_SAVE_WARNING_MESSAGE = None +SQLLAB_SCHEDULE_WARNING_MESSAGE = None # If defined, shows this text in an alert-warning box in the navbar # one example use case may be "STAGING" to make it clear that this is @@ -612,6 +613,15 @@ class CeleryConfig(object): 'presto': 'PrestoDBSQLValidator', } +# Do you want Talisman enabled? +TALISMAN_ENABLED = False +# If you want Talisman, how do you want it configured?? +TALISMAN_CONFIG = { + 'content_security_policy': None, + 'force_https': True, + 'force_https_permanent': False, +} + try: if CONFIG_PATH_ENV_VAR in os.environ: # Explicitly import config module that is not in pythonpath; useful diff --git a/superset/connectors/druid/models.py b/superset/connectors/druid/models.py index 17ec4b82ebcf8..c71bc8061962c 100644 --- a/superset/connectors/druid/models.py +++ b/superset/connectors/druid/models.py @@ -932,7 +932,7 @@ def druid_type_from_adhoc_metric(adhoc_metric): if aggregate == 'count': return 'count' if aggregate == 'count_distinct': - return 'cardinality' + return 'hyperUnique' if column_type == 'hyperunique' else 'cardinality' else: return column_type + aggregate.capitalize() diff --git a/superset/connectors/druid/views.py b/superset/connectors/druid/views.py index 4b05d70c7b4e3..99923e03162db 100644 --- a/superset/connectors/druid/views.py +++ b/superset/connectors/druid/views.py @@ -147,11 +147,11 @@ class DruidMetricInlineView(CompactCRUDMixin, SupersetModelView): # noqa def post_add(self, metric): if metric.is_restricted: - security_manager.merge_perm('metric_access', metric.get_perm()) + security_manager.add_permission_view_menu('metric_access', metric.get_perm()) def post_update(self, metric): if metric.is_restricted: - security_manager.merge_perm('metric_access', metric.get_perm()) + security_manager.add_permission_view_menu('metric_access', metric.get_perm()) appbuilder.add_view_no_menu(DruidMetricInlineView) @@ -202,7 +202,7 @@ class DruidClusterModelView(SupersetModelView, DeleteMixin, YamlExportMixin): # } def pre_add(self, cluster): - security_manager.merge_perm('database_access', cluster.perm) + security_manager.add_permission_view_menu('database_access', cluster.perm) def pre_update(self, cluster): self.pre_add(cluster) @@ -311,9 +311,15 @@ def pre_add(self, datasource): def post_add(self, datasource): datasource.refresh_metrics() - security_manager.merge_perm('datasource_access', datasource.get_perm()) + security_manager.add_permission_view_menu( + 'datasource_access', + datasource.get_perm(), + ) if datasource.schema: - security_manager.merge_perm('schema_access', datasource.schema_perm) + security_manager.add_permission_view_menu( + 'schema_access', + datasource.schema_perm, + ) def post_update(self, datasource): self.post_add(datasource) diff --git a/superset/connectors/sqla/views.py b/superset/connectors/sqla/views.py index a7c77c2a512ee..2edf949397739 100644 --- a/superset/connectors/sqla/views.py +++ b/superset/connectors/sqla/views.py @@ -155,11 +155,11 @@ class SqlMetricInlineView(CompactCRUDMixin, SupersetModelView): # noqa def post_add(self, metric): if metric.is_restricted: - security_manager.merge_perm('metric_access', metric.get_perm()) + security_manager.add_permission_view_menu('metric_access', metric.get_perm()) def post_update(self, metric): if metric.is_restricted: - security_manager.merge_perm('metric_access', metric.get_perm()) + security_manager.add_permission_view_menu('metric_access', metric.get_perm()) appbuilder.add_view_no_menu(SqlMetricInlineView) @@ -283,9 +283,9 @@ def pre_add(self, table): def post_add(self, table, flash_message=True): table.fetch_metadata() - security_manager.merge_perm('datasource_access', table.get_perm()) + security_manager.add_permission_view_menu('datasource_access', table.get_perm()) if table.schema: - security_manager.merge_perm('schema_access', table.schema_perm) + security_manager.add_permission_view_menu('schema_access', table.schema_perm) if flash_message: flash(_( diff --git a/superset/db_engine_specs.py b/superset/db_engine_specs.py index 35a591fa10202..04efef78b8f37 100644 --- a/superset/db_engine_specs.py +++ b/superset/db_engine_specs.py @@ -122,6 +122,7 @@ class BaseEngineSpec(object): force_column_alias_quotes = False arraysize = 0 max_column_name_length = 0 + try_remove_schema_from_table_name = True @classmethod def get_time_expr(cls, expr, pdf, time_grain, grain): @@ -279,33 +280,32 @@ def convert_dttm(cls, target_type, dttm): return "'{}'".format(dttm.strftime('%Y-%m-%d %H:%M:%S')) @classmethod - def fetch_result_sets(cls, db, datasource_type): - """Returns a list of tables [schema1.table1, schema2.table2, ...] + def get_all_datasource_names(cls, db, datasource_type: str) \ + -> List[utils.DatasourceName]: + """Returns a list of all tables or views in database. - Datasource_type can be 'table' or 'view'. - Empty schema corresponds to the list of full names of the all - tables or views: .. + :param db: Database instance + :param datasource_type: Datasource_type can be 'table' or 'view' + :return: List of all datasources in database or schema """ - schemas = db.all_schema_names(cache=db.schema_cache_enabled, - cache_timeout=db.schema_cache_timeout, - force=True) - all_result_sets = [] + schemas = db.get_all_schema_names(cache=db.schema_cache_enabled, + cache_timeout=db.schema_cache_timeout, + force=True) + all_datasources: List[utils.DatasourceName] = [] for schema in schemas: if datasource_type == 'table': - all_datasource_names = db.all_table_names_in_schema( + all_datasources += db.get_all_table_names_in_schema( schema=schema, force=True, cache=db.table_cache_enabled, cache_timeout=db.table_cache_timeout) elif datasource_type == 'view': - all_datasource_names = db.all_view_names_in_schema( + all_datasources += db.get_all_view_names_in_schema( schema=schema, force=True, cache=db.table_cache_enabled, cache_timeout=db.table_cache_timeout) else: raise Exception(f'Unsupported datasource_type: {datasource_type}') - all_result_sets += [ - '{}.{}'.format(schema, t) for t in all_datasource_names] - return all_result_sets + return all_datasources @classmethod def handle_cursor(cls, cursor, query, session): @@ -352,11 +352,17 @@ def get_schema_names(cls, inspector): @classmethod def get_table_names(cls, inspector, schema): - return sorted(inspector.get_table_names(schema)) + tables = inspector.get_table_names(schema) + if schema and cls.try_remove_schema_from_table_name: + tables = [re.sub(f'^{schema}\\.', '', table) for table in tables] + return sorted(tables) @classmethod def get_view_names(cls, inspector, schema): - return sorted(inspector.get_view_names(schema)) + views = inspector.get_view_names(schema) + if schema and cls.try_remove_schema_from_table_name: + views = [re.sub(f'^{schema}\\.', '', view) for view in views] + return sorted(views) @classmethod def get_columns(cls, inspector: Inspector, table_name: str, schema: str) -> list: @@ -528,6 +534,7 @@ def convert_dttm(cls, target_type, dttm): class PostgresEngineSpec(PostgresBaseEngineSpec): engine = 'postgresql' max_column_name_length = 63 + try_remove_schema_from_table_name = False @classmethod def get_table_names(cls, inspector, schema): @@ -685,29 +692,25 @@ def epoch_to_dttm(cls): return "datetime({col}, 'unixepoch')" @classmethod - def fetch_result_sets(cls, db, datasource_type): - schemas = db.all_schema_names(cache=db.schema_cache_enabled, - cache_timeout=db.schema_cache_timeout, - force=True) - all_result_sets = [] + def get_all_datasource_names(cls, db, datasource_type: str) \ + -> List[utils.DatasourceName]: + schemas = db.get_all_schema_names(cache=db.schema_cache_enabled, + cache_timeout=db.schema_cache_timeout, + force=True) schema = schemas[0] if datasource_type == 'table': - all_datasource_names = db.all_table_names_in_schema( + return db.get_all_table_names_in_schema( schema=schema, force=True, cache=db.table_cache_enabled, cache_timeout=db.table_cache_timeout) elif datasource_type == 'view': - all_datasource_names = db.all_view_names_in_schema( + return db.get_all_view_names_in_schema( schema=schema, force=True, cache=db.table_cache_enabled, cache_timeout=db.table_cache_timeout) else: raise Exception(f'Unsupported datasource_type: {datasource_type}') - all_result_sets += [ - '{}.{}'.format(schema, t) for t in all_datasource_names] - return all_result_sets - @classmethod def convert_dttm(cls, target_type, dttm): iso = dttm.isoformat().replace('T', ' ') @@ -721,6 +724,50 @@ def get_table_names(cls, inspector, schema): return sorted(inspector.get_table_names()) +class DrillEngineSpec(BaseEngineSpec): + """Engine spec for Apache Drill""" + engine = 'drill' + + time_grain_functions = { + None: '{col}', + 'PT1S': "nearestDate({col}, 'SECOND')", + 'PT1M': "nearestDate({col}, 'MINUTE')", + 'PT15M': "nearestDate({col}, 'QUARTER_HOUR')", + 'PT0.5H': "nearestDate({col}, 'HALF_HOUR')", + 'PT1H': "nearestDate({col}, 'HOUR')", + 'P1D': 'TO_DATE({col})', + 'P1W': "nearestDate({col}, 'WEEK_SUNDAY')", + 'P1M': "nearestDate({col}, 'MONTH')", + 'P0.25Y': "nearestDate({col}, 'QUARTER')", + 'P1Y': "nearestDate({col}, 'YEAR')", + } + + # Returns a function to convert a Unix timestamp in milliseconds to a date + @classmethod + def epoch_to_dttm(cls): + return cls.epoch_ms_to_dttm().replace('{col}', '({col}*1000)') + + @classmethod + def epoch_ms_to_dttm(cls): + return 'TO_DATE({col})' + + @classmethod + def convert_dttm(cls, target_type, dttm): + tt = target_type.upper() + if tt == 'DATE': + return "CAST('{}' AS DATE)".format(dttm.isoformat()[:10]) + elif tt == 'TIMESTAMP': + return "CAST('{}' AS TIMESTAMP)".format( + dttm.strftime('%Y-%m-%d %H:%M:%S')) + return "'{}'".format(dttm.strftime('%Y-%m-%d %H:%M:%S')) + + @classmethod + def adjust_database_uri(cls, uri, selected_schema): + if selected_schema: + uri.database = parse.quote(selected_schema, safe='') + return uri + + class MySQLEngineSpec(BaseEngineSpec): engine = 'mysql' max_column_name_length = 64 @@ -1107,24 +1154,19 @@ def epoch_to_dttm(cls): return 'from_unixtime({col})' @classmethod - def fetch_result_sets(cls, db, datasource_type): - """Returns a list of tables [schema1.table1, schema2.table2, ...] - - Datasource_type can be 'table' or 'view'. - Empty schema corresponds to the list of full names of the all - tables or views: .. - """ - result_set_df = db.get_df( + def get_all_datasource_names(cls, db, datasource_type: str) \ + -> List[utils.DatasourceName]: + datasource_df = db.get_df( """SELECT table_schema, table_name FROM INFORMATION_SCHEMA.{}S ORDER BY concat(table_schema, '.', table_name)""".format( datasource_type.upper(), ), None) - result_sets = [] - for unused, row in result_set_df.iterrows(): - result_sets.append('{}.{}'.format( - row['table_schema'], row['table_name'])) - return result_sets + datasource_names: List[utils.DatasourceName] = [] + for unused, row in datasource_df.iterrows(): + datasource_names.append(utils.DatasourceName( + schema=row['table_schema'], table=row['table_name'])) + return datasource_names @classmethod def extra_table_metadata(cls, database, table_name, schema_name): @@ -1385,9 +1427,9 @@ def patch(cls): hive.Cursor.fetch_logs = patched_hive.fetch_logs @classmethod - def fetch_result_sets(cls, db, datasource_type): - return BaseEngineSpec.fetch_result_sets( - db, datasource_type) + def get_all_datasource_names(cls, db, datasource_type: str) \ + -> List[utils.DatasourceName]: + return BaseEngineSpec.get_all_datasource_names(db, datasource_type) @classmethod def fetch_data(cls, cursor, limit): diff --git a/superset/models/core.py b/superset/models/core.py index e16a234bfd723..047a3ddb11b11 100644 --- a/superset/models/core.py +++ b/superset/models/core.py @@ -23,6 +23,7 @@ import json import logging import textwrap +from typing import List from flask import escape, g, Markup, request from flask_appbuilder import Model @@ -65,6 +66,7 @@ PASSWORD_MASK = 'X' * 10 + def set_related_perm(mapper, connection, target): # noqa src_class = target.cls_model id_ = target.datasource_id @@ -184,7 +186,7 @@ def clone(self): description=self.description, cache_timeout=self.cache_timeout) - @datasource.getter + @datasource.getter # type: ignore @utils.memoized def get_datasource(self): return ( @@ -210,7 +212,7 @@ def datasource_edit_url(self): datasource = self.datasource return datasource.url if datasource else None - @property + @property # type: ignore @utils.memoized def viz(self): d = json.loads(self.params) @@ -930,100 +932,87 @@ def inspector(self): @cache_util.memoized_func( key=lambda *args, **kwargs: 'db:{}:schema:None:table_list', attribute_in_key='id') - def all_table_names_in_database(self, cache=False, - cache_timeout=None, force=False): + def get_all_table_names_in_database(self, cache: bool = False, + cache_timeout: bool = None, + force=False) -> List[utils.DatasourceName]: """Parameters need to be passed as keyword arguments.""" if not self.allow_multi_schema_metadata_fetch: return [] - return self.db_engine_spec.fetch_result_sets(self, 'table') + return self.db_engine_spec.get_all_datasource_names(self, 'table') @cache_util.memoized_func( key=lambda *args, **kwargs: 'db:{}:schema:None:view_list', attribute_in_key='id') - def all_view_names_in_database(self, cache=False, - cache_timeout=None, force=False): + def get_all_view_names_in_database(self, cache: bool = False, + cache_timeout: bool = None, + force: bool = False) -> List[utils.DatasourceName]: """Parameters need to be passed as keyword arguments.""" if not self.allow_multi_schema_metadata_fetch: return [] - return self.db_engine_spec.fetch_result_sets(self, 'view') + return self.db_engine_spec.get_all_datasource_names(self, 'view') @cache_util.memoized_func( key=lambda *args, **kwargs: 'db:{{}}:schema:{}:table_list'.format( kwargs.get('schema')), attribute_in_key='id') - def all_table_names_in_schema(self, schema, cache=False, - cache_timeout=None, force=False): + def get_all_table_names_in_schema(self, schema: str, cache: bool = False, + cache_timeout: int = None, force: bool = False): """Parameters need to be passed as keyword arguments. For unused parameters, they are referenced in cache_util.memoized_func decorator. :param schema: schema name - :type schema: str :param cache: whether cache is enabled for the function - :type cache: bool :param cache_timeout: timeout in seconds for the cache - :type cache_timeout: int :param force: whether to force refresh the cache - :type force: bool - :return: table list - :rtype: list + :return: list of tables """ - tables = [] try: tables = self.db_engine_spec.get_table_names( inspector=self.inspector, schema=schema) + return [utils.DatasourceName(table=table, schema=schema) for table in tables] except Exception as e: logging.exception(e) - return tables @cache_util.memoized_func( key=lambda *args, **kwargs: 'db:{{}}:schema:{}:view_list'.format( kwargs.get('schema')), attribute_in_key='id') - def all_view_names_in_schema(self, schema, cache=False, - cache_timeout=None, force=False): + def get_all_view_names_in_schema(self, schema: str, cache: bool = False, + cache_timeout: int = None, force: bool = False): """Parameters need to be passed as keyword arguments. For unused parameters, they are referenced in cache_util.memoized_func decorator. :param schema: schema name - :type schema: str :param cache: whether cache is enabled for the function - :type cache: bool :param cache_timeout: timeout in seconds for the cache - :type cache_timeout: int :param force: whether to force refresh the cache - :type force: bool - :return: view list - :rtype: list + :return: list of views """ - views = [] try: views = self.db_engine_spec.get_view_names( inspector=self.inspector, schema=schema) + return [utils.DatasourceName(table=view, schema=schema) for view in views] except Exception as e: logging.exception(e) - return views @cache_util.memoized_func( key=lambda *args, **kwargs: 'db:{}:schema_list', attribute_in_key='id') - def all_schema_names(self, cache=False, cache_timeout=None, force=False): + def get_all_schema_names(self, cache: bool = False, cache_timeout: int = None, + force: bool = False) -> List[str]: """Parameters need to be passed as keyword arguments. For unused parameters, they are referenced in cache_util.memoized_func decorator. :param cache: whether cache is enabled for the function - :type cache: bool :param cache_timeout: timeout in seconds for the cache - :type cache_timeout: int :param force: whether to force refresh the cache - :type force: bool :return: schema list - :rtype: list """ return self.db_engine_spec.get_schema_names(self.inspector) @@ -1232,7 +1221,7 @@ def username(self): def datasource(self): return self.get_datasource - @datasource.getter + @datasource.getter # type: ignore @utils.memoized def get_datasource(self): # pylint: disable=no-member diff --git a/superset/security.py b/superset/security.py index b30b2e516cbaa..89eab5d53bf16 100644 --- a/superset/security.py +++ b/superset/security.py @@ -17,6 +17,7 @@ # pylint: disable=C,R,W """A set of constants and methods to manage permissions and security""" import logging +from typing import List from flask import g from flask_appbuilder.security.sqla import models as ab_models @@ -26,6 +27,7 @@ from superset import sql_parse from superset.connectors.connector_registry import ConnectorRegistry from superset.exceptions import SupersetSecurityException +from superset.utils.core import DatasourceName class SupersetSecurityManager(SecurityManager): @@ -240,7 +242,9 @@ def schemas_accessible_by_user(self, database, schemas, hierarchical=True): subset.add(t.schema) return sorted(list(subset)) - def accessible_by_user(self, database, datasource_names, schema=None): + def get_datasources_accessible_by_user( + self, database, datasource_names: List[DatasourceName], + schema: str = None) -> List[DatasourceName]: from superset import db if self.database_access(database) or self.all_datasource_access(): return datasource_names @@ -263,26 +267,22 @@ def accessible_by_user(self, database, datasource_names, schema=None): return [d for d in datasource_names if d in full_names] def merge_perm(self, permission_name, view_menu_name): - # Implementation copied from sm.find_permission_view_menu. - # TODO: use sm.find_permission_view_menu once issue - # https://github.com/airbnb/superset/issues/1944 is resolved. - permission = self.find_permission(permission_name) - view_menu = self.find_view_menu(view_menu_name) - pv = None - if permission and view_menu: - pv = self.get_session.query(self.permissionview_model).filter_by( - permission=permission, view_menu=view_menu).first() - if not pv and permission_name and view_menu_name: - self.add_permission_view_menu(permission_name, view_menu_name) + logging.warning( + "This method 'merge_perm' is deprecated use add_permission_view_menu", + ) + self.add_permission_view_menu(permission_name, view_menu_name) def is_user_defined_permission(self, perm): return perm.permission.name in self.OBJECT_SPEC_PERMISSIONS def create_custom_permissions(self): # Global perms - self.merge_perm('all_datasource_access', 'all_datasource_access') - self.merge_perm('all_database_access', 'all_database_access') - self.merge_perm('can_only_access_owned_queries', 'can_only_access_owned_queries') + self.add_permission_view_menu('all_datasource_access', 'all_datasource_access') + self.add_permission_view_menu('all_database_access', 'all_database_access') + self.add_permission_view_menu( + 'can_only_access_owned_queries', + 'can_only_access_owned_queries', + ) def create_missing_perms(self): """Creates missing perms for datasources, schemas and metrics""" @@ -299,7 +299,7 @@ def create_missing_perms(self): def merge_pv(view_menu, perm): """Create permission view menu only if it doesn't exist""" if view_menu and perm and (view_menu, perm) not in all_pvs: - self.merge_perm(view_menu, perm) + self.add_permission_view_menu(view_menu, perm) logging.info('Creating missing datasource permissions.') datasources = ConnectorRegistry.get_all_datasources(db.session) diff --git a/superset/utils/core.py b/superset/utils/core.py index 3b4145793939a..2defa70dd179e 100644 --- a/superset/utils/core.py +++ b/superset/utils/core.py @@ -32,7 +32,7 @@ import smtplib import sys from time import struct_time -from typing import List, Optional, Tuple +from typing import List, NamedTuple, Optional, Tuple from urllib.parse import unquote_plus import uuid import zlib @@ -1100,3 +1100,8 @@ def MediumText() -> Variant: def shortid() -> str: return '{}'.format(uuid.uuid4())[-12:] + + +class DatasourceName(NamedTuple): + table: str + schema: str diff --git a/superset/views/core.py b/superset/views/core.py index d66e7aad9f87e..7b9cbdca94fd5 100755 --- a/superset/views/core.py +++ b/superset/views/core.py @@ -15,14 +15,14 @@ # specific language governing permissions and limitations # under the License. # pylint: disable=C,R,W +from contextlib import closing from datetime import datetime, timedelta import inspect import logging import os import re -import time import traceback -from typing import List # noqa: F401 +from typing import Dict, List # noqa: F401 from urllib import parse from flask import ( @@ -36,8 +36,7 @@ from flask_babel import lazy_gettext as _ import pandas as pd import simplejson as json -import sqlalchemy as sqla -from sqlalchemy import and_, create_engine, MetaData, or_, update +from sqlalchemy import and_, create_engine, MetaData, or_, select from sqlalchemy.engine.url import make_url from sqlalchemy.exc import IntegrityError from werkzeug.routing import BaseConverter @@ -315,10 +314,10 @@ class DatabaseView(SupersetModelView, DeleteMixin, YamlExportMixin): # noqa def pre_add(self, db): self.check_extra(db) db.set_sqlalchemy_uri(db.sqlalchemy_uri) - security_manager.merge_perm('database_access', db.perm) + security_manager.add_permission_view_menu('database_access', db.perm) # adding a new database we always want to force refresh schema list - for schema in db.all_schema_names(): - security_manager.merge_perm( + for schema in db.get_all_schema_names(): + security_manager.add_permission_view_menu( 'schema_access', security_manager.get_schema_perm(db, schema)) def pre_update(self, db): @@ -1346,12 +1345,12 @@ def explore(self, datasource_type=None, datasource_id=None): if action == 'overwrite' and not slice_overwrite_perm: return json_error_response( - _('You don\'t have the rights to ') + _('alter this ') + _('chart'), + _("You don't have the rights to ") + _('alter this ') + _('chart'), status=400) if action == 'saveas' and not slice_add_perm: return json_error_response( - _('You don\'t have the rights to ') + _('create a ') + _('chart'), + _("You don't have the rights to ") + _('create a ') + _('chart'), status=400) if action in ('saveas', 'overwrite'): @@ -1458,7 +1457,7 @@ def save_or_overwrite_slice( dash_overwrite_perm = check_ownership(dash, raise_if_false=False) if not dash_overwrite_perm: return json_error_response( - _('You don\'t have the rights to ') + _('alter this ') + + _("You don't have the rights to ") + _('alter this ') + _('dashboard'), status=400) @@ -1472,7 +1471,7 @@ def save_or_overwrite_slice( dash_add_perm = security_manager.can_access('can_add', 'DashboardModelView') if not dash_add_perm: return json_error_response( - _('You don\'t have the rights to ') + _('create a ') + _('dashboard'), + _("You don't have the rights to ") + _('create a ') + _('dashboard'), status=400) dash = models.Dashboard( @@ -1551,7 +1550,7 @@ def schemas(self, db_id, force_refresh='false'): .first() ) if database: - schemas = database.all_schema_names( + schemas = database.get_all_schema_names( cache=database.schema_cache_enabled, cache_timeout=database.schema_cache_timeout, force=force_refresh) @@ -1576,50 +1575,57 @@ def tables(self, db_id, schema, substr, force_refresh='false'): database = db.session.query(models.Database).filter_by(id=db_id).one() if schema: - table_names = database.all_table_names_in_schema( + tables = database.get_all_table_names_in_schema( schema=schema, force=force_refresh, cache=database.table_cache_enabled, - cache_timeout=database.table_cache_timeout) - view_names = database.all_view_names_in_schema( + cache_timeout=database.table_cache_timeout) or [] + views = database.get_all_view_names_in_schema( schema=schema, force=force_refresh, cache=database.table_cache_enabled, - cache_timeout=database.table_cache_timeout) + cache_timeout=database.table_cache_timeout) or [] else: - table_names = database.all_table_names_in_database( + tables = database.get_all_table_names_in_database( cache=True, force=False, cache_timeout=24 * 60 * 60) - view_names = database.all_view_names_in_database( + views = database.get_all_view_names_in_database( cache=True, force=False, cache_timeout=24 * 60 * 60) - table_names = security_manager.accessible_by_user(database, table_names, schema) - view_names = security_manager.accessible_by_user(database, view_names, schema) + tables = security_manager.get_datasources_accessible_by_user( + database, tables, schema) + views = security_manager.get_datasources_accessible_by_user( + database, views, schema) + + def get_datasource_label(ds_name: utils.DatasourceName) -> str: + return ds_name.table if schema else f'{ds_name.schema}.{ds_name.table}' if substr: - table_names = [tn for tn in table_names if substr in tn] - view_names = [vn for vn in view_names if substr in vn] + tables = [tn for tn in tables if substr in get_datasource_label(tn)] + views = [vn for vn in views if substr in get_datasource_label(vn)] if not schema and database.default_schemas: - def get_schema(tbl_or_view_name): - return tbl_or_view_name.split('.')[0] if '.' in tbl_or_view_name else None - user_schema = g.user.email.split('@')[0] valid_schemas = set(database.default_schemas + [user_schema]) - table_names = [tn for tn in table_names if get_schema(tn) in valid_schemas] - view_names = [vn for vn in view_names if get_schema(vn) in valid_schemas] + tables = [tn for tn in tables if tn.schema in valid_schemas] + views = [vn for vn in views if vn.schema in valid_schemas] - max_items = config.get('MAX_TABLE_NAMES') or len(table_names) - total_items = len(table_names) + len(view_names) - max_tables = len(table_names) - max_views = len(view_names) + max_items = config.get('MAX_TABLE_NAMES') or len(tables) + total_items = len(tables) + len(views) + max_tables = len(tables) + max_views = len(views) if total_items and substr: - max_tables = max_items * len(table_names) // total_items - max_views = max_items * len(view_names) // total_items - - table_options = [{'value': tn, 'label': tn} - for tn in table_names[:max_tables]] - table_options.extend([{'value': vn, 'label': '[view] {}'.format(vn)} - for vn in view_names[:max_views]]) + max_tables = max_items * len(tables) // total_items + max_views = max_items * len(views) // total_items + + def get_datasource_value(ds_name: utils.DatasourceName) -> Dict[str, str]: + return {'schema': ds_name.schema, 'table': ds_name.table} + + table_options = [{'value': get_datasource_value(tn), + 'label': get_datasource_label(tn)} + for tn in tables[:max_tables]] + table_options.extend([{'value': get_datasource_value(vn), + 'label': f'[view] {get_datasource_label(vn)}'} + for vn in views[:max_views]]) payload = { - 'tableLength': len(table_names) + len(view_names), + 'tableLength': len(tables) + len(views), 'options': table_options, } return json_success(json.dumps(payload)) @@ -1818,8 +1824,9 @@ def testconn(self): connect_args['configuration'] = configuration engine = create_engine(uri, **engine_params) - engine.connect() - return json_success(json.dumps(engine.table_names(), indent=4)) + + with closing(engine.connect()) as conn: + return json_success(json.dumps(conn.scalar(select([1])))) except Exception as e: logging.exception(e) return json_error_response(( @@ -1849,7 +1856,7 @@ def recent_activity(self, user_id): M.Slice.id == M.Log.slice_id, ) .filter( - sqla.and_( + and_( ~M.Log.action.in_(('queries', 'shortner', 'sql_json')), M.Log.user_id == user_id, ), @@ -1919,7 +1926,7 @@ def fave_dashboards(self, user_id): ) .join( models.FavStar, - sqla.and_( + and_( models.FavStar.user_id == int(user_id), models.FavStar.class_name == 'Dashboard', models.Dashboard.id == models.FavStar.obj_id, @@ -1957,7 +1964,7 @@ def created_dashboards(self, user_id): Dash, ) .filter( - sqla.or_( + or_( Dash.created_by_fk == user_id, Dash.changed_by_fk == user_id, ), @@ -1990,13 +1997,13 @@ def user_slices(self, user_id=None): db.session.query(Slice, FavStar.dttm).join( models.FavStar, - sqla.and_( + and_( models.FavStar.user_id == int(user_id), models.FavStar.class_name == 'slice', models.Slice.id == models.FavStar.obj_id, ), isouter=True).filter( - sqla.or_( + or_( Slice.created_by_fk == user_id, Slice.changed_by_fk == user_id, FavStar.user_id == user_id, @@ -2027,7 +2034,7 @@ def created_slices(self, user_id=None): qry = ( db.session.query(Slice) .filter( - sqla.or_( + or_( Slice.created_by_fk == user_id, Slice.changed_by_fk == user_id, ), @@ -2059,7 +2066,7 @@ def fave_slices(self, user_id=None): ) .join( models.FavStar, - sqla.and_( + and_( models.FavStar.user_id == int(user_id), models.FavStar.class_name == 'slice', models.Slice.id == models.FavStar.obj_id, @@ -2815,35 +2822,6 @@ def queries_call(self, last_updated_ms): .all() ) dict_queries = {q.client_id: q.to_dict() for q in sql_queries} - - now = int(round(time.time() * 1000)) - - unfinished_states = [ - QueryStatus.PENDING, - QueryStatus.RUNNING, - ] - - queries_to_timeout = [ - client_id for client_id, query_dict in dict_queries.items() - if ( - query_dict['state'] in unfinished_states and ( - now - query_dict['startDttm'] > - config.get('SQLLAB_ASYNC_TIME_LIMIT_SEC') * 1000 - ) - ) - ] - - if queries_to_timeout: - update(Query).where( - and_( - Query.user_id == g.user.get_id(), - Query.client_id in queries_to_timeout, - ), - ).values(state=QueryStatus.TIMED_OUT) - - for client_id in queries_to_timeout: - dict_queries[client_id]['status'] = QueryStatus.TIMED_OUT - return json_success( json.dumps(dict_queries, default=utils.json_int_dttm_ser)) diff --git a/superset/views/sql_lab.py b/superset/views/sql_lab.py index 37c84c2705253..98046f891d449 100644 --- a/superset/views/sql_lab.py +++ b/superset/views/sql_lab.py @@ -122,11 +122,12 @@ def pre_update(self, obj): def show(self, pk): pk = self._deserialize_pk_if_composite(pk) widgets = self._show(pk) - extra_json = self.datamodel.get(pk).extra_json + query = self.datamodel.get(pk).to_json() + query['extra_json'] = json.loads(query['extra_json']) payload = { 'common': { 'feature_flags': get_feature_flags(), - 'extra_json': json.loads(extra_json), + 'query': query, }, } diff --git a/superset/viz.py b/superset/viz.py index 838b4a5bae882..c193daa29e4a6 100644 --- a/superset/viz.py +++ b/superset/viz.py @@ -855,7 +855,7 @@ class BoxPlotViz(NVD3Viz): viz_type = 'box_plot' verbose_name = _('Box Plot') sort_series = False - is_timeseries = True + is_timeseries = False def to_series(self, df, classed='', title_suffix=''): label_sep = ' - ' @@ -1767,9 +1767,7 @@ def get_data(self, df): columns = ['country', 'm1', 'm2'] if metric == secondary_metric: ndf = df[cols] - # df[metric] will be a DataFrame - # because there are duplicate column names - ndf['m1'] = df[metric].iloc[:, 0] + ndf['m1'] = df[metric] ndf['m2'] = ndf['m1'] else: if secondary_metric: diff --git a/tests/access_tests.py b/tests/access_tests.py index f608f00a3b25d..0bc1743cfe1ff 100644 --- a/tests/access_tests.py +++ b/tests/access_tests.py @@ -273,7 +273,7 @@ def test_clean_requests_after_db_grant(self): # gamma gets granted database access database = session.query(models.Database).first() - security_manager.merge_perm('database_access', database.perm) + security_manager.add_permission_view_menu('database_access', database.perm) ds_perm_view = security_manager.find_permission_view_menu( 'database_access', database.perm) security_manager.add_permission_role( @@ -309,7 +309,7 @@ def test_clean_requests_after_schema_grant(self): table_name='wb_health_population').first() ds.schema = 'temp_schema' - security_manager.merge_perm('schema_access', ds.schema_perm) + security_manager.add_permission_view_menu('schema_access', ds.schema_perm) schema_perm_view = security_manager.find_permission_view_menu( 'schema_access', ds.schema_perm) security_manager.add_permission_role( diff --git a/tests/db_engine_specs_test.py b/tests/db_engine_specs_test.py index e0d914f0b51e4..0372366a2a48e 100644 --- a/tests/db_engine_specs_test.py +++ b/tests/db_engine_specs_test.py @@ -108,7 +108,7 @@ def test_hive_error_msg(self): '{...} errorMessage="Error while compiling statement: FAILED: ' 'SemanticException [Error 10001]: Line 4' ':5 Table not found \'fact_ridesfdslakj\'", statusCode=3, ' - 'sqlState=\'42S02\', errorCode=10001)){...}') + "sqlState='42S02', errorCode=10001)){...}") self.assertEquals(( 'Error while compiling statement: FAILED: ' 'SemanticException [Error 10001]: Line 4:5 ' @@ -464,3 +464,22 @@ def test_mssql_where_clause_n_prefix(self): query = str(sel.compile(dialect=dialect, compile_kwargs={'literal_binds': True})) query_expected = "SELECT col, unicode_col \nFROM tbl \nWHERE col = 'abc' AND unicode_col = N'abc'" # noqa self.assertEqual(query, query_expected) + + def test_get_table_names(self): + inspector = mock.Mock() + inspector.get_table_names = mock.Mock(return_value=['schema.table', 'table_2']) + inspector.get_foreign_table_names = mock.Mock(return_value=['table_3']) + + """ Make sure base engine spec removes schema name from table name + ie. when try_remove_schema_from_table_name == True. """ + base_result_expected = ['table', 'table_2'] + base_result = db_engine_specs.BaseEngineSpec.get_table_names( + schema='schema', inspector=inspector) + self.assertListEqual(base_result_expected, base_result) + + """ Make sure postgres doesn't try to remove schema name from table name + ie. when try_remove_schema_from_table_name == False. """ + pg_result_expected = ['schema.table', 'table_2', 'table_3'] + pg_result = db_engine_specs.PostgresEngineSpec.get_table_names( + schema='schema', inspector=inspector) + self.assertListEqual(pg_result_expected, pg_result) diff --git a/tests/druid_func_tests.py b/tests/druid_func_tests.py index cf0c5e99fc925..2a7a5af8b7b5a 100644 --- a/tests/druid_func_tests.py +++ b/tests/druid_func_tests.py @@ -771,6 +771,13 @@ def test_druid_type_from_adhoc_metric(self): }) assert(druid_type == 'cardinality') + druid_type = DruidDatasource.druid_type_from_adhoc_metric({ + 'column': {'type': 'hyperUnique', 'column_name': 'value'}, + 'aggregate': 'COUNT_DISTINCT', + 'label': 'My Adhoc Metric', + }) + assert(druid_type == 'hyperUnique') + def test_run_query_order_by_metrics(self): client = Mock() client.query_builder.last_query.query_dict = {'mock': 0} diff --git a/tests/druid_tests.py b/tests/druid_tests.py index 78d1bb8635d2b..164b16ad5f38c 100644 --- a/tests/druid_tests.py +++ b/tests/druid_tests.py @@ -289,8 +289,8 @@ def test_filter_druid_datasource(self): db.session.merge(no_gamma_ds) db.session.commit() - security_manager.merge_perm('datasource_access', gamma_ds.perm) - security_manager.merge_perm('datasource_access', no_gamma_ds.perm) + security_manager.add_permission_view_menu('datasource_access', gamma_ds.perm) + security_manager.add_permission_view_menu('datasource_access', no_gamma_ds.perm) perm = security_manager.find_permission_view_menu( 'datasource_access', gamma_ds.get_perm()) diff --git a/tests/viz_tests.py b/tests/viz_tests.py index facb8c3a1525d..7c7875dbf7071 100644 --- a/tests/viz_tests.py +++ b/tests/viz_tests.py @@ -258,7 +258,7 @@ def test_parse_adhoc_filters(self): { 'expressionType': 'SQL', 'clause': 'WHERE', - 'sqlExpression': 'value3 in (\'North America\')', + 'sqlExpression': "value3 in ('North America')", }, ], } @@ -273,7 +273,7 @@ def test_parse_adhoc_filters(self): [{'op': '<', 'val': '10', 'col': 'SUM(value1)'}], query_obj['extras']['having_druid'], ) - self.assertEqual('(value3 in (\'North America\'))', query_obj['extras']['where']) + self.assertEqual("(value3 in ('North America'))", query_obj['extras']['where']) self.assertEqual('(SUM(value1) > 5)', query_obj['extras']['having']) def test_adhoc_filters_overwrite_legacy_filters(self): @@ -295,7 +295,7 @@ def test_adhoc_filters_overwrite_legacy_filters(self): { 'expressionType': 'SQL', 'clause': 'WHERE', - 'sqlExpression': 'value3 in (\'North America\')', + 'sqlExpression': "value3 in ('North America')", }, ], 'having': 'SUM(value1) > 5', @@ -311,7 +311,7 @@ def test_adhoc_filters_overwrite_legacy_filters(self): [], query_obj['extras']['having_druid'], ) - self.assertEqual('(value3 in (\'North America\'))', query_obj['extras']['where']) + self.assertEqual("(value3 in ('North America'))", query_obj['extras']['where']) self.assertEqual('', query_obj['extras']['having']) @patch('superset.viz.BaseViz.query_obj')