diff --git a/.coveralls.yml b/.coveralls.yml
index 273f84b694e76..e916d8ed05cc3 100644
--- a/.coveralls.yml
+++ b/.coveralls.yml
@@ -1 +1 @@
-repo_token: eESbYiv4An6KEvjpmguDs4L7YkubXbqn1
+repo_token: 4P9MpvLrZfJKzHdGZsdV3MzO43OZJgYFn
diff --git a/.travis.yml b/.travis.yml
index dcf700fd0c8cd..914c4b63b4482 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -10,7 +10,7 @@ cache:
env:
global:
- TRAVIS_CACHE=$HOME/.travis_cache/
- - TRAVIS_NODE_VERSION="6.10.2"
+ - TRAVIS_NODE_VERSION="7.10.0"
matrix:
- TOX_ENV=javascript
- TOX_ENV=pylint
@@ -19,7 +19,7 @@ env:
- TOX_ENV=py27-mysql
- TOX_ENV=py27-sqlite
before_install:
- - npm install -g npm@'>=4.5.0'
+ - npm install -g npm@'>=5.0.3'
before_script:
- mysql -e 'drop database if exists superset; create database superset DEFAULT CHARACTER SET utf8 COLLATE utf8_unicode_ci' -u root
- mysql -u root -e "CREATE USER 'mysqluser'@'localhost' IDENTIFIED BY 'mysqluserpassword';"
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index d909d5d4a1c27..27c70dd208e25 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -70,7 +70,7 @@ meets these guidelines:
## Documentation
-The latest documentation and tutorial are available [here](http://airbnb.io/superset).
+The latest documentation and tutorial are available [here](https://superset.incubator.apache.org/).
Contributing to the official documentation is relatively easy, once you've setup
your environment and done an edit end-to-end. The docs can be found in the
@@ -144,7 +144,7 @@ referenced in the rst, e.g.
aren't actually included in that directory. _Instead_, you'll want to add and commit
images (and any other static assets) to the _superset/assets/images_ directory.
-When the docs are being pushed to [airbnb.io](http://airbnb.io/superset/), images
+When the docs are being pushed to [Apache Superset (incubating)](https://superset.incubator.apache.org/), images
will be moved from there to the _\_static/img_ directory, just like they're referenced
in the docs.
@@ -161,12 +161,12 @@ instead.
## Setting up a Python development environment
-Check the [OS dependencies](http://airbnb.io/superset/installation.html#os-dependencies) before follows these steps.
+Check the [OS dependencies](https://superset.incubator.apache.org/installation.html#os-dependencies) before follows these steps.
# fork the repo on GitHub and then clone it
# alternatively you may want to clone the main repo but that won't work
# so well if you are planning on sending PRs
- # git clone git@github.com:airbnb/superset.git
+ # git clone git@github.com:apache/incubator-superset.git
# [optional] setup a virtual env and activate it
virtualenv env
@@ -223,8 +223,13 @@ To install third party libraries defined in `package.json`, run the
following within the `superset/assets/` directory which will install them in a
new `node_modules/` folder within `assets/`.
-```
-npm install
+```bash
+# from the root of the repository, move to where our JS package.json lives
+cd superset/assets/
+# install yarn, a replacement for `npm install` that is faster and more deterministic
+npm install -g yarn
+# run yarn to fetch all the dependencies
+yarn
```
To parse and generate bundled files for superset, run either of the
@@ -342,7 +347,7 @@ new language dictionary, run the following command:
pybabel init -i ./babel/messages.pot -d superset/translations -l es
-Then it's a matter of running the statement below to gather all stings that
+Then it's a matter of running the statement below to gather all strings that
need translation
fabmanager babel-extract --target superset/translations/
@@ -374,4 +379,4 @@ to take effect, they need to be compiled using this command:
Here's an example as a Github PR with comments that describe what the
different sections of the code do:
-https://github.com/airbnb/superset/pull/3013
+https://github.com/apache/incubator-superset/pull/3013
diff --git a/README.md b/README.md
index 013ec74f9473a..7382789f7159c 100644
--- a/README.md
+++ b/README.md
@@ -1,17 +1,14 @@
Superset
=========
-[![Build Status](https://travis-ci.org/airbnb/superset.svg?branch=master)](https://travis-ci.org/airbnb/superset)
+[![Build Status](https://travis-ci.org/apache/incubator-superset.svg?branch=master)](https://travis-ci.org/apache/incubator-superset)
[![PyPI version](https://badge.fury.io/py/superset.svg)](https://badge.fury.io/py/superset)
-[![Coverage Status](https://coveralls.io/repos/airbnb/superset/badge.svg?branch=master&service=github)](https://coveralls.io/github/airbnb/superset?branch=master)
-[![JS Test Coverage](https://codeclimate.com/github/airbnb/superset/badges/coverage.svg)](https://codeclimate.com/github/airbnb/superset/coverage)
-[![Code Health](https://landscape.io/github/airbnb/superset/master/landscape.svg?style=flat)](https://landscape.io/github/airbnb/superset/master)
-[![Code Climate](https://codeclimate.com/github/airbnb/superset/badges/gpa.svg)](https://codeclimate.com/github/airbnb/superset)
+[![Coverage Status](https://coveralls.io/repos/apache/incubator-superset/badge.svg?branch=master&service=github)](https://coveralls.io/github/apache/incubator-superset?branch=master)
[![PyPI](https://img.shields.io/pypi/pyversions/superset.svg?maxAge=2592000)](https://pypi.python.org/pypi/superset)
-[![Requirements Status](https://requires.io/github/airbnb/superset/requirements.svg?branch=master)](https://requires.io/github/airbnb/superset/requirements/?branch=master)
-[![Join the chat at https://gitter.im/airbnb/superset](https://badges.gitter.im/airbnb/superset.svg)](https://gitter.im/airbnb/superset?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
-[![Documentation](https://img.shields.io/badge/docs-airbnb.io-blue.svg)](http://airbnb.io/superset/)
-[![dependencies Status](https://david-dm.org/airbnb/superset/status.svg?path=superset/assets)](https://david-dm.org/airbnb/superset?path=superset/assets)
+[![Requirements Status](https://requires.io/github/apache/incubator-superset/requirements.svg?branch=master)](https://requires.io/github/apache/incubator-superset/requirements/?branch=master)
+[![Join the chat at https://gitter.im/apache/incubator-superset](https://badges.gitter.im/apache/incubator-superset.svg)](https://gitter.im/apache/incubator-superset?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
+[![Documentation](https://img.shields.io/badge/docs-apache.org-blue.svg)](https://superset.incubator.apache.org)
+[![dependencies Status](https://david-dm.org/apache/incubator-superset/status.svg?path=superset/assets)](https://david-dm.org/apache/incubator-superset?path=superset/assets)
=0.3.0',
+ 'PyHive>=0.4.0',
'python-dateutil==2.6.0',
'requests==2.17.3',
'simplejson==3.10.0',
diff --git a/superset/assets/backendSync.json b/superset/assets/backendSync.json
index 1d8ba55fa746d..71e7130328492 100644
--- a/superset/assets/backendSync.json
+++ b/superset/assets/backendSync.json
@@ -750,6 +750,12 @@
"default": false,
"description": "Sort bars by x labels."
},
+ "combine_metric": {
+ "type": "CheckboxControl",
+ "label": "Combine Metrics",
+ "default": false,
+ "description": "Display metrics side by side within each column, as opposed to each column being displayed side by side for each metric."
+ },
"show_controls": {
"type": "CheckboxControl",
"label": "Extra Controls",
diff --git a/superset/assets/images/viz_thumbnails/chord.png b/superset/assets/images/viz_thumbnails/chord.png
new file mode 100644
index 0000000000000..a4a30b6aebc63
Binary files /dev/null and b/superset/assets/images/viz_thumbnails/chord.png differ
diff --git a/superset/assets/images/viz_thumbnails/event_flow.png b/superset/assets/images/viz_thumbnails/event_flow.png
new file mode 100644
index 0000000000000..45765295be003
Binary files /dev/null and b/superset/assets/images/viz_thumbnails/event_flow.png differ
diff --git a/superset/assets/javascripts/SqlLab/components/ResultSet.jsx b/superset/assets/javascripts/SqlLab/components/ResultSet.jsx
index c9814ec214adc..79c6d9c8c4355 100644
--- a/superset/assets/javascripts/SqlLab/components/ResultSet.jsx
+++ b/superset/assets/javascripts/SqlLab/components/ResultSet.jsx
@@ -35,7 +35,7 @@ export default class ResultSet extends React.PureComponent {
this.state = {
searchText: '',
showModal: false,
- data: [],
+ data: null,
height: props.search ? props.height - RESULT_SET_CONTROLS_HEIGHT : props.height,
};
}
@@ -146,30 +146,12 @@ export default class ResultSet extends React.PureComponent {
const query = this.props.query;
let sql;
- if (query.state === 'stopped') {
- return Query was stopped ;
- }
-
if (this.props.showSql) {
sql = ;
}
- if (['running', 'pending', 'fetching'].indexOf(query.state) > -1) {
- let progressBar;
- if (query.progress > 0 && query.state === 'running') {
- progressBar = (
- );
- }
- return (
-
-
-
- {progressBar}
-
- );
+
+ if (query.state === 'stopped') {
+ return Query was stopped ;
} else if (query.state === 'failed') {
return {query.errorMessage} ;
} else if (query.state === 'success' && query.ctas) {
@@ -192,10 +174,10 @@ export default class ResultSet extends React.PureComponent {
let data;
if (this.props.cache && query.cached) {
data = this.state.data;
- } else {
- data = results ? results.data : [];
+ } else if (results && results.data) {
+ data = results.data;
}
- if (results && data.length > 0) {
+ if (data && data.length > 0) {
return (
);
+ } else if (data && data.length === 0) {
+ return The query returned no data ;
}
}
if (query.cached) {
@@ -226,7 +210,36 @@ export default class ResultSet extends React.PureComponent {
);
}
- return The query returned no data ;
+ let progressBar;
+ let trackingUrl;
+ if (query.progress > 0 && query.state === 'running') {
+ progressBar = (
+ );
+ }
+ if (query.trackingUrl) {
+ trackingUrl = (
+ { window.open(query.trackingUrl); }}
+ >
+ Track Job
+
+ );
+ }
+ return (
+
+
+
+ {progressBar}
+
+ {trackingUrl}
+
+
+ );
}
}
ResultSet.propTypes = propTypes;
diff --git a/superset/assets/javascripts/SqlLab/components/VisualizeModal.jsx b/superset/assets/javascripts/SqlLab/components/VisualizeModal.jsx
index 2ebfef9318a1a..dce820cd677a5 100644
--- a/superset/assets/javascripts/SqlLab/components/VisualizeModal.jsx
+++ b/superset/assets/javascripts/SqlLab/components/VisualizeModal.jsx
@@ -146,7 +146,6 @@ class VisualizeModal extends React.PureComponent {
this.props.actions.createDatasource(this.buildVizOptions(), this)
.done(() => {
const columns = Object.keys(this.state.columns).map(k => this.state.columns[k]);
- const mainMetric = columns.filter(d => d.agg)[0];
const mainGroupBy = columns.filter(d => d.is_dim)[0];
const formData = {
datasource: this.props.datasource,
@@ -154,10 +153,6 @@ class VisualizeModal extends React.PureComponent {
since: '100 years ago',
limit: '0',
};
- if (mainMetric) {
- formData.metrics = [mainMetric.name];
- formData.metric = mainMetric.name;
- }
if (mainGroupBy) {
formData.groupby = [mainGroupBy.name];
}
diff --git a/superset/assets/javascripts/SqlLab/index.jsx b/superset/assets/javascripts/SqlLab/index.jsx
index e292c2576c1ae..ba09924720177 100644
--- a/superset/assets/javascripts/SqlLab/index.jsx
+++ b/superset/assets/javascripts/SqlLab/index.jsx
@@ -11,7 +11,7 @@ import App from './components/App';
import { appSetup } from '../common';
import './main.css';
-import './reactable-pagination.css';
+import '../../stylesheets/reactable-pagination.css';
import '../components/FilterableTable/FilterableTableStyles.css';
appSetup();
diff --git a/superset/assets/javascripts/SqlLab/main.css b/superset/assets/javascripts/SqlLab/main.css
index a3ad7dbe6b445..ad2bb37c0e3e6 100644
--- a/superset/assets/javascripts/SqlLab/main.css
+++ b/superset/assets/javascripts/SqlLab/main.css
@@ -265,7 +265,7 @@ div.tablePopover:hover {
}
.QueryTable .label {
- margin-top: 5px;
+ display: inline-block;
}
.ResultsModal .modal-body {
diff --git a/superset/assets/javascripts/components/EditableTitle.jsx b/superset/assets/javascripts/components/EditableTitle.jsx
index 70046f87ca1e2..9d71388828ad2 100644
--- a/superset/assets/javascripts/components/EditableTitle.jsx
+++ b/superset/assets/javascripts/components/EditableTitle.jsx
@@ -23,6 +23,7 @@ class EditableTitle extends React.PureComponent {
this.handleClick = this.handleClick.bind(this);
this.handleBlur = this.handleBlur.bind(this);
this.handleChange = this.handleChange.bind(this);
+ this.handleKeyPress = this.handleKeyPress.bind(this);
}
handleClick() {
if (!this.props.canEdit) {
@@ -58,6 +59,13 @@ class EditableTitle extends React.PureComponent {
title: ev.target.value,
});
}
+ handleKeyPress(ev) {
+ if (ev.key === 'Enter') {
+ ev.preventDefault();
+
+ this.handleBlur();
+ }
+ }
render() {
return (
@@ -72,6 +80,7 @@ class EditableTitle extends React.PureComponent {
onChange={this.handleChange}
onBlur={this.handleBlur}
onClick={this.handleClick}
+ onKeyPress={this.handleKeyPress}
/>
diff --git a/superset/assets/javascripts/components/InfoTooltipWithTrigger.jsx b/superset/assets/javascripts/components/InfoTooltipWithTrigger.jsx
index 07b4db473e3a6..85bc7fb50d1af 100644
--- a/superset/assets/javascripts/components/InfoTooltipWithTrigger.jsx
+++ b/superset/assets/javascripts/components/InfoTooltipWithTrigger.jsx
@@ -8,18 +8,23 @@ const propTypes = {
tooltip: PropTypes.string.isRequired,
icon: PropTypes.string,
className: PropTypes.string,
+ onClick: PropTypes.func,
};
const defaultProps = {
icon: 'question-circle-o',
};
-export default function InfoTooltipWithTrigger({ label, tooltip, icon, className }) {
+export default function InfoTooltipWithTrigger({ label, tooltip, icon, className, onClick }) {
return (
{tooltip}}
>
-
+
);
}
diff --git a/superset/assets/javascripts/dashboard/Dashboard.jsx b/superset/assets/javascripts/dashboard/Dashboard.jsx
index 8b0a0e1c69114..b6c97b68ecf6d 100644
--- a/superset/assets/javascripts/dashboard/Dashboard.jsx
+++ b/superset/assets/javascripts/dashboard/Dashboard.jsx
@@ -245,15 +245,18 @@ export function dashboardContainer(dashboard, datasources, userid) {
startPeriodicRender(interval) {
this.stopPeriodicRender();
const dash = this;
+ const immune = this.metadata.timed_refresh_immune_slices || [];
const maxRandomDelay = Math.max(interval * 0.2, 5000);
const refreshAll = () => {
dash.sliceObjects.forEach((slice) => {
const force = !dash.firstLoad;
- setTimeout(() => {
- slice.render(force);
- },
- // Randomize to prevent all widgets refreshing at the same time
- maxRandomDelay * Math.random());
+ if (immune.indexOf(slice.data.slice_id) === -1) {
+ setTimeout(() => {
+ slice.render(force);
+ },
+ // Randomize to prevent all widgets refreshing at the same time
+ maxRandomDelay * Math.random());
+ }
});
dash.firstLoad = false;
};
diff --git a/superset/assets/javascripts/dashboard/components/SliceAdder.jsx b/superset/assets/javascripts/dashboard/components/SliceAdder.jsx
index a96effef4b196..cb9206619e87b 100644
--- a/superset/assets/javascripts/dashboard/components/SliceAdder.jsx
+++ b/superset/assets/javascripts/dashboard/components/SliceAdder.jsx
@@ -129,9 +129,14 @@ class SliceAdder extends React.Component {
height="auto"
>
+
Name
diff --git a/superset/assets/javascripts/explore/actions/exploreActions.js b/superset/assets/javascripts/explore/actions/exploreActions.js
index 6d8ed83488bac..d45acd5834b9e 100644
--- a/superset/assets/javascripts/explore/actions/exploreActions.js
+++ b/superset/assets/javascripts/explore/actions/exploreActions.js
@@ -16,11 +16,6 @@ export function setDatasource(datasource) {
return { type: SET_DATASOURCE, datasource };
}
-export const SET_DATASOURCES = 'SET_DATASOURCES';
-export function setDatasources(datasources) {
- return { type: SET_DATASOURCES, datasources };
-}
-
export const FETCH_DATASOURCE_STARTED = 'FETCH_DATASOURCE_STARTED';
export function fetchDatasourceStarted() {
return { type: FETCH_DATASOURCE_STARTED };
@@ -36,21 +31,6 @@ export function fetchDatasourceFailed(error) {
return { type: FETCH_DATASOURCE_FAILED, error };
}
-export const FETCH_DATASOURCES_STARTED = 'FETCH_DATASOURCES_STARTED';
-export function fetchDatasourcesStarted() {
- return { type: FETCH_DATASOURCES_STARTED };
-}
-
-export const FETCH_DATASOURCES_SUCCEEDED = 'FETCH_DATASOURCES_SUCCEEDED';
-export function fetchDatasourcesSucceeded() {
- return { type: FETCH_DATASOURCES_SUCCEEDED };
-}
-
-export const FETCH_DATASOURCES_FAILED = 'FETCH_DATASOURCES_FAILED';
-export function fetchDatasourcesFailed(error) {
- return { type: FETCH_DATASOURCES_FAILED, error };
-}
-
export const RESET_FIELDS = 'RESET_FIELDS';
export function resetControls() {
return { type: RESET_FIELDS };
@@ -83,24 +63,6 @@ export function fetchDatasourceMetadata(datasourceKey, alsoTriggerQuery = false)
};
}
-export function fetchDatasources() {
- return function (dispatch) {
- dispatch(fetchDatasourcesStarted());
- const url = '/superset/datasources/';
- $.ajax({
- type: 'GET',
- url,
- success: (data) => {
- dispatch(setDatasources(data));
- dispatch(fetchDatasourcesSucceeded());
- },
- error(error) {
- dispatch(fetchDatasourcesFailed(error.responseJSON.error));
- },
- });
- };
-}
-
export const TOGGLE_FAVE_STAR = 'TOGGLE_FAVE_STAR';
export function toggleFaveStar(isStarred) {
return { type: TOGGLE_FAVE_STAR, isStarred };
diff --git a/superset/assets/javascripts/explore/components/ChartContainer.jsx b/superset/assets/javascripts/explore/components/ChartContainer.jsx
index f6da538843eef..ab2be2fcdf24c 100644
--- a/superset/assets/javascripts/explore/components/ChartContainer.jsx
+++ b/superset/assets/javascripts/explore/components/ChartContainer.jsx
@@ -32,6 +32,7 @@ const propTypes = {
column_formats: PropTypes.object,
containerId: PropTypes.string.isRequired,
height: PropTypes.string.isRequired,
+ width: PropTypes.string.isRequired,
isStarred: PropTypes.bool.isRequired,
slice: PropTypes.object,
table_name: PropTypes.string,
@@ -60,6 +61,7 @@ class ChartContainer extends React.PureComponent {
(
prevProps.queryResponse !== this.props.queryResponse ||
prevProps.height !== this.props.height ||
+ prevProps.width !== this.props.width ||
this.props.triggerRender
) && !this.props.queryResponse.error
&& this.props.chartStatus !== 'failed'
@@ -272,7 +274,7 @@ class ChartContainer extends React.PureComponent {
(
controlName &&
+ this.props.controls[controlName] &&
{
- this.setState({ height: this.getHeight() });
+ this.setState({ height: this.getHeight(), width: this.getWidth() });
}, 250);
}
@@ -131,6 +133,7 @@ class ExploreViewContainer extends React.Component {
return (
);
}
@@ -187,7 +190,7 @@ function mapStateToProps(state) {
const form_data = getFormDataFromControls(state.controls);
return {
chartStatus: state.chartStatus,
- datasource_type: state.datasource_type,
+ datasource_type: state.datasource.type,
controls: state.controls,
form_data,
standalone: state.standalone,
diff --git a/superset/assets/javascripts/explore/components/controls/DatasourceControl.jsx b/superset/assets/javascripts/explore/components/controls/DatasourceControl.jsx
new file mode 100644
index 0000000000000..b00fe3fc79368
--- /dev/null
+++ b/superset/assets/javascripts/explore/components/controls/DatasourceControl.jsx
@@ -0,0 +1,160 @@
+/* global notify */
+import React from 'react';
+import PropTypes from 'prop-types';
+import { Table } from 'reactable';
+import { Label, FormControl, Modal, OverlayTrigger, Tooltip } from 'react-bootstrap';
+
+import ControlHeader from '../ControlHeader';
+
+const propTypes = {
+ description: PropTypes.string,
+ label: PropTypes.string,
+ name: PropTypes.string.isRequired,
+ onChange: PropTypes.func,
+ value: PropTypes.string.isRequired,
+ datasource: PropTypes.object.isRequired,
+};
+
+const defaultProps = {
+ onChange: () => {},
+};
+
+export default class DatasourceControl extends React.PureComponent {
+ constructor(props) {
+ super(props);
+ this.state = {
+ showModal: false,
+ filter: '',
+ loading: true,
+ };
+ this.toggleModal = this.toggleModal.bind(this);
+ this.changeSearch = this.changeSearch.bind(this);
+ this.setSearchRef = this.setSearchRef.bind(this);
+ this.onEnterModal = this.onEnterModal.bind(this);
+ }
+ onChange(vizType) {
+ this.props.onChange(vizType);
+ this.setState({ showModal: false });
+ }
+ onEnterModal() {
+ if (this.searchRef) {
+ this.searchRef.focus();
+ }
+ const url = '/superset/datasources/';
+ const that = this;
+ if (!this.state.datasources) {
+ $.ajax({
+ type: 'GET',
+ url,
+ success: (data) => {
+ const datasources = data.map(ds => ({
+ rawName: ds.name,
+ connection: ds.connection,
+ schema: ds.schema,
+ name: (
+
+ {ds.name}
+ ),
+ type: ds.type,
+ }));
+
+ that.setState({ loading: false, datasources });
+ },
+ error() {
+ that.setState({ loading: false });
+ notify.error('Something went wrong while fetching the datasource list');
+ },
+ });
+ }
+ }
+ setSearchRef(searchRef) {
+ this.searchRef = searchRef;
+ }
+ toggleModal() {
+ this.setState({ showModal: !this.state.showModal });
+ }
+ changeSearch(event) {
+ this.setState({ filter: event.target.value });
+ }
+ selectDatasource(datasourceId) {
+ this.setState({ showModal: false });
+ this.props.onChange(datasourceId);
+ }
+ render() {
+ return (
+
+
+
Click to point to another datasource
+ }
+ >
+
+ {this.props.datasource.name}
+
+
+
+ Edit the datasource's configuration
+
+ }
+ >
+
+
+
+
+
+
+ Select a datasource
+
+
+
+ { this.setSearchRef(ref); }}
+ type="text"
+ bsSize="sm"
+ value={this.state.filter}
+ placeholder="Search / Filter"
+ onChange={this.changeSearch}
+ />
+
+ {this.state.loading &&
+
+ }
+ {this.state.datasources &&
+
+ }
+
+
+
);
+ }
+}
+
+DatasourceControl.propTypes = propTypes;
+DatasourceControl.defaultProps = defaultProps;
diff --git a/superset/assets/javascripts/explore/components/controls/SelectControl.jsx b/superset/assets/javascripts/explore/components/controls/SelectControl.jsx
index 6998c071b0e92..312fced55bb33 100644
--- a/superset/assets/javascripts/explore/components/controls/SelectControl.jsx
+++ b/superset/assets/javascripts/explore/components/controls/SelectControl.jsx
@@ -43,7 +43,8 @@ export default class SelectControl extends React.PureComponent {
this.onChange = this.onChange.bind(this);
}
componentWillReceiveProps(nextProps) {
- if (nextProps.choices !== this.props.choices) {
+ if (nextProps.choices !== this.props.choices ||
+ nextProps.options !== this.props.options) {
const options = this.getOptions(nextProps);
this.setState({ options });
}
diff --git a/superset/assets/javascripts/explore/components/controls/VizTypeControl.jsx b/superset/assets/javascripts/explore/components/controls/VizTypeControl.jsx
index 8fdd3f2d5b125..0fc82660f2b40 100644
--- a/superset/assets/javascripts/explore/components/controls/VizTypeControl.jsx
+++ b/superset/assets/javascripts/explore/components/controls/VizTypeControl.jsx
@@ -1,6 +1,8 @@
import React from 'react';
import PropTypes from 'prop-types';
-import { Label, Row, Col, FormControl, Modal } from 'react-bootstrap';
+import {
+ Label, Row, Col, FormControl, Modal, OverlayTrigger,
+ Tooltip } from 'react-bootstrap';
import visTypes from '../../stores/visTypes';
import ControlHeader from '../ControlHeader';
@@ -25,17 +27,27 @@ export default class VizTypeControl extends React.PureComponent {
};
this.toggleModal = this.toggleModal.bind(this);
this.changeSearch = this.changeSearch.bind(this);
+ this.setSearchRef = this.setSearchRef.bind(this);
+ this.focusSearch = this.focusSearch.bind(this);
}
onChange(vizType) {
this.props.onChange(vizType);
this.setState({ showModal: false });
}
+ setSearchRef(searchRef) {
+ this.searchRef = searchRef;
+ }
toggleModal() {
this.setState({ showModal: !this.state.showModal });
}
changeSearch(event) {
this.setState({ filter: event.target.value });
}
+ focusSearch() {
+ if (this.searchRef) {
+ this.searchRef.focus();
+ }
+ }
renderVizType(vizType) {
const vt = vizType;
return (
@@ -75,14 +87,24 @@ export default class VizTypeControl extends React.PureComponent {
edit
- }
/>
-
- {visTypes[this.props.value].label}
-
-
+ Click to change visualization type
+ }
+ >
+
+ {visTypes[this.props.value].label}
+
+
+
Select a visualization type
@@ -90,6 +112,7 @@ export default class VizTypeControl extends React.PureComponent {
{ this.setSearchRef(ref); }}
type="text"
bsSize="sm"
value={this.state.filter}
diff --git a/superset/assets/javascripts/explore/index.jsx b/superset/assets/javascripts/explore/index.jsx
index 39ecab08c57dd..0fe4fcaba9524 100644
--- a/superset/assets/javascripts/explore/index.jsx
+++ b/superset/assets/javascripts/explore/index.jsx
@@ -14,6 +14,7 @@ import ExploreViewContainer from './components/ExploreViewContainer';
import { exploreReducer } from './reducers/exploreReducer';
import { appSetup } from '../common';
import './main.css';
+import '../../stylesheets/reactable-pagination.css';
appSetup();
initJQueryAjax();
diff --git a/superset/assets/javascripts/explore/reducers/exploreReducer.js b/superset/assets/javascripts/explore/reducers/exploreReducer.js
index cc214581c6b4a..96e36e3765754 100644
--- a/superset/assets/javascripts/explore/reducers/exploreReducer.js
+++ b/superset/assets/javascripts/explore/reducers/exploreReducer.js
@@ -29,25 +29,6 @@ export const exploreReducer = function (state, action) {
[actions.SET_DATASOURCE]() {
return Object.assign({}, state, { datasource: action.datasource });
},
- [actions.FETCH_DATASOURCES_STARTED]() {
- return Object.assign({}, state, { isDatasourcesLoading: true });
- },
-
- [actions.FETCH_DATASOURCES_SUCCEEDED]() {
- return Object.assign({}, state, { isDatasourcesLoading: false });
- },
-
- [actions.FETCH_DATASOURCES_FAILED]() {
- // todo(alanna) handle failure/error state
- return Object.assign({}, state,
- {
- isDatasourcesLoading: false,
- controlPanelAlert: action.error,
- });
- },
- [actions.SET_DATASOURCES]() {
- return Object.assign({}, state, { datasources: action.datasources });
- },
[actions.REMOVE_CONTROL_PANEL_ALERT]() {
return Object.assign({}, state, { controlPanelAlert: null });
},
diff --git a/superset/assets/javascripts/explore/stores/controls.jsx b/superset/assets/javascripts/explore/stores/controls.jsx
index da6ff386ad64b..3d33873c3c53d 100644
--- a/superset/assets/javascripts/explore/stores/controls.jsx
+++ b/superset/assets/javascripts/explore/stores/controls.jsx
@@ -28,24 +28,45 @@ export const D3_TIME_FORMAT_OPTIONS = [
['%H:%M:%S', '%H:%M:%S | 01:32:10'],
];
+const timeColumnOption = {
+ verbose_name: 'Time',
+ column_name: '__timestamp',
+ description: (
+ 'A reference to the [Time] configuration, taking granularity into ' +
+ 'account'),
+};
+
+const groupByControl = {
+ type: 'SelectControl',
+ multi: true,
+ label: 'Group by',
+ default: [],
+ includeTime: false,
+ description: 'One or many controls to group by',
+ optionRenderer: c => ,
+ valueRenderer: c => ,
+ valueKey: 'column_name',
+ mapStateToProps: (state, control) => {
+ const newState = {};
+ if (state.datasource) {
+ newState.options = state.datasource.columns.filter(c => c.groupby);
+ if (control && control.includeTime) {
+ newState.options.push(timeColumnOption);
+ }
+ }
+ return newState;
+ },
+};
+
export const controls = {
datasource: {
- type: 'SelectControl',
+ type: 'DatasourceControl',
label: 'Datasource',
- isLoading: true,
- clearable: false,
default: null,
- mapStateToProps: (state) => {
- const datasources = state.datasources || [];
- return {
- choices: datasources,
- isLoading: datasources.length === 0,
- rightNode: state.datasource ?
- edit
- : null,
- };
- },
- description: '',
+ description: null,
+ mapStateToProps: state => ({
+ datasource: state.datasource,
+ }),
},
viz_type: {
@@ -217,6 +238,14 @@ export const controls = {
description: null,
},
+ pivot_margins: {
+ type: 'CheckboxControl',
+ label: 'Show totals',
+ renderTrigger: false,
+ default: true,
+ description: 'Display total row/column',
+ },
+
show_markers: {
type: 'CheckboxControl',
label: 'Show Markers',
@@ -240,6 +269,14 @@ export const controls = {
description: 'Sort bars by x labels.',
},
+ combine_metric: {
+ type: 'CheckboxControl',
+ label: 'Combine Metrics',
+ default: false,
+ description: 'Display metrics side by side within each column, as ' +
+ 'opposed to each column being displayed side by side for each metric.',
+ },
+
show_controls: {
type: 'CheckboxControl',
label: 'Extra Controls',
@@ -316,30 +353,12 @@ export const controls = {
'to find in the [country] column',
},
- groupby: {
- type: 'SelectControl',
- multi: true,
- label: 'Group by',
- default: [],
- description: 'One or many controls to group by',
- optionRenderer: c => ,
- valueRenderer: c => ,
- valueKey: 'column_name',
- mapStateToProps: state => ({
- options: (state.datasource) ? state.datasource.columns : [],
- }),
- },
+ groupby: groupByControl,
- columns: {
- type: 'SelectControl',
- multi: true,
+ columns: Object.assign({}, groupByControl, {
label: 'Columns',
- mapStateToProps: state => ({
- choices: (state.datasource) ? state.datasource.gb_cols : [],
- }),
- default: [],
description: 'One or many controls to pivot as columns',
- },
+ }),
all_columns: {
type: 'SelectControl',
@@ -656,7 +675,7 @@ export const controls = {
label: 'Entity',
default: null,
validators: [v.nonEmpty],
- description: 'This define the element to be plotted on the chart',
+ description: 'This defines the element to be plotted on the chart',
mapStateToProps: state => ({
choices: (state.datasource) ? state.datasource.gb_cols : [],
}),
@@ -871,7 +890,7 @@ export const controls = {
label: 'Code',
description: 'Put your code here',
mapStateToProps: state => ({
- language: state.controls ? state.controls.markup_type.value : null,
+ language: state.controls && state.controls.markup_type ? state.controls.markup_type.value : 'markdown',
}),
default: '',
},
@@ -1270,5 +1289,23 @@ export const controls = {
hidden: true,
description: 'The number of seconds before expiring the cache',
},
+
+ order_by_entity: {
+ type: 'CheckboxControl',
+ label: 'Order by entity id',
+ description: 'Important! Select this if the table is not already sorted by entity id, ' +
+ 'else there is no guarantee that all events for each entity are returned.',
+ default: true,
+ },
+
+ min_leaf_node_event_count: {
+ type: 'SelectControl',
+ freeForm: false,
+ label: 'Minimum leaf node event count',
+ default: 1,
+ choices: formatSelectOptionsForRange(1, 10),
+ description: 'Leaf nodes that represent fewer than this number of events will be initially ' +
+ 'hidden in the visualization',
+ },
};
export default controls;
diff --git a/superset/assets/javascripts/explore/stores/store.js b/superset/assets/javascripts/explore/stores/store.js
index 2cd2874c266f2..af809ed2fa517 100644
--- a/superset/assets/javascripts/explore/stores/store.js
+++ b/superset/assets/javascripts/explore/stores/store.js
@@ -52,7 +52,7 @@ export function getControlsState(state, form_data) {
controlNames.forEach((k) => {
const control = Object.assign({}, controls[k], controlOverrides[k]);
if (control.mapStateToProps) {
- Object.assign(control, control.mapStateToProps(state));
+ Object.assign(control, control.mapStateToProps(state, control));
delete control.mapStateToProps;
}
diff --git a/superset/assets/javascripts/explore/stores/visTypes.js b/superset/assets/javascripts/explore/stores/visTypes.js
index 1e2eec23173fb..1df8e11c7267e 100644
--- a/superset/assets/javascripts/explore/stores/visTypes.js
+++ b/superset/assets/javascripts/explore/stores/visTypes.js
@@ -1,4 +1,5 @@
import { D3_TIME_FORMAT_OPTIONS } from './controls';
+import * as v from '../validators';
export const sections = {
druidTimeSeries: {
@@ -74,7 +75,7 @@ export const sections = {
],
};
-const visTypes = {
+export const visTypes = {
dist_bar: {
label: 'Distribution - Bar Chart',
controlPanelSections: [
@@ -336,10 +337,15 @@ const visTypes = {
controlSetRows: [
['groupby', 'columns'],
['metrics', 'pandas_aggfunc'],
- ['number_format'],
+ ['number_format', 'combine_metric'],
+ ['pivot_margins'],
],
},
],
+ controlOverrides: {
+ groupby: { includeTime: true },
+ columns: { includeTime: true },
+ },
},
separator: {
@@ -635,6 +641,37 @@ const visTypes = {
},
},
},
+ chord: {
+ label: 'Chord Diagram',
+ controlPanelSections: [
+ {
+ label: null,
+ controlSetRows: [
+ ['groupby', 'columns'],
+ ['metric'],
+ ['row_limit', 'y_axis_format'],
+ ],
+ },
+ ],
+ controlOverrides: {
+ y_axis_format: {
+ label: 'Number format',
+ description: 'Choose a number format',
+ },
+ groupby: {
+ label: 'Source',
+ multi: false,
+ validators: [v.nonEmpty],
+ description: 'Choose a source',
+ },
+ columns: {
+ label: 'Target',
+ multi: false,
+ validators: [v.nonEmpty],
+ description: 'Choose a target',
+ },
+ },
+ },
country_map: {
label: 'Country Map',
controlPanelSections: [
@@ -710,8 +747,13 @@ const visTypes = {
controlOverrides: {
groupby: {
label: 'Filter controls',
- description: 'The controls you want to filter on',
- default: [],
+ description: (
+ 'The controls you want to filter on. Note that only columns ' +
+ 'checked as "filterable" will show up on this list.'
+ ),
+ mapStateToProps: state => ({
+ options: (state.datasource) ? state.datasource.columns.filter(c => c.filterable) : [],
+ }),
},
},
},
@@ -765,6 +807,14 @@ const visTypes = {
],
},
],
+ controlOverrides: {
+ all_columns_x: {
+ validators: [v.nonEmpty],
+ },
+ all_columns_y: {
+ validators: [v.nonEmpty],
+ },
+ },
},
horizon: {
@@ -849,6 +899,51 @@ const visTypes = {
},
},
},
+
+ event_flow: {
+ label: 'Event flow',
+ requiresTime: true,
+ controlPanelSections: [
+ {
+ label: 'Event definition',
+ controlSetRows: [
+ ['entity'],
+ ['all_columns_x'],
+ ['row_limit'],
+ ['order_by_entity'],
+ ['min_leaf_node_event_count'],
+ ],
+ },
+ {
+ label: 'Additional meta data',
+ controlSetRows: [
+ ['all_columns'],
+ ],
+ },
+ ],
+ controlOverrides: {
+ entity: {
+ label: 'Column containing entity ids',
+ description: 'e.g., a "user id" column',
+ },
+ all_columns_x: {
+ label: 'Column containing event names',
+ validators: [v.nonEmpty],
+ default: control => (
+ control.choices && control.choices.length > 0 ?
+ control.choices[0][0] : null
+ ),
+ },
+ row_limit: {
+ label: 'Event count limit',
+ description: 'The maximum number of events to return, equivalent to number of rows',
+ },
+ all_columns: {
+ label: 'Meta data',
+ description: 'Select any columns for meta data inspection',
+ },
+ },
+ },
};
export default visTypes;
diff --git a/superset/assets/javascripts/modules/superset.js b/superset/assets/javascripts/modules/superset.js
index 55af823117c4e..eccdda4a312f7 100644
--- a/superset/assets/javascripts/modules/superset.js
+++ b/superset/assets/javascripts/modules/superset.js
@@ -8,7 +8,7 @@ import { QUERY_TIMEOUT_THRESHOLD } from '../constants';
const utils = require('./utils');
-/* eslint wrap-iife: 0*/
+/* eslint wrap-iife: 0 */
const px = function () {
let slice;
function getParam(name) {
diff --git a/superset/assets/javascripts/modules/utils.js b/superset/assets/javascripts/modules/utils.js
index 7349dbb7f8f11..61949c7e2b39e 100644
--- a/superset/assets/javascripts/modules/utils.js
+++ b/superset/assets/javascripts/modules/utils.js
@@ -229,14 +229,10 @@ export function initJQueryAjax() {
}
export function tryNumify(s) {
- // Attempts casting to float, returns string when failing
- try {
- const parsed = parseFloat(s);
- if (parsed) {
- return parsed;
- }
- } catch (e) {
- // pass
+ // Attempts casting to Number, returns string when failing
+ const n = Number(s);
+ if (isNaN(n)) {
+ return s;
}
- return s;
+ return n;
}
diff --git a/superset/assets/js_build.sh b/superset/assets/js_build.sh
index 3231d915483c5..7e48caa126d7d 100755
--- a/superset/assets/js_build.sh
+++ b/superset/assets/js_build.sh
@@ -3,7 +3,8 @@ set -e
cd "$(dirname "$0")"
npm --version
node --version
-npm install
+npm install -g yarn
+yarn
npm run sync-backend
npm run lint
npm run test
diff --git a/superset/assets/package.json b/superset/assets/package.json
index e7b432425ee2c..1422492bfea58 100644
--- a/superset/assets/package.json
+++ b/superset/assets/package.json
@@ -1,6 +1,6 @@
{
"name": "superset",
- "version": "0.18.5",
+ "version": "0.19.0",
"description": "Superset is a data exploration platform designed to be visual, intuitive, and interactive.",
"license": "Apache-2.0",
"directories": {
@@ -18,7 +18,7 @@
},
"repository": {
"type": "git",
- "url": "git+https://github.com/airbnb/superset.git"
+ "url": "git+https://github.com/apache/incubator-superset.git"
},
"keywords": [
"big",
@@ -32,12 +32,13 @@
"database",
"flask"
],
- "author": "Airbnb",
+ "author": "Apache",
"bugs": {
- "url": "https://github.com/airbnb/superset/issues"
+ "url": "https://github.com/apache/incubator-superset/issues"
},
- "homepage": "https://github.com/airbnb/superset#readme",
+ "homepage": "http://superset.apache.org/",
"dependencies": {
+ "@data-ui/event-flow": "0.0.4",
"babel-register": "^6.24.1",
"bootstrap": "^3.3.6",
"brace": "^0.10.0",
@@ -54,9 +55,7 @@
"datatables.net-bs": "^1.10.12",
"immutable": "^3.8.1",
"jquery": "^3.2.1",
- "jsdom": "9.12.0",
"lodash.throttle": "^4.1.1",
- "mapbox-gl": "^0.26.0",
"moment": "^2.14.1",
"mustache": "^2.2.1",
"nvd3": "1.8.5",
@@ -65,13 +64,13 @@
"react-ace": "^5.0.1",
"react-addons-css-transition-group": "^15.6.0",
"react-addons-shallow-compare": "^15.4.2",
- "react-alert": "^2.0.1",
+ "react-alert": "^1.0.14",
"react-bootstrap": "^0.31.0",
"react-bootstrap-table": "^3.1.7",
"react-dom": "^15.5.1",
"react-gravatar": "^2.6.1",
"react-grid-layout": "^0.14.4",
- "react-map-gl": "^1.7.0",
+ "react-map-gl": "^2.0.3",
"react-redux": "^5.0.2",
"react-resizable": "^1.3.3",
"react-select": "1.0.0-rc.3",
@@ -127,8 +126,8 @@
"transform-loader": "^0.2.3",
"uglifyjs-webpack-plugin": "^0.4.6",
"url-loader": "^0.5.7",
- "webpack": "^2.3.3",
- "webpack-manifest-plugin": "1.1.0",
- "webworkify-webpack": "2.0.4"
+ "webpack": "^3.4.1",
+ "webpack-manifest-plugin": "1.2.1",
+ "webworkify-webpack": "2.0.5"
}
}
diff --git a/superset/assets/spec/javascripts/dashboard/fixtures.jsx b/superset/assets/spec/javascripts/dashboard/fixtures.jsx
index 7ac259e9416bb..7c822d78f985f 100644
--- a/superset/assets/spec/javascripts/dashboard/fixtures.jsx
+++ b/superset/assets/spec/javascripts/dashboard/fixtures.jsx
@@ -43,6 +43,7 @@ export const dashboardData = {
css: '',
metadata: {
filter_immune_slices: [],
+ timed_refresh_immune_slices: [],
filter_immune_slice_fields: {},
expanded_slices: {},
},
diff --git a/superset/assets/spec/javascripts/explore/components/DatasourceControl_spec.jsx b/superset/assets/spec/javascripts/explore/components/DatasourceControl_spec.jsx
new file mode 100644
index 0000000000000..c46ded004a230
--- /dev/null
+++ b/superset/assets/spec/javascripts/explore/components/DatasourceControl_spec.jsx
@@ -0,0 +1,32 @@
+import React from 'react';
+import sinon from 'sinon';
+import { expect } from 'chai';
+import { describe, it, beforeEach } from 'mocha';
+import { shallow } from 'enzyme';
+import { Modal } from 'react-bootstrap';
+import DatasourceControl from '../../../../javascripts/explore/components/controls/DatasourceControl';
+
+const defaultProps = {
+ name: 'datasource',
+ label: 'Datasource',
+ value: '1__table',
+ datasource: {
+ name: 'birth_names',
+ type: 'table',
+ uid: '1__table',
+ id: 1,
+ },
+ onChange: sinon.spy(),
+};
+
+describe('DatasourceControl', () => {
+ let wrapper;
+
+ beforeEach(() => {
+ wrapper = shallow( );
+ });
+
+ it('renders a Modal', () => {
+ expect(wrapper.find(Modal)).to.have.lengthOf(1);
+ });
+});
diff --git a/superset/assets/spec/javascripts/explore/exploreActions_spec.js b/superset/assets/spec/javascripts/explore/exploreActions_spec.js
index 86173be4ecdfe..9fa02e4b12484 100644
--- a/superset/assets/spec/javascripts/explore/exploreActions_spec.js
+++ b/superset/assets/spec/javascripts/explore/exploreActions_spec.js
@@ -82,44 +82,6 @@ describe('fetching actions', () => {
});
});
- describe('fetchDatasources', () => {
- const makeRequest = () => {
- request = actions.fetchDatasources();
- request(dispatch);
- };
-
- it('calls fetchDatasourcesStarted', () => {
- makeRequest();
- expect(dispatch.args[0][0].type).to.equal(actions.FETCH_DATASOURCES_STARTED);
- });
-
- it('makes the ajax request', () => {
- makeRequest();
- expect(ajaxStub.calledOnce).to.be.true;
- });
-
- it('calls correct url', () => {
- const url = '/superset/datasources/';
- makeRequest();
- expect(ajaxStub.getCall(0).args[0].url).to.equal(url);
- });
-
- it('calls correct actions on error', () => {
- ajaxStub.yieldsTo('error', { responseJSON: { error: 'error text' } });
- makeRequest();
- expect(dispatch.callCount).to.equal(2);
- expect(dispatch.getCall(1).args[0].type).to.equal(actions.FETCH_DATASOURCES_FAILED);
- });
-
- it('calls correct actions on success', () => {
- ajaxStub.yieldsTo('success', { data: '' });
- makeRequest();
- expect(dispatch.callCount).to.equal(3);
- expect(dispatch.getCall(1).args[0].type).to.equal(actions.SET_DATASOURCES);
- expect(dispatch.getCall(2).args[0].type).to.equal(actions.FETCH_DATASOURCES_SUCCEEDED);
- });
- });
-
describe('fetchDashboards', () => {
const userID = 1;
const mockDashboardData = {
diff --git a/superset/assets/javascripts/SqlLab/reactable-pagination.css b/superset/assets/stylesheets/reactable-pagination.css
similarity index 100%
rename from superset/assets/javascripts/SqlLab/reactable-pagination.css
rename to superset/assets/stylesheets/reactable-pagination.css
diff --git a/superset/assets/stylesheets/superset.css b/superset/assets/stylesheets/superset.css
index ed968f72f3be1..5be9a5fd48835 100644
--- a/superset/assets/stylesheets/superset.css
+++ b/superset/assets/stylesheets/superset.css
@@ -13,7 +13,7 @@ body {
}
.emph {
- font-weight: bold;
+ font-weight: bold !important;
}
.alert.alert-danger > .debugger {
@@ -81,7 +81,6 @@ input[type="checkbox"] {
display: inline-block;
width: 16px;
height: 16px;
- float: right;
}
.widget-is-cached {
@@ -229,6 +228,9 @@ div.widget .slice_container {
.m-r-5 {
margin-right: 5px;
}
+.m-r-3 {
+ margin-right: 3px;
+}
.m-t-5 {
margin-top: 5px;
}
diff --git a/superset/assets/visualizations/EventFlow.jsx b/superset/assets/visualizations/EventFlow.jsx
new file mode 100644
index 0000000000000..110f4a76482c6
--- /dev/null
+++ b/superset/assets/visualizations/EventFlow.jsx
@@ -0,0 +1,61 @@
+import React from 'react';
+import ReactDOM from 'react-dom';
+
+import {
+ App,
+ withParentSize,
+ cleanEvents,
+ TS,
+ EVENT_NAME,
+ ENTITY_ID,
+} from '@data-ui/event-flow';
+
+/*
+ * This function takes the slice object and json payload as input and renders a
+ * responsive component using the json data.
+ */
+function renderEventFlow(slice, json) {
+ const container = document.querySelector(slice.selector);
+ const hasData = json.data && json.data.length > 0;
+
+ // the slice container overflows ~80px in explorer, so we have to correct for this
+ const isExplorer = (/explore/).test(window.location.pathname);
+
+ const ResponsiveVis = withParentSize(({
+ parentWidth,
+ parentHeight,
+ ...rest
+ }) => (
+
+ ));
+
+ // render the component if we have data, otherwise render a no-data message
+ let Component;
+ if (hasData) {
+ const userKey = json.form_data.entity;
+ const eventNameKey = json.form_data.all_columns_x;
+
+ // map from the Superset form fields to 's expected data keys
+ const accessorFunctions = {
+ [TS]: datum => new Date(datum.__timestamp), // eslint-disable-line no-underscore-dangle
+ [EVENT_NAME]: datum => datum[eventNameKey],
+ [ENTITY_ID]: datum => String(datum[userKey]),
+ };
+
+ const dirtyData = json.data;
+ const cleanData = cleanEvents(dirtyData, accessorFunctions);
+ const minEventCount = slice.formData.min_leaf_node_event_count;
+
+ Component = ;
+ } else {
+ Component = Sorry, there appears to be no data
;
+ }
+
+ ReactDOM.render(Component, container);
+}
+
+module.exports = renderEventFlow;
diff --git a/superset/assets/visualizations/chord.css b/superset/assets/visualizations/chord.css
new file mode 100644
index 0000000000000..d7471ba402d13
--- /dev/null
+++ b/superset/assets/visualizations/chord.css
@@ -0,0 +1,17 @@
+.chord svg #circle circle {
+ fill: none;
+ pointer-events: all;
+}
+
+.chord svg .group path {
+ fill-opacity: .6;
+}
+
+.chord svg path.chord {
+ stroke: #000;
+ stroke-width: .25px;
+}
+
+.chord svg #circle:hover path.fade {
+ opacity: 0.2;
+}
diff --git a/superset/assets/visualizations/chord.jsx b/superset/assets/visualizations/chord.jsx
new file mode 100644
index 0000000000000..c2b3c3498e7c4
--- /dev/null
+++ b/superset/assets/visualizations/chord.jsx
@@ -0,0 +1,101 @@
+/* eslint-disable no-param-reassign */
+import d3 from 'd3';
+import { category21 } from '../javascripts/modules/colors';
+import './chord.css';
+
+function chordViz(slice, json) {
+ slice.container.html('');
+
+ const div = d3.select(slice.selector);
+ const nodes = json.data.nodes;
+ const fd = slice.formData;
+ const f = d3.format(fd.y_axis_format);
+
+ const width = slice.width();
+ const height = slice.height();
+
+ const outerRadius = Math.min(width, height) / 2 - 10;
+ const innerRadius = outerRadius - 24;
+
+ let chord;
+
+ const arc = d3.svg.arc()
+ .innerRadius(innerRadius)
+ .outerRadius(outerRadius);
+
+ const layout = d3.layout.chord()
+ .padding(0.04)
+ .sortSubgroups(d3.descending)
+ .sortChords(d3.descending);
+
+ const path = d3.svg.chord()
+ .radius(innerRadius);
+
+ const svg = div.append('svg')
+ .attr('width', width)
+ .attr('height', height)
+ .on('mouseout', () => chord.classed('fade', false))
+ .append('g')
+ .attr('id', 'circle')
+ .attr('transform', `translate(${width / 2}, ${height / 2})`);
+
+ svg.append('circle')
+ .attr('r', outerRadius);
+
+ // Compute the chord layout.
+ layout.matrix(json.data.matrix);
+
+ const group = svg.selectAll('.group')
+ .data(layout.groups)
+ .enter().append('g')
+ .attr('class', 'group')
+ .on('mouseover', (d, i) => {
+ chord.classed('fade', p => p.source.index !== i && p.target.index !== i);
+ });
+
+ // Add a mouseover title.
+ group.append('title').text((d, i) => `${nodes[i]}: ${f(d.value)}`);
+
+ // Add the group arc.
+ const groupPath = group.append('path')
+ .attr('id', (d, i) => 'group' + i)
+ .attr('d', arc)
+ .style('fill', (d, i) => category21(nodes[i]));
+
+ // Add a text label.
+ const groupText = group.append('text')
+ .attr('x', 6)
+ .attr('dy', 15);
+
+ groupText.append('textPath')
+ .attr('xlink:href', (d, i) => `#group${i}`)
+ .text((d, i) => nodes[i]);
+ // Remove the labels that don't fit. :(
+ groupText.filter(function (d, i) {
+ return groupPath[0][i].getTotalLength() / 2 - 16 < this.getComputedTextLength();
+ })
+ .remove();
+
+ // Add the chords.
+ chord = svg.selectAll('.chord')
+ .data(layout.chords)
+ .enter().append('path')
+ .attr('class', 'chord')
+ .on('mouseover', (d) => {
+ chord.classed('fade', p => p !== d);
+ })
+ .style('fill', d => category21(nodes[d.source.index]))
+ .attr('d', path);
+
+ // Add an elaborate mouseover title for each chord.
+ chord.append('title').text(function (d) {
+ return nodes[d.source.index]
+ + ' → ' + nodes[d.target.index]
+ + ': ' + f(d.source.value)
+ + '\n' + nodes[d.target.index]
+ + ' → ' + nodes[d.source.index]
+ + ': ' + f(d.target.value);
+ });
+}
+
+module.exports = chordViz;
diff --git a/superset/assets/visualizations/main.js b/superset/assets/visualizations/main.js
index e078cb214840a..a02f508c33012 100644
--- a/superset/assets/visualizations/main.js
+++ b/superset/assets/visualizations/main.js
@@ -10,6 +10,7 @@ const vizMap = {
cal_heatmap: require('./cal_heatmap.js'),
compare: require('./nvd3_vis.js'),
directed_force: require('./directed_force.js'),
+ chord: require('./chord.jsx'),
dist_bar: require('./nvd3_vis.js'),
filter_box: require('./filter_box.jsx'),
heatmap: require('./heatmap.js'),
@@ -31,5 +32,6 @@ const vizMap = {
word_cloud: require('./word_cloud.js'),
world_map: require('./world_map.js'),
dual_line: require('./nvd3_vis.js'),
+ event_flow: require('./EventFlow.jsx'),
};
export default vizMap;
diff --git a/superset/assets/visualizations/nvd3_vis.js b/superset/assets/visualizations/nvd3_vis.js
index 1f34d9b797a7f..21342942cfd86 100644
--- a/superset/assets/visualizations/nvd3_vis.js
+++ b/superset/assets/visualizations/nvd3_vis.js
@@ -298,9 +298,6 @@ function nvd3Vis(slice, payload) {
chart.height(height);
slice.container.css('height', height + 'px');
- if ((vizType === 'line' || vizType === 'area') && fd.rich_tooltip) {
- chart.useInteractiveGuideline(true);
- }
if (chart.forceY &&
fd.y_axis_bounds &&
(fd.y_axis_bounds[0] !== null || fd.y_axis_bounds[1] !== null)) {
@@ -342,6 +339,34 @@ function nvd3Vis(slice, payload) {
if (vizType !== 'bullet') {
chart.color(d => category21(d[colorKey]));
}
+ if ((vizType === 'line' || vizType === 'area') && fd.rich_tooltip) {
+ chart.useInteractiveGuideline(true);
+ if (vizType === 'line') {
+ // Custom sorted tooltip
+ chart.interactiveLayer.tooltip.contentGenerator((d) => {
+ let tooltip = '';
+ tooltip += ""
+ + `${xAxisFormatter(d.value)} `
+ + ' ';
+ d.series.sort((a, b) => a.value >= b.value ? -1 : 1);
+ d.series.forEach((series) => {
+ tooltip += (
+ `` +
+ `` +
+ '
' +
+ ' ' +
+ `${series.key} ` +
+ `${yAxisFormatter(series.value)} ` +
+ ' '
+ );
+ });
+ tooltip += '
';
+ return tooltip;
+ });
+ }
+ }
if (fd.x_axis_label && fd.x_axis_label !== '' && chart.xAxis) {
let distance = 0;
diff --git a/superset/assets/visualizations/treemap.css b/superset/assets/visualizations/treemap.css
index c385780c82a77..2fdcdc76d7e56 100644
--- a/superset/assets/visualizations/treemap.css
+++ b/superset/assets/visualizations/treemap.css
@@ -1,43 +1,43 @@
-text {
+.treemap text {
pointer-events: none;
}
-.grandparent text {
+.treemap .grandparent text {
font-weight: bold;
}
-rect {
+.treemap rect {
fill: none;
stroke: #fff;
}
-rect.parent,
-.grandparent rect {
+.treemap rect.parent,
+.treemap .grandparent rect {
stroke-width: 2px;
}
-rect.parent {
+.treemap rect.parent {
pointer-events: none;
}
-.grandparent rect {
+.treemap .grandparent rect {
fill: #eee;
}
-.grandparent:hover rect {
+.treemap .grandparent:hover rect {
fill: #aaa;
}
-.children rect.parent,
-.grandparent rect {
+.treemap .children rect.parent,
+.treemap .grandparent rect {
cursor: pointer;
}
-.children rect.parent {
+.treemap .children rect.parent {
fill: #bbb;
fill-opacity: .5;
}
-.children:hover rect.child {
+.treemap .children:hover rect.child {
fill: #bbb;
}
diff --git a/superset/assets/visualizations/treemap.js b/superset/assets/visualizations/treemap.js
index 1e025935e6b25..2a5a9c3b3cf45 100644
--- a/superset/assets/visualizations/treemap.js
+++ b/superset/assets/visualizations/treemap.js
@@ -1,4 +1,4 @@
-/* eslint-disable no-shadow, no-param-reassign, no-underscore-dangle, no-use-before-define*/
+/* eslint-disable no-shadow, no-param-reassign, no-underscore-dangle, no-use-before-define */
import d3 from 'd3';
import { category21 } from '../javascripts/modules/colors';
@@ -34,6 +34,7 @@ function treemap(slice, payload) {
.round(false);
const svg = div.append('svg')
+ .attr('class', 'treemap')
.attr('width', eltWidth)
.attr('height', eltHeight);
@@ -217,7 +218,7 @@ function treemap(slice, payload) {
const name = function (d) {
return d.parent
? name(d.parent) + ' / ' + d.name + ' (' + formatNumber(d.value) + ')'
- : d.name + ' (' + formatNumber(d.value) + ')';
+ : (slice.datasource.verbose_map[d.name] || d.name) + ' (' + formatNumber(d.value) + ')';
};
initialize(data);
diff --git a/superset/assets/webpack.config.js b/superset/assets/webpack.config.js
index be8c8c983b9cb..e3413e5d14c2a 100644
--- a/superset/assets/webpack.config.js
+++ b/superset/assets/webpack.config.js
@@ -10,6 +10,9 @@ const APP_DIR = path.resolve(__dirname, './');
const BUILD_DIR = path.resolve(__dirname, './dist');
const config = {
+ node: {
+ fs: 'empty',
+ },
entry: {
'css-theme': APP_DIR + '/javascripts/css-theme.js',
common: APP_DIR + '/javascripts/common.js',
@@ -32,9 +35,7 @@ const config = {
],
alias: {
webworkify: 'webworkify-webpack',
- 'mapbox-gl/js/geo/transform': path.join(
- __dirname, '/node_modules/mapbox-gl/js/geo/transform'),
- 'mapbox-gl': path.join(__dirname, '/node_modules/mapbox-gl/dist/mapbox-gl.js'),
+ 'mapbox-gl$': path.join(__dirname, '/node_modules/mapbox-gl/dist/mapbox-gl.js'),
},
},
@@ -57,10 +58,10 @@ const config = {
],
},
},
- /* for react-map-gl overlays */
+ /* for mapbox-gl/js/geo/transform */
{
- test: /\.react\.js$/,
- include: APP_DIR + '/node_modules/react-map-gl/src/overlays',
+ test: /\.js$/,
+ include: APP_DIR + '/node_modules/mapbox-gl/js',
loader: 'babel-loader',
},
/* for require('*.css') */
@@ -129,11 +130,11 @@ if (process.env.NODE_ENV === 'production') {
const UJSplugin = new webpack.optimize.UglifyJsPlugin({
sourceMap: false,
minimize: true,
- compress: {
- drop_debugger: true,
- warnings: false,
- drop_console: true,
+ parallel: {
+ cache: true,
+ workers: 4,
},
+ compress: false,
});
config.plugins.push(UJSplugin);
}
diff --git a/superset/cli.py b/superset/cli.py
index 46b0ca794be77..f6163bb140b09 100755
--- a/superset/cli.py
+++ b/superset/cli.py
@@ -192,7 +192,7 @@ def worker(workers):
celery_app.conf.update(CELERYD_CONCURRENCY=workers)
elif config.get("SUPERSET_CELERY_WORKERS"):
celery_app.conf.update(
- worker_concurrency=config.get("SUPERSET_CELERY_WORKERS"))
+ CELERYD_CONCURRENCY=config.get("SUPERSET_CELERY_WORKERS"))
worker = celery_worker.worker(app=celery_app)
worker.run()
diff --git a/superset/config.py b/superset/config.py
index 61b111bde5764..bc91edd07b097 100644
--- a/superset/config.py
+++ b/superset/config.py
@@ -24,7 +24,10 @@
STATS_LOGGER = DummyStatsLogger()
BASE_DIR = os.path.abspath(os.path.dirname(__file__))
-DATA_DIR = os.path.join(os.path.expanduser('~'), '.superset')
+if 'SUPERSET_HOME' in os.environ:
+ DATA_DIR = os.environ['SUPERSET_HOME']
+else:
+ DATA_DIR = os.path.join(os.path.expanduser('~'), '.superset')
if not os.path.exists(DATA_DIR):
os.makedirs(DATA_DIR)
@@ -46,6 +49,7 @@
SUPERSET_WEBSERVER_TIMEOUT = 60
EMAIL_NOTIFICATIONS = False
CUSTOM_SECURITY_MANAGER = None
+SQLALCHEMY_TRACK_MODIFICATIONS = False
# ---------------------------------------------------------
# Your App secret key
@@ -240,6 +244,7 @@ class CeleryConfig(object):
CELERY_IMPORTS = ('superset.sql_lab', )
CELERY_RESULT_BACKEND = 'db+sqlite:///celery_results.sqlite'
CELERY_ANNOTATIONS = {'tasks.add': {'rate_limit': '10/s'}}
+ CELERYD_LOG_LEVEL = 'DEBUG'
CELERY_CONFIG = CeleryConfig
"""
CELERY_CONFIG = None
@@ -306,8 +311,12 @@ class CeleryConfig(object):
# configuration. These blueprints will get integrated in the app
BLUEPRINTS = []
-try:
+# Provide a callable that receives a tracking_url and returns another
+# URL. This is used to translate internal Hadoop job tracker URL
+# into a proxied one
+TRACKING_URL_TRANSFORMER = lambda x: x
+try:
if CONFIG_PATH_ENV_VAR in os.environ:
# Explicitly import config module that is not in pythonpath; useful
# for case where app is being executed via pex.
diff --git a/superset/connectors/base/__init__.py b/superset/connectors/base/__init__.py
new file mode 100644
index 0000000000000..e69de29bb2d1d
diff --git a/superset/connectors/base.py b/superset/connectors/base/models.py
similarity index 91%
rename from superset/connectors/base.py
rename to superset/connectors/base/models.py
index e203ef4401200..593c722d42bbc 100644
--- a/superset/connectors/base.py
+++ b/superset/connectors/base/models.py
@@ -68,6 +68,16 @@ def column_names(self):
def main_dttm_col(self):
return "timestamp"
+ @property
+ def connection(self):
+ """String representing the context of the Datasource"""
+ return None
+
+ @property
+ def schema(self):
+ """String representing the schema of the Datasource (if it applies)"""
+ return None
+
@property
def groupby_column_names(self):
return sorted([c.column_name for c in self.columns if c.groupby])
@@ -107,6 +117,20 @@ def metrics_combo(self):
for m in self.metrics],
key=lambda x: x[1])
+ @property
+ def short_data(self):
+ """Data representation of the datasource sent to the frontend"""
+ return {
+ 'edit_url': self.url,
+ 'id': self.id,
+ 'uid': self.uid,
+ 'schema': self.schema,
+ 'name': self.name,
+ 'type': self.type,
+ 'connection': self.connection,
+ 'creator': str(self.created_by),
+ }
+
@property
def data(self):
"""Data representation of the datasource sent to the frontend"""
@@ -222,7 +246,9 @@ def expression(self):
@property
def data(self):
- attrs = ('column_name', 'verbose_name', 'description', 'expression')
+ attrs = (
+ 'column_name', 'verbose_name', 'description', 'expression',
+ 'filterable', 'groupby')
return {s: getattr(self, s) for s in attrs}
diff --git a/superset/connectors/base/views.py b/superset/connectors/base/views.py
new file mode 100644
index 0000000000000..36cfe45076403
--- /dev/null
+++ b/superset/connectors/base/views.py
@@ -0,0 +1,12 @@
+from superset.views.base import SupersetModelView
+from superset.utils import SupersetException
+from flask import Markup
+
+
+class DatasourceModelView(SupersetModelView):
+ def pre_delete(self, obj):
+ if obj.slices:
+ raise SupersetException(Markup(
+ "Cannot delete a datasource that has slices attached to it."
+ "Here's the list of associated slices: " +
+ "".join([o.slice_link for o in obj.slices])))
diff --git a/superset/connectors/druid/models.py b/superset/connectors/druid/models.py
index dc037264a1c31..335f9d26b1fba 100644
--- a/superset/connectors/druid/models.py
+++ b/superset/connectors/druid/models.py
@@ -33,7 +33,7 @@
from superset.utils import (
flasher, MetricPermException, DimSelector, DTTM_ALIAS
)
-from superset.connectors.base import BaseDatasource, BaseColumn, BaseMetric
+from superset.connectors.base.models import BaseDatasource, BaseColumn, BaseMetric
from superset.models.helpers import AuditMixinNullable, QueryResult, set_perm
DRUID_TZ = conf.get("DRUID_TZ")
@@ -50,6 +50,13 @@ def __init__(self, name, field_names, function):
self.name = name
+class CustomPostAggregator(Postaggregator):
+ """A way to allow users to specify completely custom PostAggregators"""
+ def __init__(self, name, post_aggregator):
+ self.name = name
+ self.post_aggregator = post_aggregator
+
+
class DruidCluster(Model, AuditMixinNullable):
"""ORM object referencing the Druid clusters"""
@@ -347,6 +354,10 @@ class DruidDatasource(Model, BaseDatasource):
def database(self):
return self.cluster
+ @property
+ def connection(self):
+ return str(self.database)
+
@property
def num_cols(self):
return [c.column_name for c in self.columns if c.is_num]
@@ -492,7 +503,7 @@ def latest_metadata(self):
lbound = datetime(1901, 1, 1).isoformat()[:10]
rbound = datetime(2050, 1, 1).isoformat()[:10]
if not self.version_higher(self.cluster.druid_version, '0.8.2'):
- rbound = datetime.now().isoformat()[:10]
+ rbound = datetime.now().isoformat()
try:
segment_metadata = client.segment_metadata(
datasource=self.datasource_name,
@@ -690,6 +701,75 @@ def granularity(period_name, timezone=None, origin=None):
period_name).total_seconds() * 1000
return granularity
+ @staticmethod
+ def _metrics_and_post_aggs(metrics, metrics_dict):
+ all_metrics = []
+ post_aggs = {}
+
+ def recursive_get_fields(_conf):
+ _type = _conf.get('type')
+ _field = _conf.get('field')
+ _fields = _conf.get('fields')
+
+ field_names = []
+ if _type in ['fieldAccess', 'hyperUniqueCardinality',
+ 'quantile', 'quantiles']:
+ field_names.append(_conf.get('fieldName', ''))
+
+ if _field:
+ field_names += recursive_get_fields(_field)
+
+ if _fields:
+ for _f in _fields:
+ field_names += recursive_get_fields(_f)
+
+ return list(set(field_names))
+
+ for metric_name in metrics:
+ metric = metrics_dict[metric_name]
+ if metric.metric_type != 'postagg':
+ all_metrics.append(metric_name)
+ else:
+ mconf = metric.json_obj
+ all_metrics += recursive_get_fields(mconf)
+ all_metrics += mconf.get('fieldNames', [])
+ if mconf.get('type') == 'javascript':
+ post_aggs[metric_name] = JavascriptPostAggregator(
+ name=mconf.get('name', ''),
+ field_names=mconf.get('fieldNames', []),
+ function=mconf.get('function', ''))
+ elif mconf.get('type') == 'quantile':
+ post_aggs[metric_name] = Quantile(
+ mconf.get('name', ''),
+ mconf.get('probability', ''),
+ )
+ elif mconf.get('type') == 'quantiles':
+ post_aggs[metric_name] = Quantiles(
+ mconf.get('name', ''),
+ mconf.get('probabilities', ''),
+ )
+ elif mconf.get('type') == 'fieldAccess':
+ post_aggs[metric_name] = Field(mconf.get('name'))
+ elif mconf.get('type') == 'constant':
+ post_aggs[metric_name] = Const(
+ mconf.get('value'),
+ output_name=mconf.get('name', '')
+ )
+ elif mconf.get('type') == 'hyperUniqueCardinality':
+ post_aggs[metric_name] = HyperUniqueCardinality(
+ mconf.get('name')
+ )
+ elif mconf.get('type') == 'arithmetic':
+ post_aggs[metric_name] = Postaggregator(
+ mconf.get('fn', "/"),
+ mconf.get('fields', []),
+ mconf.get('name', ''))
+ else:
+ post_aggs[metric_name] = CustomPostAggregator(
+ mconf.get('name', ''),
+ mconf)
+ return all_metrics, post_aggs
+
def values_for_column(self,
column_name,
limit=10000):
@@ -737,6 +817,7 @@ def run_query( # noqa / druid
"""
# TODO refactor into using a TBD Query object
client = client or self.cluster.get_pydruid_client()
+
if not is_timeseries:
granularity = 'all'
inner_from_dttm = inner_from_dttm or from_dttm
@@ -749,61 +830,10 @@ def run_query( # noqa / druid
query_str = ""
metrics_dict = {m.metric_name: m for m in self.metrics}
- all_metrics = []
- post_aggs = {}
columns_dict = {c.column_name: c for c in self.columns}
- def recursive_get_fields(_conf):
- _fields = _conf.get('fields', [])
- field_names = []
- for _f in _fields:
- _type = _f.get('type')
- if _type in ['fieldAccess', 'hyperUniqueCardinality']:
- field_names.append(_f.get('fieldName'))
- elif _type == 'arithmetic':
- field_names += recursive_get_fields(_f)
- return list(set(field_names))
-
- for metric_name in metrics:
- metric = metrics_dict[metric_name]
- if metric.metric_type != 'postagg':
- all_metrics.append(metric_name)
- else:
- mconf = metric.json_obj
- all_metrics += recursive_get_fields(mconf)
- all_metrics += mconf.get('fieldNames', [])
- if mconf.get('type') == 'javascript':
- post_aggs[metric_name] = JavascriptPostAggregator(
- name=mconf.get('name', ''),
- field_names=mconf.get('fieldNames', []),
- function=mconf.get('function', ''))
- elif mconf.get('type') == 'quantile':
- post_aggs[metric_name] = Quantile(
- mconf.get('name', ''),
- mconf.get('probability', ''),
- )
- elif mconf.get('type') == 'quantiles':
- post_aggs[metric_name] = Quantiles(
- mconf.get('name', ''),
- mconf.get('probabilities', ''),
- )
- elif mconf.get('type') == 'fieldAccess':
- post_aggs[metric_name] = Field(mconf.get('name'))
- elif mconf.get('type') == 'constant':
- post_aggs[metric_name] = Const(
- mconf.get('value'),
- output_name=mconf.get('name', '')
- )
- elif mconf.get('type') == 'hyperUniqueCardinality':
- post_aggs[metric_name] = HyperUniqueCardinality(
- mconf.get('name')
- )
- else:
- post_aggs[metric_name] = Postaggregator(
- mconf.get('fn', "/"),
- mconf.get('fields', []),
- mconf.get('name', ''))
+ all_metrics, post_aggs = self._metrics_and_post_aggs(metrics, metrics_dict)
aggregations = OrderedDict()
for m in self.metrics:
diff --git a/superset/connectors/druid/views.py b/superset/connectors/druid/views.py
index e12321755b24b..b9a87e3f52567 100644
--- a/superset/connectors/druid/views.py
+++ b/superset/connectors/druid/views.py
@@ -3,19 +3,19 @@
import sqlalchemy as sqla
-from flask import Markup, flash, redirect, abort
+from flask import Markup, flash, redirect
from flask_appbuilder import CompactCRUDMixin, expose
from flask_appbuilder.models.sqla.interface import SQLAInterface
from flask_babel import lazy_gettext as _
from flask_babel import gettext as __
-import superset
from superset import db, utils, appbuilder, sm, security
from superset.connectors.connector_registry import ConnectorRegistry
from superset.utils import has_access
-from superset.views.base import BaseSupersetView
+from superset.connectors.base.views import DatasourceModelView
from superset.views.base import (
+ BaseSupersetView,
SupersetModelView, validate_json, DeleteMixin, ListWidgetWithCheckboxes,
DatasourceFilter, get_datasource_exist_error_mgs)
@@ -24,6 +24,12 @@
class DruidColumnInlineView(CompactCRUDMixin, SupersetModelView): # noqa
datamodel = SQLAInterface(models.DruidColumn)
+
+ list_title = _('List Druid Column')
+ show_title = _('Show Druid Column')
+ add_title = _('Add Druid Column')
+ edit_title = _('Edit Druid Column')
+
edit_columns = [
'column_name', 'description', 'dimension_spec_json', 'datasource',
'groupby', 'filterable', 'count_distinct', 'sum', 'min', 'max']
@@ -70,6 +76,12 @@ def post_add(self, col):
class DruidMetricInlineView(CompactCRUDMixin, SupersetModelView): # noqa
datamodel = SQLAInterface(models.DruidMetric)
+
+ list_title = _('List Druid Metric')
+ show_title = _('Show Druid Metric')
+ add_title = _('Add Druid Metric')
+ edit_title = _('Edit Druid Metric')
+
list_columns = ['metric_name', 'verbose_name', 'metric_type']
edit_columns = [
'metric_name', 'description', 'verbose_name', 'metric_type', 'json',
@@ -112,6 +124,12 @@ def post_update(self, metric):
class DruidClusterModelView(SupersetModelView, DeleteMixin): # noqa
datamodel = SQLAInterface(models.DruidCluster)
+
+ list_title = _('List Druid Cluster')
+ show_title = _('Show Druid Cluster')
+ add_title = _('Add Druid Cluster')
+ edit_title = _('Edit Druid Cluster')
+
add_columns = [
'verbose_name', 'coordinator_host', 'coordinator_port',
'coordinator_endpoint', 'broker_host', 'broker_port',
@@ -149,8 +167,14 @@ def _delete(self, pk):
category_icon='fa-database',)
-class DruidDatasourceModelView(SupersetModelView, DeleteMixin): # noqa
+class DruidDatasourceModelView(DatasourceModelView, DeleteMixin): # noqa
datamodel = SQLAInterface(models.DruidDatasource)
+
+ list_title = _('List Druid Datasource')
+ show_title = _('Show Druid Datasource')
+ add_title = _('Add Druid Datasource')
+ edit_title = _('Edit Druid Datasource')
+
list_widget = ListWidgetWithCheckboxes
list_columns = [
'datasource_link', 'cluster', 'changed_by_', 'modified']
diff --git a/superset/connectors/sqla/models.py b/superset/connectors/sqla/models.py
index 1cd0818d07063..b836a153e140d 100644
--- a/superset/connectors/sqla/models.py
+++ b/superset/connectors/sqla/models.py
@@ -11,8 +11,7 @@
)
import sqlalchemy as sa
from sqlalchemy import asc, and_, desc, select
-from sqlalchemy.ext.compiler import compiles
-from sqlalchemy.sql.expression import ColumnClause, TextAsFrom
+from sqlalchemy.sql.expression import TextAsFrom
from sqlalchemy.orm import backref, relationship
from sqlalchemy.sql import table, literal_column, text, column
@@ -21,7 +20,7 @@
from flask_babel import lazy_gettext as _
from superset import db, utils, import_util, sm
-from superset.connectors.base import BaseDatasource, BaseColumn, BaseMetric
+from superset.connectors.base.models import BaseDatasource, BaseColumn, BaseMetric
from superset.utils import DTTM_ALIAS, QueryStatus
from superset.models.helpers import QueryResult
from superset.models.core import Database
@@ -193,6 +192,10 @@ class SqlaTable(Model, BaseDatasource):
def __repr__(self):
return self.name
+ @property
+ def connection(self):
+ return str(self.database)
+
@property
def description_markeddown(self):
return utils.markdown(self.description)
@@ -286,10 +289,12 @@ def values_for_column(self, column_name, limit=10000):
"""
cols = {col.column_name: col for col in self.columns}
target_col = cols[column_name]
+ tp = self.get_template_processor()
+ db_engine_spec = self.database.db_engine_spec
qry = (
select([target_col.sqla_col])
- .select_from(self.get_from_clause())
+ .select_from(self.get_from_clause(tp, db_engine_spec))
.distinct(column_name)
)
if limit:
@@ -323,7 +328,6 @@ def get_query_str(self, query_obj):
)
logging.info(sql)
sql = sqlparse.format(sql, reindent=True)
- sql = self.database.db_engine_spec.sql_preprocessor(sql)
return sql
def get_sqla_table(self):
@@ -332,12 +336,14 @@ def get_sqla_table(self):
tbl.schema = self.schema
return tbl
- def get_from_clause(self, template_processor=None):
+ def get_from_clause(self, template_processor=None, db_engine_spec=None):
# Supporting arbitrary SQL statements in place of tables
if self.sql:
from_sql = self.sql
if template_processor:
from_sql = template_processor.process_template(from_sql)
+ if db_engine_spec:
+ from_sql = db_engine_spec.escape_sql(from_sql)
return TextAsFrom(sa.text(from_sql), []).alias('expr_qry')
return self.get_sqla_table()
@@ -368,13 +374,14 @@ def get_sqla_query( # sqla
'form_data': form_data,
}
template_processor = self.get_template_processor(**template_kwargs)
+ db_engine_spec = self.database.db_engine_spec
# For backward compatibility
if granularity not in self.dttm_cols:
granularity = self.main_dttm_col
# Database spec supports join-free timeslot grouping
- time_groupby_inline = self.database.db_engine_spec.time_groupby_inline
+ time_groupby_inline = db_engine_spec.time_groupby_inline
cols = {col.column_name: col for col in self.columns}
metrics_dict = {m.metric_name: m for m in self.metrics}
@@ -429,7 +436,7 @@ def get_sqla_query( # sqla
groupby_exprs += [timestamp]
# Use main dttm column to support index with secondary dttm columns
- if self.database.db_engine_spec.time_secondary_columns and \
+ if db_engine_spec.time_secondary_columns and \
self.main_dttm_col in self.dttm_cols and \
self.main_dttm_col != dttm_col.column_name:
time_filters.append(cols[self.main_dttm_col].
@@ -439,7 +446,7 @@ def get_sqla_query( # sqla
select_exprs += metrics_exprs
qry = sa.select(select_exprs)
- tbl = self.get_from_clause(template_processor)
+ tbl = self.get_from_clause(template_processor, db_engine_spec)
if not columns:
qry = qry.group_by(*groupby_exprs)
diff --git a/superset/connectors/sqla/views.py b/superset/connectors/sqla/views.py
index 5ba10dac54f46..ef87d3140a1ab 100644
--- a/superset/connectors/sqla/views.py
+++ b/superset/connectors/sqla/views.py
@@ -3,7 +3,7 @@
from past.builtins import basestring
-from flask import Markup, flash, redirect, abort
+from flask import Markup, flash, redirect
from flask_appbuilder import CompactCRUDMixin, expose
from flask_appbuilder.models.sqla.interface import SQLAInterface
import sqlalchemy as sa
@@ -13,6 +13,7 @@
from superset import appbuilder, db, utils, security, sm
from superset.utils import has_access
+from superset.connectors.base.views import DatasourceModelView
from superset.views.base import (
SupersetModelView, ListWidgetWithCheckboxes, DeleteMixin, DatasourceFilter,
get_datasource_exist_error_mgs,
@@ -23,6 +24,12 @@
class TableColumnInlineView(CompactCRUDMixin, SupersetModelView): # noqa
datamodel = SQLAInterface(models.TableColumn)
+
+ list_title = _('List Columns')
+ show_title = _('Show Column')
+ add_title = _('Add Column')
+ edit_title = _('Edit Column')
+
can_delete = False
list_widget = ListWidgetWithCheckboxes
edit_columns = [
@@ -91,6 +98,12 @@ class TableColumnInlineView(CompactCRUDMixin, SupersetModelView): # noqa
class SqlMetricInlineView(CompactCRUDMixin, SupersetModelView): # noqa
datamodel = SQLAInterface(models.SqlMetric)
+
+ list_title = _('List Metrics')
+ show_title = _('Show Metric')
+ add_title = _('Add Metric')
+ edit_title = _('Edit Metric')
+
list_columns = ['metric_name', 'verbose_name', 'metric_type']
edit_columns = [
'metric_name', 'description', 'verbose_name', 'metric_type',
@@ -133,8 +146,14 @@ def post_update(self, metric):
appbuilder.add_view_no_menu(SqlMetricInlineView)
-class TableModelView(SupersetModelView, DeleteMixin): # noqa
+class TableModelView(DatasourceModelView, DeleteMixin): # noqa
datamodel = SQLAInterface(models.SqlaTable)
+
+ list_title = _('List Tables')
+ show_title = _('Show Table')
+ add_title = _('Add Table')
+ edit_title = _('Edit Table')
+
list_columns = [
'link', 'database',
'changed_by_', 'modified']
diff --git a/superset/db_engine_specs.py b/superset/db_engine_specs.py
index d4a7fa0e4de4d..0b804b361305d 100644
--- a/superset/db_engine_specs.py
+++ b/superset/db_engine_specs.py
@@ -31,8 +31,9 @@
from superset.utils import SupersetTemplateException
from superset.utils import QueryStatus
-from superset import utils
-from superset import cache_util
+from superset import conf, cache_util, utils
+
+tracking_url_trans = conf.get('TRACKING_URL_TRANSFORMER')
Grain = namedtuple('Grain', 'name label function')
@@ -73,6 +74,11 @@ def extra_table_metadata(cls, database, table_name, schema_name):
"""Returns engine-specific table metadata"""
return {}
+ @classmethod
+ def escape_sql(cls, sql):
+ """Escapes the raw SQL"""
+ return sql
+
@classmethod
def convert_dttm(cls, target_type, dttm):
return "'{}'".format(dttm.strftime('%Y-%m-%d %H:%M:%S'))
@@ -139,14 +145,6 @@ def adjust_database_uri(cls, uri, selected_schema):
"""
return uri
- @classmethod
- def sql_preprocessor(cls, sql):
- """If the SQL needs to be altered prior to running it
-
- For example Presto needs to double `%` characters
- """
- return sql
-
@classmethod
def patch(cls):
pass
@@ -399,6 +397,10 @@ def adjust_database_uri(cls, uri, selected_schema=None):
uri.database = database
return uri
+ @classmethod
+ def escape_sql(cls, sql):
+ return re.sub(r'%%|%', "%%", sql)
+
@classmethod
def convert_dttm(cls, target_type, dttm):
tt = target_type.upper()
@@ -636,6 +638,21 @@ class HiveEngineSpec(PrestoEngineSpec):
engine = 'hive'
cursor_execute_kwargs = {'async': True}
+ # Scoping regex at class level to avoid recompiling
+ # 17/02/07 19:36:38 INFO ql.Driver: Total jobs = 5
+ jobs_stats_r = re.compile(
+ r'.*INFO.*Total jobs = (?P[0-9]+)')
+ # 17/02/07 19:37:08 INFO ql.Driver: Launching Job 2 out of 5
+ launching_job_r = re.compile(
+ '.*INFO.*Launching Job (?P[0-9]+) out of '
+ '(?P[0-9]+)')
+ # 17/02/07 19:36:58 INFO exec.Task: 2017-02-07 19:36:58,152 Stage-18
+ # map = 0%, reduce = 0%
+ stage_progress_r = re.compile(
+ r'.*INFO.*Stage-(?P[0-9]+).*'
+ r'map = (?P[0-9]+)%.*'
+ r'reduce = (?P[0-9]+)%.*')
+
@classmethod
def patch(cls):
from pyhive import hive
@@ -665,41 +682,30 @@ def adjust_database_uri(cls, uri, selected_schema=None):
return uri
@classmethod
- def progress(cls, logs):
- # 17/02/07 19:36:38 INFO ql.Driver: Total jobs = 5
- jobs_stats_r = re.compile(
- r'.*INFO.*Total jobs = (?P[0-9]+)')
- # 17/02/07 19:37:08 INFO ql.Driver: Launching Job 2 out of 5
- launching_job_r = re.compile(
- '.*INFO.*Launching Job (?P[0-9]+) out of '
- '(?P[0-9]+)')
- # 17/02/07 19:36:58 INFO exec.Task: 2017-02-07 19:36:58,152 Stage-18
- # map = 0%, reduce = 0%
- stage_progress = re.compile(
- r'.*INFO.*Stage-(?P[0-9]+).*'
- r'map = (?P[0-9]+)%.*'
- r'reduce = (?P[0-9]+)%.*')
- total_jobs = None
- current_job = None
+ def progress(cls, log_lines):
+ total_jobs = 1 # assuming there's at least 1 job
+ current_job = 1
stages = {}
- lines = logs.splitlines()
- for line in lines:
- match = jobs_stats_r.match(line)
+ for line in log_lines:
+ match = cls.jobs_stats_r.match(line)
if match:
- total_jobs = int(match.groupdict()['max_jobs'])
- match = launching_job_r.match(line)
+ total_jobs = int(match.groupdict()['max_jobs']) or 1
+ match = cls.launching_job_r.match(line)
if match:
current_job = int(match.groupdict()['job_number'])
+ total_jobs = int(match.groupdict()['max_jobs']) or 1
stages = {}
- match = stage_progress.match(line)
+ match = cls.stage_progress_r.match(line)
if match:
stage_number = int(match.groupdict()['stage_number'])
map_progress = int(match.groupdict()['map_progress'])
reduce_progress = int(match.groupdict()['reduce_progress'])
stages[stage_number] = (map_progress + reduce_progress) / 2
+ logging.info(
+ "Progress detail: {}, "
+ "current job {}, "
+ "total jobs: {}".format(stages, current_job, total_jobs))
- if not total_jobs or not current_job:
- return 0
stage_progress = sum(
stages.values()) / len(stages.values()) if stages else 0
@@ -708,6 +714,13 @@ def progress(cls, logs):
)
return int(progress)
+ @classmethod
+ def get_tracking_url(cls, log_lines):
+ lkp = "Tracking URL = "
+ for line in log_lines:
+ if lkp in line:
+ return line.split(lkp)[1]
+
@classmethod
def handle_cursor(cls, cursor, query, session):
"""Updates progress information"""
@@ -717,18 +730,45 @@ def handle_cursor(cls, cursor, query, session):
hive.ttypes.TOperationState.RUNNING_STATE,
)
polled = cursor.poll()
+ last_log_line = 0
+ tracking_url = None
+ job_id = None
while polled.operationState in unfinished_states:
query = session.query(type(query)).filter_by(id=query.id).one()
if query.status == QueryStatus.STOPPED:
cursor.cancel()
break
- resp = cursor.fetch_logs()
- if resp and resp.log:
- progress = cls.progress(resp.log)
+ log = cursor.fetch_logs() or ''
+ if log:
+ log_lines = log.splitlines()
+ progress = cls.progress(log_lines)
+ logging.info("Progress total: {}".format(progress))
+ needs_commit = False
if progress > query.progress:
query.progress = progress
- session.commit()
+ needs_commit = True
+ if not tracking_url:
+ tracking_url = cls.get_tracking_url(log_lines)
+ if tracking_url:
+ job_id = tracking_url.split('/')[-2]
+ logging.info(
+ "Found the tracking url: {}".format(tracking_url))
+ tracking_url = tracking_url_trans(tracking_url)
+ logging.info(
+ "Transformation applied: {}".format(tracking_url))
+ query.tracking_url = tracking_url
+ logging.info("Job id: {}".format(job_id))
+ needs_commit = True
+ if job_id and len(log_lines) > last_log_line:
+ # Wait for job id before logging things out
+ # this allows for prefixing all log lines and becoming
+ # searchable in something like Kibana
+ for l in log_lines[last_log_line:]:
+ logging.info("[{}] {}".format(job_id, l))
+ last_log_line = len(log_lines)
+ if needs_commit:
+ session.commit()
time.sleep(5)
polled = cursor.poll()
@@ -910,6 +950,59 @@ def convert_dttm(cls, target_type, dttm):
dttm.strftime('%Y-%m-%d %H:%M:%S'))
return "'{}'".format(dttm.strftime('%Y-%m-%d %H:%M:%S'))
+
+class BQEngineSpec(BaseEngineSpec):
+ """Engine spec for Google's BigQuery
+
+ As contributed by @mxmzdlv on issue #945"""
+ engine = 'bigquery'
+
+ time_grains = (
+ Grain("Time Column", _('Time Column'), "{col}"),
+ Grain("second", _('second'), "TIMESTAMP_TRUNC({col}, SECOND)"),
+ Grain("minute", _('minute'), "TIMESTAMP_TRUNC({col}, MINUTE)"),
+ Grain("hour", _('hour'), "TIMESTAMP_TRUNC({col}, HOUR)"),
+ Grain("day", _('day'), "TIMESTAMP_TRUNC({col}, DAY)"),
+ Grain("week", _('week'), "TIMESTAMP_TRUNC({col}, WEEK)"),
+ Grain("month", _('month'), "TIMESTAMP_TRUNC({col}, MONTH)"),
+ Grain("quarter", _('quarter'), "TIMESTAMP_TRUNC({col}, QUARTER)"),
+ Grain("year", _('year'), "TIMESTAMP_TRUNC({col}, YEAR)"),
+ )
+
+ @classmethod
+ def convert_dttm(cls, target_type, dttm):
+ tt = target_type.upper()
+ if tt == 'DATE':
+ return "'{}'".format(dttm.strftime('%Y-%m-%d'))
+ else:
+ return "'{}'".format(dttm.strftime('%Y-%m-%d %H:%M:%S'))
+
+
+class ImpalaEngineSpec(BaseEngineSpec):
+ """Engine spec for Cloudera's Impala"""
+
+ engine = 'impala'
+
+ time_grains = (
+ Grain("Time Column", _('Time Column'), "{col}"),
+ Grain("minute", _('minute'), "TRUNC({col}, 'MI')"),
+ Grain("hour", _('hour'), "TRUNC({col}, 'HH')"),
+ Grain("day", _('day'), "TRUNC({col}, 'DD')"),
+ Grain("week", _('week'), "TRUNC({col}, 'WW')"),
+ Grain("month", _('month'), "TRUNC({col}, 'MONTH')"),
+ Grain("quarter", _('quarter'), "TRUNC({col}, 'Q')"),
+ Grain("year", _('year'), "TRUNC({col}, 'YYYY')"),
+ )
+
+ @classmethod
+ def convert_dttm(cls, target_type, dttm):
+ tt = target_type.upper()
+ if tt == 'DATE':
+ return "'{}'".format(dttm.strftime('%Y-%m-%d'))
+ else:
+ return "'{}'".format(dttm.strftime('%Y-%m-%d %H:%M:%S'))
+
+
engines = {
o.engine: o for o in globals().values()
if inspect.isclass(o) and issubclass(o, BaseEngineSpec)}
diff --git a/superset/db_engines/hive.py b/superset/db_engines/hive.py
index d3244feac62b0..a31b4d7f323d8 100644
--- a/superset/db_engines/hive.py
+++ b/superset/db_engines/hive.py
@@ -1,5 +1,6 @@
from pyhive import hive
from pythrifthiveapi.TCLIService import ttypes
+from thrift import Thrift
# TODO: contribute back to pyhive.
@@ -15,9 +16,11 @@ def fetch_logs(self, max_rows=1024,
"""
try:
req = ttypes.TGetLogReq(operationHandle=self._operationHandle)
- logs = self._connection.client.GetLog(req)
+ logs = self._connection.client.GetLog(req).log
return logs
- except ttypes.TApplicationException as e: # raised if Hive is used
+ # raised if Hive is used
+ except (ttypes.TApplicationException,
+ Thrift.TApplicationException):
if self._state == self._STATE_NONE:
raise hive.ProgrammingError("No query yet")
logs = []
@@ -30,12 +33,11 @@ def fetch_logs(self, max_rows=1024,
)
response = self._connection.client.FetchResults(req)
hive._check_status(response)
- assert not (
- response.results.rows, 'expected data in columnar format'
- )
+ assert not response.results.rows, \
+ 'expected data in columnar format'
assert len(response.results.columns) == 1, response.results.columns
new_logs = hive._unwrap_column(response.results.columns[0])
logs += new_logs
if not new_logs:
break
- return logs
+ return '\n'.join(logs)
diff --git a/superset/migrations/versions/ca69c70ec99b_tracking_url.py b/superset/migrations/versions/ca69c70ec99b_tracking_url.py
new file mode 100644
index 0000000000000..8a2ef38295c67
--- /dev/null
+++ b/superset/migrations/versions/ca69c70ec99b_tracking_url.py
@@ -0,0 +1,23 @@
+"""tracking_url
+
+Revision ID: ca69c70ec99b
+Revises: a65458420354
+Create Date: 2017-07-26 20:09:52.606416
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = 'ca69c70ec99b'
+down_revision = 'a65458420354'
+
+from alembic import op
+import sqlalchemy as sa
+from sqlalchemy.dialects import mysql
+
+
+def upgrade():
+ op.add_column('query', sa.Column('tracking_url', sa.Text(), nullable=True))
+
+
+def downgrade():
+ op.drop_column('query', 'tracking_url')
diff --git a/superset/models/core.py b/superset/models/core.py
index 5527f11d45291..43cbeff5ae471 100644
--- a/superset/models/core.py
+++ b/superset/models/core.py
@@ -411,6 +411,7 @@ def alter_positions(dashboard, old_to_new_slc_id_dict):
slices = copy(dashboard_to_import.slices)
old_to_new_slc_id_dict = {}
new_filter_immune_slices = []
+ new_timed_refresh_immune_slices = []
new_expanded_slices = {}
i_params_dict = dashboard_to_import.params_dict
for slc in slices:
@@ -424,6 +425,10 @@ def alter_positions(dashboard, old_to_new_slc_id_dict):
if ('filter_immune_slices' in i_params_dict and
old_slc_id_str in i_params_dict['filter_immune_slices']):
new_filter_immune_slices.append(new_slc_id_str)
+ if ('timed_refresh_immune_slices' in i_params_dict and
+ old_slc_id_str in
+ i_params_dict['timed_refresh_immune_slices']):
+ new_timed_refresh_immune_slices.append(new_slc_id_str)
if ('expanded_slices' in i_params_dict and
old_slc_id_str in i_params_dict['expanded_slices']):
new_expanded_slices[new_slc_id_str] = (
@@ -446,6 +451,9 @@ def alter_positions(dashboard, old_to_new_slc_id_dict):
if new_filter_immune_slices:
dashboard_to_import.alter_params(
filter_immune_slices=new_filter_immune_slices)
+ if new_timed_refresh_immune_slices:
+ dashboard_to_import.alter_params(
+ timed_refresh_immune_slices=new_timed_refresh_immune_slices)
new_slices = session.query(Slice).filter(
Slice.id.in_(old_to_new_slc_id_dict.values())).all()
diff --git a/superset/models/sql_lab.py b/superset/models/sql_lab.py
index 00eb388150041..e2e125ad2438a 100644
--- a/superset/models/sql_lab.py
+++ b/superset/models/sql_lab.py
@@ -69,6 +69,7 @@ class Query(Model):
start_running_time = Column(Numeric(precision=20, scale=6))
end_time = Column(Numeric(precision=20, scale=6))
end_result_backend_time = Column(Numeric(precision=20, scale=6))
+ tracking_url = Column(Text)
changed_on = Column(
DateTime,
@@ -119,6 +120,7 @@ def to_dict(self):
'user': self.user.username,
'limit_reached': self.limit_reached,
'resultsKey': self.results_key,
+ 'trackingUrl': self.tracking_url,
}
@property
diff --git a/superset/security.py b/superset/security.py
index 3db268be0333f..012891143e45c 100644
--- a/superset/security.py
+++ b/superset/security.py
@@ -141,12 +141,14 @@ def is_granter_pvm(pvm):
'can_approve'}
-def set_role(role_name, pvms, pvm_check):
+def set_role(role_name, pvm_check):
logging.info("Syncing {} perms".format(role_name))
+ sesh = sm.get_session()
+ pvms = sesh.query(ab_models.PermissionView).all()
+ pvms = [p for p in pvms if p.permission and p.view_menu]
role = sm.add_role(role_name)
role_pvms = [p for p in pvms if pvm_check(p)]
role.permissions = role_pvms
- sesh = sm.get_session()
sesh.merge(role)
sesh.commit()
@@ -157,41 +159,40 @@ def create_custom_permissions():
merge_perm(sm, 'all_database_access', 'all_database_access')
-def create_missing_datasource_perms(view_menu_set):
+def create_missing_perms():
+ """Creates missing perms for datasources, schemas and metrics"""
+
+ logging.info(
+ "Fetching a set of all perms to lookup which ones are missing")
+ all_pvs = set()
+ for pv in sm.get_session.query(sm.permissionview_model).all():
+ if pv.permission and pv.view_menu:
+ all_pvs.add((pv.permission.name, pv.view_menu.name))
+
+ def merge_pv(view_menu, perm):
+ """Create permission view menu only if it doesn't exist"""
+ if view_menu and perm and (view_menu, perm) not in all_pvs:
+ merge_perm(sm, view_menu, perm)
+
logging.info("Creating missing datasource permissions.")
- datasources = ConnectorRegistry.get_all_datasources(
- db.session)
+ datasources = ConnectorRegistry.get_all_datasources(db.session)
for datasource in datasources:
- if datasource and datasource.perm not in view_menu_set:
- merge_perm(sm, 'datasource_access', datasource.get_perm())
- if datasource.schema_perm:
- merge_perm(sm, 'schema_access', datasource.schema_perm)
-
+ merge_pv('datasource_access', datasource.get_perm())
+ merge_pv('schema_access', datasource.schema_perm)
-def create_missing_database_perms(view_menu_set):
logging.info("Creating missing database permissions.")
databases = db.session.query(models.Database).all()
for database in databases:
- if database and database.perm not in view_menu_set:
- merge_perm(sm, 'database_access', database.perm)
+ merge_pv('database_access', database.perm)
-
-def create_missing_metrics_perm(view_menu_set):
- """Create permissions for restricted metrics
-
- :param metrics: a list of metrics to be processed, if not specified,
- all metrics are processed
- :type metrics: models.SqlMetric or models.DruidMetric
- """
logging.info("Creating missing metrics permissions")
metrics = []
for datasource_class in ConnectorRegistry.sources.values():
metrics += list(db.session.query(datasource_class.metric_class).all())
for metric in metrics:
- if (metric.is_restricted and metric.perm and
- metric.perm not in view_menu_set):
- merge_perm(sm, 'metric_access', metric.perm)
+ if (metric.is_restricted):
+ merge_pv('metric_access', metric.perm)
def sync_role_definitions():
@@ -201,31 +202,17 @@ def sync_role_definitions():
get_or_create_main_db()
create_custom_permissions()
- pvms = db.session.query(ab_models.PermissionView).all()
- pvms = [p for p in pvms if p.permission and p.view_menu]
-
- # cleanup
- pvms_to_delete = [p for p in pvms if not (p.permission and p.view_menu)]
-
- for pvm_to_delete in pvms_to_delete:
- sm.get_session.delete(pvm_to_delete)
-
# Creating default roles
- set_role('Admin', pvms, is_admin_pvm)
- set_role('Alpha', pvms, is_alpha_pvm)
- set_role('Gamma', pvms, is_gamma_pvm)
- set_role('granter', pvms, is_granter_pvm)
- set_role('sql_lab', pvms, is_sql_lab_pvm)
+ set_role('Admin', is_admin_pvm)
+ set_role('Alpha', is_alpha_pvm)
+ set_role('Gamma', is_gamma_pvm)
+ set_role('granter', is_granter_pvm)
+ set_role('sql_lab', is_sql_lab_pvm)
if conf.get('PUBLIC_ROLE_LIKE_GAMMA', False):
- set_role('Public', pvms, is_gamma_pvm)
+ set_role('Public', is_gamma_pvm)
- view_menu_set = []
- for datasource_class in ConnectorRegistry.sources.values():
- view_menu_set += list(db.session.query(datasource_class).all())
- create_missing_datasource_perms(view_menu_set)
- create_missing_database_perms(view_menu_set)
- create_missing_metrics_perm(view_menu_set)
+ create_missing_perms()
# commit role and view menu updates
sm.get_session.commit()
diff --git a/superset/sql_lab.py b/superset/sql_lab.py
index 4b0bd863bcd04..1d0e89247eb09 100644
--- a/superset/sql_lab.py
+++ b/superset/sql_lab.py
@@ -98,6 +98,7 @@ def get_sql_results(
query.status = QueryStatus.FAILED
query.tmp_table_name = None
sesh.commit()
+ raise
def execute_sql(ctask, query_id, return_results=True, store_results=False):
@@ -154,7 +155,6 @@ def handle_error(msg):
template_processor = get_template_processor(
database=database, query=query)
executed_sql = template_processor.process_template(executed_sql)
- executed_sql = db_engine_spec.sql_preprocessor(executed_sql)
except Exception as e:
logging.exception(e)
msg = "Template rendering failed: " + utils.error_msg_from_exception(e)
@@ -193,6 +193,9 @@ def handle_error(msg):
conn.close()
return handle_error(db_engine_spec.extract_error_message(e))
+ logging.info("Fetching cursor description")
+ cursor_description = cursor.description
+
conn.commit()
conn.close()
@@ -204,7 +207,7 @@ def handle_error(msg):
}, default=utils.json_iso_dttm_ser)
column_names = (
- [col[0] for col in cursor.description] if cursor.description else [])
+ [col[0] for col in cursor_description] if cursor_description else [])
column_names = dedup(column_names)
cdf = dataframe.SupersetDataFrame(pd.DataFrame(
list(data), columns=column_names))
diff --git a/superset/views/core.py b/superset/views/core.py
index 8a5de069efe19..a10e8848e2de0 100755
--- a/superset/views/core.py
+++ b/superset/views/core.py
@@ -170,6 +170,12 @@ def generate_download_headers(extension):
class DatabaseView(SupersetModelView, DeleteMixin): # noqa
datamodel = SQLAInterface(models.Database)
+
+ list_title = _('List Databases')
+ show_title = _('Show Database')
+ add_title = _('Add Database')
+ edit_title = _('Edit Database')
+
list_columns = [
'database_name', 'backend', 'allow_run_sync', 'allow_run_async',
'allow_dml', 'creator', 'modified']
@@ -319,6 +325,12 @@ class AccessRequestsModelView(SupersetModelView, DeleteMixin):
class SliceModelView(SupersetModelView, DeleteMixin): # noqa
datamodel = SQLAInterface(models.Slice)
+
+ list_title = _('List Slices')
+ show_title = _('Show Slice')
+ add_title = _('Add Slice')
+ edit_title = _('Edit Slice')
+
can_add = False
label_columns = {
'datasource_link': 'Datasource',
@@ -415,6 +427,12 @@ class SliceAddView(SliceModelView): # noqa
class DashboardModelView(SupersetModelView, DeleteMixin): # noqa
datamodel = SQLAInterface(models.Dashboard)
+
+ list_title = _('List Dashboards')
+ show_title = _('Show Dashboard')
+ add_title = _('Add Dashboard')
+ edit_title = _('Edit Dashboard')
+
list_columns = ['dashboard_link', 'creator', 'modified']
edit_columns = [
'dashboard_title', 'slug', 'slices', 'owners', 'position_json', 'css',
@@ -682,7 +700,8 @@ def json_response(self, obj, status=200):
@expose("/datasources/")
def datasources(self):
datasources = ConnectorRegistry.get_all_datasources(db.session)
- datasources = [(str(o.id) + '__' + o.type, repr(o)) for o in datasources]
+ datasources = [o.short_data for o in datasources]
+ datasources = sorted(datasources, key=lambda o: o['name'])
return self.json_response(datasources)
@has_access_api
@@ -1324,6 +1343,8 @@ def _set_dash_metadata(dashboard, data):
if 'filter_immune_slices' not in md:
md['filter_immune_slices'] = []
+ if 'timed_refresh_immune_slices' not in md:
+ md['timed_refresh_immune_slices'] = []
if 'filter_immune_slice_fields' not in md:
md['filter_immune_slice_fields'] = {}
md['expanded_slices'] = data['expanded_slices']
@@ -1374,7 +1395,7 @@ def testconn(self):
.get('connect_args', {}))
engine = create_engine(uri, connect_args=connect_args)
engine.connect()
- return json.dumps(engine.table_names(), indent=4)
+ return json_success(json.dumps(engine.table_names(), indent=4))
except Exception as e:
logging.exception(e)
return json_error_response((
@@ -2012,6 +2033,7 @@ def sql_json(self):
# Async request.
if async:
+ logging.info("Running query on a Celery worker")
# Ignore the celery future object and the request may time out.
try:
sql_lab.get_sql_results.delay(
@@ -2061,6 +2083,7 @@ def sql_json(self):
@log_this
def csv(self, client_id):
"""Download the query results as csv."""
+ logging.info("Exporting CSV file [{}]".format(client_id))
query = (
db.session.query(Query)
.filter_by(client_id=client_id)
@@ -2074,14 +2097,20 @@ def csv(self, client_id):
return redirect('/')
blob = None
if results_backend and query.results_key:
+ logging.info(
+ "Fetching CSV from results backend "
+ "[{}]".format(query.results_key))
blob = results_backend.get(query.results_key)
if blob:
+ logging.info("Decompressing")
json_payload = utils.zlib_decompress_to_string(blob)
obj = json.loads(json_payload)
columns = [c['name'] for c in obj['columns']]
df = pd.DataFrame.from_records(obj['data'], columns=columns)
+ logging.info("Using pandas to convert to CSV")
csv = df.to_csv(index=False, encoding='utf-8')
else:
+ logging.info("Running a query to turn into CSV")
sql = query.select_sql or query.executed_sql
df = query.database.get_df(sql, query.schema)
# TODO(bkyryliuk): add compression=gzip for big files.
@@ -2089,6 +2118,7 @@ def csv(self, client_id):
response = Response(csv, mimetype='text/csv')
response.headers['Content-Disposition'] = (
'attachment; filename={}.csv'.format(query.name))
+ logging.info("Ready to return response")
return response
@has_access
diff --git a/superset/views/sql_lab.py b/superset/views/sql_lab.py
index 16a8dd21c9477..03f382e9cc79d 100644
--- a/superset/views/sql_lab.py
+++ b/superset/views/sql_lab.py
@@ -4,6 +4,7 @@
from flask_appbuilder.models.sqla.interface import SQLAInterface
from flask_babel import gettext as __
+from flask_babel import lazy_gettext as _
from superset import appbuilder
from superset.models.sql_lab import Query, SavedQuery
@@ -25,6 +26,12 @@ class QueryView(SupersetModelView):
class SavedQueryView(SupersetModelView, DeleteMixin):
datamodel = SQLAInterface(SavedQuery)
+
+ list_title = _('List Saved Query')
+ show_title = _('Show Saved Query')
+ add_title = _('Add Saved Query')
+ edit_title = _('Edit Saved Query')
+
list_columns = [
'label', 'user', 'database', 'schema', 'description',
'modified', 'pop_tab_link']
diff --git a/superset/viz.py b/superset/viz.py
index 75cb4113b7d8d..6606f012459b7 100755
--- a/superset/viz.py
+++ b/superset/viz.py
@@ -16,6 +16,7 @@
import zlib
from collections import OrderedDict, defaultdict
+from itertools import product
from datetime import datetime, timedelta
import pandas as pd
@@ -113,6 +114,13 @@ def query_obj(self):
form_data = self.form_data
groupby = form_data.get("groupby") or []
metrics = form_data.get("metrics") or []
+ columns = form_data.get("columns") or []
+ groupby = list(set(groupby + columns))
+
+ is_timeseries = self.is_timeseries
+ if DTTM_ALIAS in groupby:
+ groupby.remove(DTTM_ALIAS)
+ is_timeseries = True
# extra_filters are temporary/contextual filters that are external
# to the slice definition. We use those for dynamic interactive
@@ -172,7 +180,7 @@ def query_obj(self):
'granularity': granularity,
'from_dttm': from_dttm,
'to_dttm': to_dttm,
- 'is_timeseries': self.is_timeseries,
+ 'is_timeseries': is_timeseries,
'groupby': groupby,
'metrics': metrics,
'row_limit': row_limit,
@@ -352,6 +360,12 @@ def get_data(self, df):
columns=list(df.columns),
)
+ def json_dumps(self, obj):
+ if self.form_data.get('all_columns'):
+ return json.dumps(obj, default=utils.json_iso_dttm_ser)
+ else:
+ return super(TableViz, self).json_dumps(obj)
+
class PivotTableViz(BaseViz):
@@ -378,9 +392,7 @@ def query_obj(self):
if (
any(v in groupby for v in columns) or
any(v in columns for v in groupby)):
- raise Exception("groupby and columns can't overlap")
-
- d['groupby'] = list(set(groupby) | set(columns))
+ raise Exception(""""Group By" and "Columns" can't overlap""")
return d
def get_data(self, df):
@@ -393,8 +405,11 @@ def get_data(self, df):
columns=self.form_data.get('columns'),
values=self.form_data.get('metrics'),
aggfunc=self.form_data.get('pandas_aggfunc'),
- margins=True,
+ margins=self.form_data.get('pivot_margins'),
)
+ # Display metrics side by side with each column
+ if self.form_data.get('combine_metric'):
+ df = df.stack(0).unstack()
return dict(
columns=list(df.columns),
html=df.to_html(
@@ -1072,14 +1087,14 @@ class DistributionBarViz(DistributionPieViz):
def query_obj(self):
d = super(DistributionBarViz, self).query_obj() # noqa
fd = self.form_data
- gb = fd.get('groupby') or []
- cols = fd.get('columns') or []
- d['groupby'] = set(gb + cols)
- if len(d['groupby']) < len(gb) + len(cols):
+ if (
+ len(d['groupby']) <
+ len(fd.get('groupby') or []) + len(fd.get('columns') or [])
+ ):
raise Exception("Can't have overlap between Series and Breakdowns")
- if not self.metrics:
+ if not fd.get('metrics'):
raise Exception("Pick at least one metric")
- if not self.groupby:
+ if not fd.get('groupby'):
raise Exception("Pick at least one field for [Series]")
return d
@@ -1231,6 +1246,39 @@ def get_data(self, df):
return df.to_dict(orient='records')
+class ChordViz(BaseViz):
+
+ """A Chord diagram"""
+
+ viz_type = "chord"
+ verbose_name = _("Directed Force Layout")
+ credits = 'Bostock '
+ is_timeseries = False
+
+ def query_obj(self):
+ qry = super(ChordViz, self).query_obj()
+ fd = self.form_data
+ qry['groupby'] = [fd.get('groupby'), fd.get('columns')]
+ qry['metrics'] = [fd.get('metric')]
+ return qry
+
+ def get_data(self, df):
+ df.columns = ['source', 'target', 'value']
+
+ # Preparing a symetrical matrix like d3.chords calls for
+ nodes = list(set(df['source']) | set(df['target']))
+ matrix = {}
+ for source, target in product(nodes, nodes):
+ matrix[(source, target)] = 0
+ for source, target, value in df.to_records(index=False):
+ matrix[(source, target)] = value
+ m = [[matrix[(n1, n2)] for n1 in nodes] for n2 in nodes]
+ return {
+ 'nodes': list(nodes),
+ 'matrix': m,
+ }
+
+
class CountryMapViz(BaseViz):
"""A country centric"""
@@ -1553,6 +1601,35 @@ def get_data(self, df):
"color": fd.get("mapbox_color"),
}
+class EventFlowViz(BaseViz):
+ """A visualization to explore patterns in event sequences"""
+
+ viz_type = "event_flow"
+ verbose_name = _("Event flow")
+ credits = 'from @data-ui '
+ is_timeseries = True
+
+ def query_obj(self):
+ query = super(EventFlowViz, self).query_obj()
+ form_data = self.form_data
+
+ event_key = form_data.get('all_columns_x')
+ entity_key = form_data.get('entity')
+ meta_keys = [
+ col for col in form_data.get('all_columns') if col != event_key and col != entity_key
+ ]
+
+ query['columns'] = [event_key, entity_key] + meta_keys
+
+ if form_data['order_by_entity']:
+ query['orderby'] = [(entity_key, True)]
+
+ return query
+
+ def get_data(self, df):
+ return df.to_dict(orient="records")
+
+
viz_types_list = [
TableViz,
@@ -1574,6 +1651,7 @@ def get_data(self, df):
DirectedForceViz,
SankeyViz,
CountryMapViz,
+ ChordViz,
WorldMapViz,
FilterBoxViz,
IFrameViz,
@@ -1586,6 +1664,7 @@ def get_data(self, df):
MapboxViz,
HistogramViz,
SeparatorViz,
+ EventFlowViz,
]
viz_types = OrderedDict([(v.viz_type, v) for v in viz_types_list
diff --git a/tests/core_tests.py b/tests/core_tests.py
index 7ff48a9081a8c..f3e98873a63fa 100644
--- a/tests/core_tests.py
+++ b/tests/core_tests.py
@@ -284,6 +284,7 @@ def test_testconn(self):
})
response = self.client.post('/superset/testconn', data=data, content_type='application/json')
assert response.status_code == 200
+ assert response.headers['Content-Type'] == 'application/json'
# validate that the endpoint works with the decrypted sqlalchemy uri
data = json.dumps({
@@ -292,6 +293,7 @@ def test_testconn(self):
})
response = self.client.post('/superset/testconn', data=data, content_type='application/json')
assert response.status_code == 200
+ assert response.headers['Content-Type'] == 'application/json'
def test_databaseview_edit(self, username='admin'):
# validate that sending a password-masked uri does not over-write the decrypted uri
diff --git a/tests/db_engine_specs_test.py b/tests/db_engine_specs_test.py
index 626a97bb3f9c3..a3038132c0a8c 100644
--- a/tests/db_engine_specs_test.py
+++ b/tests/db_engine_specs_test.py
@@ -5,7 +5,7 @@
import unittest
-from superset import db_engine_specs
+from superset.db_engine_specs import HiveEngineSpec
class DbEngineSpecsTestCase(unittest.TestCase):
@@ -13,36 +13,38 @@ def test_0_progress(self):
log = """
17/02/07 18:26:27 INFO log.PerfLogger:
17/02/07 18:26:27 INFO log.PerfLogger:
- """
- self.assertEquals(0, db_engine_specs.HiveEngineSpec.progress(log))
+ """.split('\n')
+ self.assertEquals(
+ 0, HiveEngineSpec.progress(log))
def test_0_progress(self):
log = """
17/02/07 18:26:27 INFO log.PerfLogger:
17/02/07 18:26:27 INFO log.PerfLogger:
- """
- self.assertEquals(0, db_engine_specs.HiveEngineSpec.progress(log))
+ """.split('\n')
+ self.assertEquals(
+ 0, HiveEngineSpec.progress(log))
def test_number_of_jobs_progress(self):
log = """
17/02/07 19:15:55 INFO ql.Driver: Total jobs = 2
- """
- self.assertEquals(0, db_engine_specs.HiveEngineSpec.progress(log))
+ """.split('\n')
+ self.assertEquals(0, HiveEngineSpec.progress(log))
def test_job_1_launched_progress(self):
log = """
17/02/07 19:15:55 INFO ql.Driver: Total jobs = 2
17/02/07 19:15:55 INFO ql.Driver: Launching Job 1 out of 2
- """
- self.assertEquals(0, db_engine_specs.HiveEngineSpec.progress(log))
+ """.split('\n')
+ self.assertEquals(0, HiveEngineSpec.progress(log))
def test_job_1_launched_stage_1_0_progress(self):
log = """
17/02/07 19:15:55 INFO ql.Driver: Total jobs = 2
17/02/07 19:15:55 INFO ql.Driver: Launching Job 1 out of 2
17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 0%, reduce = 0%
- """
- self.assertEquals(0, db_engine_specs.HiveEngineSpec.progress(log))
+ """.split('\n')
+ self.assertEquals(0, HiveEngineSpec.progress(log))
def test_job_1_launched_stage_1_map_40_progress(self):
log = """
@@ -50,8 +52,8 @@ def test_job_1_launched_stage_1_map_40_progress(self):
17/02/07 19:15:55 INFO ql.Driver: Launching Job 1 out of 2
17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 0%, reduce = 0%
17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 40%, reduce = 0%
- """
- self.assertEquals(10, db_engine_specs.HiveEngineSpec.progress(log))
+ """.split('\n')
+ self.assertEquals(10, HiveEngineSpec.progress(log))
def test_job_1_launched_stage_1_map_80_reduce_40_progress(self):
log = """
@@ -60,8 +62,8 @@ def test_job_1_launched_stage_1_map_80_reduce_40_progress(self):
17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 0%, reduce = 0%
17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 40%, reduce = 0%
17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 80%, reduce = 40%
- """
- self.assertEquals(30, db_engine_specs.HiveEngineSpec.progress(log))
+ """.split('\n')
+ self.assertEquals(30, HiveEngineSpec.progress(log))
def test_job_1_launched_stage_2_stages_progress(self):
log = """
@@ -72,8 +74,8 @@ def test_job_1_launched_stage_2_stages_progress(self):
17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 80%, reduce = 40%
17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-2 map = 0%, reduce = 0%
17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 100%, reduce = 0%
- """
- self.assertEquals(12, db_engine_specs.HiveEngineSpec.progress(log))
+ """.split('\n')
+ self.assertEquals(12, HiveEngineSpec.progress(log))
def test_job_2_launched_stage_2_stages_progress(self):
log = """
@@ -83,5 +85,5 @@ def test_job_2_launched_stage_2_stages_progress(self):
17/02/07 19:15:55 INFO ql.Driver: Launching Job 2 out of 2
17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 0%, reduce = 0%
17/02/07 19:16:09 INFO exec.Task: 2017-02-07 19:16:09,173 Stage-1 map = 40%, reduce = 0%
- """
- self.assertEquals(60, db_engine_specs.HiveEngineSpec.progress(log))
+ """.split('\n')
+ self.assertEquals(60, HiveEngineSpec.progress(log))
diff --git a/tests/druid_tests.py b/tests/druid_tests.py
index d7b93dee0638e..637afe984ce02 100644
--- a/tests/druid_tests.py
+++ b/tests/druid_tests.py
@@ -11,8 +11,8 @@
from mock import Mock, patch
from superset import db, sm, security
-from superset.connectors.druid.models import DruidCluster, DruidDatasource
-from superset.connectors.druid.models import PyDruid
+from superset.connectors.druid.models import DruidMetric, DruidCluster, DruidDatasource
+from superset.connectors.druid.models import PyDruid, Quantile, Postaggregator
from .base_tests import SupersetTestCase
@@ -38,7 +38,7 @@
"metric1": {
"type": "longSum",
"name": "metric1",
- "fieldName": "metric1"}
+ "fieldName": "metric1"},
},
"size": 300000,
"numRows": 5000000
@@ -318,6 +318,77 @@ def test_sync_druid_perm(self, PyDruid):
permission=permission, view_menu=view_menu).first()
assert pv is not None
+ def test_metrics_and_post_aggs(self):
+ """
+ Test generation of metrics and post-aggregations from an initial list
+ of superset metrics (which may include the results of either). This
+ primarily tests that specifying a post-aggregator metric will also
+ require the raw aggregation of the associated druid metric column.
+ """
+ metrics_dict = {
+ 'unused_count': DruidMetric(
+ metric_name='unused_count',
+ verbose_name='COUNT(*)',
+ metric_type='count',
+ json=json.dumps({'type': 'count', 'name': 'unused_count'})),
+ 'some_sum': DruidMetric(
+ metric_name='some_sum',
+ verbose_name='SUM(*)',
+ metric_type='sum',
+ json=json.dumps({'type': 'sum', 'name': 'sum'})),
+ 'a_histogram': DruidMetric(
+ metric_name='a_histogram',
+ verbose_name='APPROXIMATE_HISTOGRAM(*)',
+ metric_type='approxHistogramFold',
+ json=json.dumps({'type': 'approxHistogramFold', 'name': 'a_histogram'})),
+ 'aCustomMetric': DruidMetric(
+ metric_name='aCustomMetric',
+ verbose_name='MY_AWESOME_METRIC(*)',
+ metric_type='aCustomType',
+ json=json.dumps({'type': 'customMetric', 'name': 'aCustomMetric'})),
+ 'quantile_p95': DruidMetric(
+ metric_name='quantile_p95',
+ verbose_name='P95(*)',
+ metric_type='postagg',
+ json=json.dumps({
+ 'type': 'quantile',
+ 'probability': 0.95,
+ 'name': 'p95',
+ 'fieldName': 'a_histogram'})),
+ 'aCustomPostAgg': DruidMetric(
+ metric_name='aCustomPostAgg',
+ verbose_name='CUSTOM_POST_AGG(*)',
+ metric_type='postagg',
+ json=json.dumps({
+ 'type': 'customPostAgg',
+ 'name': 'aCustomPostAgg',
+ 'field': {
+ 'type': 'fieldAccess',
+ 'fieldName': 'aCustomMetric'}})),
+ }
+
+ metrics = ['some_sum']
+ all_metrics, post_aggs = DruidDatasource._metrics_and_post_aggs(
+ metrics, metrics_dict)
+
+ assert all_metrics == ['some_sum']
+ assert post_aggs == {}
+
+ metrics = ['quantile_p95']
+ all_metrics, post_aggs = DruidDatasource._metrics_and_post_aggs(
+ metrics, metrics_dict)
+
+ result_postaggs = set(['quantile_p95'])
+ assert all_metrics == ['a_histogram']
+ assert set(post_aggs.keys()) == result_postaggs
+
+ metrics = ['aCustomPostAgg']
+ all_metrics, post_aggs = DruidDatasource._metrics_and_post_aggs(
+ metrics, metrics_dict)
+
+ result_postaggs = set(['aCustomPostAgg'])
+ assert all_metrics == ['aCustomMetric']
+ assert set(post_aggs.keys()) == result_postaggs
if __name__ == '__main__':
diff --git a/tests/sqllab_tests.py b/tests/sqllab_tests.py
index 9e59adc7dd952..29d74f4dc3502 100644
--- a/tests/sqllab_tests.py
+++ b/tests/sqllab_tests.py
@@ -189,12 +189,9 @@ def test_search_query_on_time(self):
from_time = 'from={}'.format(int(first_query_time))
to_time = 'to={}'.format(int(second_query_time))
params = [from_time, to_time]
- resp = self.get_resp('/superset/search_queries?'+'&'.join(params))
+ resp = self.get_resp('/superset/search_queries?' + '&'.join(params))
data = json.loads(resp)
self.assertEquals(2, len(data))
- for k in data:
- self.assertLess(int(first_query_time), k['startDttm'])
- self.assertLess(k['startDttm'], int(second_query_time))
def test_alias_duplicate(self):
self.run_sql(