Skip to content

Commit

Permalink
Allow query to display while slice is loading (#2100)
Browse files Browse the repository at this point in the history
* Allow query to display while slice is loading

* Put latestQueryFormData in store

* Reorganized query function, got rid of tu[le return values
  • Loading branch information
vera-liu authored and mistercrunch committed Feb 11, 2017
1 parent 9f49307 commit 0dcabaf
Show file tree
Hide file tree
Showing 9 changed files with 133 additions and 65 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@ const propTypes = {
slice: PropTypes.object.isRequired,
table_name: PropTypes.string,
viz_type: PropTypes.string.isRequired,
formData: PropTypes.object,
latestQueryFormData: PropTypes.object,
};

class ChartContainer extends React.PureComponent {
Expand Down Expand Up @@ -226,13 +228,12 @@ class ChartContainer extends React.PureComponent {
status={CHART_STATUS_MAP[this.props.chartStatus]}
style={{ fontSize: '10px', marginRight: '5px' }}
/>
{this.state.mockSlice &&
<ExploreActionButtons
slice={this.state.mockSlice}
canDownload={this.props.can_download}
query={this.props.queryResponse.query}
/>
}
<ExploreActionButtons
slice={this.state.mockSlice}
canDownload={this.props.can_download}
queryEndpoint={getExploreUrl(
this.props.latestQueryFormData, this.props.datasource_type, 'query')}
/>
</div>
</div>
}
Expand All @@ -256,8 +257,8 @@ function mapStateToProps(state) {
chartUpdateStartTime: state.chartUpdateStartTime,
column_formats: state.datasource ? state.datasource.column_formats : null,
containerId: state.slice ? `slice-container-${state.slice.slice_id}` : 'slice-container',
datasource_type: state.datasource_type,
formData,
latestQueryFormData: state.latestQueryFormData,
isStarred: state.isStarred,
queryResponse: state.queryResponse,
slice: state.slice,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,25 +1,49 @@
import React, { PropTypes } from 'react';
import ModalTrigger from './../../components/ModalTrigger';
const $ = window.$ = require('jquery');

const propTypes = {
query: PropTypes.string,
queryEndpoint: PropTypes.string.isRequired,
};

const defaultProps = {
query: '',
};

export default function DisplayQueryButton({ query }) {
const modalBody = (<pre>{query}</pre>);
return (
<ModalTrigger
isButton
triggerNode={<span>Query</span>}
modalTitle="Query"
modalBody={modalBody}
/>
);
export default class DisplayQueryButton extends React.PureComponent {
constructor(props) {
super(props);
this.state = {
modalBody: <pre />,
};
}
beforeOpen() {
this.setState({
modalBody:
(<img
className="loading"
alt="Loading..."
src="/static/assets/images/loading.gif"
/>),
});
$.ajax({
type: 'GET',
url: this.props.queryEndpoint,
success: (data) => {
this.setState({ modalBody: (<pre>{data.query}</pre>) });
},
error(data) {
this.setState({ modalBody: (<pre>{data.error}</pre>) });
},
});
}
render() {
return (
<ModalTrigger
isButton
triggerNode={<span>Query</span>}
modalTitle="Query"
beforeOpen={this.beforeOpen.bind(this)}
modalBody={this.state.modalBody}
/>
);
}
}

DisplayQueryButton.propTypes = propTypes;
DisplayQueryButton.defaultProps = defaultProps;
Original file line number Diff line number Diff line change
Expand Up @@ -6,40 +6,47 @@ import DisplayQueryButton from './DisplayQueryButton';

const propTypes = {
canDownload: PropTypes.oneOfType([PropTypes.string, PropTypes.bool]).isRequired,
slice: PropTypes.object.isRequired,
query: PropTypes.string,
slice: PropTypes.object,
queryEndpoint: PropTypes.string,
};

export default function ExploreActionButtons({ canDownload, slice, query }) {
export default function ExploreActionButtons({ canDownload, slice, queryEndpoint }) {
const exportToCSVClasses = cx('btn btn-default btn-sm', {
'disabled disabledButton': !canDownload,
});
return (
<div className="btn-group results" role="group">
<URLShortLinkButton slice={slice} />
if (slice) {
return (
<div className="btn-group results" role="group">
<URLShortLinkButton slice={slice} />

<EmbedCodeButton slice={slice} />
<EmbedCodeButton slice={slice} />

<a
href={slice.data.json_endpoint}
className="btn btn-default btn-sm"
title="Export to .json"
target="_blank"
>
<i className="fa fa-file-code-o"></i> .json
</a>
<a
href={slice.data.json_endpoint}
className="btn btn-default btn-sm"
title="Export to .json"
target="_blank"
>
<i className="fa fa-file-code-o"></i> .json
</a>

<a
href={slice.data.csv_endpoint}
className={exportToCSVClasses}
title="Export to .csv format"
target="_blank"
>
<i className="fa fa-file-text-o"></i> .csv
</a>
<a
href={slice.data.csv_endpoint}
className={exportToCSVClasses}
title="Export to .csv format"
target="_blank"
>
<i className="fa fa-file-text-o"></i> .csv
</a>

<DisplayQueryButton query={query} />
</div>
<DisplayQueryButton
queryEndpoint={queryEndpoint}
/>
</div>
);
}
return (
<DisplayQueryButton queryEndpoint={queryEndpoint} />
);
}

Expand Down
2 changes: 2 additions & 0 deletions superset/assets/javascripts/explorev2/exploreUtils.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@ export function getExploreUrl(form_data, dummy, endpoint = 'base') {
return `/superset/explore_json/${params}&csv=true`;
case 'standalone':
return `/superset/explore/${params}&standalone=true`;
case 'query':
return `/superset/explore_json/${params}&query=true`;
default:
return `/superset/explore/${params}`;
}
Expand Down
3 changes: 2 additions & 1 deletion superset/assets/javascripts/explorev2/index.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ import { Provider } from 'react-redux';
import thunk from 'redux-thunk';
import { now } from '../modules/dates';
import { initEnhancer } from '../reduxUtils';
import { getFieldsState } from './stores/store';
import { getFieldsState, getFormDataFromFields } from './stores/store';


// jquery and bootstrap required to make bootstrap dropdown menu's work
Expand All @@ -32,6 +32,7 @@ const bootstrappedState = Object.assign(
chartUpdateStartTime: now(),
dashboards: [],
fields,
latestQueryFormData: getFormDataFromFields(fields),
filterColumnOpts: [],
isDatasourceMetaLoading: false,
isStarred: false,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,7 @@ export const exploreReducer = function (state, action) {
chartUpdateStartTime: now(),
triggerQuery: false,
queryRequest: action.queryRequest,
latestQueryFormData: getFormDataFromFields(state.fields),
});
},
[actions.CHART_UPDATE_STOPPED]() {
Expand Down
40 changes: 27 additions & 13 deletions superset/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -1259,8 +1259,9 @@ def values_for_column(self,
con=engine
)

def query( # sqla
self, groupby, metrics,
def get_query_str( # sqla
self, engine, qry_start_dttm,
groupby, metrics,
granularity,
from_dttm, to_dttm,
filter=None, # noqa
Expand All @@ -1283,7 +1284,6 @@ def query( # sqla

cols = {col.column_name: col for col in self.columns}
metrics_dict = {m.metric_name: m for m in self.metrics}
qry_start_dttm = datetime.now()

if not granularity and is_timeseries:
raise Exception(_(
Expand Down Expand Up @@ -1437,13 +1437,18 @@ def visit_column(element, compiler, **kw):

qry = qry.select_from(tbl)

engine = self.database.get_sqla_engine()
sql = "{}".format(
qry.compile(
engine, compile_kwargs={"literal_binds": True},),
)
logging.info(sql)
sql = sqlparse.format(sql, reindent=True)
return sql

def query(self, query_obj):
qry_start_dttm = datetime.now()
engine = self.database.get_sqla_engine()
sql = self.get_query_str(engine, qry_start_dttm, **query_obj)
status = QueryStatus.SUCCESS
error_message = None
df = None
Expand Down Expand Up @@ -2281,8 +2286,9 @@ def values_for_column(self,

return df

def query( # druid
self, groupby, metrics,
def get_query_str( # druid
self, client, qry_start_dttm,
groupby, metrics,
granularity,
from_dttm, to_dttm,
filter=None, # noqa
Expand All @@ -2294,13 +2300,12 @@ def query( # druid
orderby=None,
extras=None, # noqa
select=None, # noqa
columns=None, ):
columns=None, phase=2):
"""Runs a query against Druid and returns a dataframe.
This query interface is common to SqlAlchemy and Druid
"""
# TODO refactor into using a TBD Query object
qry_start_dttm = datetime.now()
if not is_timeseries:
granularity = 'all'
inner_from_dttm = inner_from_dttm or from_dttm
Expand Down Expand Up @@ -2396,7 +2401,6 @@ def recursive_get_fields(_conf):
if having_filters:
qry['having'] = having_filters

client = self.cluster.get_pydruid_client()
orig_filters = filters
if len(groupby) == 0:
del qry['dimensions']
Expand Down Expand Up @@ -2435,6 +2439,8 @@ def recursive_get_fields(_conf):
query_str += json.dumps(
client.query_builder.last_query.query_dict, indent=2)
query_str += "\n"
if phase == 1:
return query_str
query_str += (
"//\nPhase 2 (built based on phase one's results)\n")
df = client.export_pandas()
Expand Down Expand Up @@ -2474,27 +2480,35 @@ def recursive_get_fields(_conf):
client.groupby(**qry)
query_str += json.dumps(
client.query_builder.last_query.query_dict, indent=2)
return query_str

def query(self, query_obj):
qry_start_dttm = datetime.now()
client = self.cluster.get_pydruid_client()
query_str = self.get_query_str(client, qry_start_dttm, **query_obj)
df = client.export_pandas()
if df is None or df.size == 0:
raise Exception(_("No data was returned."))
df.columns = [
DTTM_ALIAS if c == 'timestamp' else c for c in df.columns]

is_timeseries = query_obj['is_timeseries'] \
if 'is_timeseries' in query_obj else True
if (
not is_timeseries and
granularity == "all" and
query_obj['granularity'] == "all" and
DTTM_ALIAS in df.columns):
del df[DTTM_ALIAS]

# Reordering columns
cols = []
if DTTM_ALIAS in df.columns:
cols += [DTTM_ALIAS]
cols += [col for col in groupby if col in df.columns]
cols += [col for col in metrics if col in df.columns]
cols += [col for col in query_obj['groupby'] if col in df.columns]
cols += [col for col in query_obj['metrics'] if col in df.columns]
df = df[cols]

time_offset = DruidDatasource.time_offset(granularity)
time_offset = DruidDatasource.time_offset(query_obj['granularity'])

def increment_timestamp(ts):
dt = utils.parse_human_datetime(ts).replace(
Expand Down
18 changes: 18 additions & 0 deletions superset/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -1518,6 +1518,24 @@ def explore_json(self, datasource_type, datasource_id):
headers=generate_download_headers("csv"),
mimetype="application/csv")

if request.args.get("query") == "true":
try:
query_obj = viz_obj.query_obj()
engine = viz_obj.datasource.database.get_sqla_engine() \
if datasource_type == 'table' \
else viz_obj.datasource.cluster.get_pydruid_client()
if datasource_type == 'druid':
# only retrive first phase query for druid
query_obj['phase'] = 1
query = viz_obj.datasource.get_query_str(
engine, datetime.now(), **query_obj)
except Exception as e:
return json_error_response(e)
return Response(
json.dumps({'query': query}),
status=200,
mimetype="application/json")

payload = {}
status = 200
try:
Expand Down
4 changes: 2 additions & 2 deletions superset/viz.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,10 +106,10 @@ def get_df(self, query_obj=None):
timestamp_format = dttm_col.python_date_format

# The datasource here can be different backend but the interface is common
self.results = self.datasource.query(**query_obj)
self.results = self.datasource.query(query_obj)
self.query = self.results.query
self.status = self.results.status
self.error_message = self.results.error_message
self.query = self.results.query

df = self.results.df
# Transform the timestamp we received from database to pandas supported
Expand Down

0 comments on commit 0dcabaf

Please sign in to comment.