Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix(explore): expired form data for SQL Lab queries #21430

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 8 additions & 3 deletions superset-frontend/src/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -69,14 +69,18 @@ export const URL_PARAMS = {
},
datasourceId: {
name: 'datasource_id',
type: 'number',
},
datasourceType: {
name: 'datasource_type',
type: 'string',
},
datasetId: {
name: 'dataset_id',
type: 'string',
type: 'number',
},
datasourceType: {
name: 'datasource_type',
datasetType: {
name: 'dataset_type',
type: 'string',
},
dashboardId: {
Expand Down Expand Up @@ -115,6 +119,7 @@ export const RESERVED_CHART_URL_PARAMS: string[] = [
URL_PARAMS.datasourceId.name,
URL_PARAMS.datasourceType.name,
URL_PARAMS.datasetId.name,
URL_PARAMS.datasetType.name,
];
export const RESERVED_DASHBOARD_URL_PARAMS: string[] = [
URL_PARAMS.nativeFilters.name,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ test('get form_data_key and slice_id from search params - url when moving from d
`${EXPLORE_BASE_URL}?form_data_key=yrLXmyE9fmhQ11lM1KgaD1PoPSBpuLZIJfqdyIdw9GoBwhPFRZHeIgeFiNZljbpd&slice_id=56`,
);
expect(getParsedExploreURLParams().toString()).toEqual(
'slice_id=56&form_data_key=yrLXmyE9fmhQ11lM1KgaD1PoPSBpuLZIJfqdyIdw9GoBwhPFRZHeIgeFiNZljbpd',
'form_data_key=yrLXmyE9fmhQ11lM1KgaD1PoPSBpuLZIJfqdyIdw9GoBwhPFRZHeIgeFiNZljbpd&slice_id=56',
);
});

Expand All @@ -49,6 +49,15 @@ test('get datasource and viz type from form_data search param - url when creatin
);
});

test('parse legacy url where datasource_id instead of dataset_id is used', () => {
setupLocation(
`${EXPLORE_BASE_URL}?viz_type=big_number&datasource_id=2&datasource_type=query`,
);
expect(getParsedExploreURLParams().toString()).toEqual(
'viz_type=big_number&dataset_id=2&dataset_type=query',
);
});

test('get permalink key from path params', () => {
setupLocation(`${EXPLORE_BASE_URL}p/kpOqweaMY9R/`);
expect(getParsedExploreURLParams().toString()).toEqual(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,15 +17,29 @@
* under the License.
*/

import { URL_PARAMS } from 'src/constants';

export interface Location {
search: string;
pathname: string;
}

// mapping { url_param: v1_explore_request_param }
const EXPLORE_URL_SEARCH_PARAMS = {
// Current supported URL params
[URL_PARAMS.formDataKey.name]: {},
[URL_PARAMS.sliceId.name]: {},
[URL_PARAMS.vizType.name]: {},
[URL_PARAMS.datasetId.name]: {},
[URL_PARAMS.datasetType.name]: {},
[URL_PARAMS.datasourceId.name]: {
aliasOf: URL_PARAMS.datasetId.name,
},
[URL_PARAMS.datasourceType.name]: {
aliasOf: URL_PARAMS.datasetType.name,
},

// legacy URL params
form_data: {
name: 'form_data',
parser: (formData: string) => {
const formDataObject = JSON.parse(formData);
if (formDataObject.datasource) {
Expand All @@ -38,34 +52,16 @@ const EXPLORE_URL_SEARCH_PARAMS = {
return formDataObject;
},
},
slice_id: {
name: 'slice_id',
},
dataset_id: {
name: 'dataset_id',
},
dataset_type: {
name: 'dataset_type',
},
datasource: {
name: 'datasource',
parser: (datasource: string) => {
const [dataset_id, dataset_type] = datasource.split('__');
return { dataset_id, dataset_type };
},
},
form_data_key: {
name: 'form_data_key',
},
permalink_key: {
name: 'permalink_key',
},
viz_type: {
name: 'viz_type',
},
dashboard_id: {
name: 'dashboard_id',
},

// URL path params
permalink_key: {},
dashboard_id: {},
};

const EXPLORE_URL_PATH_PARAMS = {
Expand Down Expand Up @@ -95,7 +91,8 @@ const getParsedExploreURLSearchParams = (search: string) => {
}
return {
...acc,
[EXPLORE_URL_SEARCH_PARAMS[currentParam].name]: parsedParamValue,
[EXPLORE_URL_SEARCH_PARAMS[currentParam].aliasOf || currentParam]:
parsedParamValue,
};
}, {});
};
Expand Down
4 changes: 2 additions & 2 deletions superset-frontend/src/explore/fixtures.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -159,8 +159,8 @@ export const fallbackExploreInitialData: ExplorePageInitialData = {
verbose_map: {},
main_dttm_col: '',
owners: [],
datasource_name: 'missing_datasource',
name: 'missing_datasource',
datasource_name: '[Missing Dataset]',
name: '[Missing Dataset]',
description: null,
},
slice: null,
Expand Down
3 changes: 2 additions & 1 deletion superset/connectors/sqla/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,6 +126,7 @@
from superset.tables.models import Table as NewTable
from superset.utils import core as utils
from superset.utils.core import (
DatasourceType,
GenericDataType,
get_column_name,
get_username,
Expand Down Expand Up @@ -676,7 +677,7 @@ def _process_sql_expression(
class SqlaTable(Model, BaseDatasource): # pylint: disable=too-many-public-methods
"""An ORM object for SqlAlchemy table references"""

type = "table"
type = DatasourceType.TABLE
query_language = "sql"
is_rls_supported = True
columns: List[TableColumn] = []
Expand Down
17 changes: 10 additions & 7 deletions superset/explore/commands/get.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,11 @@
)
from superset.explore.permalink.commands.get import GetExplorePermalinkCommand
from superset.explore.permalink.exceptions import ExplorePermalinkGetFailedError
from superset.utils import core as utils
from superset.utils.core import (
DatasourceType,
convert_legacy_filters_into_adhoc,
merge_extra_filters,
)
from superset.views.utils import (
get_datasource_info,
get_form_data,
Expand Down Expand Up @@ -79,7 +83,6 @@ def run(self) -> Optional[Dict[str, Any]]:
initial_form_data = json.loads(value) if value else {}

message = None

if not initial_form_data:
if self._slice_id:
initial_form_data["slice_id"] = self._slice_id
Expand All @@ -90,7 +93,7 @@ def run(self) -> Optional[Dict[str, Any]]:
elif self._dataset_id:
initial_form_data[
"datasource"
] = f"{self._dataset_id}__{self._dataset_type}"
] = f"{self._dataset_id}__{self._dataset_type or DatasourceType.TABLE}"
if self._form_data_key:
message = _(
"Form data not found in cache, reverting to dataset metadata."
Expand All @@ -105,8 +108,8 @@ def run(self) -> Optional[Dict[str, Any]]:
)
except SupersetException:
self._dataset_id = None
# fallback unkonw datasource to table type
self._dataset_type = SqlaTable.type
# fallback unknown datasource to table type
self._dataset_type = DatasourceType.TABLE

dataset: Optional[BaseDatasource] = None
if self._dataset_id is not None:
Expand Down Expand Up @@ -138,8 +141,8 @@ def run(self) -> Optional[Dict[str, Any]]:
)

# On explore, merge legacy and extra filters into the form data
utils.convert_legacy_filters_into_adhoc(form_data)
utils.merge_extra_filters(form_data)
convert_legacy_filters_into_adhoc(form_data)
merge_extra_filters(form_data)

dummy_dataset_data: Dict[str, Any] = {
"type": self._dataset_type,
Expand Down
Loading