diff --git a/UPDATING.md b/UPDATING.md index c66bd04f43140..d53f7cae94792 100644 --- a/UPDATING.md +++ b/UPDATING.md @@ -27,8 +27,6 @@ assists people when migrating to a new version. - [20606](https://github.com/apache/superset/pull/20606): When user clicks on chart title or "Edit chart" button in Dashboard page, Explore opens in the same tab. Clicking while holding cmd/ctrl opens Explore in a new tab. To bring back the old behaviour (always opening Explore in a new tab), flip feature flag `DASHBOARD_EDIT_CHART_IN_NEW_TAB` to `True`. - [20799](https://github.com/apache/superset/pull/20799): Presto and Trino engine will now display tracking URL for running queries in SQL Lab. If for some reason you don't want to show the tracking URL (for example, when your data warehouse hasn't enable access for to Presto or Trino UI), update `TRACKING_URL_TRANSFORMER` in `config.py` to return `None`. - [21002](https://github.com/apache/superset/pull/21002): Support Python 3.10 and bump pandas 1.4 and pyarrow 6. -- [21163](https://github.com/apache/superset/pull/21163): When `GENERIC_CHART_AXES` feature flags set to `True`, the Time Grain control will move below the X-Axis control. - ### Breaking Changes diff --git a/superset-frontend/packages/superset-ui-chart-controls/src/sections/echartsTimeSeriesQuery.tsx b/superset-frontend/packages/superset-ui-chart-controls/src/sections/echartsTimeSeriesQuery.tsx index 6fe14d3457f67..66d9fb7682dbf 100644 --- a/superset-frontend/packages/superset-ui-chart-controls/src/sections/echartsTimeSeriesQuery.tsx +++ b/superset-frontend/packages/superset-ui-chart-controls/src/sections/echartsTimeSeriesQuery.tsx @@ -30,11 +30,6 @@ export const echartsTimeSeriesQuery: ControlPanelSectionConfig = { expanded: true, controlSetRows: [ [isFeatureEnabled(FeatureFlag.GENERIC_CHART_AXES) ? 'x_axis' : null], - [ - isFeatureEnabled(FeatureFlag.GENERIC_CHART_AXES) - ? 'time_grain_sqla' - : null, - ], ['metrics'], ['groupby'], [ diff --git a/superset-frontend/packages/superset-ui-chart-controls/src/sections/sections.tsx b/superset-frontend/packages/superset-ui-chart-controls/src/sections/sections.tsx index 4f4efdb82ff34..fee5f990b6bc9 100644 --- a/superset-frontend/packages/superset-ui-chart-controls/src/sections/sections.tsx +++ b/superset-frontend/packages/superset-ui-chart-controls/src/sections/sections.tsx @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ -import { FeatureFlag, isFeatureEnabled, t } from '@superset-ui/core'; +import { t } from '@superset-ui/core'; import { ControlPanelSectionConfig } from '../types'; // A few standard controls sections that are used internally. @@ -38,19 +38,6 @@ export const legacyTimeseriesTime: ControlPanelSectionConfig = { ], }; -export const genericTime: ControlPanelSectionConfig = { - ...baseTimeSection, - controlSetRows: [ - ['granularity_sqla'], - [ - isFeatureEnabled(FeatureFlag.GENERIC_CHART_AXES) - ? null - : 'time_grain_sqla', - ], - ['time_range'], - ], -}; - export const legacyRegularTime: ControlPanelSectionConfig = { ...baseTimeSection, controlSetRows: [['granularity_sqla'], ['time_range']], diff --git a/superset-frontend/packages/superset-ui-chart-controls/src/shared-controls/index.tsx b/superset-frontend/packages/superset-ui-chart-controls/src/shared-controls/index.tsx index cc808381f9764..55b4326791ea8 100644 --- a/superset-frontend/packages/superset-ui-chart-controls/src/shared-controls/index.tsx +++ b/superset-frontend/packages/superset-ui-chart-controls/src/shared-controls/index.tsx @@ -47,8 +47,6 @@ import { ComparisionType, QueryResponse, QueryColumn, - isAdhocColumn, - isPhysicalColumn, } from '@superset-ui/core'; import { @@ -325,21 +323,6 @@ const time_grain_sqla: SharedControlConfig<'SelectControl'> = { mapStateToProps: ({ datasource }) => ({ choices: (datasource as Dataset)?.time_grain_sqla || null, }), - visibility: ({ controls }) => { - if (!isFeatureEnabled(FeatureFlag.GENERIC_CHART_AXES)) { - return true; - } - - const xAxis = controls?.x_axis; - const xAxisValue = xAxis?.value; - if (xAxisValue === undefined || isAdhocColumn(xAxisValue)) { - return true; - } - if (isPhysicalColumn(xAxisValue)) { - return !!xAxis?.options?.[xAxisValue]?.is_dttm; - } - return false; - }, }; const time_range: SharedControlConfig<'DateFilterControl'> = { diff --git a/superset-frontend/packages/superset-ui-core/src/query/buildQueryContext.ts b/superset-frontend/packages/superset-ui-core/src/query/buildQueryContext.ts index 4ab69ab40e8b6..dbc1289c5460d 100644 --- a/superset-frontend/packages/superset-ui-core/src/query/buildQueryContext.ts +++ b/superset-frontend/packages/superset-ui-core/src/query/buildQueryContext.ts @@ -23,8 +23,6 @@ import { QueryFieldAliases, QueryFormData } from './types/QueryFormData'; import { QueryContext, QueryObject } from './types/Query'; import { SetDataMaskHook } from '../chart'; import { JsonObject } from '../connection'; -import { isFeatureEnabled, FeatureFlag } from '../utils'; -import { normalizeTimeColumn } from './normalizeTimeColumn'; const WRAP_IN_ARRAY = (baseQueryObject: QueryObject) => [baseQueryObject]; @@ -47,16 +45,13 @@ export default function buildQueryContext( typeof options === 'function' ? { buildQuery: options, queryFields: {} } : options || {}; - let queries = buildQuery(buildQueryObject(formData, queryFields)); + const queries = buildQuery(buildQueryObject(formData, queryFields)); queries.forEach(query => { if (Array.isArray(query.post_processing)) { // eslint-disable-next-line no-param-reassign query.post_processing = query.post_processing.filter(Boolean); } }); - if (isFeatureEnabled(FeatureFlag.GENERIC_CHART_AXES)) { - queries = queries.map(query => normalizeTimeColumn(formData, query)); - } return { datasource: new DatasourceKey(formData.datasource).toObject(), force: formData.force || false, diff --git a/superset-frontend/packages/superset-ui-core/src/query/index.ts b/superset-frontend/packages/superset-ui-core/src/query/index.ts index a539267f9d56b..9bbfbc59fba86 100644 --- a/superset-frontend/packages/superset-ui-core/src/query/index.ts +++ b/superset-frontend/packages/superset-ui-core/src/query/index.ts @@ -28,7 +28,6 @@ export { default as getColumnLabel } from './getColumnLabel'; export { default as getMetricLabel } from './getMetricLabel'; export { default as DatasourceKey } from './DatasourceKey'; export { default as normalizeOrderBy } from './normalizeOrderBy'; -export { normalizeTimeColumn } from './normalizeTimeColumn'; export * from './types/AnnotationLayer'; export * from './types/QueryFormData'; diff --git a/superset-frontend/packages/superset-ui-core/src/query/normalizeTimeColumn.ts b/superset-frontend/packages/superset-ui-core/src/query/normalizeTimeColumn.ts deleted file mode 100644 index 95a06fdf6c0bb..0000000000000 --- a/superset-frontend/packages/superset-ui-core/src/query/normalizeTimeColumn.ts +++ /dev/null @@ -1,83 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -import omit from 'lodash/omit'; - -import { - AdhocColumn, - isAdhocColumn, - isPhysicalColumn, - QueryFormColumn, - QueryFormData, - QueryObject, -} from './types'; -import { FeatureFlag, isFeatureEnabled } from '../utils'; - -export function normalizeTimeColumn( - formData: QueryFormData, - queryObject: QueryObject, -): QueryObject { - if (!(isFeatureEnabled(FeatureFlag.GENERIC_CHART_AXES) && formData.x_axis)) { - return queryObject; - } - - const { columns: _columns, extras: _extras } = queryObject; - const mutatedColumns: QueryFormColumn[] = [...(_columns || [])]; - const axisIdx = _columns?.findIndex( - col => - (isPhysicalColumn(col) && - isPhysicalColumn(formData.x_axis) && - col === formData.x_axis) || - (isAdhocColumn(col) && - isAdhocColumn(formData.x_axis) && - col.sqlExpression === formData.x_axis.sqlExpression), - ); - if ( - axisIdx !== undefined && - axisIdx > -1 && - formData.x_axis && - Array.isArray(_columns) - ) { - if (isAdhocColumn(_columns[axisIdx])) { - mutatedColumns[axisIdx] = { - timeGrain: _extras?.time_grain_sqla, - columnType: 'BASE_AXIS', - ...(_columns[axisIdx] as AdhocColumn), - }; - } else { - mutatedColumns[axisIdx] = { - timeGrain: _extras?.time_grain_sqla, - columnType: 'BASE_AXIS', - sqlExpression: formData.x_axis, - label: formData.x_axis, - expressionType: 'SQL', - }; - } - - const newQueryObject = omit(queryObject, [ - 'extras.time_grain_sqla', - 'is_timeseries', - ]); - newQueryObject.columns = mutatedColumns; - - return newQueryObject; - } - - // fallback, return original queryObject - return queryObject; -} diff --git a/superset-frontend/packages/superset-ui-core/src/query/types/Column.ts b/superset-frontend/packages/superset-ui-core/src/query/types/Column.ts index 693bf5e54aace..7afe031bcfe3e 100644 --- a/superset-frontend/packages/superset-ui-core/src/query/types/Column.ts +++ b/superset-frontend/packages/superset-ui-core/src/query/types/Column.ts @@ -27,8 +27,6 @@ export interface AdhocColumn { optionName?: string; sqlExpression: string; expressionType: 'SQL'; - columnType?: 'BASE_AXIS' | 'SERIES'; - timeGrain?: string; } /** diff --git a/superset-frontend/packages/superset-ui-core/test/query/buildQueryContext.test.ts b/superset-frontend/packages/superset-ui-core/test/query/buildQueryContext.test.ts index 3cfbb4e3bbd1c..baae438321aec 100644 --- a/superset-frontend/packages/superset-ui-core/test/query/buildQueryContext.test.ts +++ b/superset-frontend/packages/superset-ui-core/test/query/buildQueryContext.test.ts @@ -17,7 +17,6 @@ * under the License. */ import { buildQueryContext } from '@superset-ui/core'; -import * as queryModule from '../../src/query/normalizeTimeColumn'; describe('buildQueryContext', () => { it('should build datasource for table sources and apply defaults', () => { @@ -123,50 +122,4 @@ describe('buildQueryContext', () => { }, ]); }); - it('should call normalizeTimeColumn if GENERIC_CHART_AXES is enabled', () => { - // @ts-ignore - const spy = jest.spyOn(window, 'window', 'get').mockImplementation(() => ({ - featureFlags: { - GENERIC_CHART_AXES: true, - }, - })); - const spyNormalizeTimeColumn = jest.spyOn( - queryModule, - 'normalizeTimeColumn', - ); - - buildQueryContext( - { - datasource: '5__table', - viz_type: 'table', - }, - () => [{}], - ); - expect(spyNormalizeTimeColumn).toBeCalled(); - spy.mockRestore(); - spyNormalizeTimeColumn.mockRestore(); - }); - it("shouldn't call normalizeTimeColumn if GENERIC_CHART_AXES is disabled", () => { - // @ts-ignore - const spy = jest.spyOn(window, 'window', 'get').mockImplementation(() => ({ - featureFlags: { - GENERIC_CHART_AXES: false, - }, - })); - const spyNormalizeTimeColumn = jest.spyOn( - queryModule, - 'normalizeTimeColumn', - ); - - buildQueryContext( - { - datasource: '5__table', - viz_type: 'table', - }, - () => [{}], - ); - expect(spyNormalizeTimeColumn).not.toBeCalled(); - spy.mockRestore(); - spyNormalizeTimeColumn.mockRestore(); - }); }); diff --git a/superset-frontend/packages/superset-ui-core/test/query/normalizeTimeColumn.test.ts b/superset-frontend/packages/superset-ui-core/test/query/normalizeTimeColumn.test.ts deleted file mode 100644 index 1466d10619835..0000000000000 --- a/superset-frontend/packages/superset-ui-core/test/query/normalizeTimeColumn.test.ts +++ /dev/null @@ -1,247 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -import { - normalizeTimeColumn, - QueryObject, - SqlaFormData, -} from '@superset-ui/core'; - -describe('disabled GENERIC_CHART_AXES', () => { - let windowSpy: any; - - beforeAll(() => { - // @ts-ignore - windowSpy = jest.spyOn(window, 'window', 'get').mockImplementation(() => ({ - featureFlags: { - GENERIC_CHART_AXES: false, - }, - })); - }); - - afterAll(() => { - windowSpy.mockRestore(); - }); - - it('should return original QueryObject if disabled GENERIC_CHART_AXES', () => { - const formData: SqlaFormData = { - datasource: '5__table', - viz_type: 'table', - granularity: 'time_column', - time_grain_sqla: 'P1Y', - time_range: '1 year ago : 2013', - columns: ['col1'], - metrics: ['count(*)'], - x_axis: 'time_column', - }; - const query: QueryObject = { - datasource: '5__table', - viz_type: 'table', - granularity: 'time_column', - extras: { - time_grain_sqla: 'P1Y', - }, - time_range: '1 year ago : 2013', - orderby: [['count(*)', true]], - columns: ['col1'], - metrics: ['count(*)'], - is_timeseries: true, - }; - expect(normalizeTimeColumn(formData, query)).toEqual(query); - }); -}); - -describe('enabled GENERIC_CHART_AXES', () => { - let windowSpy: any; - - beforeAll(() => { - // @ts-ignore - windowSpy = jest.spyOn(window, 'window', 'get').mockImplementation(() => ({ - featureFlags: { - GENERIC_CHART_AXES: true, - }, - })); - }); - - afterAll(() => { - windowSpy.mockRestore(); - }); - - it('should return original QueryObject if x_axis is empty', () => { - const formData: SqlaFormData = { - datasource: '5__table', - viz_type: 'table', - granularity: 'time_column', - time_grain_sqla: 'P1Y', - time_range: '1 year ago : 2013', - columns: ['col1'], - metrics: ['count(*)'], - }; - const query: QueryObject = { - datasource: '5__table', - viz_type: 'table', - granularity: 'time_column', - extras: { - time_grain_sqla: 'P1Y', - }, - time_range: '1 year ago : 2013', - orderby: [['count(*)', true]], - columns: ['col1'], - metrics: ['count(*)'], - is_timeseries: true, - }; - expect(normalizeTimeColumn(formData, query)).toEqual(query); - }); - - it('should support different columns for x-axis and granularity', () => { - const formData: SqlaFormData = { - datasource: '5__table', - viz_type: 'table', - granularity: 'time_column', - time_grain_sqla: 'P1Y', - time_range: '1 year ago : 2013', - x_axis: 'time_column_in_x_axis', - columns: ['col1'], - metrics: ['count(*)'], - }; - const query: QueryObject = { - datasource: '5__table', - viz_type: 'table', - granularity: 'time_column', - extras: { - time_grain_sqla: 'P1Y', - where: '', - having: '', - }, - time_range: '1 year ago : 2013', - orderby: [['count(*)', true]], - columns: ['time_column_in_x_axis', 'col1'], - metrics: ['count(*)'], - is_timeseries: true, - }; - expect(normalizeTimeColumn(formData, query)).toEqual({ - datasource: '5__table', - viz_type: 'table', - granularity: 'time_column', - extras: { where: '', having: '' }, - time_range: '1 year ago : 2013', - orderby: [['count(*)', true]], - columns: [ - { - timeGrain: 'P1Y', - columnType: 'BASE_AXIS', - sqlExpression: 'time_column_in_x_axis', - label: 'time_column_in_x_axis', - expressionType: 'SQL', - }, - 'col1', - ], - metrics: ['count(*)'], - }); - }); - - it('should support custom SQL in x-axis', () => { - const formData: SqlaFormData = { - datasource: '5__table', - viz_type: 'table', - granularity: 'time_column', - time_grain_sqla: 'P1Y', - time_range: '1 year ago : 2013', - x_axis: { - expressionType: 'SQL', - label: 'Order Data + 1 year', - sqlExpression: '"Order Date" + interval \'1 year\'', - }, - columns: ['col1'], - metrics: ['count(*)'], - }; - const query: QueryObject = { - datasource: '5__table', - viz_type: 'table', - granularity: 'time_column', - extras: { - time_grain_sqla: 'P1Y', - where: '', - having: '', - }, - time_range: '1 year ago : 2013', - orderby: [['count(*)', true]], - columns: [ - { - expressionType: 'SQL', - label: 'Order Data + 1 year', - sqlExpression: '"Order Date" + interval \'1 year\'', - }, - 'col1', - ], - metrics: ['count(*)'], - is_timeseries: true, - }; - expect(normalizeTimeColumn(formData, query)).toEqual({ - datasource: '5__table', - viz_type: 'table', - granularity: 'time_column', - extras: { where: '', having: '' }, - time_range: '1 year ago : 2013', - orderby: [['count(*)', true]], - columns: [ - { - timeGrain: 'P1Y', - columnType: 'BASE_AXIS', - expressionType: 'SQL', - label: 'Order Data + 1 year', - sqlExpression: `"Order Date" + interval '1 year'`, - }, - 'col1', - ], - metrics: ['count(*)'], - }); - }); - - it('fallback and invalid columns value', () => { - const formData: SqlaFormData = { - datasource: '5__table', - viz_type: 'table', - granularity: 'time_column', - time_grain_sqla: 'P1Y', - time_range: '1 year ago : 2013', - x_axis: { - expressionType: 'SQL', - label: 'Order Data + 1 year', - sqlExpression: '"Order Date" + interval \'1 year\'', - }, - columns: ['col1'], - metrics: ['count(*)'], - }; - const query: QueryObject = { - datasource: '5__table', - viz_type: 'table', - granularity: 'time_column', - extras: { - time_grain_sqla: 'P1Y', - where: '', - having: '', - }, - time_range: '1 year ago : 2013', - orderby: [['count(*)', true]], - metrics: ['count(*)'], - is_timeseries: true, - }; - expect(normalizeTimeColumn(formData, query)).toEqual(query); - }); -}); diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/MixedTimeseries/controlPanel.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/MixedTimeseries/controlPanel.tsx index 00f2d9ed8f924..7f5e1c8c28536 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/MixedTimeseries/controlPanel.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/MixedTimeseries/controlPanel.tsx @@ -291,12 +291,12 @@ function createAdvancedAnalyticsSection( const config: ControlPanelConfig = { controlPanelSections: [ - sections.genericTime, + sections.legacyTimeseriesTime, isFeatureEnabled(FeatureFlag.GENERIC_CHART_AXES) ? { label: t('Shared query fields'), expanded: true, - controlSetRows: [['x_axis'], ['time_grain_sqla']], + controlSetRows: [['x_axis']], } : null, createQuerySection(t('Query A'), ''), diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Area/controlPanel.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Area/controlPanel.tsx index 66bc2d0e79bc8..520772375e273 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Area/controlPanel.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Area/controlPanel.tsx @@ -52,7 +52,7 @@ const { } = DEFAULT_FORM_DATA; const config: ControlPanelConfig = { controlPanelSections: [ - sections.genericTime, + sections.legacyTimeseriesTime, sections.echartsTimeSeriesQuery, sections.advancedAnalyticsControls, sections.annotationsAndLayersControls, diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/Bar/controlPanel.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/Bar/controlPanel.tsx index 160f981084b84..d5e8fcdee90af 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/Bar/controlPanel.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/Bar/controlPanel.tsx @@ -259,7 +259,7 @@ function createAxisControl(axis: 'x' | 'y'): ControlSetRow[] { const config: ControlPanelConfig = { controlPanelSections: [ - sections.genericTime, + sections.legacyTimeseriesTime, sections.echartsTimeSeriesQuery, sections.advancedAnalyticsControls, sections.annotationsAndLayersControls, diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/Line/controlPanel.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/Line/controlPanel.tsx index 1b7357eaaa184..736d8b1054a97 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/Line/controlPanel.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/Line/controlPanel.tsx @@ -51,7 +51,7 @@ const { } = DEFAULT_FORM_DATA; const config: ControlPanelConfig = { controlPanelSections: [ - sections.genericTime, + sections.legacyTimeseriesTime, sections.echartsTimeSeriesQuery, sections.advancedAnalyticsControls, sections.annotationsAndLayersControls, diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/Scatter/controlPanel.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/Scatter/controlPanel.tsx index 379840ae3cbf3..3ffdb4489e2cd 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/Scatter/controlPanel.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/Scatter/controlPanel.tsx @@ -47,7 +47,7 @@ const { } = DEFAULT_FORM_DATA; const config: ControlPanelConfig = { controlPanelSections: [ - sections.genericTime, + sections.legacyTimeseriesTime, sections.echartsTimeSeriesQuery, sections.advancedAnalyticsControls, sections.annotationsAndLayersControls, diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/SmoothLine/controlPanel.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/SmoothLine/controlPanel.tsx index f758249572f12..88a8b1a2fefee 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/SmoothLine/controlPanel.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Regular/SmoothLine/controlPanel.tsx @@ -47,7 +47,7 @@ const { } = DEFAULT_FORM_DATA; const config: ControlPanelConfig = { controlPanelSections: [ - sections.genericTime, + sections.legacyTimeseriesTime, sections.echartsTimeSeriesQuery, sections.advancedAnalyticsControls, sections.annotationsAndLayersControls, diff --git a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Step/controlPanel.tsx b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Step/controlPanel.tsx index 8001acc5220ce..5e02cbc59b5d3 100644 --- a/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Step/controlPanel.tsx +++ b/superset-frontend/plugins/plugin-chart-echarts/src/Timeseries/Step/controlPanel.tsx @@ -50,7 +50,7 @@ const { } = DEFAULT_FORM_DATA; const config: ControlPanelConfig = { controlPanelSections: [ - sections.genericTime, + sections.legacyTimeseriesTime, sections.echartsTimeSeriesQuery, sections.advancedAnalyticsControls, sections.annotationsAndLayersControls, diff --git a/superset/connectors/sqla/models.py b/superset/connectors/sqla/models.py index a72d07f2d0741..a7ce51a344923 100644 --- a/superset/connectors/sqla/models.py +++ b/superset/connectors/sqla/models.py @@ -83,7 +83,6 @@ from superset.connectors.base.models import BaseColumn, BaseDatasource, BaseMetric from superset.connectors.sqla.utils import ( find_cached_objects_in_session, - get_columns_description, get_physical_table_metadata, get_virtual_table_metadata, validate_adhoc_subquery, @@ -1125,29 +1124,7 @@ def adhoc_column_to_sqla( schema=self.schema, template_processor=template_processor, ) - col_in_metadata = self.get_column(expression) - if col_in_metadata: - sqla_column = col_in_metadata.get_sqla_col() - is_dttm = col_in_metadata.is_temporal - else: - sqla_column = literal_column(expression) - # probe adhoc column type - tbl, _ = self.get_from_clause(template_processor) - qry = sa.select([sqla_column]).limit(1).select_from(tbl) - sql = self.database.compile_sqla_query(qry) - col_desc = get_columns_description(self.database, sql) - is_dttm = col_desc[0]["is_dttm"] - - if ( - is_dttm - and col.get("columnType") == "BASE_AXIS" - and (time_grain := col.get("timeGrain")) - ): - sqla_column = self.db_engine_spec.get_timestamp_expr( - sqla_column, - None, - time_grain, - ) + sqla_column = literal_column(expression) return self.make_sqla_column_compatible(sqla_column, label) def make_sqla_column_compatible( diff --git a/superset/connectors/sqla/utils.py b/superset/connectors/sqla/utils.py index 8151bfd44b03b..e58cf797a9931 100644 --- a/superset/connectors/sqla/utils.py +++ b/superset/connectors/sqla/utils.py @@ -14,8 +14,6 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -from __future__ import annotations - import logging from contextlib import closing from typing import ( @@ -104,7 +102,7 @@ def get_physical_table_metadata( return cols -def get_virtual_table_metadata(dataset: SqlaTable) -> List[ResultSetColumnType]: +def get_virtual_table_metadata(dataset: "SqlaTable") -> List[ResultSetColumnType]: """Use SQLparser to get virtual dataset metadata""" if not dataset.sql: raise SupersetGenericDBErrorException( @@ -139,7 +137,7 @@ def get_virtual_table_metadata(dataset: SqlaTable) -> List[ResultSetColumnType]: try: with closing(engine.raw_connection()) as conn: cursor = conn.cursor() - query = dataset.database.apply_limit_to_sql(statements[0], limit=1) + query = dataset.database.apply_limit_to_sql(statements[0]) db_engine_spec.execute(cursor, query) result = db_engine_spec.fetch_data(cursor, limit=1) result_set = SupersetResultSet(result, cursor.description, db_engine_spec) @@ -149,24 +147,6 @@ def get_virtual_table_metadata(dataset: SqlaTable) -> List[ResultSetColumnType]: return cols -def get_columns_description( - database: Database, - query: str, -) -> List[ResultSetColumnType]: - db_engine_spec = database.db_engine_spec - try: - with closing(database.get_sqla_engine().raw_connection()) as conn: - cursor = conn.cursor() - query = database.apply_limit_to_sql(query, limit=1) - cursor.execute(query) - db_engine_spec.execute(cursor, query) - result = db_engine_spec.fetch_data(cursor, limit=1) - result_set = SupersetResultSet(result, cursor.description, db_engine_spec) - return result_set.columns - except Exception as ex: - raise SupersetGenericDBErrorException(message=str(ex)) from ex - - def validate_adhoc_subquery( sql: str, database_id: int, @@ -204,12 +184,12 @@ def validate_adhoc_subquery( @memoized def get_dialect_name(drivername: str) -> str: - return SqlaURL.create(drivername).get_dialect().name + return SqlaURL(drivername).get_dialect().name @memoized def get_identifier_quoter(drivername: str) -> Dict[str, Callable[[str], str]]: - return SqlaURL.create(drivername).get_dialect()().identifier_preparer.quote + return SqlaURL(drivername).get_dialect()().identifier_preparer.quote DeclarativeModel = TypeVar("DeclarativeModel", bound=DeclarativeMeta) diff --git a/superset/superset_typing.py b/superset/superset_typing.py index eeaca0dd511aa..ae8787d1c6913 100644 --- a/superset/superset_typing.py +++ b/superset/superset_typing.py @@ -55,8 +55,6 @@ class AdhocColumn(TypedDict, total=False): hasCustomLabel: Optional[bool] label: Optional[str] sqlExpression: Optional[str] - columnType: Optional[Literal["BASE_AXIS", "SERIES"]] - timeGrain: Optional[str] class ResultSetColumnType(TypedDict): diff --git a/superset/utils/core.py b/superset/utils/core.py index 4131ede785a9a..319069e2d41e6 100644 --- a/superset/utils/core.py +++ b/superset/utils/core.py @@ -1271,17 +1271,6 @@ def is_adhoc_column(column: Column) -> TypeGuard[AdhocColumn]: return isinstance(column, dict) -def get_base_axis_column(columns: Optional[List[Column]]) -> Optional[AdhocColumn]: - if columns is None: - return None - axis_cols = [ - col - for col in columns - if is_adhoc_column(col) and col.get("columnType") == "BASE_AXIS" - ] - return axis_cols[0] if axis_cols else None - - def get_column_name( column: Column, verbose_map: Optional[Dict[str, Any]] = None ) -> str: diff --git a/tests/integration_tests/conftest.py b/tests/integration_tests/conftest.py index 606bfe4377450..aaa156b5b4ff7 100644 --- a/tests/integration_tests/conftest.py +++ b/tests/integration_tests/conftest.py @@ -18,7 +18,7 @@ import contextlib import functools -import os +from operator import ge from typing import Any, Callable, Optional, TYPE_CHECKING from unittest.mock import patch @@ -303,38 +303,34 @@ def virtual_dataset(): @pytest.fixture def physical_dataset(): from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn - from superset.connectors.sqla.utils import get_identifier_quoter example_database = get_example_database() engine = example_database.get_sqla_engine() - quoter = get_identifier_quoter(engine.name) # sqlite can only execute one statement at a time engine.execute( - f""" + """ CREATE TABLE IF NOT EXISTS physical_dataset( col1 INTEGER, col2 VARCHAR(255), col3 DECIMAL(4,2), col4 VARCHAR(255), - col5 TIMESTAMP DEFAULT '1970-01-01 00:00:01', - col6 TIMESTAMP DEFAULT '1970-01-01 00:00:01', - {quoter('time column with spaces')} TIMESTAMP DEFAULT '1970-01-01 00:00:01' + col5 TIMESTAMP ); """ ) engine.execute( """ INSERT INTO physical_dataset values - (0, 'a', 1.0, NULL, '2000-01-01 00:00:00', '2002-01-03 00:00:00', '2002-01-03 00:00:00'), - (1, 'b', 1.1, NULL, '2000-01-02 00:00:00', '2002-02-04 00:00:00', '2002-02-04 00:00:00'), - (2, 'c', 1.2, NULL, '2000-01-03 00:00:00', '2002-03-07 00:00:00', '2002-03-07 00:00:00'), - (3, 'd', 1.3, NULL, '2000-01-04 00:00:00', '2002-04-12 00:00:00', '2002-04-12 00:00:00'), - (4, 'e', 1.4, NULL, '2000-01-05 00:00:00', '2002-05-11 00:00:00', '2002-05-11 00:00:00'), - (5, 'f', 1.5, NULL, '2000-01-06 00:00:00', '2002-06-13 00:00:00', '2002-06-13 00:00:00'), - (6, 'g', 1.6, NULL, '2000-01-07 00:00:00', '2002-07-15 00:00:00', '2002-07-15 00:00:00'), - (7, 'h', 1.7, NULL, '2000-01-08 00:00:00', '2002-08-18 00:00:00', '2002-08-18 00:00:00'), - (8, 'i', 1.8, NULL, '2000-01-09 00:00:00', '2002-09-20 00:00:00', '2002-09-20 00:00:00'), - (9, 'j', 1.9, NULL, '2000-01-10 00:00:00', '2002-10-22 00:00:00', '2002-10-22 00:00:00'); + (0, 'a', 1.0, NULL, '2000-01-01 00:00:00'), + (1, 'b', 1.1, NULL, '2000-01-02 00:00:00'), + (2, 'c', 1.2, NULL, '2000-01-03 00:00:00'), + (3, 'd', 1.3, NULL, '2000-01-04 00:00:00'), + (4, 'e', 1.4, NULL, '2000-01-05 00:00:00'), + (5, 'f', 1.5, NULL, '2000-01-06 00:00:00'), + (6, 'g', 1.6, NULL, '2000-01-07 00:00:00'), + (7, 'h', 1.7, NULL, '2000-01-08 00:00:00'), + (8, 'i', 1.8, NULL, '2000-01-09 00:00:00'), + (9, 'j', 1.9, NULL, '2000-01-10 00:00:00'); """ ) @@ -347,13 +343,6 @@ def physical_dataset(): TableColumn(column_name="col3", type="DECIMAL(4,2)", table=dataset) TableColumn(column_name="col4", type="VARCHAR(255)", table=dataset) TableColumn(column_name="col5", type="TIMESTAMP", is_dttm=True, table=dataset) - TableColumn(column_name="col6", type="TIMESTAMP", is_dttm=True, table=dataset) - TableColumn( - column_name="time column with spaces", - type="TIMESTAMP", - is_dttm=True, - table=dataset, - ) SqlMetric(metric_name="count", expression="count(*)", table=dataset) db.session.merge(dataset) db.session.commit() @@ -396,9 +385,3 @@ def virtual_dataset_comma_in_column_value(): db.session.delete(dataset) db.session.commit() - - -only_postgresql = pytest.mark.skipif( - "postgresql" not in os.environ.get("SUPERSET__SQLALCHEMY_DATABASE_URI", ""), - reason="Only run test case in Postgresql", -) diff --git a/tests/integration_tests/query_context_tests.py b/tests/integration_tests/query_context_tests.py index cb2b24e9cc6ea..b17072f6bc568 100644 --- a/tests/integration_tests/query_context_tests.py +++ b/tests/integration_tests/query_context_tests.py @@ -30,7 +30,6 @@ from superset.connectors.sqla.models import SqlMetric from superset.datasource.dao import DatasourceDAO from superset.extensions import cache_manager -from superset.superset_typing import AdhocColumn from superset.utils.core import ( AdhocMetricExpressionType, backend, @@ -39,7 +38,6 @@ ) from superset.utils.pandas_postprocessing.utils import FLAT_COLUMN_SEPARATOR from tests.integration_tests.base_tests import SupersetTestCase -from tests.integration_tests.conftest import only_postgresql from tests.integration_tests.fixtures.birth_names_dashboard import ( load_birth_names_dashboard_with_slices, load_birth_names_data, @@ -730,183 +728,3 @@ def test_get_label_map(app_context, virtual_dataset_comma_in_column_value): "count, col2, row2": ["count", "col2, row2"], "count, col2, row3": ["count", "col2, row3"], } - - -def test_time_column_with_time_grain(app_context, physical_dataset): - column_on_axis: AdhocColumn = { - "label": "I_AM_AN_ORIGINAL_COLUMN", - "sqlExpression": "col5", - "timeGrain": "P1Y", - } - adhoc_column: AdhocColumn = { - "label": "I_AM_A_TRUNC_COLUMN", - "sqlExpression": "col6", - "columnType": "BASE_AXIS", - "timeGrain": "P1Y", - } - qc = QueryContextFactory().create( - datasource={ - "type": physical_dataset.type, - "id": physical_dataset.id, - }, - queries=[ - { - "columns": ["col1", column_on_axis, adhoc_column], - "metrics": ["count"], - "orderby": [["col1", True]], - } - ], - result_type=ChartDataResultType.FULL, - force=True, - ) - query_object = qc.queries[0] - df = qc.get_df_payload(query_object)["df"] - if query_object.datasource.database.backend == "sqlite": - # sqlite returns string as timestamp column - assert df["I_AM_AN_ORIGINAL_COLUMN"][0] == "2000-01-01 00:00:00" - assert df["I_AM_AN_ORIGINAL_COLUMN"][1] == "2000-01-02 00:00:00" - assert df["I_AM_A_TRUNC_COLUMN"][0] == "2002-01-01 00:00:00" - assert df["I_AM_A_TRUNC_COLUMN"][1] == "2002-01-01 00:00:00" - else: - assert df["I_AM_AN_ORIGINAL_COLUMN"][0].strftime("%Y-%m-%d") == "2000-01-01" - assert df["I_AM_AN_ORIGINAL_COLUMN"][1].strftime("%Y-%m-%d") == "2000-01-02" - assert df["I_AM_A_TRUNC_COLUMN"][0].strftime("%Y-%m-%d") == "2002-01-01" - assert df["I_AM_A_TRUNC_COLUMN"][1].strftime("%Y-%m-%d") == "2002-01-01" - - -def test_non_time_column_with_time_grain(app_context, physical_dataset): - qc = QueryContextFactory().create( - datasource={ - "type": physical_dataset.type, - "id": physical_dataset.id, - }, - queries=[ - { - "columns": [ - "col1", - { - "label": "COL2 ALIAS", - "sqlExpression": "col2", - "columnType": "BASE_AXIS", - "timeGrain": "P1Y", - }, - ], - "metrics": ["count"], - "orderby": [["col1", True]], - "row_limit": 1, - } - ], - result_type=ChartDataResultType.FULL, - force=True, - ) - - query_object = qc.queries[0] - df = qc.get_df_payload(query_object)["df"] - assert df["COL2 ALIAS"][0] == "a" - - -def test_special_chars_in_column_name(app_context, physical_dataset): - qc = QueryContextFactory().create( - datasource={ - "type": physical_dataset.type, - "id": physical_dataset.id, - }, - queries=[ - { - "columns": [ - "col1", - "time column with spaces", - { - "label": "I_AM_A_TRUNC_COLUMN", - "sqlExpression": "time column with spaces", - "columnType": "BASE_AXIS", - "timeGrain": "P1Y", - }, - ], - "metrics": ["count"], - "orderby": [["col1", True]], - "row_limit": 1, - } - ], - result_type=ChartDataResultType.FULL, - force=True, - ) - - query_object = qc.queries[0] - df = qc.get_df_payload(query_object)["df"] - if query_object.datasource.database.backend == "sqlite": - # sqlite returns string as timestamp column - assert df["time column with spaces"][0] == "2002-01-03 00:00:00" - assert df["I_AM_A_TRUNC_COLUMN"][0] == "2002-01-01 00:00:00" - else: - assert df["time column with spaces"][0].strftime("%Y-%m-%d") == "2002-01-03" - assert df["I_AM_A_TRUNC_COLUMN"][0].strftime("%Y-%m-%d") == "2002-01-01" - - -@only_postgresql -def test_date_adhoc_column(app_context, physical_dataset): - # sql expression returns date type - column_on_axis: AdhocColumn = { - "label": "ADHOC COLUMN", - "sqlExpression": "col6 + interval '20 year'", - "columnType": "BASE_AXIS", - "timeGrain": "P1Y", - } - qc = QueryContextFactory().create( - datasource={ - "type": physical_dataset.type, - "id": physical_dataset.id, - }, - queries=[ - { - "columns": [column_on_axis], - "metrics": ["count"], - } - ], - result_type=ChartDataResultType.FULL, - force=True, - ) - query_object = qc.queries[0] - df = qc.get_df_payload(query_object)["df"] - # ADHOC COLUMN count - # 0 2022-01-01 10 - assert df["ADHOC COLUMN"][0].strftime("%Y-%m-%d") == "2022-01-01" - assert df["count"][0] == 10 - - -@only_postgresql -def test_non_date_adhoc_column(app_context, physical_dataset): - # sql expression returns non-date type - column_on_axis: AdhocColumn = { - "label": "ADHOC COLUMN", - "sqlExpression": "col1 * 10", - "columnType": "BASE_AXIS", - "timeGrain": "P1Y", - } - qc = QueryContextFactory().create( - datasource={ - "type": physical_dataset.type, - "id": physical_dataset.id, - }, - queries=[ - { - "columns": [column_on_axis], - "metrics": ["count"], - "orderby": [ - [ - { - "expressionType": "SQL", - "sqlExpression": '"ADHOC COLUMN"', - }, - True, - ] - ], - } - ], - result_type=ChartDataResultType.FULL, - force=True, - ) - query_object = qc.queries[0] - df = qc.get_df_payload(query_object)["df"] - assert df["ADHOC COLUMN"][0] == 0 - assert df["ADHOC COLUMN"][1] == 10 diff --git a/tests/unit_tests/core_tests.py b/tests/unit_tests/core_tests.py index 1473f18382950..bd151011a48f6 100644 --- a/tests/unit_tests/core_tests.py +++ b/tests/unit_tests/core_tests.py @@ -30,6 +30,7 @@ get_metric_names, get_time_filter_status, is_adhoc_metric, + NO_TIME_RANGE, ) from tests.unit_tests.fixtures.datasets import get_dataset_mock