From 53bb43b0fe6f773e7ef3782593749f403a3a449f Mon Sep 17 00:00:00 2001 From: saghatelian <43491361+saghatelian@users.noreply.github.com> Date: Mon, 23 Oct 2023 17:20:40 +0400 Subject: [PATCH] 3.0 (#2) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: is_select with UNION (#25290) (cherry picked from commit bb002d6147c82de692f6692e77f59bd729953495) * fix: Add explicit ON DELETE CASCADE for dashboard_roles (#25320) (cherry picked from commit d54e827bb9f8eab8a7734bf7bdb8a5fdb2ae0c79) * fix(chart): Supporting custom SQL as temporal x-axis column with filter (#25126) Co-authored-by: Kamil Gabryjelski * fix: Use RLS clause instead of ID for cache key (#25229) (cherry picked from commit fba66c6250c38944639cfc1f95a67ef00c66629c) * fix: Improve the reliability of alerts & reports (#25239) (cherry picked from commit f672d5da5cb9390b83176bb12c27ce7eeea3e8ae) * fix: DashboardRoles cascade operation (#25349) (cherry picked from commit a971a28a3450b28151bbad3632ce2364c87df3fc) * fix: datetime with timezone excel export (#25318) Co-authored-by: Michael S. Molina <70410625+michael-s-molina@users.noreply.github.com> (cherry picked from commit 5ebcd2a5f69d2691f1e6c0ffc4a611c728cf4354) * fix: Workaround for Cypress ECONNRESET error (#25399) (cherry picked from commit d76ff39766409d9240191b58b699e5f4ec9afa2f) * fix(sqllab): invalid persisted tab state (#25308) (#25398) * fix: Rename on_delete parameter to ondelete (#25424) (cherry picked from commit 893b45feef306cb59409702bfd39cfcf3589f6ef) * fix: preventing save button from flickering in SQL Lab (#25106) (cherry picked from commit 296ff17f196084dbfe1fc5745c2f0e429325aa11) * fix: chart import (#25425) (cherry picked from commit a4d8f36863e16a8c75aec2a75f2a185b6ca1d3c5) * fix: swagger UI CSP error (#25368) (cherry picked from commit 1716b9f8f68c7abe4c1a082e11ccdb26dbe6a3db) * fix: smarter date formatter (#25404) (cherry picked from commit f0080f9c559c407c5d06e03db27f2cc40fb227e2) * fix(sqllab): invalid start date (#25437) * fix(nativeFilters): Speed up native filters by removing unnecessary rerenders (#25282) Co-authored-by: JUST.in DO IT (cherry picked from commit a0eeb4d767df9f573d80b520cf8afe42013616bb) * fix(SqlLab): make icon placement even (#25372) (cherry picked from commit 11b49a6ceb7b258766d7fe4642808509ccf83317) * fix: Duplicate items when pasting into Select (#25447) (cherry picked from commit 7cf96cd8436c782090336ae8a9581bd3898a4e6a) * fix: update the SQLAlchemy model definition at json column for Log table (#25445) (cherry picked from commit e83a76a58642018aa93ae5bef509a42cabdec980) * fix(helm chart): set chart appVersion to 3.0.0 (#25373) * fix(mysql): handle string typed decimal results (#24241) (cherry picked from commit 7eab59af513ccccb3b1fed7aca5798c98c35fdb8) * fix: Styles not loading because of faulty CSP setting (#25468) (cherry picked from commit 0cebffd59a45bb7256e1817d9792dbe2793fba72) * fix(sqllab): error with lazy_gettext for tab titles (#25469) (cherry picked from commit ddde178e3bf2d73811e3e39dbb79a9a86f1e0970) * fix: Address Mypy issue which is causing CI to fail (#25494) (cherry picked from commit 36ed617090b72ad3cb7b587daa05f9d0dd984e7b) * chore: Adds 3.0.1 CHANGELOG * fix: Unable to sync columns when database or dataset name contains `+` (#25390) (cherry picked from commit dbe0838f8f446b94568644bdf68b86f75a87baf1) * fix(sqllab): Broken query containing 'children' (#25490) (cherry picked from commit b92957e510ade609b3a89ac342af466591aa1a2d) * chore: Expand error detail on screencapture (#25519) (cherry picked from commit ba541e802278bde10f77a543b66a9b4da3bc15cf) * fix: tags permissions error message (#25516) (cherry picked from commit 50b0816e375123f3609f80f7e13555665cba7a69) * fix: Apply normalization to all dttm columns (#25147) (cherry picked from commit 58fcd292a979212a3d6f636917021c12c299fd93) * fix: REST API CSRF exempt list (#25590) (cherry picked from commit 549abb542b5d541b4960386d774d13dc74d72347) * fix(RLS): Fix Info Tooltip + Button Alignment on RLS Modal (#25400) (cherry picked from commit a6d0e6f37a0713e1dfa8a943e3c8e8e68a4d2032) * fix: thubmnails loading - Talisman default config (#25486) (cherry picked from commit 52f631a038dae9d353bae6e0f4cde1f96b1899f1) * fix(Presto): catch DatabaseError when testing Presto views (#25559) Co-authored-by: Rui Zhao (cherry picked from commit be3714e1314df69627614c5229bacaa7839ccfc6) * fix(Charts): Set max row limit + removed the option to use an empty row limit value (#25579) (cherry picked from commit f556ef53f3177746ec2526b4b963da4ef00c2d58) * fix(window): unavailable localStorage and sessionStorage (#25599) * fix: finestTemporalGrainFormatter (#25618) (cherry picked from commit 62bffaf935e6745dc4a122c4f4f71ef548511d31) * fix: revert fix(sqllab): Force trino client async execution (#24859) (#25541) (cherry picked from commit e56e0de45880c20b0eb51d84bc7e5b8898f61c94) * chore: Updates 3.0.1 CHANGELOG * fix(sqllab): Mistitled for new tab after rename (#25523) (cherry picked from commit a520124a78286aea0f9a7ad491d041bbca2c3596) * fix(sqllab): template validation error within comments (#25626) (cherry picked from commit b370c66308e1bc84031ed7aae855aa72c20fbd11) * fix: avoid 500 errors with SQLLAB_BACKEND_PERSISTENCE (#25553) (cherry picked from commit 99f79f5143c417497ffde326a8393ab60aa71e7e) * fix(import): Make sure query context is overwritten for overwriting imports (#25493) (cherry picked from commit a0a0d8043fe7004134bf89a05e6b5f6ee41399e5) * fix: permalink save/overwrites in explore (#25112) Co-authored-by: Elizabeth Thompson (cherry picked from commit e58a3aba545fd03f2af33b0075c4cacf09f776a3) * fix(header navlinks): link navlinks to path prefix (#25495) (cherry picked from commit 51c56dd2a0f52fa092862f8bc5833749f9adc1ba) * fix: improve upload ZIP file validation (#25658) * fix: warning of nth-child (#23638) (cherry picked from commit 16cc089b198dcdebc2422845aa08d18233c6b3a4) * fix(dremio): Fixes issue with Dremio SQL generation for Charts with Series Limit (#25657) (cherry picked from commit be8265794059d8bbe216a4cb22c7a3f6adf4bcb3) --------- Co-authored-by: Beto Dealmeida Co-authored-by: John Bodley <4567245+john-bodley@users.noreply.github.com> Co-authored-by: Zef Lin Co-authored-by: Kamil Gabryjelski Co-authored-by: Jack Fragassi Co-authored-by: Michael S. Molina <70410625+michael-s-molina@users.noreply.github.com> Co-authored-by: JUST.in DO IT Co-authored-by: Jack <41238731+fisjac@users.noreply.github.com> Co-authored-by: Daniel Vaz Gaspar Co-authored-by: Stepan <66589759+Always-prog@users.noreply.github.com> Co-authored-by: Corbin Bullard Co-authored-by: Gyuil Han Co-authored-by: Celalettin Calis Co-authored-by: Ville Brofeldt <33317356+villebro@users.noreply.github.com> Co-authored-by: ʈᵃᵢ Co-authored-by: Michael S. Molina Co-authored-by: mapledan Co-authored-by: Igor Khrol Co-authored-by: Rui Zhao <105950525+zhaorui2022@users.noreply.github.com> Co-authored-by: Fabien <18534166+frassinier@users.noreply.github.com> Co-authored-by: Hugh A. Miles II Co-authored-by: OskarNS --- CHANGELOG.md | 46 ++++++ helm/superset/Chart.yaml | 4 +- helm/superset/README.md | 2 +- requirements/base.txt | 2 +- setup.py | 2 +- .../cypress-base/cypress.config.ts | 17 ++ superset-frontend/jest.config.js | 3 + superset-frontend/package.json | 2 +- .../src/shared-controls/sharedControls.tsx | 9 +- .../superset-ui-core/src/chart/types/Base.ts | 1 + .../formatters/finestTemporalGrain.test.ts | 63 ++++++++ .../formatters/finestTemporalGrain.ts | 80 ++++++++++ .../superset-ui-core/src/time-format/index.ts | 1 + .../superset-ui-core/src/validator/index.ts | 1 + .../src/validator/validateMaxValue.ts | 8 + .../test/validator/validateMaxValue.test.ts | 38 +++++ .../src/ReactParallelCoordinates.jsx | 2 +- .../src/ReactNVD3.jsx | 2 +- .../src/SqlLab/actions/sqlLab.js | 17 +- .../src/SqlLab/actions/sqlLab.test.js | 22 ++- .../SaveDatasetActionButton/index.tsx | 2 +- .../components/SaveQuery/SaveQuery.test.tsx | 21 ++- .../src/SqlLab/components/SaveQuery/index.tsx | 12 +- .../components/SqlEditorLeftBar/index.tsx | 4 +- .../SqlLab/components/TableElement/index.tsx | 5 + .../src/SqlLab/reducers/getInitialState.js | 132 ++++++++-------- .../SqlLab/reducers/getInitialState.test.ts | 53 ++++++- .../src/SqlLab/reducers/sqlLab.js | 49 +++--- .../src/components/ButtonGroup/index.tsx | 6 +- .../Datasource/DatasourceEditor.jsx | 6 +- .../Datasource/DatasourceEditor.test.jsx | 2 +- .../src/components/DropdownButton/index.tsx | 2 +- .../DropdownSelectableIcon/index.tsx | 2 +- .../ErrorMessage/ErrorAlert.test.tsx | 24 +++ .../components/ErrorMessage/ErrorAlert.tsx | 6 +- .../FilterableTable/FilterableTable.test.tsx | 8 +- .../src/components/FilterableTable/index.tsx | 1 + .../components/Select/AsyncSelect.test.tsx | 39 +++++ .../src/components/Select/AsyncSelect.tsx | 26 +++- .../src/components/Select/Select.test.tsx | 39 +++++ .../src/components/Select/Select.tsx | 33 +++- .../src/components/Select/utils.tsx | 28 ++-- .../src/components/Table/index.tsx | 10 ++ .../src/components/Tags/utils.tsx | 2 +- .../FilterBar/FilterControls/state.ts | 3 +- .../nativeFilters/FilterBar/index.tsx | 16 +- superset-frontend/src/dataMask/reducer.ts | 1 + .../explore/actions/exploreActions.test.js | 13 +- .../components/DatasourcePanel/index.tsx | 14 +- .../src/explore/components/SaveModal.test.jsx | 35 ++++- .../src/explore/components/SaveModal.tsx | 42 +++-- .../src/explore/reducers/exploreReducer.js | 18 ++- .../src/features/home/Menu.test.tsx | 6 +- superset-frontend/src/features/home/Menu.tsx | 30 +++- .../features/rls/RowLevelSecurityModal.tsx | 4 + .../components/Select/SelectFilterPlugin.tsx | 6 +- superset-frontend/src/hooks/useTabId.ts | 23 ++- .../charts/commands/importers/v1/__init__.py | 2 +- .../charts/commands/importers/v1/utils.py | 4 +- superset/commands/importers/v1/assets.py | 2 +- superset/commands/importers/v1/utils.py | 2 + superset/common/query_context_factory.py | 8 +- superset/common/query_context_processor.py | 5 +- superset/common/query_object_factory.py | 67 +++++++- superset/config.py | 26 +++- superset/connectors/sqla/models.py | 2 + .../commands/importers/v1/__init__.py | 2 +- superset/db_engine_specs/base.py | 47 +++--- superset/db_engine_specs/dremio.py | 2 - superset/db_engine_specs/mysql.py | 6 +- superset/db_engine_specs/presto.py | 6 +- superset/db_engine_specs/trino.py | 66 +------- ..._on_delete_cascade_for_dashboard_slices.py | 2 +- ...lete_cascade_for_embedded_dashboards.py.py | 2 +- ...d_on_delete_cascade_for_dashboard_roles.py | 55 +++++++ superset/models/core.py | 2 +- superset/models/dashboard.py | 14 +- superset/security/manager.py | 26 ++-- superset/sql_lab.py | 7 +- superset/sql_parse.py | 10 +- superset/sqllab/query_render.py | 4 +- superset/tasks/cron_util.py | 14 +- superset/tasks/scheduler.py | 9 +- superset/utils/core.py | 19 +++ superset/utils/excel.py | 5 + superset/views/base_api.py | 2 +- superset/views/core.py | 31 ++-- superset/views/sql_lab/views.py | 11 +- .../charts/data/api_tests.py | 66 ++++++++ tests/integration_tests/core_tests.py | 62 ++++++++ .../db_engine_specs/presto_tests.py | 4 +- .../integration_tests/query_context_tests.py | 8 +- .../security/row_level_security_tests.py | 15 ++ tests/integration_tests/sqllab_tests.py | 7 + .../commands/importers/v1/utils_test.py | 1 + .../common/test_query_object_factory.py | 90 ++++++++++- tests/unit_tests/conftest.py | 9 ++ .../db_engine_specs/test_clickhouse.py | 6 +- .../unit_tests/db_engine_specs/test_mysql.py | 40 +++++ .../unit_tests/db_engine_specs/test_trino.py | 31 +--- tests/unit_tests/security/api_test.py | 31 ++++ tests/unit_tests/sql_lab_test.py | 10 +- tests/unit_tests/sql_parse_tests.py | 10 ++ tests/unit_tests/tasks/test_cron_util.py | 145 +++++++++++------- tests/unit_tests/utils/excel_tests.py | 31 ++++ tests/unit_tests/utils/test_core.py | 57 +++++++ 106 files changed, 1677 insertions(+), 451 deletions(-) create mode 100644 superset-frontend/packages/superset-ui-core/src/time-format/formatters/finestTemporalGrain.test.ts create mode 100644 superset-frontend/packages/superset-ui-core/src/time-format/formatters/finestTemporalGrain.ts create mode 100644 superset-frontend/packages/superset-ui-core/src/validator/validateMaxValue.ts create mode 100644 superset-frontend/packages/superset-ui-core/test/validator/validateMaxValue.test.ts create mode 100644 superset/migrations/versions/2023-09-15_12-58_4b85906e5b91_add_on_delete_cascade_for_dashboard_roles.py create mode 100644 tests/unit_tests/security/api_test.py create mode 100644 tests/unit_tests/utils/excel_tests.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 55656a16ab6ac..3e150d476b76e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -19,6 +19,7 @@ under the License. ## Change Log +- [3.0.1](#301-tue-oct-13-103221-2023--0700) - [3.0.0](#300-thu-aug-24-133627-2023--0600) - [2.1.1](#211-sun-apr-23-154421-2023-0100) - [2.1.0](#210-thu-mar-16-211305-2023--0700) @@ -31,6 +32,51 @@ under the License. - [1.4.2](#142-sat-mar-19-000806-2022-0200) - [1.4.1](#141) +### 3.0.1 (Tue Oct 13 10:32:21 2023 -0700) + +**Database Migrations** + +- [#25320](https://github.com/apache/superset/pull/25320) fix: Add explicit ON DELETE CASCADE for dashboard_roles (@john-bodley) + +**Fixes** + +- [#25541](https://github.com/apache/superset/pull/25541) fix: revert fix(sqllab): Force trino client async execution (#24859) (@villebro) +- [#25618](https://github.com/apache/superset/pull/25618) fix: finestTemporalGrainFormatter (@betodealmeida) +- [#25599](https://github.com/apache/superset/pull/25599) fix(window): unavailable localStorage and sessionStorage (@frassinier) +- [#25579](https://github.com/apache/superset/pull/25579) fix(Charts): Set max row limit + removed the option to use an empty row limit value (@CorbinBullard) +- [#25559](https://github.com/apache/superset/pull/25559) fix(Presto): catch DatabaseError when testing Presto views (@zhaorui2022) +- [#25486](https://github.com/apache/superset/pull/25486) fix: thubmnails loading - Talisman default config (@Khrol) +- [#25400](https://github.com/apache/superset/pull/25400) fix(RLS): Fix Info Tooltip + Button Alignment on RLS Modal (@CorbinBullard) +- [#25590](https://github.com/apache/superset/pull/25590) fix: REST API CSRF exempt list (@dpgaspar) +- [#25147](https://github.com/apache/superset/pull/25147) fix: Apply normalization to all dttm columns (@kgabryje) +- [#25516](https://github.com/apache/superset/pull/25516) fix: tags permissions error message (@Khrol) +- [#25519](https://github.com/apache/superset/pull/25519) fix: Expand error detail on screencapture (@justinpark) +- [#25490](https://github.com/apache/superset/pull/25490) fix(sqllab): Broken query containing 'children' (@justinpark) +- [#25390](https://github.com/apache/superset/pull/25390) fix: Unable to sync columns when database or dataset name contains `+` (@mapledan) +- [#25494](https://github.com/apache/superset/pull/25494) fix: Address Mypy issue which is causing CI to fail (@john-bodley) +- [#25469](https://github.com/apache/superset/pull/25469) fix(sqllab): error with lazy_gettext for tab titles (@nytai) +- [#25468](https://github.com/apache/superset/pull/25468) fix: Styles not loading because of faulty CSP setting (@kgabryje) +- [#24241](https://github.com/apache/superset/pull/24241) fix(mysql): handle string typed decimal results (@villebro) +- [#25373](https://github.com/apache/superset/pull/25373) fix(helm chart): set chart appVersion to 3.0.0 (@celalettin1286) +- [#25445](https://github.com/apache/superset/pull/25445) fix: update the SQLAlchemy model definition at json column for Log table (@cnabro) +- [#25447](https://github.com/apache/superset/pull/25447) fix: Duplicate items when pasting into Select (@michael-s-molina) +- [#25372](https://github.com/apache/superset/pull/25372) fix(SqlLab): make icon placement even (@CorbinBullard) +- [#25282](https://github.com/apache/superset/pull/25282) fix(nativeFilters): Speed up native filters by removing unnecessary rerenders (@Always-prog) +- [#25437](https://github.com/apache/superset/pull/25437) fix(sqllab): invalid start date (@justinpark) +- [#25404](https://github.com/apache/superset/pull/25404) fix: smarter date formatter (@betodealmeida) +- [#25368](https://github.com/apache/superset/pull/25368) fix: swagger UI CSP error (@dpgaspar) +- [#25425](https://github.com/apache/superset/pull/25425) fix: chart import (@betodealmeida) +- [#25106](https://github.com/apache/superset/pull/25106) fix: preventing save button from flickering in SQL Lab (@fisjac) +- [#25424](https://github.com/apache/superset/pull/25424) fix: Rename on_delete parameter to ondelete (@john-bodley) +- [#25398](https://github.com/apache/superset/pull/25398) fix(sqllab): invalid persisted tab state (#25308) (@justinpark) +- [#25399](https://github.com/apache/superset/pull/25399) fix: Workaround for Cypress ECONNRESET error (@michael-s-molina) +- [#25318](https://github.com/apache/superset/pull/25318) fix: datetime with timezone excel export (@betodealmeida) +- [#25349](https://github.com/apache/superset/pull/25349) fix: DashboardRoles cascade operation (@michael-s-molina) +- [#25239](https://github.com/apache/superset/pull/25239) fix: Improve the reliability of alerts & reports (@jfrag1) +- [#25229](https://github.com/apache/superset/pull/25229) fix: Use RLS clause instead of ID for cache key (@jfrag1) +- [#25126](https://github.com/apache/superset/pull/25126) fix(chart): Supporting custom SQL as temporal x-axis column with filter (@zephyring) +- [#25290](https://github.com/apache/superset/pull/25290) fix: is_select with UNION (@betodealmeida) + ### 3.0.0 (Thu Aug 24 13:36:27 2023 -0600) **Database Migrations** diff --git a/helm/superset/Chart.yaml b/helm/superset/Chart.yaml index 8af3ae4acf579..2aa2bc49a3178 100644 --- a/helm/superset/Chart.yaml +++ b/helm/superset/Chart.yaml @@ -15,7 +15,7 @@ # limitations under the License. # apiVersion: v2 -appVersion: "2.1.0" +appVersion: "3.0.0" description: Apache Superset is a modern, enterprise-ready business intelligence web application name: superset icon: https://artifacthub.io/image/68c1d717-0e97-491f-b046-754e46f46922@2x @@ -29,7 +29,7 @@ maintainers: - name: craig-rueda email: craig@craigrueda.com url: https://github.com/craig-rueda -version: 0.10.6 +version: 0.10.9 dependencies: - name: postgresql version: 12.1.6 diff --git a/helm/superset/README.md b/helm/superset/README.md index c3a46fbec4d6e..38c69c38b524d 100644 --- a/helm/superset/README.md +++ b/helm/superset/README.md @@ -23,7 +23,7 @@ NOTE: This file is generated by helm-docs: https://github.com/norwoodj/helm-docs # superset -![Version: 0.10.6](https://img.shields.io/badge/Version-0.10.6-informational?style=flat-square) +![Version: 0.10.9](https://img.shields.io/badge/Version-0.10.9-informational?style=flat-square) Apache Superset is a modern, enterprise-ready business intelligence web application diff --git a/requirements/base.txt b/requirements/base.txt index 1a971fdab4910..d6ee2e6a6b9ef 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -88,7 +88,7 @@ flask==2.2.5 # flask-migrate # flask-sqlalchemy # flask-wtf -flask-appbuilder==4.3.6 +flask-appbuilder==4.3.7 # via apache-superset flask-babel==1.0.0 # via flask-appbuilder diff --git a/setup.py b/setup.py index 060ea19732b9b..3cb0c144b2f58 100644 --- a/setup.py +++ b/setup.py @@ -80,7 +80,7 @@ def get_git_sha() -> str: "cryptography>=39.0.1, <40", "deprecation>=2.1.0, <2.2.0", "flask>=2.2.5, <3.0.0", - "flask-appbuilder>=4.3.6, <5.0.0", + "flask-appbuilder>=4.3.7, <5.0.0", "flask-caching>=1.11.1, <2.0", "flask-compress>=1.13, <2.0", "flask-talisman>=1.0.0, <2.0", diff --git a/superset-frontend/cypress-base/cypress.config.ts b/superset-frontend/cypress-base/cypress.config.ts index 1d2c3baf49600..7340830bf0016 100644 --- a/superset-frontend/cypress-base/cypress.config.ts +++ b/superset-frontend/cypress-base/cypress.config.ts @@ -38,6 +38,23 @@ export default defineConfig({ // We've imported your old cypress plugins here. // You may want to clean this up later by importing these. setupNodeEvents(on, config) { + // ECONNRESET on Chrome/Chromium 117.0.5851.0 when using Cypress <12.15.0 + // Check https://github.com/cypress-io/cypress/issues/27804 for context + // TODO: This workaround should be removed when upgrading Cypress + on('before:browser:launch', (browser, launchOptions) => { + if (browser.name === 'chrome' && browser.isHeadless) { + // eslint-disable-next-line no-param-reassign + launchOptions.args = launchOptions.args.map(arg => { + if (arg === '--headless') { + return '--headless=new'; + } + + return arg; + }); + } + return launchOptions; + }); + // eslint-disable-next-line global-require,import/extensions return require('./cypress/plugins/index.js')(on, config); }, diff --git a/superset-frontend/jest.config.js b/superset-frontend/jest.config.js index 24e4886ecda43..316102c5c20ff 100644 --- a/superset-frontend/jest.config.js +++ b/superset-frontend/jest.config.js @@ -17,6 +17,9 @@ * under the License. */ +// timezone for unit tests +process.env.TZ = 'America/New_York'; + module.exports = { testRegex: '\\/superset-frontend\\/(spec|src|plugins|packages|tools)\\/.*(_spec|\\.test)\\.[jt]sx?$', diff --git a/superset-frontend/package.json b/superset-frontend/package.json index ca68f91dee904..0bd3e5509444a 100644 --- a/superset-frontend/package.json +++ b/superset-frontend/package.json @@ -1,6 +1,6 @@ { "name": "superset", - "version": "3.0.0", + "version": "3.0.1", "description": "Superset is a data exploration platform designed to be visual, intuitive, and interactive.", "keywords": [ "big", diff --git a/superset-frontend/packages/superset-ui-chart-controls/src/shared-controls/sharedControls.tsx b/superset-frontend/packages/superset-ui-chart-controls/src/shared-controls/sharedControls.tsx index abf5153bb0d51..69fa8a6864909 100644 --- a/superset-frontend/packages/superset-ui-chart-controls/src/shared-controls/sharedControls.tsx +++ b/superset-frontend/packages/superset-ui-chart-controls/src/shared-controls/sharedControls.tsx @@ -47,6 +47,8 @@ import { isDefined, hasGenericChartAxes, NO_TIME_RANGE, + validateNonEmpty, + validateMaxValue, } from '@superset-ui/core'; import { @@ -245,7 +247,12 @@ const row_limit: SharedControlConfig<'SelectControl'> = { type: 'SelectControl', freeForm: true, label: t('Row limit'), - validators: [legacyValidateInteger], + clearable: false, + validators: [ + validateNonEmpty, + legacyValidateInteger, + v => validateMaxValue(v, 100000), + ], default: 10000, choices: formatSelectOptions(ROW_LIMIT_OPTIONS), description: t('Limits the number of rows that get displayed.'), diff --git a/superset-frontend/packages/superset-ui-core/src/chart/types/Base.ts b/superset-frontend/packages/superset-ui-core/src/chart/types/Base.ts index b3884a8488013..1c4d278f6cc46 100644 --- a/superset-frontend/packages/superset-ui-core/src/chart/types/Base.ts +++ b/superset-frontend/packages/superset-ui-core/src/chart/types/Base.ts @@ -58,6 +58,7 @@ export enum AppSection { export type FilterState = { value?: any; [key: string]: any }; export type DataMask = { + __cache?: FilterState; extraFormData?: ExtraFormData; filterState?: FilterState; ownState?: JsonObject; diff --git a/superset-frontend/packages/superset-ui-core/src/time-format/formatters/finestTemporalGrain.test.ts b/superset-frontend/packages/superset-ui-core/src/time-format/formatters/finestTemporalGrain.test.ts new file mode 100644 index 0000000000000..6e4f07df4b8bf --- /dev/null +++ b/superset-frontend/packages/superset-ui-core/src/time-format/formatters/finestTemporalGrain.test.ts @@ -0,0 +1,63 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0, + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import finestTemporalGrain from './finestTemporalGrain'; + +test('finestTemporalGrain', () => { + const monthFormatter = finestTemporalGrain([ + new Date('2003-01-01 00:00:00Z').getTime(), + new Date('2003-02-01 00:00:00Z').getTime(), + ]); + expect(monthFormatter(new Date('2003-01-01 00:00:00Z').getTime())).toBe( + '2003-01-01', + ); + expect(monthFormatter(new Date('2003-02-01 00:00:00Z').getTime())).toBe( + '2003-02-01', + ); + + const yearFormatter = finestTemporalGrain([ + new Date('2003-01-01 00:00:00Z').getTime(), + new Date('2004-01-01 00:00:00Z').getTime(), + ]); + expect(yearFormatter(new Date('2003-01-01 00:00:00Z').getTime())).toBe( + '2003', + ); + expect(yearFormatter(new Date('2004-01-01 00:00:00Z').getTime())).toBe( + '2004', + ); + + const milliSecondFormatter = finestTemporalGrain([ + new Date('2003-01-01 00:00:00Z').getTime(), + new Date('2003-04-05 06:07:08.123Z').getTime(), + ]); + expect(milliSecondFormatter(new Date('2003-01-01 00:00:00Z').getTime())).toBe( + '2003-01-01 00:00:00.000', + ); + + const localTimeFormatter = finestTemporalGrain( + [ + new Date('2003-01-01 00:00:00Z').getTime(), + new Date('2003-02-01 00:00:00Z').getTime(), + ], + true, + ); + expect(localTimeFormatter(new Date('2003-01-01 00:00:00Z').getTime())).toBe( + '2002-12-31 19:00', + ); +}); diff --git a/superset-frontend/packages/superset-ui-core/src/time-format/formatters/finestTemporalGrain.ts b/superset-frontend/packages/superset-ui-core/src/time-format/formatters/finestTemporalGrain.ts new file mode 100644 index 0000000000000..c03b7ec1593cf --- /dev/null +++ b/superset-frontend/packages/superset-ui-core/src/time-format/formatters/finestTemporalGrain.ts @@ -0,0 +1,80 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { utcFormat, timeFormat } from 'd3-time-format'; +import { utcUtils, localTimeUtils } from '../utils/d3Time'; +import TimeFormatter from '../TimeFormatter'; + +/* + * A formatter that examines all the values, and uses the finest temporal grain. + */ +export default function finestTemporalGrain( + values: any[], + useLocalTime = false, +) { + const format = useLocalTime ? timeFormat : utcFormat; + + const formatMillisecond = format('%Y-%m-%d %H:%M:%S.%L'); + const formatSecond = format('%Y-%m-%d %H:%M:%S'); + const formatMinute = format('%Y-%m-%d %H:%M'); + const formatHour = format('%Y-%m-%d %H:%M'); + const formatDay = format('%Y-%m-%d'); + const formatMonth = format('%Y-%m-%d'); + const formatYear = format('%Y'); + + const { + hasMillisecond, + hasSecond, + hasMinute, + hasHour, + isNotFirstDayOfMonth, + isNotFirstMonth, + } = useLocalTime ? localTimeUtils : utcUtils; + + let formatFunc = formatYear; + values.forEach((value: any) => { + if (formatFunc === formatYear && isNotFirstMonth(value)) { + formatFunc = formatMonth; + } + if (formatFunc === formatMonth && isNotFirstDayOfMonth(value)) { + formatFunc = formatDay; + } + if (formatFunc === formatDay && hasHour(value)) { + formatFunc = formatHour; + } + if (formatFunc === formatHour && hasMinute(value)) { + formatFunc = formatMinute; + } + if (formatFunc === formatMinute && hasSecond(value)) { + formatFunc = formatSecond; + } + if (formatFunc === formatSecond && hasMillisecond(value)) { + formatFunc = formatMillisecond; + } + }); + + return new TimeFormatter({ + description: + 'Use the finest grain in an array of dates to format all dates in the array', + formatFunc, + id: 'finest_temporal_grain', + label: 'Format temporal columns with the finest grain', + useLocalTime, + }); +} diff --git a/superset-frontend/packages/superset-ui-core/src/time-format/index.ts b/superset-frontend/packages/superset-ui-core/src/time-format/index.ts index 53f23f36431cf..b0d95c1433940 100644 --- a/superset-frontend/packages/superset-ui-core/src/time-format/index.ts +++ b/superset-frontend/packages/superset-ui-core/src/time-format/index.ts @@ -35,6 +35,7 @@ export { default as createMultiFormatter } from './factories/createMultiFormatte export { default as smartDateFormatter } from './formatters/smartDate'; export { default as smartDateDetailedFormatter } from './formatters/smartDateDetailed'; export { default as smartDateVerboseFormatter } from './formatters/smartDateVerbose'; +export { default as finestTemporalGrainFormatter } from './formatters/finestTemporalGrain'; export { default as normalizeTimestamp } from './utils/normalizeTimestamp'; export { default as denormalizeTimestamp } from './utils/denormalizeTimestamp'; diff --git a/superset-frontend/packages/superset-ui-core/src/validator/index.ts b/superset-frontend/packages/superset-ui-core/src/validator/index.ts index 532efcc959116..fb37328c02290 100644 --- a/superset-frontend/packages/superset-ui-core/src/validator/index.ts +++ b/superset-frontend/packages/superset-ui-core/src/validator/index.ts @@ -22,3 +22,4 @@ export { default as legacyValidateNumber } from './legacyValidateNumber'; export { default as validateInteger } from './validateInteger'; export { default as validateNumber } from './validateNumber'; export { default as validateNonEmpty } from './validateNonEmpty'; +export { default as validateMaxValue } from './validateMaxValue'; diff --git a/superset-frontend/packages/superset-ui-core/src/validator/validateMaxValue.ts b/superset-frontend/packages/superset-ui-core/src/validator/validateMaxValue.ts new file mode 100644 index 0000000000000..24c1da1c79dde --- /dev/null +++ b/superset-frontend/packages/superset-ui-core/src/validator/validateMaxValue.ts @@ -0,0 +1,8 @@ +import { t } from '../translation'; + +export default function validateMaxValue(v: unknown, max: Number) { + if (Number(v) > +max) { + return t('Value cannot exceed %s', max); + } + return false; +} diff --git a/superset-frontend/packages/superset-ui-core/test/validator/validateMaxValue.test.ts b/superset-frontend/packages/superset-ui-core/test/validator/validateMaxValue.test.ts new file mode 100644 index 0000000000000..70f3d332c52e3 --- /dev/null +++ b/superset-frontend/packages/superset-ui-core/test/validator/validateMaxValue.test.ts @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import { validateMaxValue } from '@superset-ui/core'; +import './setup'; + +describe('validateInteger()', () => { + it('returns the warning message if invalid', () => { + expect(validateMaxValue(10.1, 10)).toBeTruthy(); + expect(validateMaxValue(1, 0)).toBeTruthy(); + expect(validateMaxValue('2', 1)).toBeTruthy(); + }); + it('returns false if the input is valid', () => { + expect(validateMaxValue(0, 1)).toBeFalsy(); + expect(validateMaxValue(10, 10)).toBeFalsy(); + expect(validateMaxValue(undefined, 1)).toBeFalsy(); + expect(validateMaxValue(NaN, NaN)).toBeFalsy(); + expect(validateMaxValue(null, 1)).toBeFalsy(); + expect(validateMaxValue('1', 1)).toBeFalsy(); + expect(validateMaxValue('a', 1)).toBeFalsy(); + }); +}); diff --git a/superset-frontend/plugins/legacy-plugin-chart-parallel-coordinates/src/ReactParallelCoordinates.jsx b/superset-frontend/plugins/legacy-plugin-chart-parallel-coordinates/src/ReactParallelCoordinates.jsx index 4a7675d555cd6..7f30716057604 100644 --- a/superset-frontend/plugins/legacy-plugin-chart-parallel-coordinates/src/ReactParallelCoordinates.jsx +++ b/superset-frontend/plugins/legacy-plugin-chart-parallel-coordinates/src/ReactParallelCoordinates.jsx @@ -106,7 +106,7 @@ export default styled(ParallelCoordinates)` height: 18px; margin: 0px; } - .parcoords .row:nth-child(odd) { + .parcoords .row:nth-of-type(odd) { background: ${addAlpha(theme.colors.grayscale.dark2, 0.05)}; } .parcoords .header { diff --git a/superset-frontend/plugins/legacy-preset-chart-nvd3/src/ReactNVD3.jsx b/superset-frontend/plugins/legacy-preset-chart-nvd3/src/ReactNVD3.jsx index 9a9962e8aaca4..f7f219a05d219 100644 --- a/superset-frontend/plugins/legacy-preset-chart-nvd3/src/ReactNVD3.jsx +++ b/superset-frontend/plugins/legacy-preset-chart-nvd3/src/ReactNVD3.jsx @@ -152,7 +152,7 @@ export default styled(NVD3)` white-space: nowrap; font-weight: ${({ theme }) => theme.typography.weights.bold}; } - tbody tr:not(.tooltip-header) td:nth-child(2) { + tbody tr:not(.tooltip-header) td:nth-of-type(2) { word-break: break-word; } } diff --git a/superset-frontend/src/SqlLab/actions/sqlLab.js b/superset-frontend/src/SqlLab/actions/sqlLab.js index 5d9ecdacdf6c3..fbfba6783e8e4 100644 --- a/superset-frontend/src/SqlLab/actions/sqlLab.js +++ b/superset-frontend/src/SqlLab/actions/sqlLab.js @@ -596,7 +596,12 @@ export function addNewQueryEditor() { '-- Note: Unless you save your query, these tabs will NOT persist if you clear your cookies or change browsers.\n\n', ); - const name = newQueryTabName(queryEditors || []); + const name = newQueryTabName( + queryEditors?.map(qe => ({ + ...qe, + ...(qe.id === unsavedQueryEditor.id && unsavedQueryEditor), + })) || [], + ); return dispatch( addQueryEditor({ @@ -614,10 +619,12 @@ export function addNewQueryEditor() { export function cloneQueryToNewTab(query, autorun) { return function (dispatch, getState) { const state = getState(); - const { queryEditors, tabHistory } = state.sqlLab; - const sourceQueryEditor = queryEditors.find( - qe => qe.id === tabHistory[tabHistory.length - 1], - ); + const { queryEditors, unsavedQueryEditor, tabHistory } = state.sqlLab; + const sourceQueryEditor = { + ...queryEditors.find(qe => qe.id === tabHistory[tabHistory.length - 1]), + ...(tabHistory[tabHistory.length - 1] === unsavedQueryEditor.id && + unsavedQueryEditor), + }; const queryEditor = { name: t('Copy of %s', sourceQueryEditor.name), dbId: query.dbId ? query.dbId : null, diff --git a/superset-frontend/src/SqlLab/actions/sqlLab.test.js b/superset-frontend/src/SqlLab/actions/sqlLab.test.js index fc94a44645c7e..25f80aa1c386a 100644 --- a/superset-frontend/src/SqlLab/actions/sqlLab.test.js +++ b/superset-frontend/src/SqlLab/actions/sqlLab.test.js @@ -389,8 +389,11 @@ describe('async actions', () => { const state = { sqlLab: { tabHistory: [id], - queryEditors: [{ id, name: 'Dummy query editor' }], - unsavedQueryEditor: {}, + queryEditors: [{ id, name: 'out of updated title' }], + unsavedQueryEditor: { + id, + name: 'Dummy query editor', + }, }, }; const store = mockStore(state); @@ -444,16 +447,23 @@ describe('async actions', () => { describe('addNewQueryEditor', () => { it('creates new query editor with new tab name', () => { - const store = mockStore(initialState); + const store = mockStore({ + ...initialState, + sqlLab: { + ...initialState.sqlLab, + unsavedQueryEditor: { + id: defaultQueryEditor.id, + name: 'Untitled Query 6', + }, + }, + }); const expectedActions = [ { type: actions.ADD_QUERY_EDITOR, queryEditor: { id: 'abcd', sql: expect.stringContaining('SELECT ...'), - name: `Untitled Query ${ - store.getState().sqlLab.queryEditors.length + 1 - }`, + name: `Untitled Query 7`, dbId: defaultQueryEditor.dbId, schema: defaultQueryEditor.schema, autorun: false, diff --git a/superset-frontend/src/SqlLab/components/SaveDatasetActionButton/index.tsx b/superset-frontend/src/SqlLab/components/SaveDatasetActionButton/index.tsx index dbb25b138a58e..79a3bf0b8ee9c 100644 --- a/superset-frontend/src/SqlLab/components/SaveDatasetActionButton/index.tsx +++ b/superset-frontend/src/SqlLab/components/SaveDatasetActionButton/index.tsx @@ -44,7 +44,7 @@ const SaveDatasetActionButton = ({ font-weight: ${theme.gridUnit * 150}; background-color: ${theme.colors.primary.light4}; color: ${theme.colors.primary.dark1}; - &:nth-child(2) { + &:nth-of-type(2) { &:before, &:hover:before { border-left: 2px solid ${theme.colors.primary.dark2}; diff --git a/superset-frontend/src/SqlLab/components/SaveQuery/SaveQuery.test.tsx b/superset-frontend/src/SqlLab/components/SaveQuery/SaveQuery.test.tsx index f321a54ec4dbe..54b81df96013d 100644 --- a/superset-frontend/src/SqlLab/components/SaveQuery/SaveQuery.test.tsx +++ b/superset-frontend/src/SqlLab/components/SaveQuery/SaveQuery.test.tsx @@ -27,7 +27,7 @@ import { initialState, databases } from 'src/SqlLab/fixtures'; const mockedProps = { queryEditorId: '123', animation: false, - database: databases.result[0], + database: { ...databases.result[0], allows_virtual_table_explore: false }, onUpdate: () => {}, onSave: () => {}, saveQueryWarning: null, @@ -61,6 +61,25 @@ const middlewares = [thunk]; const mockStore = configureStore(middlewares); describe('SavedQuery', () => { + it('doesnt render save button when allows_virtual_table_explore is undefined', async () => { + const noRenderProps = { + ...mockedProps, + database: { + ...mockedProps.database, + allows_virtual_table_explore: undefined, + }, + }; + render(, { + useRedux: true, + store: mockStore(mockState), + }); + expect(() => { + screen.getByRole('button', { name: /save/i }); + }).toThrow( + 'Unable to find an accessible element with the role "button" and name `/save/i`', + ); + }); + it('renders a non-split save button when allows_virtual_table_explore is not enabled', () => { render(, { useRedux: true, diff --git a/superset-frontend/src/SqlLab/components/SaveQuery/index.tsx b/superset-frontend/src/SqlLab/components/SaveQuery/index.tsx index 4071b9e2d71d4..6ed0f4c668e7e 100644 --- a/superset-frontend/src/SqlLab/components/SaveQuery/index.tsx +++ b/superset-frontend/src/SqlLab/components/SaveQuery/index.tsx @@ -98,6 +98,8 @@ const SaveQuery = ({ const [showSaveDatasetModal, setShowSaveDatasetModal] = useState(false); const isSaved = !!query.remoteId; const canExploreDatabase = !!database?.allows_virtual_table_explore; + const shouldShowSaveButton = + database?.allows_virtual_table_explore !== undefined; const overlayMenu = ( @@ -180,10 +182,12 @@ const SaveQuery = ({ return ( - + {shouldShowSaveButton && ( + + )} setShowSaveDatasetModal(false)} diff --git a/superset-frontend/src/SqlLab/components/SqlEditorLeftBar/index.tsx b/superset-frontend/src/SqlLab/components/SqlEditorLeftBar/index.tsx index e89bdc57b7fb3..4c062308bd7b0 100644 --- a/superset-frontend/src/SqlLab/components/SqlEditorLeftBar/index.tsx +++ b/superset-frontend/src/SqlLab/components/SqlEditorLeftBar/index.tsx @@ -84,7 +84,9 @@ const collapseStyles = (theme: SupersetTheme) => css` padding: 0px ${theme.gridUnit * 4}px 0px 0px !important; } .ant-collapse-arrow { - top: ${theme.gridUnit * 2}px !important; + padding: 0 !important; + bottom: ${theme.gridUnit}px !important; + right: ${theme.gridUnit * 4}px !important; color: ${theme.colors.primary.dark1} !important; &:hover { color: ${theme.colors.primary.dark2} !important; diff --git a/superset-frontend/src/SqlLab/components/TableElement/index.tsx b/superset-frontend/src/SqlLab/components/TableElement/index.tsx index c47d1d86d3e1e..2c5f68c5bf959 100644 --- a/superset-frontend/src/SqlLab/components/TableElement/index.tsx +++ b/superset-frontend/src/SqlLab/components/TableElement/index.tsx @@ -264,6 +264,11 @@ const TableElement = ({ table, ...props }: TableElementProps) => { display: flex; column-gap: ${theme.gridUnit * 1.5}px; margin-right: ${theme.gridUnit}px; + & span { + display: flex; + justify-content: center; + width: ${theme.gridUnit * 4}px; + } `} > {keyLink} diff --git a/superset-frontend/src/SqlLab/reducers/getInitialState.js b/superset-frontend/src/SqlLab/reducers/getInitialState.js index b6fdbe14910ba..00b0128148e0b 100644 --- a/superset-frontend/src/SqlLab/reducers/getInitialState.js +++ b/superset-frontend/src/SqlLab/reducers/getInitialState.js @@ -135,72 +135,63 @@ export default function getInitialState({ }); } - const queries = Object.fromEntries( - Object.entries(queries_ || {}).map(([queryId, query]) => [ - queryId, - { - ...query, - ...(query.startDttm && { - startDttm: Number(query.startDttm), - }), - ...(query.endDttm && { - endDttm: Number(query.endDttm), - }), - }, - ]), - ); + const queries = { ...queries_ }; - /** - * If the `SQLLAB_BACKEND_PERSISTENCE` feature flag is off, or if the user - * hasn't used SQL Lab after it has been turned on, the state will be stored - * in the browser's local storage. - */ - if ( - localStorage.getItem('redux') && - JSON.parse(localStorage.getItem('redux')).sqlLab - ) { - const { sqlLab } = JSON.parse(localStorage.getItem('redux')); + try { + /** + * If the `SQLLAB_BACKEND_PERSISTENCE` feature flag is off, or if the user + * hasn't used SQL Lab after it has been turned on, the state will be stored + * in the browser's local storage. + */ + if ( + localStorage.getItem('redux') && + JSON.parse(localStorage.getItem('redux')).sqlLab + ) { + const { sqlLab } = JSON.parse(localStorage.getItem('redux')); - if (sqlLab.queryEditors.length === 0) { - // migration was successful - localStorage.removeItem('redux'); - } else { - unsavedQueryEditor = sqlLab.unsavedQueryEditor || {}; - // add query editors and tables to state with a special flag so they can - // be migrated if the `SQLLAB_BACKEND_PERSISTENCE` feature flag is on - sqlLab.queryEditors.forEach(qe => { - queryEditors = { - ...queryEditors, - [qe.id]: { - ...queryEditors[qe.id], - ...qe, - name: qe.title || qe.name, - ...(unsavedQueryEditor.id === qe.id && unsavedQueryEditor), - inLocalStorage: true, - loaded: true, - }, - }; - }); - const expandedTables = new Set(); - tables = sqlLab.tables.reduce((merged, table) => { - const expanded = !expandedTables.has(table.queryEditorId); - if (expanded) { - expandedTables.add(table.queryEditorId); - } - return { - ...merged, - [table.id]: { - ...tables[table.id], - ...table, - expanded, - }, - }; - }, tables); - Object.values(sqlLab.queries).forEach(query => { - queries[query.id] = { ...query, inLocalStorage: true }; - }); - tabHistory.push(...sqlLab.tabHistory); + if (sqlLab.queryEditors.length === 0) { + // migration was successful + localStorage.removeItem('redux'); + } else { + unsavedQueryEditor = sqlLab.unsavedQueryEditor || {}; + // add query editors and tables to state with a special flag so they can + // be migrated if the `SQLLAB_BACKEND_PERSISTENCE` feature flag is on + sqlLab.queryEditors.forEach(qe => { + queryEditors = { + ...queryEditors, + [qe.id]: { + ...queryEditors[qe.id], + ...qe, + name: qe.title || qe.name, + ...(unsavedQueryEditor.id === qe.id && unsavedQueryEditor), + inLocalStorage: true, + loaded: true, + }, + }; + }); + const expandedTables = new Set(); + tables = sqlLab.tables.reduce((merged, table) => { + const expanded = !expandedTables.has(table.queryEditorId); + if (expanded) { + expandedTables.add(table.queryEditorId); + } + return { + ...merged, + [table.id]: { + ...tables[table.id], + ...table, + expanded, + }, + }; + }, tables); + Object.values(sqlLab.queries).forEach(query => { + queries[query.id] = { ...query, inLocalStorage: true }; + }); + tabHistory.push(...sqlLab.tabHistory); + } } + } catch (error) { + // continue regardless of error } return { @@ -209,7 +200,20 @@ export default function getInitialState({ alerts: [], databases, offline: false, - queries, + queries: Object.fromEntries( + Object.entries(queries).map(([queryId, query]) => [ + queryId, + { + ...query, + ...(query.startDttm && { + startDttm: Number(query.startDttm), + }), + ...(query.endDttm && { + endDttm: Number(query.endDttm), + }), + }, + ]), + ), queryEditors: Object.values(queryEditors), tabHistory: dedupeTabHistory(tabHistory), tables: Object.values(tables), diff --git a/superset-frontend/src/SqlLab/reducers/getInitialState.test.ts b/superset-frontend/src/SqlLab/reducers/getInitialState.test.ts index c06633c6f5c7f..420ef69c7bd87 100644 --- a/superset-frontend/src/SqlLab/reducers/getInitialState.test.ts +++ b/superset-frontend/src/SqlLab/reducers/getInitialState.test.ts @@ -16,7 +16,7 @@ * specific language governing permissions and limitations * under the License. */ - +import { runningQuery, successfulQuery } from 'src/SqlLab/fixtures'; import getInitialState, { dedupeTabHistory } from './getInitialState'; const apiData = { @@ -121,5 +121,56 @@ describe('getInitialState', () => { }).sqlLab.tables; expect(initializedTables.map(({ id }) => id)).toEqual([1, 2, 6]); }); + + it('should parse the float dttm value', () => { + const startDttmInStr = '1693433503447.166992'; + const endDttmInStr = '1693433503500.23132'; + + localStorage.setItem( + 'redux', + JSON.stringify({ + sqlLab: { + tables: [ + { id: 1, name: 'test1' }, + { id: 6, name: 'test6' }, + ], + queryEditors: [{ id: 1, title: 'editor1' }], + queries: { + localStoragePersisted: { + ...successfulQuery, + id: 'localStoragePersisted', + startDttm: startDttmInStr, + endDttm: endDttmInStr, + }, + }, + tabHistory: [], + }, + }), + ); + + const initializedQueries = getInitialState({ + ...apiData, + queries: { + backendPersisted: { + ...runningQuery, + id: 'backendPersisted', + startDttm: startDttmInStr, + endDttm: endDttmInStr, + }, + }, + }).sqlLab.queries; + expect(initializedQueries.backendPersisted).toEqual( + expect.objectContaining({ + startDttm: Number(startDttmInStr), + endDttm: Number(endDttmInStr), + }), + ); + expect(initializedQueries.localStoragePersisted).toEqual( + expect.objectContaining({ + startDttm: Number(startDttmInStr), + endDttm: Number(endDttmInStr), + }), + ); + }); }); }); diff --git a/superset-frontend/src/SqlLab/reducers/sqlLab.js b/superset-frontend/src/SqlLab/reducers/sqlLab.js index 66850ce079505..e6e0a54ed949e 100644 --- a/superset-frontend/src/SqlLab/reducers/sqlLab.js +++ b/superset-frontend/src/SqlLab/reducers/sqlLab.js @@ -424,13 +424,16 @@ export default function sqlLabReducer(state = {}, action) { return { ...state, activeSouthPaneTab: action.tabId }; }, [actions.MIGRATE_QUERY_EDITOR]() { - // remove migrated query editor from localStorage - const { sqlLab } = JSON.parse(localStorage.getItem('redux')); - sqlLab.queryEditors = sqlLab.queryEditors.filter( - qe => qe.id !== action.oldQueryEditor.id, - ); - localStorage.setItem('redux', JSON.stringify({ sqlLab })); - + try { + // remove migrated query editor from localStorage + const { sqlLab } = JSON.parse(localStorage.getItem('redux')); + sqlLab.queryEditors = sqlLab.queryEditors.filter( + qe => qe.id !== action.oldQueryEditor.id, + ); + localStorage.setItem('redux', JSON.stringify({ sqlLab })); + } catch (error) { + // continue regardless of error + } // replace localStorage query editor with the server backed one return addToArr( removeFromArr(state, 'queryEditors', action.oldQueryEditor), @@ -439,12 +442,16 @@ export default function sqlLabReducer(state = {}, action) { ); }, [actions.MIGRATE_TABLE]() { - // remove migrated table from localStorage - const { sqlLab } = JSON.parse(localStorage.getItem('redux')); - sqlLab.tables = sqlLab.tables.filter( - table => table.id !== action.oldTable.id, - ); - localStorage.setItem('redux', JSON.stringify({ sqlLab })); + try { + // remove migrated table from localStorage + const { sqlLab } = JSON.parse(localStorage.getItem('redux')); + sqlLab.tables = sqlLab.tables.filter( + table => table.id !== action.oldTable.id, + ); + localStorage.setItem('redux', JSON.stringify({ sqlLab })); + } catch (error) { + // continue regardless of error + } // replace localStorage table with the server backed one return addToArr( @@ -454,12 +461,16 @@ export default function sqlLabReducer(state = {}, action) { ); }, [actions.MIGRATE_TAB_HISTORY]() { - // remove migrated tab from localStorage tabHistory - const { sqlLab } = JSON.parse(localStorage.getItem('redux')); - sqlLab.tabHistory = sqlLab.tabHistory.filter( - tabId => tabId !== action.oldId, - ); - localStorage.setItem('redux', JSON.stringify({ sqlLab })); + try { + // remove migrated tab from localStorage tabHistory + const { sqlLab } = JSON.parse(localStorage.getItem('redux')); + sqlLab.tabHistory = sqlLab.tabHistory.filter( + tabId => tabId !== action.oldId, + ); + localStorage.setItem('redux', JSON.stringify({ sqlLab })); + } catch (error) { + // continue regardless of error + } const tabHistory = state.tabHistory.filter( tabId => tabId !== action.oldId, ); diff --git a/superset-frontend/src/components/ButtonGroup/index.tsx b/superset-frontend/src/components/ButtonGroup/index.tsx index 463a042066585..78e29714cd3f2 100644 --- a/superset-frontend/src/components/ButtonGroup/index.tsx +++ b/superset-frontend/src/components/ButtonGroup/index.tsx @@ -30,18 +30,18 @@ export default function ButtonGroup(props: ButtonGroupProps) { role="group" className={className} css={{ - '& :nth-child(1):not(:nth-last-child(1))': { + '& :nth-of-type(1):not(:nth-last-of-type(1))': { borderTopRightRadius: 0, borderBottomRightRadius: 0, borderRight: 0, marginLeft: 0, }, - '& :not(:nth-child(1)):not(:nth-last-child(1))': { + '& :not(:nth-of-type(1)):not(:nth-last-of-type(1))': { borderRadius: 0, borderRight: 0, marginLeft: 0, }, - '& :nth-last-child(1):not(:nth-child(1))': { + '& :nth-last-of-type(1):not(:nth-of-type(1))': { borderTopLeftRadius: 0, borderBottomLeftRadius: 0, marginLeft: 0, diff --git a/superset-frontend/src/components/Datasource/DatasourceEditor.jsx b/superset-frontend/src/components/Datasource/DatasourceEditor.jsx index 3d0cc7da57a71..ffbba4c7db1f8 100644 --- a/superset-frontend/src/components/Datasource/DatasourceEditor.jsx +++ b/superset-frontend/src/components/Datasource/DatasourceEditor.jsx @@ -762,9 +762,7 @@ class DatasourceEditor extends React.PureComponent { database_name: datasource.database.database_name || datasource.database.name, schema_name: datasource.schema, - table_name: datasource.table_name - ? encodeURIComponent(datasource.table_name) - : datasource.table_name, + table_name: datasource.table_name, normalize_columns: datasource.normalize_columns, }; Object.entries(params).forEach(([key, value]) => { @@ -773,7 +771,7 @@ class DatasourceEditor extends React.PureComponent { params[key] = null; } }); - const endpoint = `/datasource/external_metadata_by_name/?q=${rison.encode( + const endpoint = `/datasource/external_metadata_by_name/?q=${rison.encode_uri( params, )}`; this.setState({ metadataLoading: true }); diff --git a/superset-frontend/src/components/Datasource/DatasourceEditor.test.jsx b/superset-frontend/src/components/Datasource/DatasourceEditor.test.jsx index ae35ebc139c54..9100eb6c86b2d 100644 --- a/superset-frontend/src/components/Datasource/DatasourceEditor.test.jsx +++ b/superset-frontend/src/components/Datasource/DatasourceEditor.test.jsx @@ -75,7 +75,7 @@ describe('DatasourceEditor', () => { setTimeout(() => { expect(fetchMock.calls(DATASOURCE_ENDPOINT)).toHaveLength(1); expect(fetchMock.calls(DATASOURCE_ENDPOINT)[0][0]).toContain( - 'Vehicle%20Sales%20%2B%27', + 'Vehicle+Sales%20%2B', ); fetchMock.reset(); done(); diff --git a/superset-frontend/src/components/DropdownButton/index.tsx b/superset-frontend/src/components/DropdownButton/index.tsx index c6293f66a3fbd..a35e66b5d7329 100644 --- a/superset-frontend/src/components/DropdownButton/index.tsx +++ b/superset-frontend/src/components/DropdownButton/index.tsx @@ -42,7 +42,7 @@ const StyledDropdownButton = styled.div` background-color: ${({ theme }) => theme.colors.grayscale.light2}; color: ${({ theme }) => theme.colors.grayscale.base}; } - &:nth-child(2) { + &:nth-of-type(2) { margin: 0; border-radius: ${({ theme }) => `0 ${theme.gridUnit}px ${theme.gridUnit}px 0`}; diff --git a/superset-frontend/src/components/DropdownSelectableIcon/index.tsx b/superset-frontend/src/components/DropdownSelectableIcon/index.tsx index 582bc182e3c15..f668aa720d162 100644 --- a/superset-frontend/src/components/DropdownSelectableIcon/index.tsx +++ b/superset-frontend/src/components/DropdownSelectableIcon/index.tsx @@ -46,7 +46,7 @@ const StyledDropdownButton = styled( button.ant-btn:first-of-type { display: none; } - > button.ant-btn:nth-child(2) { + > button.ant-btn:nth-of-type(2) { display: inline-flex; background-color: transparent !important; height: unset; diff --git a/superset-frontend/src/components/ErrorMessage/ErrorAlert.test.tsx b/superset-frontend/src/components/ErrorMessage/ErrorAlert.test.tsx index 38006c2ec409e..2c2f1c2349877 100644 --- a/superset-frontend/src/components/ErrorMessage/ErrorAlert.test.tsx +++ b/superset-frontend/src/components/ErrorMessage/ErrorAlert.test.tsx @@ -21,6 +21,7 @@ import React from 'react'; import userEvent from '@testing-library/user-event'; import { render, screen } from 'spec/helpers/testing-library'; import { supersetTheme } from '@superset-ui/core'; +import { isCurrentUserBot } from 'src/utils/isBot'; import ErrorAlert from './ErrorAlert'; import { ErrorLevel, ErrorSource } from './types'; @@ -31,6 +32,10 @@ jest.mock( , ); +jest.mock('src/utils/isBot', () => ({ + isCurrentUserBot: jest.fn(), +})); + const mockedProps = { body: 'Error body', level: 'warning' as ErrorLevel, @@ -41,6 +46,14 @@ const mockedProps = { description: 'we are unable to connect db.', }; +beforeEach(() => { + (isCurrentUserBot as jest.Mock).mockReturnValue(false); +}); + +afterEach(() => { + jest.clearAllMocks(); +}); + test('should render', () => { const { container } = render(); expect(container).toBeInTheDocument(); @@ -100,6 +113,17 @@ test('should render the See more button', () => { expect(screen.getByText('See more')).toBeInTheDocument(); }); +test('should render the error subtitle and body defaultly for the screen capture request', () => { + const seemoreProps = { + ...mockedProps, + source: 'explore' as ErrorSource, + }; + (isCurrentUserBot as jest.Mock).mockReturnValue(true); + render(); + expect(screen.getByText('Error subtitle')).toBeInTheDocument(); + expect(screen.getByText('Error body')).toBeInTheDocument(); +}); + test('should render the modal', () => { render(, { useRedux: true }); const button = screen.getByText('See more'); diff --git a/superset-frontend/src/components/ErrorMessage/ErrorAlert.tsx b/superset-frontend/src/components/ErrorMessage/ErrorAlert.tsx index cf2522b4e43ce..d61abea5976b4 100644 --- a/superset-frontend/src/components/ErrorMessage/ErrorAlert.tsx +++ b/superset-frontend/src/components/ErrorMessage/ErrorAlert.tsx @@ -21,6 +21,7 @@ import { styled, useTheme, t } from '@superset-ui/core'; import { noOp } from 'src/utils/common'; import Modal from 'src/components/Modal'; import Button from 'src/components/Button'; +import { isCurrentUserBot } from 'src/utils/isBot'; import Icons from 'src/components/Icons'; import { ErrorLevel, ErrorSource } from './types'; @@ -102,9 +103,10 @@ export default function ErrorAlert({ const theme = useTheme(); const [isModalOpen, setIsModalOpen] = useState(false); - const [isBodyExpanded, setIsBodyExpanded] = useState(false); + const [isBodyExpanded, setIsBodyExpanded] = useState(isCurrentUserBot()); - const isExpandable = ['explore', 'sqllab'].includes(source); + const isExpandable = + isCurrentUserBot() || ['explore', 'sqllab'].includes(source); const iconColor = theme.colors[level].base; return ( diff --git a/superset-frontend/src/components/FilterableTable/FilterableTable.test.tsx b/superset-frontend/src/components/FilterableTable/FilterableTable.test.tsx index 17e9cad2faa98..aebf2c44b2319 100644 --- a/superset-frontend/src/components/FilterableTable/FilterableTable.test.tsx +++ b/superset-frontend/src/components/FilterableTable/FilterableTable.test.tsx @@ -25,11 +25,11 @@ import userEvent from '@testing-library/user-event'; describe('FilterableTable', () => { const mockedProps = { - orderedColumnKeys: ['a', 'b', 'c'], + orderedColumnKeys: ['a', 'b', 'c', 'children'], data: [ - { a: 'a1', b: 'b1', c: 'c1', d: 0 }, - { a: 'a2', b: 'b2', c: 'c2', d: 100 }, - { a: null, b: 'b3', c: 'c3', d: 50 }, + { a: 'a1', b: 'b1', c: 'c1', d: 0, children: 0 }, + { a: 'a2', b: 'b2', c: 'c2', d: 100, children: 2 }, + { a: null, b: 'b3', c: 'c3', d: 50, children: 1 }, ], height: 500, }; diff --git a/superset-frontend/src/components/FilterableTable/index.tsx b/superset-frontend/src/components/FilterableTable/index.tsx index 91fc1f4477f2c..01dd31be72012 100644 --- a/superset-frontend/src/components/FilterableTable/index.tsx +++ b/superset-frontend/src/components/FilterableTable/index.tsx @@ -391,6 +391,7 @@ const FilterableTable = ({ usePagination={false} columns={columns} data={filteredList} + childrenColumnName="" virtualize bordered /> diff --git a/superset-frontend/src/components/Select/AsyncSelect.test.tsx b/superset-frontend/src/components/Select/AsyncSelect.test.tsx index b964f48ee78ab..e49f00be537aa 100644 --- a/superset-frontend/src/components/Select/AsyncSelect.test.tsx +++ b/superset-frontend/src/components/Select/AsyncSelect.test.tsx @@ -858,6 +858,45 @@ test('does not duplicate options when using numeric values', async () => { await waitFor(() => expect(getAllSelectOptions().length).toBe(1)); }); +test('pasting an existing option does not duplicate it', async () => { + const options = jest.fn(async () => ({ + data: [OPTIONS[0]], + totalCount: 1, + })); + render(); + await open(); + const input = getElementByClassName('.ant-select-selection-search-input'); + const paste = createEvent.paste(input, { + clipboardData: { + getData: () => OPTIONS[0].label, + }, + }); + fireEvent(input, paste); + expect(await findAllSelectOptions()).toHaveLength(1); +}); + +test('pasting an existing option does not duplicate it in multiple mode', async () => { + const options = jest.fn(async () => ({ + data: [ + { label: 'John', value: 1 }, + { label: 'Liam', value: 2 }, + { label: 'Olivia', value: 3 }, + ], + totalCount: 3, + })); + render(); + await open(); + const input = getElementByClassName('.ant-select-selection-search-input'); + const paste = createEvent.paste(input, { + clipboardData: { + getData: () => 'John,Liam,Peter', + }, + }); + fireEvent(input, paste); + // Only Peter should be added + expect(await findAllSelectOptions()).toHaveLength(4); +}); + /* TODO: Add tests that require scroll interaction. Needs further investigation. - Fetches more data when scrolling and more data is available diff --git a/superset-frontend/src/components/Select/AsyncSelect.tsx b/superset-frontend/src/components/Select/AsyncSelect.tsx index 320d6ec3bdb47..20de7bb5911c0 100644 --- a/superset-frontend/src/components/Select/AsyncSelect.tsx +++ b/superset-frontend/src/components/Select/AsyncSelect.tsx @@ -49,6 +49,8 @@ import { dropDownRenderHelper, handleFilterOptionHelper, mapOptions, + getOption, + isObject, } from './utils'; import { AsyncSelectProps, @@ -523,19 +525,33 @@ const AsyncSelect = forwardRef( [ref], ); + const getPastedTextValue = useCallback( + (text: string) => { + const option = getOption(text, fullSelectOptions, true); + const value: AntdLabeledValue = { + label: text, + value: text, + }; + if (option) { + value.label = isObject(option) ? option.label : option; + value.value = isObject(option) ? option.value! : option; + } + return value; + }, + [fullSelectOptions], + ); + const onPaste = (e: ClipboardEvent) => { const pastedText = e.clipboardData.getData('text'); if (isSingleMode) { - setSelectValue({ label: pastedText, value: pastedText }); + setSelectValue(getPastedTextValue(pastedText)); } else { const token = tokenSeparators.find(token => pastedText.includes(token)); const array = token ? uniq(pastedText.split(token)) : [pastedText]; + const values = array.map(item => getPastedTextValue(item)); setSelectValue(previous => [ ...((previous || []) as AntdLabeledValue[]), - ...array.map(value => ({ - label: value, - value, - })), + ...values, ]); } }; diff --git a/superset-frontend/src/components/Select/Select.test.tsx b/superset-frontend/src/components/Select/Select.test.tsx index 2b204cec1cd52..52e566df177ad 100644 --- a/superset-frontend/src/components/Select/Select.test.tsx +++ b/superset-frontend/src/components/Select/Select.test.tsx @@ -972,6 +972,45 @@ test('does not duplicate options when using numeric values', async () => { await waitFor(() => expect(getAllSelectOptions().length).toBe(1)); }); +test('pasting an existing option does not duplicate it', async () => { + render(, + ); + await open(); + const input = getElementByClassName('.ant-select-selection-search-input'); + const paste = createEvent.paste(input, { + clipboardData: { + getData: () => 'John,Liam,Peter', + }, + }); + fireEvent(input, paste); + // Only Peter should be added + expect(await findAllSelectOptions()).toHaveLength(4); +}); + /* TODO: Add tests that require scroll interaction. Needs further investigation. - Fetches more data when scrolling and more data is available diff --git a/superset-frontend/src/components/Select/Select.tsx b/superset-frontend/src/components/Select/Select.tsx index 89c62ef8bd98f..907850a456fe5 100644 --- a/superset-frontend/src/components/Select/Select.tsx +++ b/superset-frontend/src/components/Select/Select.tsx @@ -51,6 +51,8 @@ import { mapValues, mapOptions, hasCustomLabels, + getOption, + isObject, } from './utils'; import { RawValue, SelectOptionsType, SelectProps } from './types'; import { @@ -530,27 +532,42 @@ const Select = forwardRef( actualMaxTagCount -= 1; } + const getPastedTextValue = useCallback( + (text: string) => { + const option = getOption(text, fullSelectOptions, true); + if (labelInValue) { + const value: AntdLabeledValue = { + label: text, + value: text, + }; + if (option) { + value.label = isObject(option) ? option.label : option; + value.value = isObject(option) ? option.value! : option; + } + return value; + } + return option ? (isObject(option) ? option.value! : option) : text; + }, + [fullSelectOptions, labelInValue], + ); + const onPaste = (e: ClipboardEvent) => { const pastedText = e.clipboardData.getData('text'); if (isSingleMode) { - setSelectValue( - labelInValue ? { label: pastedText, value: pastedText } : pastedText, - ); + setSelectValue(getPastedTextValue(pastedText)); } else { const token = tokenSeparators.find(token => pastedText.includes(token)); const array = token ? uniq(pastedText.split(token)) : [pastedText]; + const values = array.map(item => getPastedTextValue(item)); if (labelInValue) { setSelectValue(previous => [ ...((previous || []) as AntdLabeledValue[]), - ...array.map(value => ({ - label: value, - value, - })), + ...(values as AntdLabeledValue[]), ]); } else { setSelectValue(previous => [ ...((previous || []) as string[]), - ...array, + ...(values as string[]), ]); } } diff --git a/superset-frontend/src/components/Select/utils.tsx b/superset-frontend/src/components/Select/utils.tsx index 7de201caeb6a7..0b638f4f0128f 100644 --- a/superset-frontend/src/components/Select/utils.tsx +++ b/superset-frontend/src/components/Select/utils.tsx @@ -49,27 +49,33 @@ export function getValue( return isLabeledValue(option) ? option.value : option; } -export function hasOption( +export function getOption( value: V, options?: V | LabeledValue | (V | LabeledValue)[], checkLabel = false, -): boolean { +): V | LabeledValue { const optionsArray = ensureIsArray(options); // When comparing the values we use the equality // operator to automatically convert different types - return ( - optionsArray.find( - x => + return optionsArray.find( + x => + // eslint-disable-next-line eqeqeq + x == value || + (isObject(x) && // eslint-disable-next-line eqeqeq - x == value || - (isObject(x) && - // eslint-disable-next-line eqeqeq - (('value' in x && x.value == value) || - (checkLabel && 'label' in x && x.label === value))), - ) !== undefined + (('value' in x && x.value == value) || + (checkLabel && 'label' in x && x.label === value))), ); } +export function hasOption( + value: V, + options?: V | LabeledValue | (V | LabeledValue)[], + checkLabel = false, +): boolean { + return getOption(value, options, checkLabel) !== undefined; +} + /** * It creates a comparator to check for a specific property. * Can be used with string and number property values. diff --git a/superset-frontend/src/components/Table/index.tsx b/superset-frontend/src/components/Table/index.tsx index 84ca7883f5757..12fdcec1d8e7d 100644 --- a/superset-frontend/src/components/Table/index.tsx +++ b/superset-frontend/src/components/Table/index.tsx @@ -150,6 +150,12 @@ export interface TableProps { * only supported for virtualize == true */ allowHTML?: boolean; + + /** + * The column that contains children to display. + * Check https://ant.design/components/table#table for more details. + */ + childrenColumnName?: string; } const defaultRowSelection: React.Key[] = []; @@ -259,6 +265,7 @@ export function Table( recordCount, onRow, allowHTML = false, + childrenColumnName, } = props; const wrapperRef = useRef(null); @@ -392,6 +399,9 @@ export function Table( theme, height: bodyHeight, bordered, + expandable: { + childrenColumnName, + }, }; return ( diff --git a/superset-frontend/src/components/Tags/utils.tsx b/superset-frontend/src/components/Tags/utils.tsx index 690a9b44066d0..48bd979046d5b 100644 --- a/superset-frontend/src/components/Tags/utils.tsx +++ b/superset-frontend/src/components/Tags/utils.tsx @@ -66,7 +66,7 @@ export const loadTags = async ( const getErrorMessage = ({ error, message }: ClientErrorObject) => { let errorText = message || error || t('An error has occurred'); if (message === 'Forbidden') { - errorText = t('You do not have permission to edit this dashboard'); + errorText = t('You do not have permission to read tags'); } return errorText; }; diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/FilterControls/state.ts b/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/FilterControls/state.ts index a71028145969b..bb5b5c2672e28 100644 --- a/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/FilterControls/state.ts +++ b/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/FilterControls/state.ts @@ -17,7 +17,7 @@ * under the License. */ import { useMemo } from 'react'; -import { useSelector } from 'react-redux'; +import { shallowEqual, useSelector } from 'react-redux'; import { DataMaskStateWithId, ensureIsArray, @@ -32,6 +32,7 @@ export function useFilterDependencies( ): ExtraFormData { const dependencyIds = useSelector( state => state.nativeFilters.filters[id]?.cascadeParentIds, + shallowEqual, ); return useMemo(() => { let dependencies = {}; diff --git a/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/index.tsx b/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/index.tsx index fe3a8e21d22b6..546742c6dd73b 100644 --- a/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/index.tsx +++ b/superset-frontend/src/dashboard/components/nativeFilters/FilterBar/index.tsx @@ -18,7 +18,13 @@ */ /* eslint-disable no-param-reassign */ -import React, { useEffect, useState, useCallback, createContext } from 'react'; +import React, { + useEffect, + useState, + useCallback, + createContext, + useRef, +} from 'react'; import { useDispatch, useSelector } from 'react-redux'; import { DataMaskStateWithId, @@ -144,6 +150,8 @@ const FilterBar: React.FC = ({ const [filtersInScope] = useSelectFiltersInScope(nativeFilterValues); + const dataMaskSelectedRef = useRef(dataMaskSelected); + dataMaskSelectedRef.current = dataMaskSelected; const handleFilterSelectionChange = useCallback( ( filter: Pick & Partial, @@ -154,19 +162,19 @@ const FilterBar: React.FC = ({ if ( // filterState.value === undefined - means that value not initialized dataMask.filterState?.value !== undefined && - dataMaskSelected[filter.id]?.filterState?.value === undefined && + dataMaskSelectedRef.current[filter.id]?.filterState?.value === + undefined && filter.requiredFirst ) { dispatch(updateDataMask(filter.id, dataMask)); } - draft[filter.id] = { ...(getInitialDataMask(filter.id) as DataMaskWithId), ...dataMask, }; }); }, - [dataMaskSelected, dispatch, setDataMaskSelected], + [dispatch, setDataMaskSelected], ); useEffect(() => { diff --git a/superset-frontend/src/dataMask/reducer.ts b/superset-frontend/src/dataMask/reducer.ts index f2163a54a44a0..6e9a5fae5404a 100644 --- a/superset-frontend/src/dataMask/reducer.ts +++ b/superset-frontend/src/dataMask/reducer.ts @@ -56,6 +56,7 @@ export function getInitialDataMask( } return { ...otherProps, + __cache: {}, extraFormData: {}, filterState: {}, ownState: {}, diff --git a/superset-frontend/src/explore/actions/exploreActions.test.js b/superset-frontend/src/explore/actions/exploreActions.test.js index cdf76bdcc6a6f..9dd53756800d1 100644 --- a/superset-frontend/src/explore/actions/exploreActions.test.js +++ b/superset-frontend/src/explore/actions/exploreActions.test.js @@ -22,20 +22,19 @@ import exploreReducer from 'src/explore/reducers/exploreReducer'; import * as actions from 'src/explore/actions/exploreActions'; describe('reducers', () => { - it('sets correct control value given an arbitrary key and value', () => { + it('Does not set a control value if control does not exist', () => { const newState = exploreReducer( defaultState, actions.setControlValue('NEW_FIELD', 'x', []), ); - expect(newState.controls.NEW_FIELD.value).toBe('x'); - expect(newState.form_data.NEW_FIELD).toBe('x'); + expect(newState.controls.NEW_FIELD).toBeUndefined(); }); - it('setControlValue works as expected with a checkbox', () => { + it('setControlValue works as expected with a Select control', () => { const newState = exploreReducer( defaultState, - actions.setControlValue('show_legend', true, []), + actions.setControlValue('y_axis_format', '$,.2f', []), ); - expect(newState.controls.show_legend.value).toBe(true); - expect(newState.form_data.show_legend).toBe(true); + expect(newState.controls.y_axis_format.value).toBe('$,.2f'); + expect(newState.form_data.y_axis_format).toBe('$,.2f'); }); }); diff --git a/superset-frontend/src/explore/components/DatasourcePanel/index.tsx b/superset-frontend/src/explore/components/DatasourcePanel/index.tsx index 1d85c8235fd6e..80ed37a30170f 100644 --- a/superset-frontend/src/explore/components/DatasourcePanel/index.tsx +++ b/superset-frontend/src/explore/components/DatasourcePanel/index.tsx @@ -336,7 +336,11 @@ export default function DataSourcePanel({ ); const showInfoboxCheck = () => { - if (sessionStorage.getItem('showInfobox') === 'false') return false; + try { + if (sessionStorage.getItem('showInfobox') === 'false') return false; + } catch (error) { + // continue regardless of error + } return true; }; @@ -366,7 +370,13 @@ export default function DataSourcePanel({ sessionStorage.setItem('showInfobox', 'false')} + onClose={() => { + try { + sessionStorage.setItem('showInfobox', 'false'); + } catch (error) { + // continue regardless of error + } + }} type="info" message="" description={ diff --git a/superset-frontend/src/explore/components/SaveModal.test.jsx b/superset-frontend/src/explore/components/SaveModal.test.jsx index bdb93e542935d..29bb278269e19 100644 --- a/superset-frontend/src/explore/components/SaveModal.test.jsx +++ b/superset-frontend/src/explore/components/SaveModal.test.jsx @@ -27,7 +27,10 @@ import Button from 'src/components/Button'; import fetchMock from 'fetch-mock'; import * as saveModalActions from 'src/explore/actions/saveModalActions'; -import SaveModal, { StyledModal } from 'src/explore/components/SaveModal'; +import SaveModal, { + PureSaveModal, + StyledModal, +} from 'src/explore/components/SaveModal'; import { BrowserRouter } from 'react-router-dom'; const middlewares = [thunk]; @@ -100,8 +103,12 @@ const queryDefaultProps = { }; const fetchDashboardsEndpoint = `glob:*/dashboardasync/api/read?_flt_0_owners=${1}`; +const fetchChartEndpoint = `glob:*/api/v1/chart/${1}*`; -beforeAll(() => fetchMock.get(fetchDashboardsEndpoint, mockDashboardData)); +beforeAll(() => { + fetchMock.get(fetchDashboardsEndpoint, mockDashboardData); + fetchMock.get(fetchChartEndpoint, { id: 1, dashboards: [1] }); +}); afterAll(() => fetchMock.restore()); @@ -226,3 +233,27 @@ test('set dataset name when chart source is query', () => { expect(wrapper.find('[data-test="new-dataset-name"]')).toExist(); expect(wrapper.state().datasetName).toBe('test'); }); + +test('make sure slice_id in the URLSearchParams before the redirect', () => { + const myProps = { + ...defaultProps, + slice: { slice_id: 1, slice_name: 'title', owners: [1] }, + actions: { + setFormData: jest.fn(), + updateSlice: jest.fn(() => Promise.resolve({ id: 1 })), + getSliceDashboards: jest.fn(), + }, + user: { userId: 1 }, + history: { + replace: jest.fn(), + }, + dispatch: jest.fn(), + }; + + const saveModal = new PureSaveModal(myProps); + const result = saveModal.handleRedirect( + 'https://example.com/?name=John&age=30', + { id: 1 }, + ); + expect(result.get('slice_id')).toEqual('1'); +}); diff --git a/superset-frontend/src/explore/components/SaveModal.tsx b/superset-frontend/src/explore/components/SaveModal.tsx index 86ff27bb42e3e..ed5b244cd70cf 100644 --- a/superset-frontend/src/explore/components/SaveModal.tsx +++ b/superset-frontend/src/explore/components/SaveModal.tsx @@ -119,7 +119,12 @@ class SaveModal extends React.Component { async componentDidMount() { let { dashboardId } = this.props; if (!dashboardId) { - const lastDashboard = sessionStorage.getItem(SK_DASHBOARD_ID); + let lastDashboard = null; + try { + lastDashboard = sessionStorage.getItem(SK_DASHBOARD_ID); + } catch (error) { + // continue regardless of error + } dashboardId = lastDashboard && parseInt(lastDashboard, 10); } if (dashboardId) { @@ -159,6 +164,17 @@ class SaveModal extends React.Component { this.props.dispatch(setSaveChartModalVisibility(false)); } + handleRedirect = (windowLocationSearch: string, chart: any) => { + const searchParams = new URLSearchParams(windowLocationSearch); + searchParams.set('save_action', this.state.action); + if (this.state.action !== 'overwrite') { + searchParams.delete('form_data_key'); + } + + searchParams.set('slice_id', chart.id.toString()); + return searchParams; + }; + async saveOrOverwrite(gotodash: boolean) { this.setState({ isLoading: true }); @@ -249,10 +265,14 @@ class SaveModal extends React.Component { ); } - if (dashboard) { - sessionStorage.setItem(SK_DASHBOARD_ID, `${dashboard.id}`); - } else { - sessionStorage.removeItem(SK_DASHBOARD_ID); + try { + if (dashboard) { + sessionStorage.setItem(SK_DASHBOARD_ID, `${dashboard.id}`); + } else { + sessionStorage.removeItem(SK_DASHBOARD_ID); + } + } catch (error) { + // continue regardless of error } // Go to new dashboard url @@ -261,14 +281,7 @@ class SaveModal extends React.Component { return; } - const searchParams = new URLSearchParams(window.location.search); - searchParams.set('save_action', this.state.action); - if (this.state.action !== 'overwrite') { - searchParams.delete('form_data_key'); - } - if (this.state.action === 'saveas') { - searchParams.set('slice_id', value.id.toString()); - } + const searchParams = this.handleRedirect(window.location.search, value); this.props.history.replace(`/explore/?${searchParams.toString()}`); this.setState({ isLoading: false }); @@ -518,3 +531,6 @@ function mapStateToProps({ } export default withRouter(connect(mapStateToProps)(SaveModal)); + +// User for testing purposes need to revisit once we convert this to functional component +export { SaveModal as PureSaveModal }; diff --git a/superset-frontend/src/explore/reducers/exploreReducer.js b/superset-frontend/src/explore/reducers/exploreReducer.js index ac451ade3da16..d5565a0dad5eb 100644 --- a/superset-frontend/src/explore/reducers/exploreReducer.js +++ b/superset-frontend/src/explore/reducers/exploreReducer.js @@ -112,7 +112,7 @@ export default function exploreReducer(state = {}, action) { const vizType = new_form_data.viz_type; // if the controlName is metrics, and the metric column name is updated, - // need to update column config as well to keep the previou config. + // need to update column config as well to keep the previous config. if (controlName === 'metrics' && old_metrics_data && new_column_config) { value.forEach((item, index) => { if ( @@ -129,11 +129,11 @@ export default function exploreReducer(state = {}, action) { } // Use the processed control config (with overrides and everything) - // if `controlName` does not existing in current controls, + // if `controlName` does not exist in current controls, const controlConfig = state.controls[action.controlName] || getControlConfig(action.controlName, vizType) || - {}; + null; // will call validators again const control = { @@ -149,7 +149,7 @@ export default function exploreReducer(state = {}, action) { ...state, controls: { ...state.controls, - [controlName]: control, + ...(controlConfig && { [controlName]: control }), ...(controlName === 'metrics' && { column_config }), }, }; @@ -196,10 +196,12 @@ export default function exploreReducer(state = {}, action) { triggerRender: control.renderTrigger && !hasErrors, controls: { ...currentControlsState, - [action.controlName]: { - ...control, - validationErrors: errors, - }, + ...(controlConfig && { + [action.controlName]: { + ...control, + validationErrors: errors, + }, + }), ...rerenderedControls, }, }; diff --git a/superset-frontend/src/features/home/Menu.test.tsx b/superset-frontend/src/features/home/Menu.test.tsx index b40a5ab075252..04a0a9876f0ea 100644 --- a/superset-frontend/src/features/home/Menu.test.tsx +++ b/superset-frontend/src/features/home/Menu.test.tsx @@ -295,7 +295,11 @@ test('should render the environment tag', async () => { const { data: { environment_tag }, } = mockedProps; - render(, { useRedux: true, useQueryParams: true }); + render(, { + useRedux: true, + useQueryParams: true, + useRouter: true, + }); expect(await screen.findByText(environment_tag.text)).toBeInTheDocument(); }); diff --git a/superset-frontend/src/features/home/Menu.tsx b/superset-frontend/src/features/home/Menu.tsx index 92766cfdda3a5..56a2fd611ec95 100644 --- a/superset-frontend/src/features/home/Menu.tsx +++ b/superset-frontend/src/features/home/Menu.tsx @@ -24,7 +24,7 @@ import { getUrlParam } from 'src/utils/urlUtils'; import { Row, Col, Grid } from 'src/components'; import { MainNav as DropdownMenu, MenuMode } from 'src/components/Menu'; import { Tooltip } from 'src/components/Tooltip'; -import { Link } from 'react-router-dom'; +import { Link, useLocation } from 'react-router-dom'; import { GenericLink } from 'src/components/GenericLink/GenericLink'; import Icons from 'src/components/Icons'; import { useUiConfig } from 'src/components/UiConfigContext'; @@ -186,6 +186,33 @@ export function Menu({ return () => window.removeEventListener('resize', windowResize); }, []); + enum paths { + EXPLORE = '/explore', + DASHBOARD = '/dashboard', + CHART = '/chart', + DATASETS = '/tablemodelview', + } + + const defaultTabSelection: string[] = []; + const [activeTabs, setActiveTabs] = useState(defaultTabSelection); + const location = useLocation(); + useEffect(() => { + const path = location.pathname; + switch (true) { + case path.startsWith(paths.DASHBOARD): + setActiveTabs(['Dashboards']); + break; + case path.startsWith(paths.CHART) || path.startsWith(paths.EXPLORE): + setActiveTabs(['Charts']); + break; + case path.startsWith(paths.DATASETS): + setActiveTabs(['Datasets']); + break; + default: + setActiveTabs(defaultTabSelection); + } + }, [location.pathname]); + const standalone = getUrlParam(URL_PARAMS.standalone); if (standalone || uiConfig.hideNav) return <>; @@ -268,6 +295,7 @@ export function Menu({ mode={showMenu} data-test="navbar-top" className="main-nav" + selectedKeys={activeTabs} > {menu.map((item, index) => { const props = { diff --git a/superset-frontend/src/features/rls/RowLevelSecurityModal.tsx b/superset-frontend/src/features/rls/RowLevelSecurityModal.tsx index 84d8ca11d0f5d..dac4858e4adb9 100644 --- a/superset-frontend/src/features/rls/RowLevelSecurityModal.tsx +++ b/superset-frontend/src/features/rls/RowLevelSecurityModal.tsx @@ -39,10 +39,14 @@ import { FilterType, RLSObject, RoleObject, TableObject } from './types'; const StyledModal = styled(Modal)` max-width: 1200px; + min-width: min-content; width: 100%; .ant-modal-body { overflow: initial; } + .ant-modal-footer { + white-space: nowrap; + } `; const StyledIcon = (theme: SupersetTheme) => css` margin: auto ${theme.gridUnit * 2}px auto 0; diff --git a/superset-frontend/src/filters/components/Select/SelectFilterPlugin.tsx b/superset-frontend/src/filters/components/Select/SelectFilterPlugin.tsx index 2c5d9191887e7..7d8ab55fb5571 100644 --- a/superset-frontend/src/filters/components/Select/SelectFilterPlugin.tsx +++ b/superset-frontend/src/filters/components/Select/SelectFilterPlugin.tsx @@ -26,7 +26,7 @@ import { GenericDataType, getColumnLabel, JsonObject, - smartDateDetailedFormatter, + finestTemporalGrainFormatter, t, tn, } from '@superset-ui/core'; @@ -117,9 +117,9 @@ export default function PluginFilterSelect(props: PluginFilterSelectProps) { const labelFormatter = useMemo( () => getDataRecordFormatter({ - timeFormatter: smartDateDetailedFormatter, + timeFormatter: finestTemporalGrainFormatter(data.map(el => el[col])), }), - [], + [data, col], ); const updateDataMask = useCallback( diff --git a/superset-frontend/src/hooks/useTabId.ts b/superset-frontend/src/hooks/useTabId.ts index 4f60763c88875..56857cca64b1a 100644 --- a/superset-frontend/src/hooks/useTabId.ts +++ b/superset-frontend/src/hooks/useTabId.ts @@ -49,16 +49,29 @@ export function useTabId() { } const updateTabId = () => { - const lastTabId = window.localStorage.getItem('last_tab_id'); + let lastTabId; + try { + lastTabId = window.localStorage.getItem('last_tab_id'); + } catch (error) { + // continue regardless of error + } const newTabId = String( lastTabId ? Number.parseInt(lastTabId, 10) + 1 : 1, ); - window.sessionStorage.setItem('tab_id', newTabId); - window.localStorage.setItem('last_tab_id', newTabId); + try { + window.sessionStorage.setItem('tab_id', newTabId); + window.localStorage.setItem('last_tab_id', newTabId); + } catch (error) { + // continue regardless of error + } setTabId(newTabId); }; - - const storedTabId = window.sessionStorage.getItem('tab_id'); + let storedTabId; + try { + storedTabId = window.sessionStorage.getItem('tab_id'); + } catch (error) { + // continue regardless of error + } if (storedTabId) { channel.postMessage({ type: 'REQUESTING_TAB_ID', diff --git a/superset/charts/commands/importers/v1/__init__.py b/superset/charts/commands/importers/v1/__init__.py index 2a9c691159b4c..043018fa3b18a 100644 --- a/superset/charts/commands/importers/v1/__init__.py +++ b/superset/charts/commands/importers/v1/__init__.py @@ -95,6 +95,6 @@ def _import( config["params"].update({"datasource": dataset.uid}) if "query_context" in config: - del config["query_context"] + config["query_context"] = None import_chart(session, config, overwrite=overwrite) diff --git a/superset/charts/commands/importers/v1/utils.py b/superset/charts/commands/importers/v1/utils.py index d3f90f7ff4312..3ef0a2ed78b49 100644 --- a/superset/charts/commands/importers/v1/utils.py +++ b/superset/charts/commands/importers/v1/utils.py @@ -99,8 +99,8 @@ def migrate_chart(config: dict[str, Any]) -> dict[str, Any]: # also update `query_context` try: - query_context = json.loads(output.get("query_context", "{}")) - except json.decoder.JSONDecodeError: + query_context = json.loads(output.get("query_context") or "{}") + except (json.decoder.JSONDecodeError, TypeError): query_context = {} if "form_data" in query_context: query_context["form_data"] = output["params"] diff --git a/superset/commands/importers/v1/assets.py b/superset/commands/importers/v1/assets.py index f0720d70b19ad..4c8971315c270 100644 --- a/superset/commands/importers/v1/assets.py +++ b/superset/commands/importers/v1/assets.py @@ -117,7 +117,7 @@ def _import(session: Session, configs: dict[str, Any]) -> None: dataset_uid = f"{dataset_dict['datasource_id']}__{dataset_dict['datasource_type']}" config["params"].update({"datasource": dataset_uid}) if "query_context" in config: - del config["query_context"] + config["query_context"] = None chart = import_chart(session, config, overwrite=True) chart_ids[str(chart.uuid)] = chart.id diff --git a/superset/commands/importers/v1/utils.py b/superset/commands/importers/v1/utils.py index 8ca008b3e23bf..8cb0c1b553fe3 100644 --- a/superset/commands/importers/v1/utils.py +++ b/superset/commands/importers/v1/utils.py @@ -26,6 +26,7 @@ from superset.commands.importers.exceptions import IncorrectVersionError from superset.databases.ssh_tunnel.models import SSHTunnel from superset.models.core import Database +from superset.utils.core import check_is_safe_zip METADATA_FILE_NAME = "metadata.yaml" IMPORT_VERSION = "1.0.0" @@ -207,6 +208,7 @@ def is_valid_config(file_name: str) -> bool: def get_contents_from_bundle(bundle: ZipFile) -> dict[str, str]: + check_is_safe_zip(bundle) return { remove_root(file_name): bundle.read(file_name).decode() for file_name in bundle.namelist() diff --git a/superset/common/query_context_factory.py b/superset/common/query_context_factory.py index a6fe549894db0..62e8b79893556 100644 --- a/superset/common/query_context_factory.py +++ b/superset/common/query_context_factory.py @@ -26,7 +26,7 @@ from superset.daos.chart import ChartDAO from superset.daos.datasource import DatasourceDAO from superset.models.slice import Slice -from superset.utils.core import DatasourceDict, DatasourceType +from superset.utils.core import DatasourceDict, DatasourceType, is_adhoc_column if TYPE_CHECKING: from superset.connectors.base.models import BaseDatasource @@ -129,6 +129,8 @@ def _apply_granularity( if granularity := query_object.granularity: filter_to_remove = None + if is_adhoc_column(x_axis): # type: ignore + x_axis = x_axis.get("sqlExpression") if x_axis and x_axis in temporal_columns: filter_to_remove = x_axis x_axis_column = next( @@ -176,11 +178,15 @@ def _apply_granularity( # another temporal filter. A new filter based on the value of # the granularity will be added later in the code. # In practice, this is replacing the previous default temporal filter. + if is_adhoc_column(filter_to_remove): # type: ignore + filter_to_remove = filter_to_remove.get("sqlExpression") + if filter_to_remove: query_object.filter = [ filter for filter in query_object.filter if filter["col"] != filter_to_remove + or filter["op"] != "TEMPORAL_RANGE" ] def _apply_filters(self, query_object: QueryObject) -> None: diff --git a/superset/common/query_context_processor.py b/superset/common/query_context_processor.py index f6152b232a938..754c9ae91a854 100644 --- a/superset/common/query_context_processor.py +++ b/superset/common/query_context_processor.py @@ -285,10 +285,11 @@ def _get_timestamp_format( datasource = self._qc_datasource labels = tuple( label - for label in [ + for label in { *get_base_axis_labels(query_object.columns), + *[col for col in query_object.columns or [] if isinstance(col, str)], query_object.granularity, - ] + } if datasource # Query datasource didn't support `get_column` and hasattr(datasource, "get_column") diff --git a/superset/common/query_object_factory.py b/superset/common/query_object_factory.py index ae85912cdfe78..a76431122e38c 100644 --- a/superset/common/query_object_factory.py +++ b/superset/common/query_object_factory.py @@ -16,17 +16,24 @@ # under the License. from __future__ import annotations +from datetime import datetime from typing import Any, TYPE_CHECKING from superset.common.chart_data import ChartDataResultType from superset.common.query_object import QueryObject from superset.common.utils.time_range_utils import get_since_until_from_time_range -from superset.utils.core import apply_max_row_limit, DatasourceDict, DatasourceType +from superset.utils.core import ( + apply_max_row_limit, + DatasourceDict, + DatasourceType, + FilterOperator, + QueryObjectFilterClause, +) if TYPE_CHECKING: from sqlalchemy.orm import sessionmaker - from superset.connectors.base.models import BaseDatasource + from superset.connectors.base.models import BaseColumn, BaseDatasource from superset.daos.datasource import DatasourceDAO @@ -66,6 +73,10 @@ def create( # pylint: disable=too-many-arguments ) kwargs["from_dttm"] = from_dttm kwargs["to_dttm"] = to_dttm + if datasource_model_instance and kwargs.get("filters", []): + kwargs["filters"] = self._process_filters( + datasource_model_instance, kwargs["filters"] + ) return QueryObject( datasource=datasource_model_instance, extras=extras, @@ -102,3 +113,55 @@ def _process_row_limit( # light version of the view.utils.core # import view.utils require application context # Todo: move it and the view.utils.core to utils package + + # pylint: disable=no-self-use + def _process_filters( + self, datasource: BaseDatasource, query_filters: list[QueryObjectFilterClause] + ) -> list[QueryObjectFilterClause]: + def get_dttm_filter_value( + value: Any, col: BaseColumn, date_format: str + ) -> int | str: + if not isinstance(value, int): + return value + if date_format in {"epoch_ms", "epoch_s"}: + if date_format == "epoch_s": + value = str(value) + else: + value = str(value * 1000) + else: + dttm = datetime.utcfromtimestamp(value / 1000) + value = dttm.strftime(date_format) + + if col.type in col.num_types: + value = int(value) + return value + + for query_filter in query_filters: + if query_filter.get("op") == FilterOperator.TEMPORAL_RANGE: + continue + filter_col = query_filter.get("col") + if not isinstance(filter_col, str): + continue + column = datasource.get_column(filter_col) + if not column: + continue + filter_value = query_filter.get("val") + + date_format = column.python_date_format + if not date_format and datasource.db_extra: + date_format = datasource.db_extra.get( + "python_date_format_by_column_name", {} + ).get(column.column_name) + + if column.is_dttm and date_format: + if isinstance(filter_value, list): + query_filter["val"] = [ + get_dttm_filter_value(value, column, date_format) + for value in filter_value + ] + else: + query_filter["val"] = get_dttm_filter_value( + filter_value, column, date_format + ) + + return query_filters diff --git a/superset/config.py b/superset/config.py index 98718e731eeea..27f78832d1e3b 100644 --- a/superset/config.py +++ b/superset/config.py @@ -904,6 +904,10 @@ class D3Format(TypedDict, total=False): [86400, "24 hours"], ] +# This is used as a workaround for the alerts & reports scheduler task to get the time +# celery beat triggered it, see https://github.com/celery/celery/issues/6974 for details +CELERY_BEAT_SCHEDULER_EXPIRES = timedelta(weeks=1) + # Default celery config is to use SQLA as a broker, in a production setting # you'll want to use a proper broker as specified here: # http://docs.celeryproject.org/en/latest/getting-started/brokers/index.html @@ -932,6 +936,7 @@ class CeleryConfig: # pylint: disable=too-few-public-methods "reports.scheduler": { "task": "reports.scheduler", "schedule": crontab(minute="*", hour="*"), + "options": {"expires": int(CELERY_BEAT_SCHEDULER_EXPIRES.total_seconds())}, }, "reports.prune_log": { "task": "reports.prune_log", @@ -1402,7 +1407,7 @@ def EMAIL_HEADER_MUTATOR( # pylint: disable=invalid-name,unused-argument TALISMAN_CONFIG = { "content_security_policy": { "default-src": ["'self'"], - "img-src": ["'self'", "data:"], + "img-src": ["'self'", "blob:", "data:"], "worker-src": ["'self'", "blob:"], "connect-src": [ "'self'", @@ -1410,7 +1415,11 @@ def EMAIL_HEADER_MUTATOR( # pylint: disable=invalid-name,unused-argument "https://events.mapbox.com", ], "object-src": "'none'", - "style-src": ["'self'", "'unsafe-inline'"], + "style-src": [ + "'self'", + "'unsafe-inline'", + "https://cdn.jsdelivr.net/npm/swagger-ui-dist@5/swagger-ui.css", + ], "script-src": ["'self'", "'strict-dynamic'"], }, "content_security_policy_nonce_in": ["script-src"], @@ -1420,7 +1429,7 @@ def EMAIL_HEADER_MUTATOR( # pylint: disable=invalid-name,unused-argument TALISMAN_DEV_CONFIG = { "content_security_policy": { "default-src": ["'self'"], - "img-src": ["'self'", "data:"], + "img-src": ["'self'", "blob:", "data:"], "worker-src": ["'self'", "blob:"], "connect-src": [ "'self'", @@ -1428,7 +1437,11 @@ def EMAIL_HEADER_MUTATOR( # pylint: disable=invalid-name,unused-argument "https://events.mapbox.com", ], "object-src": "'none'", - "style-src": ["'self'", "'unsafe-inline'"], + "style-src": [ + "'self'", + "'unsafe-inline'", + "https://cdn.jsdelivr.net/npm/swagger-ui-dist@5/swagger-ui.css", + ], "script-src": ["'self'", "'unsafe-inline'", "'unsafe-eval'"], }, "content_security_policy_nonce_in": ["script-src"], @@ -1566,6 +1579,11 @@ def EMAIL_HEADER_MUTATOR( # pylint: disable=invalid-name,unused-argument Literal["examples", "all"] | tuple[str, list[dict[str, Any]]] ) = "all" +# Max allowed size for a zipped file +ZIPPED_FILE_MAX_SIZE = 100 * 1024 * 1024 # 100MB +# Max allowed compression ratio for a zipped file +ZIP_FILE_MAX_COMPRESS_RATIO = 200.0 + # Configuration for environment tag shown on the navbar. Setting 'text' to '' will hide the tag. # 'color' can either be a hex color code, or a dot-indexed theme color (e.g. error.base) ENVIRONMENT_TAG_CONFIG = { diff --git a/superset/connectors/sqla/models.py b/superset/connectors/sqla/models.py index c7ea336dedd63..79203256f1e6b 100644 --- a/superset/connectors/sqla/models.py +++ b/superset/connectors/sqla/models.py @@ -1007,6 +1007,8 @@ def adhoc_column_to_sqla( # pylint: disable=too-many-locals qry = sa.select([sqla_column]).limit(1).select_from(tbl) sql = self.database.compile_sqla_query(qry) col_desc = get_columns_description(self.database, sql) + if not col_desc: + raise SupersetGenericDBErrorException("Column not found") is_dttm = col_desc[0]["is_dttm"] # type: ignore except SupersetGenericDBErrorException as ex: raise ColumnNotFoundException(message=str(ex)) from ex diff --git a/superset/dashboards/commands/importers/v1/__init__.py b/superset/dashboards/commands/importers/v1/__init__.py index e86bddec9fe3e..30e63da4e4a95 100644 --- a/superset/dashboards/commands/importers/v1/__init__.py +++ b/superset/dashboards/commands/importers/v1/__init__.py @@ -118,7 +118,7 @@ def _import( dataset_uid = f"{dataset_dict['datasource_id']}__{dataset_dict['datasource_type']}" config["params"].update({"datasource": dataset_uid}) if "query_context" in config: - del config["query_context"] + config["query_context"] = None chart = import_chart(session, config, overwrite=False) chart_ids[str(chart.uuid)] = chart.id diff --git a/superset/db_engine_specs/base.py b/superset/db_engine_specs/base.py index e7f86754230fc..5836e6163f8d9 100644 --- a/superset/db_engine_specs/base.py +++ b/superset/db_engine_specs/base.py @@ -309,6 +309,10 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods # engine-specific type mappings to check prior to the defaults column_type_mappings: tuple[ColumnTypeMapping, ...] = () + # type-specific functions to mutate values received from the database. + # Needed on certain databases that return values in an unexpected format + column_type_mutators: dict[TypeEngine, Callable[[Any], Any]] = {} + # Does database support join-free timeslot grouping time_groupby_inline = False limit_method = LimitMethod.FORCE_LIMIT @@ -730,7 +734,30 @@ def fetch_data(cls, cursor: Any, limit: int | None = None) -> list[tuple[Any, .. try: if cls.limit_method == LimitMethod.FETCH_MANY and limit: return cursor.fetchmany(limit) - return cursor.fetchall() + data = cursor.fetchall() + description = cursor.description or [] + # Create a mapping between column name and a mutator function to normalize + # values with. The first two items in the description row are + # the column name and type. + column_mutators = { + row[0]: func + for row in description + if ( + func := cls.column_type_mutators.get( + type(cls.get_sqla_column_type(cls.get_datatype(row[1]))) + ) + ) + } + if column_mutators: + indexes = {row[0]: idx for idx, row in enumerate(description)} + for row_idx, row in enumerate(data): + new_row = list(row) + for col, func in column_mutators.items(): + col_idx = indexes[col] + new_row[col_idx] = func(row[col_idx]) + data[row_idx] = tuple(new_row) + + return data except Exception as ex: raise cls.get_dbapi_mapped_exception(ex) from ex @@ -1026,24 +1053,6 @@ def handle_cursor(cls, cursor: Any, query: Query, session: Session) -> None: query object""" # TODO: Fix circular import error caused by importing sql_lab.Query - @classmethod - def execute_with_cursor( - cls, cursor: Any, sql: str, query: Query, session: Session - ) -> None: - """ - Trigger execution of a query and handle the resulting cursor. - - For most implementations this just makes calls to `execute` and - `handle_cursor` consecutively, but in some engines (e.g. Trino) we may - need to handle client limitations such as lack of async support and - perform a more complicated operation to get information from the cursor - in a timely manner and facilitate operations such as query stop - """ - logger.debug("Query %d: Running query: %s", query.id, sql) - cls.execute(cursor, sql, async_=True) - logger.debug("Query %d: Handling cursor", query.id) - cls.handle_cursor(cursor, query, session) - @classmethod def extract_error_message(cls, ex: Exception) -> str: return f"{cls.engine} error: {cls._extract_error_message(ex)}" diff --git a/superset/db_engine_specs/dremio.py b/superset/db_engine_specs/dremio.py index 2288c5257248c..c96159f1b8aa4 100644 --- a/superset/db_engine_specs/dremio.py +++ b/superset/db_engine_specs/dremio.py @@ -27,8 +27,6 @@ class DremioEngineSpec(BaseEngineSpec): engine = "dremio" engine_name = "Dremio" - allows_alias_in_select = False - _time_grain_expressions = { None: "{col}", TimeGrain.SECOND: "DATE_TRUNC('second', {col})", diff --git a/superset/db_engine_specs/mysql.py b/superset/db_engine_specs/mysql.py index e83e53e426143..eaa7d9377d82d 100644 --- a/superset/db_engine_specs/mysql.py +++ b/superset/db_engine_specs/mysql.py @@ -16,8 +16,9 @@ # under the License. import re from datetime import datetime +from decimal import Decimal from re import Pattern -from typing import Any, Optional +from typing import Any, Callable, Optional from urllib import parse from flask_babel import gettext as __ @@ -125,6 +126,9 @@ class MySQLEngineSpec(BaseEngineSpec, BasicParametersMixin): GenericDataType.STRING, ), ) + column_type_mutators: dict[types.TypeEngine, Callable[[Any], Any]] = { + DECIMAL: lambda val: Decimal(val) if isinstance(val, str) else val + } _time_grain_expressions = { None: "{col}", diff --git a/superset/db_engine_specs/presto.py b/superset/db_engine_specs/presto.py index fc10099d85309..561978f61a501 100644 --- a/superset/db_engine_specs/presto.py +++ b/superset/db_engine_specs/presto.py @@ -1269,11 +1269,11 @@ def get_create_view( sql = f"SHOW CREATE VIEW {schema}.{table}" try: cls.execute(cursor, sql) + rows = cls.fetch_data(cursor, 1) + + return rows[0][0] except DatabaseError: # not a VIEW return None - rows = cls.fetch_data(cursor, 1) - - return rows[0][0] @classmethod def get_tracking_url(cls, cursor: Cursor) -> str | None: diff --git a/superset/db_engine_specs/trino.py b/superset/db_engine_specs/trino.py index f758f1fadd1aa..eff78c4fa4eb5 100644 --- a/superset/db_engine_specs/trino.py +++ b/superset/db_engine_specs/trino.py @@ -17,8 +17,6 @@ from __future__ import annotations import logging -import threading -import time from typing import Any, TYPE_CHECKING import simplejson as json @@ -156,22 +154,15 @@ def get_tracking_url(cls, cursor: Cursor) -> str | None: @classmethod def handle_cursor(cls, cursor: Cursor, query: Query, session: Session) -> None: - """ - Handle a trino client cursor. - - WARNING: if you execute a query, it will block until complete and you - will not be able to handle the cursor until complete. Use - `execute_with_cursor` instead, to handle this asynchronously. - """ - - # Adds the executed query id to the extra payload so the query can be cancelled - cancel_query_id = cursor.query_id - logger.debug("Query %d: queryId %s found in cursor", query.id, cancel_query_id) - query.set_extra_json_key(key=QUERY_CANCEL_KEY, value=cancel_query_id) - if tracking_url := cls.get_tracking_url(cursor): query.tracking_url = tracking_url + # Adds the executed query id to the extra payload so the query can be cancelled + query.set_extra_json_key( + key=QUERY_CANCEL_KEY, + value=(cancel_query_id := cursor.stats["queryId"]), + ) + session.commit() # if query cancelation was requested prior to the handle_cursor call, but @@ -185,51 +176,6 @@ def handle_cursor(cls, cursor: Cursor, query: Query, session: Session) -> None: super().handle_cursor(cursor=cursor, query=query, session=session) - @classmethod - def execute_with_cursor( - cls, cursor: Any, sql: str, query: Query, session: Session - ) -> None: - """ - Trigger execution of a query and handle the resulting cursor. - - Trino's client blocks until the query is complete, so we need to run it - in another thread and invoke `handle_cursor` to poll for the query ID - to appear on the cursor in parallel. - """ - execute_result: dict[str, Any] = {} - - def _execute(results: dict[str, Any]) -> None: - logger.debug("Query %d: Running query: %s", query.id, sql) - - # Pass result / exception information back to the parent thread - try: - cls.execute(cursor, sql) - results["complete"] = True - except Exception as ex: # pylint: disable=broad-except - results["complete"] = True - results["error"] = ex - - execute_thread = threading.Thread(target=_execute, args=(execute_result,)) - execute_thread.start() - - # Wait for a query ID to be available before handling the cursor, as - # it's required by that method; it may never become available on error. - while not cursor.query_id and not execute_result.get("complete"): - time.sleep(0.1) - - logger.debug("Query %d: Handling cursor", query.id) - cls.handle_cursor(cursor, query, session) - - # Block until the query completes; same behaviour as the client itself - logger.debug("Query %d: Waiting for query to complete", query.id) - while not execute_result.get("complete"): - time.sleep(0.5) - - # Unfortunately we'll mangle the stack trace due to the thread, but - # throwing the original exception allows mapping database errors as normal - if err := execute_result.get("error"): - raise err - @classmethod def prepare_cancel_query(cls, query: Query, session: Session) -> None: if QUERY_CANCEL_KEY not in query.extra: diff --git a/superset/migrations/versions/2023-08-09_14-17_8ace289026f3_add_on_delete_cascade_for_dashboard_slices.py b/superset/migrations/versions/2023-08-09_14-17_8ace289026f3_add_on_delete_cascade_for_dashboard_slices.py index caac489bd1b1a..42e3507f7ca83 100644 --- a/superset/migrations/versions/2023-08-09_14-17_8ace289026f3_add_on_delete_cascade_for_dashboard_slices.py +++ b/superset/migrations/versions/2023-08-09_14-17_8ace289026f3_add_on_delete_cascade_for_dashboard_slices.py @@ -14,7 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""add on delete cascade for dashboard slices +"""add on delete cascade for dashboard_slices Revision ID: 8ace289026f3 Revises: 2e826adca42c diff --git a/superset/migrations/versions/2023-08-09_15-39_4448fa6deeb1__dd_on_delete_cascade_for_embedded_dashboards.py.py b/superset/migrations/versions/2023-08-09_15-39_4448fa6deeb1__dd_on_delete_cascade_for_embedded_dashboards.py.py index b50f6375147c1..8d298863a6379 100644 --- a/superset/migrations/versions/2023-08-09_15-39_4448fa6deeb1__dd_on_delete_cascade_for_embedded_dashboards.py.py +++ b/superset/migrations/versions/2023-08-09_15-39_4448fa6deeb1__dd_on_delete_cascade_for_embedded_dashboards.py.py @@ -14,7 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -"""add on delete cascade for embedded dashboards +"""add on delete cascade for embedded_dashboards Revision ID: 4448fa6deeb1 Revises: 8ace289026f3 diff --git a/superset/migrations/versions/2023-09-15_12-58_4b85906e5b91_add_on_delete_cascade_for_dashboard_roles.py b/superset/migrations/versions/2023-09-15_12-58_4b85906e5b91_add_on_delete_cascade_for_dashboard_roles.py new file mode 100644 index 0000000000000..8f429ced9f6de --- /dev/null +++ b/superset/migrations/versions/2023-09-15_12-58_4b85906e5b91_add_on_delete_cascade_for_dashboard_roles.py @@ -0,0 +1,55 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""add on delete cascade for dashboard_roles + +Revision ID: 4b85906e5b91 +Revises: ec54aca4c8a2 +Create Date: 2023-09-15 12:58:26.772759 + +""" + +# revision identifiers, used by Alembic. +revision = "4b85906e5b91" +down_revision = "ec54aca4c8a2" + + +from superset.migrations.shared.constraints import ForeignKey, redefine + +foreign_keys = [ + ForeignKey( + table="dashboard_roles", + referent_table="dashboards", + local_cols=["dashboard_id"], + remote_cols=["id"], + ), + ForeignKey( + table="dashboard_roles", + referent_table="ab_role", + local_cols=["role_id"], + remote_cols=["id"], + ), +] + + +def upgrade(): + for foreign_key in foreign_keys: + redefine(foreign_key, on_delete="CASCADE") + + +def downgrade(): + for foreign_key in foreign_keys: + redefine(foreign_key) diff --git a/superset/models/core.py b/superset/models/core.py index e3f91e1379437..0581756b818ea 100755 --- a/superset/models/core.py +++ b/superset/models/core.py @@ -995,7 +995,7 @@ class Log(Model): # pylint: disable=too-few-public-methods user_id = Column(Integer, ForeignKey("ab_user.id")) dashboard_id = Column(Integer) slice_id = Column(Integer) - json = Column(Text) + json = Column(utils.MediumText()) user = relationship( security_manager.user_model, backref="logs", foreign_keys=[user_id] ) diff --git a/superset/models/dashboard.py b/superset/models/dashboard.py index 0848126012695..789045542f0d8 100644 --- a/superset/models/dashboard.py +++ b/superset/models/dashboard.py @@ -127,8 +127,18 @@ def copy_dashboard(_mapper: Mapper, connection: Connection, target: Dashboard) - "dashboard_roles", metadata, Column("id", Integer, primary_key=True), - Column("dashboard_id", Integer, ForeignKey("dashboards.id"), nullable=False), - Column("role_id", Integer, ForeignKey("ab_role.id"), nullable=False), + Column( + "dashboard_id", + Integer, + ForeignKey("dashboards.id", ondelete="CASCADE"), + nullable=False, + ), + Column( + "role_id", + Integer, + ForeignKey("ab_role.id", ondelete="CASCADE"), + nullable=False, + ), ) diff --git a/superset/security/manager.py b/superset/security/manager.py index 6c47c6e163d6d..47e772d765d58 100644 --- a/superset/security/manager.py +++ b/superset/security/manager.py @@ -79,7 +79,7 @@ if TYPE_CHECKING: from superset.common.query_context import QueryContext from superset.connectors.base.models import BaseDatasource - from superset.connectors.sqla.models import SqlaTable + from superset.connectors.sqla.models import RowLevelSecurityFilter, SqlaTable from superset.models.core import Database from superset.models.dashboard import Dashboard from superset.models.sql_lab import Query @@ -2091,28 +2091,30 @@ def get_rls_filters(self, table: "BaseDatasource") -> list[SqlaQuery]: ) return query.all() - def get_rls_ids(self, table: "BaseDatasource") -> list[int]: + def get_rls_sorted(self, table: "BaseDatasource") -> list["RowLevelSecurityFilter"]: """ - Retrieves the appropriate row level security filters IDs for the current user - and the passed table. + Retrieves a list RLS filters sorted by ID for + the current user and the passed table. :param table: The table to check against - :returns: A list of IDs + :returns: A list RLS filters """ - ids = [f.id for f in self.get_rls_filters(table)] - ids.sort() # Combinations rather than permutations - return ids + filters = self.get_rls_filters(table) + filters.sort(key=lambda f: f.id) + return filters def get_guest_rls_filters_str(self, table: "BaseDatasource") -> list[str]: return [f.get("clause", "") for f in self.get_guest_rls_filters(table)] def get_rls_cache_key(self, datasource: "BaseDatasource") -> list[str]: - rls_ids = [] + rls_clauses_with_group_key = [] if datasource.is_rls_supported: - rls_ids = self.get_rls_ids(datasource) - rls_str = [str(rls_id) for rls_id in rls_ids] + rls_clauses_with_group_key = [ + f"{f.clause}-{f.group_key or ''}" + for f in self.get_rls_sorted(datasource) + ] guest_rls = self.get_guest_rls_filters_str(datasource) - return guest_rls + rls_str + return guest_rls + rls_clauses_with_group_key @staticmethod def _get_current_epoch_time() -> float: diff --git a/superset/sql_lab.py b/superset/sql_lab.py index ca157b324085d..afc682b10fbcf 100644 --- a/superset/sql_lab.py +++ b/superset/sql_lab.py @@ -191,7 +191,7 @@ def get_sql_results( # pylint: disable=too-many-arguments return handle_query_error(ex, query, session) -def execute_sql_statement( # pylint: disable=too-many-arguments +def execute_sql_statement( # pylint: disable=too-many-arguments,too-many-statements sql_statement: str, query: Query, session: Session, @@ -271,7 +271,10 @@ def execute_sql_statement( # pylint: disable=too-many-arguments ) session.commit() with stats_timing("sqllab.query.time_executing_query", stats_logger): - db_engine_spec.execute_with_cursor(cursor, sql, query, session) + logger.debug("Query %d: Running query: %s", query.id, sql) + db_engine_spec.execute(cursor, sql, async_=True) + logger.debug("Query %d: Handling cursor", query.id) + db_engine_spec.handle_cursor(cursor, query, session) with stats_timing("sqllab.query.time_fetching_results", stats_logger): logger.debug( diff --git a/superset/sql_parse.py b/superset/sql_parse.py index 2d467ec2b362b..c196fdabfa2d5 100644 --- a/superset/sql_parse.py +++ b/superset/sql_parse.py @@ -230,13 +230,17 @@ def _check_cte_is_select(self, oxide_parse: list[dict[str, Any]]) -> bool: :param oxide_parse: parsed CTE :return: True if CTE is a SELECT statement """ + + def is_body_select(body: dict[str, Any]) -> bool: + if op := body.get("SetOperation"): + return is_body_select(op["left"]) and is_body_select(op["right"]) + return all(key == "Select" for key in body.keys()) + for query in oxide_parse: parsed_query = query["Query"] cte_tables = self._get_cte_tables(parsed_query) for cte_table in cte_tables: - is_select = all( - key == "Select" for key in cte_table["query"]["body"].keys() - ) + is_select = is_body_select(cte_table["query"]["body"]) if not is_select: return False return True diff --git a/superset/sqllab/query_render.py b/superset/sqllab/query_render.py index db1adf43bab34..95111276fecae 100644 --- a/superset/sqllab/query_render.py +++ b/superset/sqllab/query_render.py @@ -25,6 +25,7 @@ from superset import is_feature_enabled from superset.errors import SupersetErrorType +from superset.sql_parse import ParsedQuery from superset.sqllab.commands.execute import SqlQueryRender from superset.sqllab.exceptions import SqlLabException from superset.utils import core as utils @@ -57,8 +58,9 @@ def render(self, execution_context: SqlJsonExecutionContext) -> str: database=query_model.database, query=query_model ) + parsed_query = ParsedQuery(query_model.sql, strip_comments=True) rendered_query = sql_template_processor.process_template( - query_model.sql, **execution_context.template_params + parsed_query.stripped(), **execution_context.template_params ) self._validate(execution_context, rendered_query, sql_template_processor) return rendered_query diff --git a/superset/tasks/cron_util.py b/superset/tasks/cron_util.py index 19d342ebdcf86..329937fb82850 100644 --- a/superset/tasks/cron_util.py +++ b/superset/tasks/cron_util.py @@ -17,7 +17,7 @@ import logging from collections.abc import Iterator -from datetime import datetime, timedelta, timezone as dt_timezone +from datetime import datetime, timedelta from croniter import croniter from pytz import timezone as pytz_timezone, UnknownTimeZoneError @@ -27,10 +27,10 @@ logger = logging.getLogger(__name__) -def cron_schedule_window(cron: str, timezone: str) -> Iterator[datetime]: +def cron_schedule_window( + triggered_at: datetime, cron: str, timezone: str +) -> Iterator[datetime]: window_size = app.config["ALERT_REPORTS_CRON_WINDOW_SIZE"] - # create a time-aware datetime in utc - time_now = datetime.now(tz=dt_timezone.utc) try: tz = pytz_timezone(timezone) except UnknownTimeZoneError: @@ -39,9 +39,9 @@ def cron_schedule_window(cron: str, timezone: str) -> Iterator[datetime]: logger.warning("Timezone %s was invalid. Falling back to 'UTC'", timezone) utc = pytz_timezone("UTC") # convert the current time to the user's local time for comparison - time_now = time_now.astimezone(tz) - start_at = time_now - timedelta(seconds=1) - stop_at = time_now + timedelta(seconds=window_size) + time_now = triggered_at.astimezone(tz) + start_at = time_now - timedelta(seconds=window_size / 2) + stop_at = time_now + timedelta(seconds=window_size / 2) crons = croniter(cron, start_at) for schedule in crons.all_next(datetime): if schedule >= stop_at: diff --git a/superset/tasks/scheduler.py b/superset/tasks/scheduler.py index a84036c340771..9c541605c77ba 100644 --- a/superset/tasks/scheduler.py +++ b/superset/tasks/scheduler.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. import logging +from datetime import datetime from celery import Celery from celery.exceptions import SoftTimeLimitExceeded @@ -43,9 +44,15 @@ def scheduler() -> None: return with session_scope(nullpool=True) as session: active_schedules = ReportScheduleDAO.find_active(session) + triggered_at = ( + datetime.fromisoformat(scheduler.request.expires) + - app.config["CELERY_BEAT_SCHEDULER_EXPIRES"] + if scheduler.request.expires + else datetime.utcnow() + ) for active_schedule in active_schedules: for schedule in cron_schedule_window( - active_schedule.crontab, active_schedule.timezone + triggered_at, active_schedule.crontab, active_schedule.timezone ): logger.info( "Scheduling alert %s eta: %s", active_schedule.name, schedule diff --git a/superset/utils/core.py b/superset/utils/core.py index 58df8b3c0f676..9e3592d22b700 100644 --- a/superset/utils/core.py +++ b/superset/utils/core.py @@ -1917,6 +1917,25 @@ def create_zip(files: dict[str, Any]) -> BytesIO: return buf +def check_is_safe_zip(zip_file: ZipFile) -> None: + """ + Checks whether a ZIP file is safe, raises SupersetException if not. + + :param zip_file: + :return: + """ + uncompress_size = 0 + compress_size = 0 + for zip_file_element in zip_file.infolist(): + if zip_file_element.file_size > current_app.config["ZIPPED_FILE_MAX_SIZE"]: + raise SupersetException("Found file with size above allowed threshold") + uncompress_size += zip_file_element.file_size + compress_size += zip_file_element.compress_size + compress_ratio = uncompress_size / compress_size + if compress_ratio > current_app.config["ZIP_FILE_MAX_COMPRESS_RATIO"]: + raise SupersetException("Zip compress ratio above allowed threshold") + + def remove_extra_adhoc_filters(form_data: dict[str, Any]) -> None: """ Remove filters from slice data that originate from a filter box or native filter diff --git a/superset/utils/excel.py b/superset/utils/excel.py index 1f68031b6497b..ccbeadee5ecec 100644 --- a/superset/utils/excel.py +++ b/superset/utils/excel.py @@ -22,6 +22,11 @@ def df_to_excel(df: pd.DataFrame, **kwargs: Any) -> Any: output = io.BytesIO() + + # timezones are not supported + for column in df.select_dtypes(include=["datetimetz"]).columns: + df[column] = df[column].astype(str) + # pylint: disable=abstract-class-instantiated with pd.ExcelWriter(output, engine="xlsxwriter") as writer: df.to_excel(writer, **kwargs) diff --git a/superset/views/base_api.py b/superset/views/base_api.py index dca7a96b1d90c..611aea5a18937 100644 --- a/superset/views/base_api.py +++ b/superset/views/base_api.py @@ -251,7 +251,7 @@ class BaseSupersetApi(BaseSupersetApiMixin, BaseApi): ... -class BaseSupersetModelRestApi(ModelRestApi, BaseSupersetApiMixin): +class BaseSupersetModelRestApi(BaseSupersetApiMixin, ModelRestApi): """ Extends FAB's ModelResApi to implement specific superset generic functionality """ diff --git a/superset/views/core.py b/superset/views/core.py index 1b50a59e362ed..e39edb99af84b 100755 --- a/superset/views/core.py +++ b/superset/views/core.py @@ -1025,21 +1025,8 @@ def profile(self) -> FlaskResponse: @staticmethod def _get_sqllab_tabs(user_id: int | None) -> dict[str, Any]: - # send list of tab state ids - tabs_state = ( - db.session.query(TabState.id, TabState.label) - .filter_by(user_id=user_id) - .all() - ) - tab_state_ids = [str(tab_state[0]) for tab_state in tabs_state] - # return first active tab, or fallback to another one if no tab is active - active_tab = ( - db.session.query(TabState) - .filter_by(user_id=user_id) - .order_by(TabState.active.desc()) - .first() - ) - + tabs_state: list[Any] = [] + active_tab: Any = None databases: dict[int, Any] = {} for database in DatabaseDAO.find_all(): databases[database.id] = { @@ -1050,6 +1037,20 @@ def _get_sqllab_tabs(user_id: int | None) -> dict[str, Any]: # These are unnecessary if sqllab backend persistence is disabled if is_feature_enabled("SQLLAB_BACKEND_PERSISTENCE"): + # send list of tab state ids + tabs_state = ( + db.session.query(TabState.id, TabState.label) + .filter_by(user_id=user_id) + .all() + ) + tab_state_ids = [str(tab_state[0]) for tab_state in tabs_state] + # return first active tab, or fallback to another one if no tab is active + active_tab = ( + db.session.query(TabState) + .filter_by(user_id=user_id) + .order_by(TabState.active.desc()) + .first() + ) # return all user queries associated with existing SQL editors user_queries = ( db.session.query(Query) diff --git a/superset/views/sql_lab/views.py b/superset/views/sql_lab/views.py index 0002c3f31cc43..068888353ce5c 100644 --- a/superset/views/sql_lab/views.py +++ b/superset/views/sql_lab/views.py @@ -14,6 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +# import logging import simplejson as json @@ -21,7 +22,7 @@ from flask_appbuilder import expose from flask_appbuilder.models.sqla.interface import SQLAInterface from flask_appbuilder.security.decorators import has_access, has_access_api -from flask_babel import lazy_gettext as _ +from flask_babel import gettext as __, lazy_gettext as _ from sqlalchemy import and_ from superset import db @@ -145,7 +146,7 @@ def post(self) -> FlaskResponse: # pylint: disable=no-self-use user_id=get_user_id(), # This is for backward compatibility label=query_editor.get("name") - or query_editor.get("title", _("Untitled Query")), + or query_editor.get("title", __("Untitled Query")), active=True, database_id=query_editor["dbId"], schema=query_editor.get("schema"), @@ -213,11 +214,15 @@ def activate( # pylint: disable=no-self-use @has_access_api @expose("", methods=("PUT",)) - def put(self, tab_state_id: int) -> FlaskResponse: # pylint: disable=no-self-use + def put(self, tab_state_id: int) -> FlaskResponse: if _get_owner_id(tab_state_id) != get_user_id(): return Response(status=403) fields = {k: json.loads(v) for k, v in request.form.to_dict().items()} + if client_id := fields.get("latest_query_id"): + query = db.session.query(Query).filter_by(client_id=client_id).one_or_none() + if not query: + return self.json_response({"error": "Bad request"}, status=400) db.session.query(TabState).filter_by(id=tab_state_id).update(fields) db.session.commit() return json_success(json.dumps(tab_state_id)) diff --git a/tests/integration_tests/charts/data/api_tests.py b/tests/integration_tests/charts/data/api_tests.py index dc82026986245..ab91cce55e825 100644 --- a/tests/integration_tests/charts/data/api_tests.py +++ b/tests/integration_tests/charts/data/api_tests.py @@ -51,6 +51,7 @@ from superset.superset_typing import AdhocColumn from superset.utils.core import ( AnnotationType, + backend, get_example_default_schema, AdhocMetricExpressionType, ExtraFiltersReasonType, @@ -943,6 +944,71 @@ def test_chart_data_get(self): assert data["result"][0]["status"] == "success" assert data["result"][0]["rowcount"] == 2 + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_chart_data_get_with_x_axis_using_custom_sql(self): + """ + Chart data API: Test GET endpoint + """ + chart = db.session.query(Slice).filter_by(slice_name="Genders").one() + chart.query_context = json.dumps( + { + "datasource": {"id": chart.table.id, "type": "table"}, + "force": False, + "queries": [ + { + "time_range": "1900-01-01T00:00:00 : 2000-01-01T00:00:00", + "granularity": "ds", + "filters": [ + {"col": "ds", "op": "TEMPORAL_RANGE", "val": "No filter"} + ], + "extras": { + "having": "", + "where": "", + }, + "applied_time_extras": {}, + "columns": [ + { + "columnType": "BASE_AXIS", + "datasourceWarning": False, + "expressionType": "SQL", + "label": "My column", + "sqlExpression": "ds", + "timeGrain": "P1W", + } + ], + "metrics": ["sum__num"], + "orderby": [["sum__num", False]], + "annotation_layers": [], + "row_limit": 50000, + "timeseries_limit": 0, + "order_desc": True, + "url_params": {}, + "custom_params": {}, + "custom_form_data": {}, + } + ], + "form_data": { + "x_axis": { + "datasourceWarning": False, + "expressionType": "SQL", + "label": "My column", + "sqlExpression": "ds", + } + }, + "result_format": "json", + "result_type": "full", + } + ) + rv = self.get_assert_metric(f"api/v1/chart/{chart.id}/data/", "get_data") + assert rv.mimetype == "application/json" + data = json.loads(rv.data.decode("utf-8")) + assert data["result"][0]["status"] == "success" + + if backend() == "presto": + assert data["result"][0]["rowcount"] == 41 + else: + assert data["result"][0]["rowcount"] == 40 + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") def test_chart_data_get_forced(self): """ diff --git a/tests/integration_tests/core_tests.py b/tests/integration_tests/core_tests.py index 63805b6c636d2..191d9dc2d0338 100644 --- a/tests/integration_tests/core_tests.py +++ b/tests/integration_tests/core_tests.py @@ -1091,6 +1091,33 @@ def test_feature_flag_serialization(self): data = self.get_resp(url) self.assertTrue(html_string in data) + @mock.patch.dict( + "superset.extensions.feature_flag_manager._feature_flags", + {"SQLLAB_BACKEND_PERSISTENCE": False}, + clear=True, + ) + def test_get_from_bootstrap_data_for_non_persisted_tab_state(self): + username = "admin" + self.login(username) + user_id = security_manager.find_user(username).id + # create a tab + data = { + "queryEditor": json.dumps( + { + "title": "Untitled Query 1", + "dbId": 1, + "schema": None, + "autorun": False, + "sql": "SELECT ...", + "queryLimit": 1000, + } + ) + } + + payload = views.Superset._get_sqllab_tabs(user_id=user_id) + self.assertEqual(len(payload["queries"]), 0) + self.assertEqual(len(payload["tab_state_ids"]), 0) + @mock.patch.dict( "superset.extensions.feature_flag_manager._feature_flags", {"SQLLAB_BACKEND_PERSISTENCE": True}, @@ -1170,6 +1197,41 @@ def test_tabstate_with_name(self): self.assertEqual(payload["label"], "Untitled Query foo") + def test_tabstate_update(self): + username = "admin" + self.login(username) + # create a tab + data = { + "queryEditor": json.dumps( + { + "name": "Untitled Query foo", + "dbId": 1, + "schema": None, + "autorun": False, + "sql": "SELECT ...", + "queryLimit": 1000, + } + ) + } + resp = self.get_json_resp("/tabstateview/", data=data) + tab_state_id = resp["id"] + # update tab state with non-existing client_id + client_id = "asdfasdf" + data = {"sql": json.dumps("select 1"), "latest_query_id": json.dumps(client_id)} + response = self.client.put(f"/tabstateview/{tab_state_id}", data=data) + self.assertEqual(response.status_code, 400) + self.assertEqual(response.json["error"], "Bad request") + # generate query + db.session.add(Query(client_id=client_id, database_id=1)) + db.session.commit() + # update tab state with a valid client_id + response = self.client.put(f"/tabstateview/{tab_state_id}", data=data) + self.assertEqual(response.status_code, 200) + # nulls should be ok too + data["latest_query_id"] = "null" + response = self.client.put(f"/tabstateview/{tab_state_id}", data=data) + self.assertEqual(response.status_code, 200) + def test_virtual_table_explore_visibility(self): # test that default visibility it set to True database = superset.utils.database.get_example_database() diff --git a/tests/integration_tests/db_engine_specs/presto_tests.py b/tests/integration_tests/db_engine_specs/presto_tests.py index 393f89621c1f7..7e151648a645c 100644 --- a/tests/integration_tests/db_engine_specs/presto_tests.py +++ b/tests/integration_tests/db_engine_specs/presto_tests.py @@ -925,9 +925,11 @@ def test_get_create_view_exception(self): def test_get_create_view_database_error(self): from pyhive.exc import DatabaseError - mock_execute = mock.MagicMock(side_effect=DatabaseError()) + mock_execute = mock.MagicMock() + mock_fetch_data = mock.MagicMock(side_effect=DatabaseError()) database = mock.MagicMock() database.get_raw_connection().__enter__().cursor().execute = mock_execute + database.get_raw_connection().__enter__().cursor().fetchall = mock_fetch_data schema = "schema" table = "table" result = PrestoEngineSpec.get_create_view(database, schema=schema, table=table) diff --git a/tests/integration_tests/query_context_tests.py b/tests/integration_tests/query_context_tests.py index 8c2082d1c4b12..00a98b2c21d93 100644 --- a/tests/integration_tests/query_context_tests.py +++ b/tests/integration_tests/query_context_tests.py @@ -836,11 +836,9 @@ def test_special_chars_in_column_name(app_context, physical_dataset): query_object = qc.queries[0] df = qc.get_df_payload(query_object)["df"] - if query_object.datasource.database.backend == "sqlite": - # sqlite returns string as timestamp column - assert df["time column with spaces"][0] == "2002-01-03 00:00:00" - assert df["I_AM_A_TRUNC_COLUMN"][0] == "2002-01-01 00:00:00" - else: + + # sqlite doesn't have timestamp columns + if query_object.datasource.database.backend != "sqlite": assert df["time column with spaces"][0].strftime("%Y-%m-%d") == "2002-01-03" assert df["I_AM_A_TRUNC_COLUMN"][0].strftime("%Y-%m-%d") == "2002-01-01" diff --git a/tests/integration_tests/security/row_level_security_tests.py b/tests/integration_tests/security/row_level_security_tests.py index c29ebe9afef03..41ca0d5e798e9 100644 --- a/tests/integration_tests/security/row_level_security_tests.py +++ b/tests/integration_tests/security/row_level_security_tests.py @@ -305,6 +305,21 @@ def test_rls_filter_doesnt_alter_admin_birth_names_query(self): assert not self.NAMES_Q_REGEX.search(sql) assert not self.BASE_FILTER_REGEX.search(sql) + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_get_rls_cache_key(self): + g.user = self.get_user(username="admin") + tbl = self.get_table(name="birth_names") + clauses = security_manager.get_rls_cache_key(tbl) + assert clauses == [] + + g.user = self.get_user(username="gamma") + clauses = security_manager.get_rls_cache_key(tbl) + assert clauses == [ + "name like 'A%' or name like 'B%'-name", + "name like 'Q%'-name", + "gender = 'boy'-gender", + ] + class TestRowLevelSecurityCreateAPI(SupersetTestCase): @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") diff --git a/tests/integration_tests/sqllab_tests.py b/tests/integration_tests/sqllab_tests.py index fbab4d98d25ec..914c601610e90 100644 --- a/tests/integration_tests/sqllab_tests.py +++ b/tests/integration_tests/sqllab_tests.py @@ -514,6 +514,13 @@ def test_sql_json_parameter_error(self): ) assert data["status"] == "success" + data = self.run_sql( + "SELECT * FROM birth_names WHERE state = '{{ state }}' -- blabblah {{ extra1 }} {{fake.fn()}}\nLIMIT 10", + "3", + template_params=json.dumps({"state": "CA"}), + ) + assert data["status"] == "success" + data = self.run_sql( "SELECT * FROM birth_names WHERE state = '{{ stat }}' LIMIT 10", "2", diff --git a/tests/unit_tests/charts/commands/importers/v1/utils_test.py b/tests/unit_tests/charts/commands/importers/v1/utils_test.py index c99ecaf6d6da7..77d31e7d776b3 100644 --- a/tests/unit_tests/charts/commands/importers/v1/utils_test.py +++ b/tests/unit_tests/charts/commands/importers/v1/utils_test.py @@ -32,6 +32,7 @@ def test_migrate_chart_area() -> None: "certified_by": None, "certification_details": None, "viz_type": "area", + "query_context": None, "params": json.dumps( { "adhoc_filters": [], diff --git a/tests/unit_tests/common/test_query_object_factory.py b/tests/unit_tests/common/test_query_object_factory.py index 02304828dca82..4e8fadfe3e993 100644 --- a/tests/unit_tests/common/test_query_object_factory.py +++ b/tests/unit_tests/common/test_query_object_factory.py @@ -43,9 +43,45 @@ def session_factory() -> Mock: return Mock() +class SimpleDatasetColumn: + def __init__(self, col_params: dict[str, Any]): + self.__dict__.update(col_params) + + +TEMPORAL_COLUMN_NAMES = ["temporal_column", "temporal_column_with_python_date_format"] +TEMPORAL_COLUMNS = { + TEMPORAL_COLUMN_NAMES[0]: SimpleDatasetColumn( + { + "column_name": TEMPORAL_COLUMN_NAMES[0], + "is_dttm": True, + "python_date_format": None, + "type": "string", + "num_types": ["BIGINT"], + } + ), + TEMPORAL_COLUMN_NAMES[1]: SimpleDatasetColumn( + { + "column_name": TEMPORAL_COLUMN_NAMES[1], + "type": "BIGINT", + "is_dttm": True, + "python_date_format": "%Y", + "num_types": ["BIGINT"], + } + ), +} + + @fixture def connector_registry() -> Mock: - return Mock(spec=["get_datasource"]) + datasource_dao_mock = Mock(spec=["get_datasource"]) + datasource_dao_mock.get_datasource.return_value = Mock() + datasource_dao_mock.get_datasource().get_column = Mock( + side_effect=lambda col_name: TEMPORAL_COLUMNS[col_name] + if col_name in TEMPORAL_COLUMN_NAMES + else Mock() + ) + datasource_dao_mock.get_datasource().db_extra = None + return datasource_dao_mock def apply_max_row_limit(limit: int, max_limit: Optional[int] = None) -> int: @@ -112,3 +148,55 @@ def test_query_context_null_post_processing_op( raw_query_context["result_type"], **raw_query_object ) assert query_object.post_processing == [] + + def test_query_context_no_python_date_format_filters( + self, + query_object_factory: QueryObjectFactory, + raw_query_context: dict[str, Any], + ): + raw_query_object = raw_query_context["queries"][0] + raw_query_object["filters"].append( + {"col": TEMPORAL_COLUMN_NAMES[0], "op": "==", "val": 315532800000} + ) + query_object = query_object_factory.create( + raw_query_context["result_type"], + raw_query_context["datasource"], + **raw_query_object + ) + assert query_object.filter[3]["val"] == 315532800000 + + def test_query_context_python_date_format_filters( + self, + query_object_factory: QueryObjectFactory, + raw_query_context: dict[str, Any], + ): + raw_query_object = raw_query_context["queries"][0] + raw_query_object["filters"].append( + {"col": TEMPORAL_COLUMN_NAMES[1], "op": "==", "val": 315532800000} + ) + query_object = query_object_factory.create( + raw_query_context["result_type"], + raw_query_context["datasource"], + **raw_query_object + ) + assert query_object.filter[3]["val"] == 1980 + + def test_query_context_python_date_format_filters_list_of_values( + self, + query_object_factory: QueryObjectFactory, + raw_query_context: dict[str, Any], + ): + raw_query_object = raw_query_context["queries"][0] + raw_query_object["filters"].append( + { + "col": TEMPORAL_COLUMN_NAMES[1], + "op": "==", + "val": [315532800000, 631152000000], + } + ) + query_object = query_object_factory.create( + raw_query_context["result_type"], + raw_query_context["datasource"], + **raw_query_object + ) + assert query_object.filter[3]["val"] == [1980, 1990] diff --git a/tests/unit_tests/conftest.py b/tests/unit_tests/conftest.py index cbf728dfc776b..4444fdc8c7564 100644 --- a/tests/unit_tests/conftest.py +++ b/tests/unit_tests/conftest.py @@ -89,6 +89,15 @@ def app(request: SubRequest) -> Iterator[SupersetApp]: app.config["TESTING"] = True # loop over extra configs passed in by tests + # and update the app config + # to override the default configs use: + # + # @pytest.mark.parametrize( + # "app", + # [{"SOME_CONFIG": "SOME_VALUE"}], + # indirect=True, + # ) + # def test_some_test(app_context: None) -> None: if request and hasattr(request, "param"): for key, val in request.param.items(): app.config[key] = val diff --git a/tests/unit_tests/db_engine_specs/test_clickhouse.py b/tests/unit_tests/db_engine_specs/test_clickhouse.py index 6dfeddaf37cfd..3f28341f2643d 100644 --- a/tests/unit_tests/db_engine_specs/test_clickhouse.py +++ b/tests/unit_tests/db_engine_specs/test_clickhouse.py @@ -30,6 +30,8 @@ String, TypeEngine, ) +from urllib3.connection import HTTPConnection +from urllib3.exceptions import NewConnectionError from superset.utils.core import GenericDataType from tests.unit_tests.db_engine_specs.utils import ( @@ -56,14 +58,12 @@ def test_convert_dttm( def test_execute_connection_error() -> None: - from urllib3.exceptions import NewConnectionError - from superset.db_engine_specs.clickhouse import ClickHouseEngineSpec from superset.db_engine_specs.exceptions import SupersetDBAPIDatabaseError cursor = Mock() cursor.execute.side_effect = NewConnectionError( - "Dummypool", "Exception with sensitive data" + HTTPConnection("localhost"), "Exception with sensitive data" ) with pytest.raises(SupersetDBAPIDatabaseError) as ex: ClickHouseEngineSpec.execute(cursor, "SELECT col1 from table1") diff --git a/tests/unit_tests/db_engine_specs/test_mysql.py b/tests/unit_tests/db_engine_specs/test_mysql.py index 89abf2321d79b..ed643470176ec 100644 --- a/tests/unit_tests/db_engine_specs/test_mysql.py +++ b/tests/unit_tests/db_engine_specs/test_mysql.py @@ -16,6 +16,7 @@ # under the License. from datetime import datetime +from decimal import Decimal from typing import Any, Optional from unittest.mock import Mock, patch @@ -220,3 +221,42 @@ def test_get_schema_from_engine_params() -> None: ) == "db1" ) + + +@pytest.mark.parametrize( + "data,description,expected_result", + [ + ( + [("1.23456", "abc")], + [("dec", "decimal(12,6)"), ("str", "varchar(3)")], + [(Decimal("1.23456"), "abc")], + ), + ( + [(Decimal("1.23456"), "abc")], + [("dec", "decimal(12,6)"), ("str", "varchar(3)")], + [(Decimal("1.23456"), "abc")], + ), + ( + [(None, "abc")], + [("dec", "decimal(12,6)"), ("str", "varchar(3)")], + [(None, "abc")], + ), + ( + [("1.23456", "abc")], + [("dec", "varchar(255)"), ("str", "varchar(3)")], + [("1.23456", "abc")], + ), + ], +) +def test_column_type_mutator( + data: list[tuple[Any, ...]], + description: list[Any], + expected_result: list[tuple[Any, ...]], +): + from superset.db_engine_specs.mysql import MySQLEngineSpec as spec + + mock_cursor = Mock() + mock_cursor.fetchall.return_value = data + mock_cursor.description = description + + assert spec.fetch_data(mock_cursor) == expected_result diff --git a/tests/unit_tests/db_engine_specs/test_trino.py b/tests/unit_tests/db_engine_specs/test_trino.py index 1b50a683a0841..963953d18b48e 100644 --- a/tests/unit_tests/db_engine_specs/test_trino.py +++ b/tests/unit_tests/db_engine_specs/test_trino.py @@ -352,7 +352,7 @@ def test_handle_cursor_early_cancel( query_id = "myQueryId" cursor_mock = engine_mock.return_value.__enter__.return_value - cursor_mock.query_id = query_id + cursor_mock.stats = {"queryId": query_id} session_mock = mocker.MagicMock() query = Query() @@ -366,32 +366,3 @@ def test_handle_cursor_early_cancel( assert cancel_query_mock.call_args[1]["cancel_query_id"] == query_id else: assert cancel_query_mock.call_args is None - - -def test_execute_with_cursor_in_parallel(mocker: MockerFixture): - """Test that `execute_with_cursor` fetches query ID from the cursor""" - from superset.db_engine_specs.trino import TrinoEngineSpec - - query_id = "myQueryId" - - mock_cursor = mocker.MagicMock() - mock_cursor.query_id = None - - mock_query = mocker.MagicMock() - mock_session = mocker.MagicMock() - - def _mock_execute(*args, **kwargs): - mock_cursor.query_id = query_id - - mock_cursor.execute.side_effect = _mock_execute - - TrinoEngineSpec.execute_with_cursor( - cursor=mock_cursor, - sql="SELECT 1 FROM foo", - query=mock_query, - session=mock_session, - ) - - mock_query.set_extra_json_key.assert_called_once_with( - key=QUERY_CANCEL_KEY, value=query_id - ) diff --git a/tests/unit_tests/security/api_test.py b/tests/unit_tests/security/api_test.py new file mode 100644 index 0000000000000..5d596073e9b27 --- /dev/null +++ b/tests/unit_tests/security/api_test.py @@ -0,0 +1,31 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import pytest + +from superset.extensions import csrf + + +@pytest.mark.parametrize( + "app", + [{"WTF_CSRF_ENABLED": True}], + indirect=True, +) +def test_csrf_not_exempt(app_context: None) -> None: + """ + Test that REST API is not exempt from CSRF. + """ + assert csrf._exempt_blueprints == {"MenuApi", "SecurityApi", "OpenApi"} diff --git a/tests/unit_tests/sql_lab_test.py b/tests/unit_tests/sql_lab_test.py index edc1fd2ec4a5d..29f45eab682a0 100644 --- a/tests/unit_tests/sql_lab_test.py +++ b/tests/unit_tests/sql_lab_test.py @@ -55,8 +55,8 @@ def test_execute_sql_statement(mocker: MockerFixture, app: None) -> None: ) database.apply_limit_to_sql.assert_called_with("SELECT 42 AS answer", 2, force=True) - db_engine_spec.execute_with_cursor.assert_called_with( - cursor, "SELECT 42 AS answer LIMIT 2", query, session + db_engine_spec.execute.assert_called_with( + cursor, "SELECT 42 AS answer LIMIT 2", async_=True ) SupersetResultSet.assert_called_with([(42,)], cursor.description, db_engine_spec) @@ -106,8 +106,10 @@ def test_execute_sql_statement_with_rls( 101, force=True, ) - db_engine_spec.execute_with_cursor.assert_called_with( - cursor, "SELECT * FROM sales WHERE organization_id=42 LIMIT 101", query, session + db_engine_spec.execute.assert_called_with( + cursor, + "SELECT * FROM sales WHERE organization_id=42 LIMIT 101", + async_=True, ) SupersetResultSet.assert_called_with([(42,)], cursor.description, db_engine_spec) diff --git a/tests/unit_tests/sql_parse_tests.py b/tests/unit_tests/sql_parse_tests.py index 73074d3df64bf..341ba9d789396 100644 --- a/tests/unit_tests/sql_parse_tests.py +++ b/tests/unit_tests/sql_parse_tests.py @@ -1623,3 +1623,13 @@ def test_is_select() -> None: Test `is_select`. """ assert not ParsedQuery("SELECT 1; DROP DATABASE superset").is_select() + assert ParsedQuery( + "with base as(select id from table1 union all select id from table2) select * from base" + ).is_select() + assert ParsedQuery( + """ +WITH t AS ( + SELECT 1 UNION ALL SELECT 2 +) +SELECT * FROM t""" + ).is_select() diff --git a/tests/unit_tests/tasks/test_cron_util.py b/tests/unit_tests/tasks/test_cron_util.py index 5bc22273f544e..56f1258e30b57 100644 --- a/tests/unit_tests/tasks/test_cron_util.py +++ b/tests/unit_tests/tasks/test_cron_util.py @@ -14,11 +14,9 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +from datetime import datetime import pytest -import pytz -from dateutil import parser -from freezegun import freeze_time from freezegun.api import FakeDatetime from superset.tasks.cron_util import cron_schedule_window @@ -27,23 +25,28 @@ @pytest.mark.parametrize( "current_dttm, cron, expected", [ - ("2020-01-01T08:59:01Z", "0 1 * * *", []), + ("2020-01-01T08:59:01+00:00", "0 1 * * *", []), ( - "2020-01-01T08:59:02Z", + "2020-01-01T08:59:32+00:00", "0 1 * * *", [FakeDatetime(2020, 1, 1, 9, 0).strftime("%A, %d %B %Y, %H:%M:%S")], ), ( - "2020-01-01T08:59:59Z", + "2020-01-01T08:59:59+00:00", "0 1 * * *", [FakeDatetime(2020, 1, 1, 9, 0).strftime("%A, %d %B %Y, %H:%M:%S")], ), ( - "2020-01-01T09:00:00Z", + "2020-01-01T09:00:00+00:00", "0 1 * * *", [FakeDatetime(2020, 1, 1, 9, 0).strftime("%A, %d %B %Y, %H:%M:%S")], ), - ("2020-01-01T09:00:01Z", "0 1 * * *", []), + ( + "2020-01-01T09:00:01+00:00", + "0 1 * * *", + [FakeDatetime(2020, 1, 1, 9, 0).strftime("%A, %d %B %Y, %H:%M:%S")], + ), + ("2020-01-01T09:00:30+00:00", "0 1 * * *", []), ], ) def test_cron_schedule_window_los_angeles( @@ -53,34 +56,40 @@ def test_cron_schedule_window_los_angeles( Reports scheduler: Test cron schedule window for "America/Los_Angeles" """ - with freeze_time(current_dttm): - datetimes = cron_schedule_window(cron, "America/Los_Angeles") - assert ( - list(cron.strftime("%A, %d %B %Y, %H:%M:%S") for cron in datetimes) - == expected - ) + datetimes = cron_schedule_window( + datetime.fromisoformat(current_dttm), cron, "America/Los_Angeles" + ) + assert ( + list(cron.strftime("%A, %d %B %Y, %H:%M:%S") for cron in datetimes) == expected + ) @pytest.mark.parametrize( "current_dttm, cron, expected", [ - ("2020-01-01T00:59:01Z", "0 1 * * *", []), + ("2020-01-01T00:59:01+00:00", "0 1 * * *", []), + ("2020-01-01T00:59:02+00:00", "0 1 * * *", []), + ( + "2020-01-01T00:59:59+00:00", + "0 1 * * *", + [FakeDatetime(2020, 1, 1, 1, 0).strftime("%A, %d %B %Y, %H:%M:%S")], + ), ( - "2020-01-01T00:59:02Z", + "2020-01-01T01:00:00+00:00", "0 1 * * *", [FakeDatetime(2020, 1, 1, 1, 0).strftime("%A, %d %B %Y, %H:%M:%S")], ), ( - "2020-01-01T00:59:59Z", + "2020-01-01T01:00:01+00:00", "0 1 * * *", [FakeDatetime(2020, 1, 1, 1, 0).strftime("%A, %d %B %Y, %H:%M:%S")], ), ( - "2020-01-01T01:00:00Z", + "2020-01-01T01:00:29+00:00", "0 1 * * *", [FakeDatetime(2020, 1, 1, 1, 0).strftime("%A, %d %B %Y, %H:%M:%S")], ), - ("2020-01-01T01:00:01Z", "0 1 * * *", []), + ("2020-01-01T01:00:30+00:00", "0 1 * * *", []), ], ) def test_cron_schedule_window_invalid_timezone( @@ -90,35 +99,41 @@ def test_cron_schedule_window_invalid_timezone( Reports scheduler: Test cron schedule window for "invalid timezone" """ - with freeze_time(current_dttm): - datetimes = cron_schedule_window(cron, "invalid timezone") - # it should default to UTC - assert ( - list(cron.strftime("%A, %d %B %Y, %H:%M:%S") for cron in datetimes) - == expected - ) + datetimes = cron_schedule_window( + datetime.fromisoformat(current_dttm), cron, "invalid timezone" + ) + # it should default to UTC + assert ( + list(cron.strftime("%A, %d %B %Y, %H:%M:%S") for cron in datetimes) == expected + ) @pytest.mark.parametrize( "current_dttm, cron, expected", [ - ("2020-01-01T05:59:01Z", "0 1 * * *", []), + ("2020-01-01T05:59:01+00:00", "0 1 * * *", []), + ("2020-01-01T05:59:02+00:00", "0 1 * * *", []), + ( + "2020-01-01T05:59:59+00:00", + "0 1 * * *", + [FakeDatetime(2020, 1, 1, 6, 0).strftime("%A, %d %B %Y, %H:%M:%S")], + ), ( - "2020-01-01T05:59:02Z", + "2020-01-01T06:00:00+00:00", "0 1 * * *", [FakeDatetime(2020, 1, 1, 6, 0).strftime("%A, %d %B %Y, %H:%M:%S")], ), ( - "2020-01-01T5:59:59Z", + "2020-01-01T06:00:01+00:00", "0 1 * * *", [FakeDatetime(2020, 1, 1, 6, 0).strftime("%A, %d %B %Y, %H:%M:%S")], ), ( - "2020-01-01T6:00:00", + "2020-01-01T06:00:29+00:00", "0 1 * * *", [FakeDatetime(2020, 1, 1, 6, 0).strftime("%A, %d %B %Y, %H:%M:%S")], ), - ("2020-01-01T6:00:01Z", "0 1 * * *", []), + ("2020-01-01T06:00:30+00:00", "0 1 * * *", []), ], ) def test_cron_schedule_window_new_york( @@ -128,34 +143,40 @@ def test_cron_schedule_window_new_york( Reports scheduler: Test cron schedule window for "America/New_York" """ - with freeze_time(current_dttm, tz_offset=0): - datetimes = cron_schedule_window(cron, "America/New_York") - assert ( - list(cron.strftime("%A, %d %B %Y, %H:%M:%S") for cron in datetimes) - == expected - ) + datetimes = cron_schedule_window( + datetime.fromisoformat(current_dttm), cron, "America/New_York" + ) + assert ( + list(cron.strftime("%A, %d %B %Y, %H:%M:%S") for cron in datetimes) == expected + ) @pytest.mark.parametrize( "current_dttm, cron, expected", [ - ("2020-01-01T06:59:01Z", "0 1 * * *", []), + ("2020-01-01T06:59:01+00:00", "0 1 * * *", []), + ("2020-01-01T06:59:02+00:00", "0 1 * * *", []), + ( + "2020-01-01T06:59:59+00:00", + "0 1 * * *", + [FakeDatetime(2020, 1, 1, 7, 0).strftime("%A, %d %B %Y, %H:%M:%S")], + ), ( - "2020-01-01T06:59:02Z", + "2020-01-01T07:00:00+00:00", "0 1 * * *", [FakeDatetime(2020, 1, 1, 7, 0).strftime("%A, %d %B %Y, %H:%M:%S")], ), ( - "2020-01-01T06:59:59Z", + "2020-01-01T07:00:01+00:00", "0 1 * * *", [FakeDatetime(2020, 1, 1, 7, 0).strftime("%A, %d %B %Y, %H:%M:%S")], ), ( - "2020-01-01T07:00:00", + "2020-01-01T07:00:29+00:00", "0 1 * * *", [FakeDatetime(2020, 1, 1, 7, 0).strftime("%A, %d %B %Y, %H:%M:%S")], ), - ("2020-01-01T07:00:01Z", "0 1 * * *", []), + ("2020-01-01T07:00:30+00:00", "0 1 * * *", []), ], ) def test_cron_schedule_window_chicago( @@ -165,34 +186,40 @@ def test_cron_schedule_window_chicago( Reports scheduler: Test cron schedule window for "America/Chicago" """ - with freeze_time(current_dttm, tz_offset=0): - datetimes = cron_schedule_window(cron, "America/Chicago") - assert ( - list(cron.strftime("%A, %d %B %Y, %H:%M:%S") for cron in datetimes) - == expected - ) + datetimes = cron_schedule_window( + datetime.fromisoformat(current_dttm), cron, "America/Chicago" + ) + assert ( + list(cron.strftime("%A, %d %B %Y, %H:%M:%S") for cron in datetimes) == expected + ) @pytest.mark.parametrize( "current_dttm, cron, expected", [ - ("2020-07-01T05:59:01Z", "0 1 * * *", []), + ("2020-07-01T05:59:01+00:00", "0 1 * * *", []), + ("2020-07-01T05:59:02+00:00", "0 1 * * *", []), + ( + "2020-07-01T05:59:59+00:00", + "0 1 * * *", + [FakeDatetime(2020, 7, 1, 6, 0).strftime("%A, %d %B %Y, %H:%M:%S")], + ), ( - "2020-07-01T05:59:02Z", + "2020-07-01T06:00:00+00:00", "0 1 * * *", [FakeDatetime(2020, 7, 1, 6, 0).strftime("%A, %d %B %Y, %H:%M:%S")], ), ( - "2020-07-01T05:59:59Z", + "2020-07-01T06:00:01+00:00", "0 1 * * *", [FakeDatetime(2020, 7, 1, 6, 0).strftime("%A, %d %B %Y, %H:%M:%S")], ), ( - "2020-07-01T06:00:00", + "2020-07-01T06:00:29+00:00", "0 1 * * *", [FakeDatetime(2020, 7, 1, 6, 0).strftime("%A, %d %B %Y, %H:%M:%S")], ), - ("2020-07-01T06:00:01Z", "0 1 * * *", []), + ("2020-07-01T06:00:30+00:00", "0 1 * * *", []), ], ) def test_cron_schedule_window_chicago_daylight( @@ -202,9 +229,9 @@ def test_cron_schedule_window_chicago_daylight( Reports scheduler: Test cron schedule window for "America/Chicago" """ - with freeze_time(current_dttm, tz_offset=0): - datetimes = cron_schedule_window(cron, "America/Chicago") - assert ( - list(cron.strftime("%A, %d %B %Y, %H:%M:%S") for cron in datetimes) - == expected - ) + datetimes = cron_schedule_window( + datetime.fromisoformat(current_dttm), cron, "America/Chicago" + ) + assert ( + list(cron.strftime("%A, %d %B %Y, %H:%M:%S") for cron in datetimes) == expected + ) diff --git a/tests/unit_tests/utils/excel_tests.py b/tests/unit_tests/utils/excel_tests.py new file mode 100644 index 0000000000000..c15f69a0c62a3 --- /dev/null +++ b/tests/unit_tests/utils/excel_tests.py @@ -0,0 +1,31 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from datetime import datetime, timezone + +import pandas as pd + +from superset.utils.excel import df_to_excel + + +def test_timezone_conversion() -> None: + """ + Test that columns with timezones are converted to a string. + """ + df = pd.DataFrame({"dt": [datetime(2023, 1, 1, 0, 0, tzinfo=timezone.utc)]}) + contents = df_to_excel(df) + assert pd.read_excel(contents)["dt"][0] == "2023-01-01 00:00:00+00:00" diff --git a/tests/unit_tests/utils/test_core.py b/tests/unit_tests/utils/test_core.py index 568595517c6bd..bd0cefeaf0f8d 100644 --- a/tests/unit_tests/utils/test_core.py +++ b/tests/unit_tests/utils/test_core.py @@ -15,12 +15,16 @@ # specific language governing permissions and limitations # under the License. import os +from dataclasses import dataclass from typing import Any, Optional +from unittest.mock import MagicMock import pytest +from superset.exceptions import SupersetException from superset.utils.core import ( cast_to_boolean, + check_is_safe_zip, is_test, parse_boolean_string, QueryObjectFilterClause, @@ -41,6 +45,12 @@ } +@dataclass +class MockZipInfo: + file_size: int + compress_size: int + + @pytest.mark.parametrize( "original,expected", [ @@ -171,3 +181,50 @@ def test_other_values(): assert cast_to_boolean([]) is False assert cast_to_boolean({}) is False assert cast_to_boolean(object()) is False + + +def test_check_if_safe_zip_success(app_context: None) -> None: + """ + Test if ZIP files are safe + """ + ZipFile = MagicMock() + ZipFile.infolist.return_value = [ + MockZipInfo(file_size=1000, compress_size=10), + MockZipInfo(file_size=1000, compress_size=10), + MockZipInfo(file_size=1000, compress_size=10), + MockZipInfo(file_size=1000, compress_size=10), + MockZipInfo(file_size=1000, compress_size=10), + ] + check_is_safe_zip(ZipFile) + + +def test_check_if_safe_zip_high_rate(app_context: None) -> None: + """ + Test if ZIP files is not highly compressed + """ + ZipFile = MagicMock() + ZipFile.infolist.return_value = [ + MockZipInfo(file_size=1000, compress_size=1), + MockZipInfo(file_size=1000, compress_size=1), + MockZipInfo(file_size=1000, compress_size=1), + MockZipInfo(file_size=1000, compress_size=1), + MockZipInfo(file_size=1000, compress_size=1), + ] + with pytest.raises(SupersetException): + check_is_safe_zip(ZipFile) + + +def test_check_if_safe_zip_hidden_bomb(app_context: None) -> None: + """ + Test if ZIP file does not contain a big file highly compressed + """ + ZipFile = MagicMock() + ZipFile.infolist.return_value = [ + MockZipInfo(file_size=1000, compress_size=100), + MockZipInfo(file_size=1000, compress_size=100), + MockZipInfo(file_size=1000, compress_size=100), + MockZipInfo(file_size=1000, compress_size=100), + MockZipInfo(file_size=1000 * (1024 * 1024), compress_size=100), + ] + with pytest.raises(SupersetException): + check_is_safe_zip(ZipFile)