diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug-report.md similarity index 84% rename from .github/ISSUE_TEMPLATE/bug_report.md rename to .github/ISSUE_TEMPLATE/bug-report.md index f821eb35246bc..edb58358a1de7 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug-report.md @@ -1,8 +1,7 @@ --- name: Bug report -about: Create a report to help us improve Superset's stability! For feature requests please open a discussion at https://github.com/apache/superset/discussions/categories/ideas -labels: "#bug" - +about: "Create a report to help us improve Superset's stability! For feature requests please open a discussion [here](https://github.com/apache/superset/discussions/categories/ideas)." +labels: bug --- A clear and concise description of what the bug is. diff --git a/.github/ISSUE_TEMPLATE/cosmetic.md b/.github/ISSUE_TEMPLATE/cosmetic.md index 753c2bc5cd8bf..1a2e6ea2da02b 100644 --- a/.github/ISSUE_TEMPLATE/cosmetic.md +++ b/.github/ISSUE_TEMPLATE/cosmetic.md @@ -2,7 +2,6 @@ name: Cosmetic Issue about: Describe a cosmetic issue with CSS, positioning, layout, labeling, or similar labels: "cosmetic-issue" - --- ## Screenshot diff --git a/.github/ISSUE_TEMPLATE/sip.md b/.github/ISSUE_TEMPLATE/sip.md index c2b0a14b91400..c01e7b1ab7d77 100644 --- a/.github/ISSUE_TEMPLATE/sip.md +++ b/.github/ISSUE_TEMPLATE/sip.md @@ -1,10 +1,9 @@ --- name: SIP -about: Superset Improvement Proposal (See SIP-0: https://github.com/apache/superset/issues/5602) -labels: "#SIP" +about: "Superset Improvement Proposal. See [here](https://github.com/apache/superset/issues/5602) for details." +labels: sip title: "[SIP] Your Title Here (do not add SIP number)" asignees: "apache/superset-committers" - --- *Please make sure you are familiar with the SIP process documented* diff --git a/.github/workflows/docker-ephemeral-env.yml b/.github/workflows/docker-ephemeral-env.yml index 5159a8b77998b..bc877ae9ae737 100644 --- a/.github/workflows/docker-ephemeral-env.yml +++ b/.github/workflows/docker-ephemeral-env.yml @@ -22,6 +22,10 @@ jobs: secrets.AWS_SECRET_ACCESS_KEY != '' && secrets.AWS_SECRET_ACCESS_KEY != '') || '' }}" ]; then echo "has-secrets=1" >> "$GITHUB_OUTPUT" + echo "has secrets!" + else + echo "has-secrets=0" >> "$GITHUB_OUTPUT" + echo "no secrets!" fi docker_ephemeral_env: diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index f9c62501f32e9..7c5b50baf430f 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -20,6 +20,10 @@ jobs: run: | if [ -n "${{ (secrets.DOCKERHUB_USER != '' && secrets.DOCKERHUB_TOKEN != '') || '' }}" ]; then echo "has-secrets=1" >> "$GITHUB_OUTPUT" + echo "has secrets!" + else + echo "has-secrets=0" >> "$GITHUB_OUTPUT" + echo "no secrets!" fi docker-build: diff --git a/.github/workflows/superset-frontend.yml b/.github/workflows/superset-frontend.yml index a8045518469fa..aeb258cd268b8 100644 --- a/.github/workflows/superset-frontend.yml +++ b/.github/workflows/superset-frontend.yml @@ -41,7 +41,7 @@ jobs: if: steps.check.outcome == 'failure' working-directory: ./superset-frontend run: | - npm run lint + npm run lint -- --quiet npm run prettier-check - name: Build plugins packages if: steps.check.outcome == 'failure' @@ -60,7 +60,7 @@ jobs: if: steps.check.outcome == 'failure' working-directory: ./superset-frontend run: | - npm run test -- --coverage + npm run test -- --coverage --silent # todo: remove this step when fix generator as a project in root jest.config.js - name: generator-superset unit tests if: steps.check.outcome == 'failure' diff --git a/RELEASING/from_tarball_entrypoint.sh b/RELEASING/from_tarball_entrypoint.sh index 9fe0516abb90b..27b247eb72fe8 100755 --- a/RELEASING/from_tarball_entrypoint.sh +++ b/RELEASING/from_tarball_entrypoint.sh @@ -36,7 +36,7 @@ superset db upgrade superset init # Loading examples -superset load-examples +superset load-examples --force FLASK_ENV=development FLASK_APP="superset.app:create_app()" \ flask run -p 8088 --with-threads --reload --debugger --host=0.0.0.0 diff --git a/RESOURCES/STANDARD_ROLES.md b/RESOURCES/STANDARD_ROLES.md index b247585396280..2d455f1e8706b 100644 --- a/RESOURCES/STANDARD_ROLES.md +++ b/RESOURCES/STANDARD_ROLES.md @@ -61,53 +61,26 @@ |can my queries on SqlLab|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:| |can log on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| |can schemas access for csv upload on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| -|can user slices on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| -|can favstar on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| -|can estimate query cost on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| |can import dashboards on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| -|can search queries on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:| -|can sqllab viz on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:| |can schemas on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| |can sqllab history on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:| -|can copy dash on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| |can publish on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| |can csv on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:| -|can datasources on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| -|can fave dashboards by username on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| -|can sql json on Superset|:heavy_check_mark:|O|O|:heavy_check_mark:| |can slice on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| |can sync druid source on Superset|:heavy_check_mark:|O|O|O| |can explore on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| -|can fave slices on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| -|can tables on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| -|can slice json on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| |can approve on Superset|:heavy_check_mark:|O|O|O| |can explore json on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| |can fetch datasource metadata on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| -|can override role permissions on Superset|:heavy_check_mark:|O|O|O| -|can created dashboards on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| -|can extra table metadata on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| |can csrf token on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| -|can created slices on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| -|can testconn on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| -|can annotation json on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| -|can add slices on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| -|can fave dashboards on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| -|can save dash on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| |can sqllab on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:| -|can recent activity on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| |can select star on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| |can warm up cache on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| |can sqllab table viz on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:| |can profile on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| -|can validate sql json on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| |can available domains on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| -|can queries on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| -|can stop query on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:| |can request access on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| -|can filter on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| |can dashboard on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| -|can results on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| |can post on TableSchemaView|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| |can expanded on TableSchemaView|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| |can delete on TableSchemaView|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| @@ -191,12 +164,6 @@ |can show on AlertLogModelView|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| |can list on AlertObservationModelView|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| |can show on AlertObservationModelView|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| -|can edit on AccessRequestsModelView|:heavy_check_mark:|O|O|O| -|can list on AccessRequestsModelView|:heavy_check_mark:|O|O|O| -|can show on AccessRequestsModelView|:heavy_check_mark:|O|O|O| -|can add on AccessRequestsModelView|:heavy_check_mark:|O|O|O| -|can delete on AccessRequestsModelView|:heavy_check_mark:|O|O|O| -|muldelete on AccessRequestsModelView|:heavy_check_mark:|O|O|O| |menu access on Row Level Security|:heavy_check_mark:|O|O|O| |menu access on Access requests|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| |menu access on Home|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| @@ -214,7 +181,6 @@ |can edit on FilterSets|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| |can this form get on ColumnarToDatabaseView|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| |can this form post on ColumnarToDatabaseView|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| -|can schemas access for file upload on Superset|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| |menu access on Upload a Columnar file|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| |can export on Chart|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| |can write on DashboardFilterStateRestApi|:heavy_check_mark:|:heavy_check_mark:|:heavy_check_mark:|O| diff --git a/UPDATING.md b/UPDATING.md index e2f45f6937bc3..41a120f31078d 100644 --- a/UPDATING.md +++ b/UPDATING.md @@ -24,6 +24,7 @@ assists people when migrating to a new version. ## Next +- [24335](https://github.com/apache/superset/pull/24335): Removed deprecated API `/superset/filter////` - [24185](https://github.com/apache/superset/pull/24185): `/api/v1/database/test_connection` and `api/v1/database/validate_parameters` permissions changed from `can_read` to `can_write`. Only Admin user's have access. - [24256](https://github.com/apache/superset/pull/24256): `Flask-Login` session validation is now set to `strong` by default. Previous setting was `basic`. - [24232](https://github.com/apache/superset/pull/24232): Enables ENABLE_TEMPLATE_REMOVE_FILTERS, DRILL_TO_DETAIL, DASHBOARD_CROSS_FILTERS by default, marks VERSIONED_EXPORT and ENABLE_TEMPLATE_REMOVE_FILTERS as deprecated. @@ -33,6 +34,22 @@ assists people when migrating to a new version. ### Breaking Changes +- [24415](https://github.com/apache/superset/pull/24415): Removed the obsolete Druid NoSQL REGEX operator. +- [24423](https://github.com/apache/superset/pull/24423): Removed deprecated APIs `/superset/slice_json/...`, `/superset/annotation_json/...` +- [24400](https://github.com/apache/superset/pull/24400): Removed deprecated APIs `/superset/recent_activity/...`, `/superset/fave_dashboards_by_username/...`, `/superset/fave_dashboards/...`, `/superset/created_dashboards/...`, `/superset/user_slices/`, `/superset/created_slices/...`, `/superset/fave_slices/...`, `/superset/favstar/...`, +- [24401](https://github.com/apache/superset/pull/24401): Removes the deprecated `metrics` column (which was blossomed in [20732](https://github.com/apache/superset/pull/20732)) from the `/api/v1/dataset/` API. +- [24375](https://github.com/apache/superset/pull/24375): Removed deprecated API `/superset/get_or_create_table/...`, `/superset/sqllab_viz` +- [24360](https://github.com/apache/superset/pull/24360): Removed deprecated APIs `/superset/stop_query/...`, `/superset/queries/...`, `/superset/search_queries` +- [24353](https://github.com/apache/superset/pull/24353): Removed deprecated APIs `/copy_dash/int:dashboard_id/`, `/save_dash/int:dashboard_id/`, `/add_slices/int:dashboard_id/`. +- [24198](https://github.com/apache/superset/pull/24198) The FAB views `User Registrations` and `User's Statistics` have been changed to Admin only. To re-enable them for non-admin users, please add the following perms to your custom role: `menu access on User's Statistics` and `menu access on User Registrations`. +- [24354](https://github.com/apache/superset/pull/24354): Removed deprecated APIs `/superset/testconn`, `/superset/validate_sql_json/`, `/superset/schemas_access_for_file_upload`, `/superset/extra_table_metadata` +- [24381](https://github.com/apache/superset/pull/24381): Removed deprecated API `/superset/available_domains/` +- [24359](https://github.com/apache/superset/pull/24359): Removed deprecated APIs `/superset/estimate_query_cost/..`, `/superset/results/..`, `/superset/sql_json/..`, `/superset/csv/..` +- [24345](https://github.com/apache/superset/pull/24345) Converts `ENABLE_BROAD_ACTIVITY_ACCESS` and `MENU_HIDE_USER_INFO` into feature flags and changes the value of `ENABLE_BROAD_ACTIVITY_ACCESS` to `False` as it's more secure. +- [24342](https://github.com/apache/superset/pull/24342): Removed deprecated API `/superset/tables///...` +- [24335](https://github.com/apache/superset/pull/24335): Removed deprecated API `/superset/filter////` +- [24333](https://github.com/apache/superset/pull/24333): Removed deprecated API `/superset/datasources` +- [24266](https://github.com/apache/superset/pull/24266) Remove the `ENABLE_ACCESS_REQUEST` config parameter and the associated request/approval workflows. - [24330](https://github.com/apache/superset/pull/24330) Removes `getUiOverrideRegistry` from `ExtensionsRegistry`. - [23933](https://github.com/apache/superset/pull/23933) Removes the deprecated Multiple Line Charts. - [23741](https://github.com/apache/superset/pull/23741) Migrates the TreeMap chart and removes the legacy Treemap code. diff --git a/docker/docker-bootstrap.sh b/docker/docker-bootstrap.sh index 0784a0fdf2e5b..6b92e970028b4 100755 --- a/docker/docker-bootstrap.sh +++ b/docker/docker-bootstrap.sh @@ -40,6 +40,7 @@ if [[ "${1}" == "worker" ]]; then celery --app=superset.tasks.celery_app:app worker -O fair -l INFO elif [[ "${1}" == "beat" ]]; then echo "Starting Celery beat..." + rm -f /tmp/celerybeat.pid celery --app=superset.tasks.celery_app:app beat --pidfile /tmp/celerybeat.pid -l INFO -s "${SUPERSET_HOME}"/celerybeat-schedule elif [[ "${1}" == "app" ]]; then echo "Starting web app..." diff --git a/docker/docker-init.sh b/docker/docker-init.sh index c98f49881ada7..b54f999cb2a85 100755 --- a/docker/docker-init.sh +++ b/docker/docker-init.sh @@ -72,7 +72,7 @@ if [ "$SUPERSET_LOAD_EXAMPLES" = "yes" ]; then superset load_test_users superset load_examples --load-test-data else - superset load_examples + superset load_examples --force fi echo_step "4" "Complete" "Loading examples" fi diff --git a/docs/docs/databases/mysql.mdx b/docs/docs/databases/mysql.mdx index e784321515b4c..651721602f0d2 100644 --- a/docs/docs/databases/mysql.mdx +++ b/docs/docs/databases/mysql.mdx @@ -22,7 +22,7 @@ Host: - For Docker running in OSX: `docker.for.mac.host.internal` Port: `3306` by default -One problem with `mysqlclient` is that it will fail to connect to newer MySQL databases using `caching_sha2_password` for authentication, since the plugin is not included in the client. In this case, you should use `[mysql-connector-python](https://pypi.org/project/mysql-connector-python/)` instead: +One problem with `mysqlclient` is that it will fail to connect to newer MySQL databases using `caching_sha2_password` for authentication, since the plugin is not included in the client. In this case, you should use [mysql-connector-python](https://pypi.org/project/mysql-connector-python/) instead: ``` mysql+mysqlconnector://{username}:{password}@{host}/{database} diff --git a/docs/docusaurus.config.js b/docs/docusaurus.config.js index db875088c0ddb..4346191cbd0e1 100644 --- a/docs/docusaurus.config.js +++ b/docs/docusaurus.config.js @@ -32,7 +32,7 @@ const config = { baseUrl: '/', onBrokenLinks: 'throw', onBrokenMarkdownLinks: 'throw', - favicon: 'img/favicon.ico', + favicon: '/img/favicon.ico', organizationName: 'apache', // Usually your GitHub org/user name. projectName: 'superset', // Usually your repo name. themes: ['@saucelabs/theme-github-codeblock'], @@ -177,8 +177,8 @@ const config = { navbar: { logo: { alt: 'Superset Logo', - src: 'img/superset-logo-horiz.svg', - srcDark: 'img/superset-logo-horiz-dark.svg', + src: '/img/superset-logo-horiz.svg', + srcDark: '/img/superset-logo-horiz-dark.svg', }, items: [ { @@ -219,28 +219,40 @@ const config = { }, ], }, + { + href: '/docs/intro', + position: 'right', + className: 'default-button-theme get-started-button', + label: 'Get Started', + }, { href: 'https://github.com/apache/superset', position: 'right', - class: 'github-logo-container', + className: 'github-button', }, ], }, footer: { - style: 'dark', links: [], - copyright: `Copyright © ${new Date().getFullYear()}, - The Apache Software Foundation, - Licensed under the Apache License.
- Apache Superset, Apache, Superset, the Superset logo, and the Apache feather logo are either registered trademarks or trademarks of The Apache Software Foundation. All other products or name brands are trademarks of their respective holders, including The Apache Software Foundation. - Apache Software Foundation resources
- - Security |  - Donate |  - Thanks |  - Events |  - License - `, + copyright: ` + +

Copyright © ${new Date().getFullYear()}, + The Apache Software Foundation, + Licensed under the Apache License.

+

Apache Superset, Apache, Superset, the Superset logo, and the Apache feather logo are either registered trademarks or trademarks of The Apache Software Foundation. All other products or name brands are trademarks of their respective holders, including The Apache Software Foundation. + Apache Software Foundation resources

+ Divider +

+ + Security |  + Donate |  + Thanks |  + Events |  + License + +

`, }, prism: { theme: lightCodeTheme, @@ -250,8 +262,7 @@ const config = { scripts: [ '/script/matomo.js', { - src: - 'https://www.bugherd.com/sidebarv2.js?apikey=enilpiu7bgexxsnoqfjtxa', + src: 'https://www.bugherd.com/sidebarv2.js?apikey=enilpiu7bgexxsnoqfjtxa', async: true, }, ], diff --git a/docs/src/components/BlurredSection.tsx b/docs/src/components/BlurredSection.tsx new file mode 100644 index 0000000000000..7c8589c684fe4 --- /dev/null +++ b/docs/src/components/BlurredSection.tsx @@ -0,0 +1,53 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import React, { ReactNode } from 'react'; +import styled from '@emotion/styled'; +import { mq } from '../utils'; + +const StyledBlurredSection = styled('section')` + text-align: center; + border-bottom: 1px solid var(--ifm-border-color); + overflow: hidden; + .blur { + max-width: 635px; + width: 100%; + margin-top: -70px; + margin-bottom: -35px; + position: relative; + z-index: -1; + ${mq[1]} { + margin-top: -40px; + } + } +`; + +interface BlurredSectionProps { + children: ReactNode; +} + +const BlurredSection = ({ children }: BlurredSectionProps) => { + return ( + + {children} + Blur + + ); +}; + +export default BlurredSection; diff --git a/docs/src/components/SectionHeader.tsx b/docs/src/components/SectionHeader.tsx new file mode 100644 index 0000000000000..c868b4097aa56 --- /dev/null +++ b/docs/src/components/SectionHeader.tsx @@ -0,0 +1,123 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import React from 'react'; +import styled from '@emotion/styled'; +import { mq } from '../utils'; + +type StyledSectionHeaderProps = { + dark: boolean; +}; + +const StyledSectionHeader = styled('div')` + display: flex; + flex-direction: column; + align-items: center; + text-align: center; + padding: 75px 20px 0; + max-width: 720px; + margin: 0 auto; + ${mq[1]} { + padding-top: 55px; + } + .title, + .subtitle { + color: ${props => + props.dark + ? 'var(--ifm-font-base-color-inverse)' + : 'var(--ifm-font-base-color)'}; + } +`; + +const StyledSectionHeaderH1 = styled(StyledSectionHeader)` + .title { + font-size: 96px; + ${mq[1]} { + font-size: 46px; + } + } + .line { + margin-top: -45px; + margin-bottom: 15px; + ${mq[1]} { + margin-top: -20px; + margin-bottom: 30px; + } + } + .subtitle { + font-size: 30px; + line-height: 40px; + ${mq[1]} { + font-size: 25px; + line-height: 29px; + } + } +`; + +const StyledSectionHeaderH2 = styled(StyledSectionHeader)` + .title { + font-size: 48px; + ${mq[1]} { + font-size: 34px; + } + } + .line { + margin-top: -15px; + margin-bottom: 15px; + ${mq[1]} { + margin-top: -5px; + } + } + .subtitle { + font-size: 24px; + line-height: 32px; + ${mq[1]} { + font-size: 18px; + line-height: 26px; + } + } +`; + +interface SectionHeaderProps { + level: any; + title: string; + subtitle?: string; + dark?: boolean; +} + +const SectionHeader = ({ + level, + title, + subtitle, + dark, +}: SectionHeaderProps) => { + const Heading = level; + + const StyledRoot = + level === 'h1' ? StyledSectionHeaderH1 : StyledSectionHeaderH2; + + return ( + + {title} + line + {subtitle &&

{subtitle}

} +
+ ); +}; + +export default SectionHeader; diff --git a/docs/src/pages/community.tsx b/docs/src/pages/community.tsx index 52d8bb33064a5..bf3a7ab04fd46 100644 --- a/docs/src/pages/community.tsx +++ b/docs/src/pages/community.tsx @@ -20,103 +20,211 @@ import React from 'react'; import styled from '@emotion/styled'; import { List } from 'antd'; import Layout from '@theme/Layout'; +import { mq } from '../utils'; +import SectionHeader from '../components/SectionHeader'; +import BlurredSection from '../components/BlurredSection'; -const links = [ - [ - 'http://bit.ly/join-superset-slack', - 'Slack', - 'interact with other Superset users and community members', - ], - [ - 'https://github.com/apache/superset', - 'GitHub', - 'create tickets to report issues, report bugs, and suggest new features', - ], - [ - 'https://lists.apache.org/list.html?dev@superset.apache.org', - 'dev@ Mailing List', - 'participate in conversations with committers and contributors', - ], - [ - 'https://calendar.google.com/calendar/u/2?cid=c3VwZXJzZXQuY29tbWl0dGVyc0BnbWFpbC5jb20', - 'Superset Community Calendar', - 'join us for working group sessions and other community gatherings', - ], - [ - 'https://stackoverflow.com/questions/tagged/superset+apache-superset', - 'Stack Overflow', - 'our growing knowledge base', - ], - [ - 'https://www.meetup.com/Global-Apache-Superset-Community-Meetup/', - 'Superset Meetup Group', - 'join our monthly virtual meetups and register for any upcoming events', - ], - [ - 'https://github.com/apache/superset/blob/master/RESOURCES/INTHEWILD.md', - 'Organizations', - 'a list of some of the organizations using Superset in production', - ], - [ - 'https://github.com/apache-superset/awesome-apache-superset', - 'Contributors Guide', - 'Interested in contributing? Learn how to contribute and best practices', - ], +const communityLinks = [ + { + url: 'http://bit.ly/join-superset-slack', + title: 'Slack', + description: 'Interact with other Superset users and community members.', + image: 'slack-symbol.jpg', + ariaLabel: + 'Interact with other Superset users and community members on Slack', + }, + { + url: 'https://github.com/apache/superset', + title: 'GitHub', + description: + 'Create tickets to report issues, report bugs, and suggest new features.', + image: 'github-symbol.jpg', + ariaLabel: + 'Create tickets to report issues, report bugs, and suggest new features on Superset GitHub repo', + }, + { + url: 'https://lists.apache.org/list.html?dev@superset.apache.org', + title: 'dev@ Mailing List', + description: + 'Participate in conversations with committers and contributors.', + image: 'email-symbol.png', + ariaLabel: + 'Participate in conversations with committers and contributors on Superset mailing list', + }, + { + url: 'https://stackoverflow.com/questions/tagged/superset+apache-superset', + title: 'Stack Overflow', + description: 'Our growing knowledge base.', + image: 'stackoverflow-symbol.jpg', + ariaLabel: 'See Superset issues on Stack Overflow', + }, + { + url: 'https://www.meetup.com/Global-Apache-Superset-Community-Meetup/', + title: 'Superset Meetup Group', + description: + 'Join our monthly virtual meetups and register for any upcoming events.', + image: 'coffee-symbol.png', + ariaLabel: + 'Join our monthly virtual meetups and register for any upcoming events on Meetup', + }, + { + url: 'https://github.com/apache/superset/blob/master/RESOURCES/INTHEWILD.md', + title: 'Organizations', + description: + 'A list of some of the organizations using Superset in production.', + image: 'note-symbol.png', + ariaLabel: 'See a list of the organizations using Superset in production', + }, + { + url: 'https://github.com/apache-superset/awesome-apache-superset', + title: 'Contributors Guide', + description: + 'Interested in contributing? Learn how to contribute and best practices.', + image: 'writing-symbol.png', + ariaLabel: 'Learn how to contribute and best practices on Superset GitHub', + }, ]; -const StyledMain = styled('main')` - padding-bottom: 60px; - padding-left: 16px; - padding-right: 16px; - section { - width: 100%; - max-width: 800px; +const StyledJoinCommunity = styled('section')` + background-color: var(--ifm-off-section-background); + border-bottom: 1px solid var(--ifm-border-color); + .list { + max-width: 540px; margin: 0 auto; - padding: 60px 0 0 0; - font-size: 17px; - &:first-of-type{ - padding: 40px; - background-image: linear-gradient(120deg, #d6f2f8, #52c6e3); - border-radius: 0 0 10px; + padding: 40px 20px 20px 35px; + } + .item { + padding: 0; + border: 0; + } + .icon { + width: 40px; + margin-top: 5px; + ${mq[1]} { + width: 40px; + margin-top: 0; + } + } + .title { + font-size: 20px; + line-height: 36px; + font-weight: 700; + color: var(--ifm-font-base-color); + ${mq[1]} { + font-size: 23px; + line-height: 26px; + } + } + .description { + font-size: 14px; + line-height: 20px; + color: var(--ifm-secondary-text); + margin-top: -8px; + margin-bottom: 23px; + ${mq[1]} { + font-size: 17px; + line-height: 22px; + color: var(--ifm-primary-text); + margin-bottom: 35px; + margin-top: 0; } } `; -const StyledGetInvolved = styled('div')` - margin-bottom: 25px; +const StyledCalendarIframe = styled('iframe')` + display: block; + margin: 20px auto 30px; + max-width: 800px; + width: 100%; + height: 600px; + border: 0; + ${mq[1]} { + width: calc(100% - 40px); + } +`; + +const StyledNewsletterIframe = styled('iframe')` + display: block; + max-width: 1080px; + width: calc(100% - 40px); + height: 285px; + margin: 30px auto 20px; + border: 0; + @media (max-width: 1200px) { + height: 380px; + } + @media (max-width: 679px) { + height: 680px; + margin-top: 15px; + } `; const Community = () => { return ( - -
-

Community

- Get involved in our welcoming, fast growing community! -
-
- -

Get involved!

- ( - - {link} - {' '} - - - {' '} - {post} - - )} - /> -
-
-
+
+ + + + + ( + + + + + } + title={ + + {title} + + } + description={

{description}

} + role="group" + aria-label="Community link" + /> +
+ )} + /> +
+ + + + + + + + +
); }; diff --git a/docs/src/pages/index.tsx b/docs/src/pages/index.tsx index f3781a4039cca..59e5170cd7410 100644 --- a/docs/src/pages/index.tsx +++ b/docs/src/pages/index.tsx @@ -16,290 +16,506 @@ * specific language governing permissions and limitations * under the License. */ -import React, { useRef, useState } from 'react'; +import React, { useRef, useState, useEffect } from 'react'; import Layout from '@theme/Layout'; import Link from '@docusaurus/Link'; -import { - Button, Col, Row, Carousel, -} from 'antd'; +import { Carousel } from 'antd'; import styled from '@emotion/styled'; -import { supersetTheme } from '@superset-ui/style'; -import '../styles/main.less'; -import { - DeploymentUnitOutlined, - FireOutlined, - DotChartOutlined, - DatabaseOutlined, -} from '@ant-design/icons'; import GitHubButton from 'react-github-btn'; import { mq } from '../utils'; import { Databases } from '../resources/data'; +import SectionHeader from '../components/SectionHeader'; +import BlurredSection from '../components/BlurredSection'; +import '../styles/main.less'; -const { colors } = supersetTheme; +const features = [ + { + image: 'powerful-yet-easy.jpg', + title: 'Powerful yet easy to use', + description: + 'Superset makes it easy to explore your data, using either our simple no-code viz builder or state-of-the-art SQL IDE.', + }, + { + image: 'modern-databases.jpg', + title: 'Integrates with modern databases', + description: + 'Superset can connect to any SQL-based databases including modern cloud-native databases and engines at petabyte scale.', + }, + { + image: 'modern-architecture.jpg', + title: 'Modern architecture', + description: + 'Superset is lightweight and highly scalable, leveraging the power of your existing data infrastructure without requiring yet another ingestion layer.', + }, + { + image: 'rich-visualizations.jpg', + title: 'Rich visualizations and dashboards', + description: + 'Superset ships with 40+ pre-installed visualization types. Our plug-in architecture makes it easy to build custom visualizations.', + }, +]; const StyledMain = styled('main')` text-align: center; - .alert-color { - color: ${colors.alert.base}; - } - .error-color { - color: ${colors.error.base}; - } - .warning-color { - color: ${colors.warning.base}; - } - .info-color { - color: ${colors.info.base}; - } - .success-color { - color: ${colors.success.base}; - } - .secondary-color { - color: ${colors.secondary.base}; - } - .info-text { - font-size: 32px; - font-weight: normal; - max-width: 600px; - margin: auto; - } - .info-text-smaller { - font-size: 24px; - max-width: 800px; - } `; const StyledTitleContainer = styled('div')` position: relative; - padding-top: 60px; - padding-bottom: 80px; - padding-left: 20px; - padding-right: 20px; - background-image: url('img/data-point.jpg'); + padding: 130px 20px 0; + margin-bottom: 160px; + background-image: url('/img/grid-background.jpg'); background-size: cover; - background-position-x: right; - .github-section { - margin-bottom: 40px; - margin-top: 40px; - .github-button { - margin: 5px; - } + ${mq[1]} { + margin-bottom: 100px; + } + .info-container { + position: relative; + z-index: 4; } - .logo-horiz { - margin-top: 20px; - margin-bottom: 20px; - width: 600px; - max-width: 100%; - ${[mq[3]]} { - width: 550px; + .superset-mark { + ${mq[1]} { + width: 140px; } - ${[mq[2]]} { - width: 450px; + } + .info-text { + font-size: 30px; + line-height: 37px; + max-width: 720px; + margin: 24px auto 10px; + color: var(--ifm-font-base-color-inverse); + ${mq[1]} { + font-size: 25px; + line-height: 30px; } - ${[mq[1]]} { - width: 425px; + } + .github-section { + margin-top: 9px; + ${mq[1]} { + display: flex; + flex-direction: column; + overflow: hidden; } - ${[mq[0]]} { - width: 400px; + .github-button { + margin: 5px; + ${mq[1]} { + transform: scale(1.25); + margin: 8px; + &:first-of-type { + margin-top: 5px; + } + &:last-of-type { + margin-bottom: 5px; + } + } } } - .alert { - color: #0c5460; - background-color: #d1ecf1; - border-color: #bee5eb; - max-width: 600px; - margin: 0 auto; - padding: 0.75rem 1.25rem; - margin-top: 83px; - border: 1px solid transparent; - border-radius: 0.25rem; - } `; -const StyledHeading = styled('h2')` - font-size: 55px; - text-align: center; +const StyledButton = styled(Link)` + border-radius: 10px; + font-size: 20px; + font-weight: bold; + width: 170px; + padding: 10px 0; + margin: 15px auto 0; + ${mq[1]} { + font-size: 19px; + width: 175px; + padding: 10px 0; + } `; -const StyledFeatures = styled('div')` - background: #fff; - padding: 5vw 0; - margin-top: 0px; - margin-bottom: 10px; - .featureList { - padding: 40px; +const StyledScreenshotContainer = styled('div')` + position: relative; + display: inline-block; + padding-top: 30px; + margin-top: 25px; + margin-bottom: -125px; + max-width: 800px; + ${mq[1]} { + padding-top: 20px; + } + .screenshot { + position: relative; + z-index: 3; + border-radius: 10px; + } + .screenshot-shadow-1 { + position: absolute; + top: 15px; + left: 20px; + width: calc(100% - 40px); + height: calc(100% - 15px); + background-color: #256b7c; + border-radius: 10px; + z-index: 2; + ${mq[1]} { + background-color: #335a64; + top: 10px; + left: 15px; + width: calc(100% - 30px); + height: calc(100% - 10px); + } + } + .screenshot-shadow-2 { + position: absolute; + top: 0; + left: 40px; + width: calc(100% - 80px); + height: 100%; + background-color: #0d5262; + border-radius: 10px; + z-index: 1; + ${mq[1]} { + background-color: #1f4048; + left: 30px; + width: calc(100% - 60px); + } + } + .screenshotBlur { + display: none; + background-color: #173036; + filter: blur(45px); + position: absolute; + bottom: 0; + left: 50%; width: 100%; - list-style-type: none; - margin: 0 auto; - max-width: 1000px; - .feature { - padding: 20px; + padding-top: 100%; + border-radius: 50%; + transform: translate3d(-50%, 0, 0); + opacity: 0.3; + ${mq[1]} { + display: block; + } + } +`; + +const StyledFeaturesList = styled('ul')` + display: grid; + grid-template-columns: repeat(2, minmax(0, 1fr)); + gap: 16px; + width: 100%; + max-width: 1170px; + margin: 15px auto 0; + padding: 0 20px; + ${mq[1]} { + grid-template-columns: repeat(1, minmax(0, 1fr)); + } + .item { + text-align: left; + border: 1px solid var(--ifm-border-color); + background-color: #ffffff; + border-radius: 10px; + overflow: hidden; + display: flex; + align-items: flex-start; + padding: 20px; + ${mq[1]} { + flex-direction: column; + align-items: center; text-align: center; - margin-bottom: 20px; - .imagePlaceHolder { - svg { - width: 70px; - height: 70px; - } - margin-bottom: 15px; + padding: 35px; + } + .image { + flex-shrink: 0; + margin-right: 20px; + width: 140px; + text-align: center; + ${mq[1]} { + width: 115px; } - .featureText { - color: ${colors.grayscale.dark2}; + } + .title { + font-size: 24px; + color: var(--ifm-primary-text); + margin: 10px 0 0; + ${mq[1]} { + font-size: 23px; + margin-top: 20px; + } + } + .description { + font-size: 17px; + line-height: 23px; + color: var(--ifm-secondary-text); + margin: 5px 0 0; + ${mq[1]} { font-size: 16px; - strong { - font-size: 22px; - } + margin-top: 10px; } } } - .heading { - font-size: 22px; - margin: 0 auto; - text-align: center; - } `; -const StyledIntegrations = styled('div')` - background: white; - margin-bottom: 64px; - .databaseSub { - text-align: center; +const StyledSliderSection = styled('div')` + position: relative; + padding: 60px 20px; + ${mq[1]} { + padding-top: 0; + padding-bottom: 50px; + } + &::before { + content: ''; display: block; - margin-bottom: 40px; - font-size: 18px; + width: 100%; + height: calc(100% - 320px); + position: absolute; + top: 0; + left: 0; + background-image: url('/img/grid-background.jpg'); + background-size: cover; + z-index: -1; + ${mq[1]} { + height: 100%; + } } - - .database-list { - margin-top: 100px; - list-style-type: none; - padding: 0px; - max-width: 1000px; - margin: 0 auto; + .toggleBtns { display: flex; - flex-wrap: wrap; - justify-content: space-around; - margin-bottom: 50px; - li { - padding: 15px; + justify-content: space-between; + list-style: none; + max-width: 870px; + width: 100%; + margin: 0 auto 20px; + padding: 0; + ${mq[1]} { + flex-direction: column; + text-align: left; + max-width: 140px; + gap: 10px; + margin-top: 15px; + margin-bottom: 40px; + } + .toggle { + font-size: 24px; + color: #b4c0c7; + position: relative; + padding-left: 32px; + cursor: pointer; + ${mq[1]} { + font-size: 17px; + font-weight: bold; + padding-left: 22px; + } + &::before { + content: ''; + display: block; + width: 12px; + height: 12px; + border-radius: 50%; + background-color: #457f8d; + position: absolute; + top: 50%; + left: 0; + transform: translate3d(0, -50%, 0); + ${mq[1]} { + width: 8px; + height: 8px; + } + } + &.active { + font-weight: 700; + color: var(--ifm-font-base-color-inverse); + } + &.active::before { + background-color: var(--ifm-color-primary); + } + } + } + .slide { + max-width: 920px; + & > p { + max-width: 560px; + margin: 0 auto; + font-size: 24px; + line-height: 32px; + color: var(--ifm-font-base-color-inverse); + margin-bottom: 45px; + ${mq[1]} { + font-size: 17px; + line-height: 23px; + } + } + } + video { + width: 100%; + max-width: 920px; + margin-top: 10px; + border-radius: 10px; + ${mq[1]} { + border-radius: 5px; } } `; -const CarouselSection = styled('div')` - .toggleContainer { - display: flex; - flex-direction: column; - margin-bottom: 100px; - position: relative; - .toggleBtns { +const StyledKeyFeatures = styled('div')` + margin-top: 50px; + & > h3 { + font-size: 30px; + } + .grid { + display: grid; + grid-template-columns: repeat(2, minmax(0, 1fr)); + gap: 30px; + max-width: 960px; + margin: 30px auto 0; + padding: 0 20px; + text-align: left; + ${mq[1]} { + grid-template-columns: repeat(1, minmax(0, 1fr)); + } + & > .item { display: flex; - flex-direction: row; - /* ${[mq[0]]} { - flex-direction: column; - } */ - justify-content: center; - .toggle { - margin: 10px; - color: #666; - border: 1px solid #888; - background-color: #20a7c911; - border-radius: 3px; - padding: 16px; - transition: all 0.25s; - overflow: visible; - ${[mq[0]]} { - > span { - display: none; - position: absolute; - bottom: 0px; - left: 50%; - width: 100%; - transform: translate(-50%, 100%); - } - h2 { - font-size: 14px; - margin: 0; - } - } - &:hover { - cursor: pointer; - color: ${colors.primary.base}; - border: 1px solid ${colors.primary.base}; - } - &.active { - background: red; - background: #20a7c933; - ${[mq[0]]} { - > span { - display: block; - } - } + font-size: 17px; + ${mq[1]} { + font-size: 15px; + } + & > img { + width: 20px; + height: 20px; + flex-shrink: 0; + margin-right: 12px; + margin-top: 4px; + ${mq[1]} { + width: 18px; + height: 18px; + margin-top: 2px; } } } - .imageContainer { - img { - margin: 0 auto; - max-width: 800px; - box-shadow: 0 0 3px #aaa; - margin-top: 5px; - margin-bottom: 5px; + } + .row { + display: flex; + max-width: 960px; + margin: 30px auto 0; + & > .column { + width: 50%; + & > ul { + font-size: 17px; + list-style: none; + padding: 0 20px; + text-align: left; + margin: 0; + & > li { + display: flex; + margin-bottom: 20px; + & > img { + width: 20px; + height: 20px; + flex-shrink: 0; + margin-right: 12px; + margin-top: 4px; + } + } } } } `; -const StyledCredits = styled.div` - width: 100%; - height: 60px; - padding: 18px; - background-color: #282E4A; - text-align: center; - color: #FFFFFF; -`; - -const StyledDatabaseImg = styled.img` - width: ${(props) => props.width}; - height: ${(props) => props.height}; +const StyledIntegrations = styled('div')` + padding: 0 20px; + .database-grid { + display: grid; + grid-template-columns: repeat(5, minmax(0, 1fr)); + gap: 14px; + max-width: 1160px; + margin: 25px auto 0; + ${mq[1]} { + grid-template-columns: repeat(4, minmax(0, 1fr)); + } + ${mq[0]} { + grid-template-columns: repeat(1, minmax(0, 1fr)); + } + & > .item { + border: 1px solid var(--ifm-border-color); + border-radius: 10px; + overflow: hidden; + height: 120px; + padding: 25px; + display: flex; + align-items: center; + justify-content: center; + & > a { + height: 100%; + } + & img { + height: 100%; + object-fit: contain; + } + } + } + .database-sub { + display: block; + text-align: center; + font-size: 17px; + margin-top: 50px; + } `; -interface featureProps { - icon: React.ReactNode, - title: string, - descr: string, -} -const Feature = ({ icon, title, descr }: featureProps) => ( -
  • -
    - {icon} -
    -
    -

    {title}

    - {descr} -
    -
  • -); export default function Home(): JSX.Element { const slider = useRef(null); const [slideIndex, setSlideIndex] = useState(0); - const onChange = (index) => { - setSlideIndex(index); + const onChange = (current, next) => { + setSlideIndex(next); + }; + + const changeToDark = () => { + const navbar = document.body.querySelector('.navbar'); + const logo = document.body.querySelector('.navbar__logo img'); + navbar.classList.add('navbar--dark'); + logo.setAttribute('src', '/img/superset-logo-horiz-dark.svg'); + }; + + const changeToLight = () => { + const navbar = document.body.querySelector('.navbar'); + const logo = document.body.querySelector('.navbar__logo img'); + navbar.classList.remove('navbar--dark'); + logo.setAttribute('src', '/img/superset-logo-horiz.svg'); }; + // Set up dark <-> light navbar change + useEffect(() => { + changeToDark(); + + const navbarToggle = document.body.querySelector('.navbar__toggle'); + navbarToggle.addEventListener('click', () => changeToLight()); + + const scrollListener = () => { + if (window.scrollY > 0) { + changeToLight(); + } else { + changeToDark(); + } + }; + + window.addEventListener('scroll', scrollListener); + + return () => { + window.removeEventListener('scroll', scrollListener); + changeToLight(); + }; + }, []); + return ( - logo-horiz +
    + Superset mark
    - Apache Superset is a modern data exploration and visualization - platform + Apache Superset™ is an open-source modern data exploration + and visualization platform.
    + line
    -
    - - - -
    + line + + Get Started + +
    + + hero-screenshot +
    +
    +
    +
    - - - Overview -
    - Superset is fast, lightweight, intuitive, and loaded with options - that make it easy for users of all skill sets to explore and - visualize their data, from simple line charts to highly detailed - geospatial charts. -
    -
      - - - } - title="Powerful yet easy to use" - descr={` - Quickly and easily integrate and explore your data, using - either our simple no-code viz builder or state of the art SQL - IDE. - `} - /> - - - - } - title="Integrates with modern databases" - descr={` - Superset can connect to any SQL based datasource - through SQLAlchemy, including modern cloud native databases - and engines at petabyte scale. - `} - /> - - - - - } - title="Modern architecture" - descr={` - Superset is lightweight and highly scalable, leveraging the - power of your existing data infrastructure without requiring - yet another ingestion layer. - `} - /> - - - } - title="Rich visualizations and dashboards" - descr={` - Superset ships with a wide array of beautiful visualizations. - Our visualization plug-in architecture makes it easy to build - custom visualizations that drop directly into Superset. - `} - /> - - + + + + {features.map(({ image, title, description }) => ( +
    • +
      + +
      +
      +

      {title}

      +

      {description}

      +
      +
    • + ))} +
      +
      + + + +
        +
      • slider.current.goTo(0)} + role="button" + > + Dashboards +
      • +
      • slider.current.goTo(1)} + role="button" + > + Chart Builder +
      • +
      • slider.current.goTo(2)} + role="button" + > + SQL Lab +
      • +
      • slider.current.goTo(3)} + role="button" + > + Datasets +
      - - - - Explore -
      -
      -
      slider.current.goTo(0)} - role="button" - > -

      Explore

      - - Explore your data using the array of data visualizations. - + +
      +

      + Explore data and find insights from interactive dashboards. +

      +
      +
      +

      Drag and drop to create robust charts and tables.

      +
      +
      +

      + Write custom SQL queries, browse database metadata, use Jinja + templating, and more. +

      +
      +
      +

      + Create physical and virtual datasets to scale chart creation + with unified metric definitions. +

      +
      +
      + + + +

      Key features

      +
      +
      + check-icon +
      + 40+ pre-installed visualizations
      - -
      slider.current.goTo(1)} - role="button" - > -

      View

      - View your data through interactive dashboards +
      +
      + check-icon +
      + Support for drag-and-drop and{' '} + SQL queries
      -
      slider.current.goTo(2)} - role="button" - > -

      Investigate

      - Use SQL Lab to write queries to explore your data +
      +
      + check-icon +
      + Data caching for the faster load time of + charts and dashboards +
      +
      +
      + check-icon +
      + Jinja templating and dashboard filters for + creating interactive dashboards +
      +
      +
      + check-icon +
      + CSS templates to customize charts and + dashboards to your brand’s look and feel
      - -
      - Explore (chart buider) UI +
      + check-icon +
      + Semantic layer for SQL data transformations
      -
      - Superset Dashboard +
      +
      + check-icon +
      + Cross-filters, drill-to-detail, and drill-by{' '} + features for deeper data analysis
      -
      - SQL Lab +
      +
      + check-icon +
      + Virtual datasets for ad-hoc data exploration
      - +
      +
      + check-icon +
      + Access to new functionalities through{' '} + feature flags +
      +
      - - Supported Databases - -
        - {Databases.map( - ({ - title, imgName: imageName, width, height, - }) => ( -
      • - -
      • - ), - )} -
      - - ... and many other - - {' '} - compatible databases - {' '} - - -
      - + + + + + +
      + {Databases.map(({ title, href, imgName }) => ( +
      + {href ? ( + + + + ) : ( + + )} +
      + ))} +
      + + ...and many other{' '} + + compatible databases + + +
      +
      - - We use{' '} - - - - ); } diff --git a/docs/src/resources/data.js b/docs/src/resources/data.js index 79b12017de7f2..a07be552673ef 100644 --- a/docs/src/resources/data.js +++ b/docs/src/resources/data.js @@ -19,34 +19,39 @@ export const Databases = [ { - title: 'Amazon Redshift', - href: 'https://aws.amazon.com/redshift/', - imgName: 'aws-redshift.png', + title: 'PostgreSQL', + href: 'https://www.postgresql.org/', + imgName: 'postgresql.svg', }, { - title: 'Apache Druid', - href: 'http://druid.io/', - imgName: 'druid.png', + title: 'BigQuery', + href: 'https://cloud.google.com/bigquery/', + imgName: 'google-big-query.svg', }, { - title: 'Apache Kylin', - href: 'http://kylin.apache.org/', - imgName: 'apache-kylin.png', + title: 'Snowflake', + href: 'https://www.snowflake.com/', + imgName: 'snowflake.svg', }, { - title: 'BigQuery', - href: 'https://cloud.google.com/bigquery/', - imgName: 'googleBQ.png', + title: 'MySQL', + href: 'https://www.mysql.com/', + imgName: 'mysql.jpg', }, { - title: 'ClickHouse', - href: 'https://clickhouse.tech/', - imgName: 'clickhouse.png', + title: 'Amazon Redshift', + href: 'https://aws.amazon.com/redshift/', + imgName: 'amazon-redshift.jpg', }, { - title: 'Dremio', - href: 'https://dremio.com/', - imgName: 'dremio.png', + title: 'Amazon Athena', + href: 'https://aws.amazon.com/pt/athena/', + imgName: 'amazon-athena.jpg', + }, + { + title: 'Apache Druid', + href: 'http://druid.io/', + imgName: 'druid.png', }, { title: 'Databricks', @@ -54,39 +59,33 @@ export const Databases = [ imgName: 'databricks.png', }, { - title: 'Exasol', - href: 'https://www.exasol.com/en/', - imgName: 'exasol.png', + title: 'Google Sheets', + href: 'https://www.google.com/sheets/about/', + imgName: 'google-sheets.svg', }, { - title: 'FireBirdSql', - href: 'https://firebirdsql.org/', - imgName: 'firebird.png', + title: 'CSV', + imgName: 'csv.svg', }, { - title: 'Green Plum', - href: 'https://greenplum.org/', - imgName: 'greenplum.png', - }, - { - title: 'IBM Db2', - href: 'https://www.ibm.com/analytics/db2', - imgName: 'ibmdb2.png', + title: 'ClickHouse', + href: 'https://clickhouse.tech/', + imgName: 'clickhouse.png', }, { - title: 'MySQL', - href: 'https://www.mysql.com/', - imgName: 'mysql.png', + title: 'Rockset', + href: 'https://rockset.com/', + imgName: 'rockset.png', }, { - title: 'Microsoft SqlServer', - href: 'https://www.microsoft.com/en-us/sql-server', - imgName: 'msql.png', + title: 'Dremio', + href: 'https://dremio.com/', + imgName: 'dremio.png', }, { - title: 'MonetDB', - href: 'https://www.monetdb.org/', - imgName: 'monet.png', + title: 'Trino', + href: 'https://trino.io/', + imgName: 'trino2.jpg', }, { title: 'Oracle', @@ -94,9 +93,9 @@ export const Databases = [ imgName: 'oraclelogo.png', }, { - title: 'PostgresSQL', - href: 'https://www.postgresql.org/', - imgName: 'postsql.png', + title: 'Apache Pinot', + href: 'https://pinot.apache.org/', + imgName: 'apache-pinot.svg', }, { title: 'Presto', @@ -104,58 +103,18 @@ export const Databases = [ imgName: 'presto-og.png', }, { - title: 'Snowflake', - href: 'https://www.snowflake.com/', - imgName: 'snowflake.png', - }, - { - title: 'SQLite', - href: 'https://www.sqlite.org/index.html', - imgName: 'sqlite.png', - }, - { - title: 'Trino', - href: 'https://trino.io/', - imgName: 'trino2.jpg', - }, - { - title: 'Rockset', - href: 'https://rockset.com/', - imgName: 'rockset.png', - }, - { - title: 'Vertica', - href: 'https://www.vertica.com/', - imgName: 'vertica.png', - }, - { - title: 'Hologres', - href: 'https://www.alibabacloud.com/product/hologres', - imgName: 'hologres.png', - }, - { - title: 'IBM Netezza Performance Server', - href: 'https://www.ibm.com/products/netezza', - imgName: 'netezza.png', - }, - { - title: 'Teradata', - href: "www.teradata.com", - imgName: 'teradata.png' + title: 'IBM Db2', + href: 'https://www.ibm.com/analytics/db2', + imgName: 'ibmdb2.png', }, { - title: 'TimescaleDB', - href: "www.timescale.com", - imgName: 'timescale.png' + title: 'SAP Hana', + href: 'https://www.sap.com/products/technology-platform/hana.html', + imgName: 'sap-hana.jpg', }, { - title: 'YugabyteDB', - href: "www.yugabyte.com", - imgName: 'yugabyte.png' + title: 'Microsoft SqlServer', + href: 'https://www.microsoft.com/en-us/sql-server', + imgName: 'msql.png', }, - { - title: 'StarRocks', - href: "www.starrocks.io", - imgName: 'starrocks.png' - } ]; diff --git a/docs/src/styles/custom.css b/docs/src/styles/custom.css index 2f133aaebdf9c..f13af3dbe2a0c 100644 --- a/docs/src/styles/custom.css +++ b/docs/src/styles/custom.css @@ -24,7 +24,7 @@ */ /* You can override the default Infima variables here. */ -@import url('https://fonts.googleapis.com/css2?family=Inter&display=swap'); +@import url('https://fonts.googleapis.com/css2?family=Roboto:wght@400;700&display=swap'); :root { --ifm-color-primary: #20a7c9; @@ -34,80 +34,17 @@ --ifm-color-primary-light: #79cade; --ifm-color-primary-lighter: #a5dbe9; --ifm-color-primary-lightest: #d2edf4; + --ifm-font-base-color: #484848; + --ifm-font-base-color-inverse: #ffffff; --ifm-code-font-size: 95%; --ifm-menu-link-padding-vertical: 12px; --doc-sidebar-width: 350px !important; --ifm-navbar-height: none; - --ifm-font-family-base: Inter; -} -body { - font-family: Inter !important; -} -.DocSearch-Button .DocSearch-Button-Key { - display: none; -} -.github-logo-container { - background-image: url('/img/github.png'); - background-size: contain; - width: 30px; - height: 30px; -} - -.theme-doc-toc-desktop { - position: fixed !important; -} - -.docusaurus-highlight-code-line { - background-color: rgba(0, 0, 0, 0.1); - display: block; - margin: 0 calc(-1 * var(--ifm-pre-padding)); - padding: 0 var(--ifm-pre-padding); -} - -html[data-theme='dark'] .docusaurus-highlight-code-line { - background-color: rgba(0, 0, 0, 0.3); -} - -.navbar__logo { - height: 40px; -} - -.navbar-sidebar__brand { - padding-left: 0; -} - -.menu, -.navbar { - font-size: 14px; - font-weight: 400; -} - -/* Hacks to disable Swagger UI's "try it out" interactive mode */ -.try-out, -.auth-wrapper, -.information-container { - display: none !important; -} - -.swagger-ui table td, -.swagger-ui table th, -.swagger-ui table tr { - border: none; -} - -.markdown h2:first-child { - margin-top: 0.5em; -} - -@media only screen and (min-width: 800px) { - .navbar__logo { - height: 50px; - } - - .navbar { - padding-left: 0; - } -} -a > span > svg { - display: none; + --ifm-font-family-base: Roboto; + --ifm-footer-background-color: #173036; + --ifm-footer-color: #87939a; + --ifm-off-section-background: #fbfbfb; + --ifm-border-color: #ededed; + --ifm-primary-text: #484848; + --ifm-secondary-text: #5f5f5f; } diff --git a/docs/src/styles/main.less b/docs/src/styles/main.less index ba3c1ad0b8e9e..80dee90ecabd5 100644 --- a/docs/src/styles/main.less +++ b/docs/src/styles/main.less @@ -19,3 +19,242 @@ @import '~antd/lib/style/themes/default.less'; @import '~antd/dist/antd.less'; // Import Ant Design styles by less entry @import 'antd-theme.less'; + +body { + font-family: var(--ifm-font-family-base); + color: var(--ifm-font-base-color); +} + +h1, +h2, +h3, +h4, +h5, +h6 { + color: var(--ifm-font-base-color); + font-weight: var(--ifm-heading-font-weight); +} + +.under-navbar { + margin-top: -67px; +} + +.theme-doc-toc-desktop { + position: fixed !important; +} + +.docusaurus-highlight-code-line { + background-color: rgba(0, 0, 0, 0.1); + display: block; + margin: 0 calc(-1 * var(--ifm-pre-padding)); + padding: 0 var(--ifm-pre-padding); +} + +html[data-theme='dark'] .docusaurus-highlight-code-line { + background-color: rgba(0, 0, 0, 0.3); +} + +.menu { + font-size: 14px; + font-weight: 400; +} + +/* Hacks to disable Swagger UI's "try it out" interactive mode */ +.try-out, +.auth-wrapper, +.information-container { + display: none !important; +} + +.swagger-ui table td, +.swagger-ui table th, +.swagger-ui table tr { + border: none; +} + +.markdown h2:first-child { + margin-top: 0.5em; +} + +a > span > svg { + display: none; +} + +/* Default button */ + +.default-button-theme { + display: block; + background: linear-gradient(180deg, #20a7c9 0%, #0c8fae 100%); + color: #ffffff; + text-align: center; + position: relative; + z-index: 2; + &::before { + border-radius: inherit; + background: linear-gradient(180deg, #11b0d8 0%, #116f86 100%); + content: ''; + display: block; + height: 100%; + position: absolute; + top: 0; + left: 0; + opacity: 0; + width: 100%; + z-index: -1; + transition: all 0.3s; + } + &:hover { + color: #ffffff; + &::before { + opacity: 1; + } + } +} + +/* Navbar */ + +.navbar { + font-size: 14px; + font-weight: 400; + background-color: #fff; + + .get-started-button { + border-radius: 10px; + font-size: 18px; + font-weight: bold; + width: 142px; + padding: 7px 0; + margin-right: 20px; + } + + .github-button { + background-image: url('/img/github.png'); + background-size: contain; + width: 30px; + height: 30px; + margin-right: 10px; + } +} + +.navbar--dark { + background-color: transparent; + border-bottom: 1px solid rgba(24, 115, 132, 0.4); + + .github-button { + background-image: url('/img/github-dark.png'); + } +} + +.navbar__logo { + height: 50px; +} + +.dropdown > .navbar__link::after { + display: none; +} + +.navbar-sidebar__brand { + padding-left: 0; +} + +@media only screen and (max-width: 996px) { + .navbar { + padding-right: 8px; + padding-left: 8px; + + .get-started-button, + .github-button { + display: none; + } + } + + .navbar__items { + flex-direction: row-reverse; + justify-content: space-between; + } + + .navbar__logo { + height: 48px; + } +} + +/* Sidebar */ + +.navbar-sidebar { + left: auto; + right: 0; + transform: translate3d(100%, 0, 0); +} + +/* Search Bar */ + +.navbar .DocSearch { + --docsearch-text-color: #187384; + --docsearch-muted-color: #187384; + --docsearch-searchbox-background: #fff; + border: 1px solid #187384; + border-radius: 10px; + + &.DocSearch-Button { + width: 225px; + } + + .DocSearch-Search-Icon { + width: 16px; + height: 16px; + } + + .DocSearch-Button-Key, + .DocSearch-Button-Placeholder { + display: none; + } +} + +.navbar--dark .DocSearch { + --docsearch-searchbox-background: #1d3d46; +} + +@media only screen and (max-width: 996px) { + .navbar .DocSearch.DocSearch-Button { + display: none; + } +} + +/* Footer */ + +.footer { + position: relative; + padding-top: 90px; + font-size: 15px; +} + +.footer__applitools { + background-color: #0d3e49; + color: #e1e1e1; + position: absolute; + top: 0; + left: 0; + width: 100%; + padding: 16px 0; + + img { + height: 34px; + } +} + +.footer__divider { + margin: 10px auto 25px; +} + +.footer small { + font-size: 13px; + display: block; + margin: 0 auto; + max-width: 830px; +} + +@media only screen and (max-width: 996px) { + .footer__applitools img { + height: 28px; + } +} diff --git a/docs/static/img/applitools.png b/docs/static/img/applitools.png index 66b7b1eb4f375..89034f378054a 100644 Binary files a/docs/static/img/applitools.png and b/docs/static/img/applitools.png differ diff --git a/docs/static/img/check-icon.svg b/docs/static/img/check-icon.svg new file mode 100644 index 0000000000000..858b2d3f3a6aa --- /dev/null +++ b/docs/static/img/check-icon.svg @@ -0,0 +1,3 @@ + + + diff --git a/docs/static/img/community/blur.png b/docs/static/img/community/blur.png new file mode 100644 index 0000000000000..78814af487d21 Binary files /dev/null and b/docs/static/img/community/blur.png differ diff --git a/docs/static/img/community/coffee-symbol.png b/docs/static/img/community/coffee-symbol.png new file mode 100644 index 0000000000000..e45475f08e587 Binary files /dev/null and b/docs/static/img/community/coffee-symbol.png differ diff --git a/docs/static/img/community/email-symbol.png b/docs/static/img/community/email-symbol.png new file mode 100644 index 0000000000000..5e437e8a4efba Binary files /dev/null and b/docs/static/img/community/email-symbol.png differ diff --git a/docs/static/img/community/github-symbol.jpg b/docs/static/img/community/github-symbol.jpg new file mode 100644 index 0000000000000..f7f3e1e08393d Binary files /dev/null and b/docs/static/img/community/github-symbol.jpg differ diff --git a/docs/static/img/community/line.png b/docs/static/img/community/line.png new file mode 100644 index 0000000000000..8ee3db38a3379 Binary files /dev/null and b/docs/static/img/community/line.png differ diff --git a/docs/static/img/community/note-symbol.png b/docs/static/img/community/note-symbol.png new file mode 100644 index 0000000000000..7aa69b0053203 Binary files /dev/null and b/docs/static/img/community/note-symbol.png differ diff --git a/docs/static/img/community/slack-symbol.jpg b/docs/static/img/community/slack-symbol.jpg new file mode 100644 index 0000000000000..25c4685364b79 Binary files /dev/null and b/docs/static/img/community/slack-symbol.jpg differ diff --git a/docs/static/img/community/stackoverflow-symbol.jpg b/docs/static/img/community/stackoverflow-symbol.jpg new file mode 100644 index 0000000000000..5a9bc7dc3dfb2 Binary files /dev/null and b/docs/static/img/community/stackoverflow-symbol.jpg differ diff --git a/docs/static/img/community/writing-symbol.png b/docs/static/img/community/writing-symbol.png new file mode 100644 index 0000000000000..087b9dc2dfdef Binary files /dev/null and b/docs/static/img/community/writing-symbol.png differ diff --git a/docs/static/img/databases/amazon-athena.jpg b/docs/static/img/databases/amazon-athena.jpg new file mode 100644 index 0000000000000..33ee95c083280 Binary files /dev/null and b/docs/static/img/databases/amazon-athena.jpg differ diff --git a/docs/static/img/databases/amazon-redshift.jpg b/docs/static/img/databases/amazon-redshift.jpg new file mode 100644 index 0000000000000..97a58612cb883 Binary files /dev/null and b/docs/static/img/databases/amazon-redshift.jpg differ diff --git a/docs/static/img/databases/apache-pinot.svg b/docs/static/img/databases/apache-pinot.svg new file mode 100644 index 0000000000000..0d69dd4d2fb3c --- /dev/null +++ b/docs/static/img/databases/apache-pinot.svg @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + + diff --git a/docs/static/img/databases/aws-redshift.png b/docs/static/img/databases/aws-redshift.png deleted file mode 100644 index 73d79b8bf4a88..0000000000000 Binary files a/docs/static/img/databases/aws-redshift.png and /dev/null differ diff --git a/docs/static/img/databases/csv.svg b/docs/static/img/databases/csv.svg new file mode 100644 index 0000000000000..792b90deeeb5f --- /dev/null +++ b/docs/static/img/databases/csv.svg @@ -0,0 +1,7 @@ + + + + + + + diff --git a/docs/static/img/databases/google-big-query.svg b/docs/static/img/databases/google-big-query.svg new file mode 100644 index 0000000000000..08a0544bbb472 --- /dev/null +++ b/docs/static/img/databases/google-big-query.svg @@ -0,0 +1,7 @@ + + + + + + + diff --git a/docs/static/img/databases/google-sheets.svg b/docs/static/img/databases/google-sheets.svg new file mode 100644 index 0000000000000..11fcdd65f8197 --- /dev/null +++ b/docs/static/img/databases/google-sheets.svg @@ -0,0 +1,166 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/static/img/databases/googleBQ.png b/docs/static/img/databases/googleBQ.png deleted file mode 100644 index da6960ac9b32f..0000000000000 Binary files a/docs/static/img/databases/googleBQ.png and /dev/null differ diff --git a/docs/static/img/databases/mysql.jpg b/docs/static/img/databases/mysql.jpg new file mode 100644 index 0000000000000..dedef48c61c88 Binary files /dev/null and b/docs/static/img/databases/mysql.jpg differ diff --git a/docs/static/img/databases/mysql.png b/docs/static/img/databases/mysql.png deleted file mode 100644 index b68620c289f13..0000000000000 Binary files a/docs/static/img/databases/mysql.png and /dev/null differ diff --git a/docs/static/img/databases/postgresql.svg b/docs/static/img/databases/postgresql.svg new file mode 100644 index 0000000000000..f96e464eb69cc --- /dev/null +++ b/docs/static/img/databases/postgresql.svg @@ -0,0 +1,22 @@ + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/static/img/databases/postsql.png b/docs/static/img/databases/postsql.png deleted file mode 100644 index ca488a2d9c612..0000000000000 Binary files a/docs/static/img/databases/postsql.png and /dev/null differ diff --git a/docs/static/img/databases/sap-hana.jpg b/docs/static/img/databases/sap-hana.jpg new file mode 100644 index 0000000000000..5374367edf3b7 Binary files /dev/null and b/docs/static/img/databases/sap-hana.jpg differ diff --git a/docs/static/img/databases/sap-hana.png b/docs/static/img/databases/sap-hana.png new file mode 100644 index 0000000000000..8afdda39496e7 Binary files /dev/null and b/docs/static/img/databases/sap-hana.png differ diff --git a/docs/static/img/databases/snowflake.png b/docs/static/img/databases/snowflake.png deleted file mode 100644 index adc3443fdda77..0000000000000 Binary files a/docs/static/img/databases/snowflake.png and /dev/null differ diff --git a/docs/static/img/databases/snowflake.svg b/docs/static/img/databases/snowflake.svg new file mode 100644 index 0000000000000..07462d24106ee --- /dev/null +++ b/docs/static/img/databases/snowflake.svg @@ -0,0 +1,7 @@ + + + + + + + diff --git a/docs/static/img/features/modern-architecture.jpg b/docs/static/img/features/modern-architecture.jpg new file mode 100644 index 0000000000000..da6320d5180ce Binary files /dev/null and b/docs/static/img/features/modern-architecture.jpg differ diff --git a/docs/static/img/features/modern-databases.jpg b/docs/static/img/features/modern-databases.jpg new file mode 100644 index 0000000000000..7ecfdb3cb9a02 Binary files /dev/null and b/docs/static/img/features/modern-databases.jpg differ diff --git a/docs/static/img/features/powerful-yet-easy.jpg b/docs/static/img/features/powerful-yet-easy.jpg new file mode 100644 index 0000000000000..020999439b702 Binary files /dev/null and b/docs/static/img/features/powerful-yet-easy.jpg differ diff --git a/docs/static/img/features/rich-visualizations.jpg b/docs/static/img/features/rich-visualizations.jpg new file mode 100644 index 0000000000000..783e41fe5f92c Binary files /dev/null and b/docs/static/img/features/rich-visualizations.jpg differ diff --git a/docs/static/img/github-dark.png b/docs/static/img/github-dark.png new file mode 100644 index 0000000000000..7492fcb54ae19 Binary files /dev/null and b/docs/static/img/github-dark.png differ diff --git a/docs/static/img/grid-background.jpg b/docs/static/img/grid-background.jpg new file mode 100644 index 0000000000000..865bfe85ca8be Binary files /dev/null and b/docs/static/img/grid-background.jpg differ diff --git a/docs/static/img/hero-screenshot.jpg b/docs/static/img/hero-screenshot.jpg new file mode 100644 index 0000000000000..7b2d993908b8e Binary files /dev/null and b/docs/static/img/hero-screenshot.jpg differ diff --git a/docs/static/img/superset-logo-horiz-dark.svg b/docs/static/img/superset-logo-horiz-dark.svg index 3fb7cdc0c19bf..bcf41bff325d3 100644 --- a/docs/static/img/superset-logo-horiz-dark.svg +++ b/docs/static/img/superset-logo-horiz-dark.svg @@ -15,7 +15,7 @@ under the License. --> - + @@ -24,14 +24,14 @@ - - - - - - - - + + + + + + + + diff --git a/docs/static/img/superset-mark-dark.svg b/docs/static/img/superset-mark-dark.svg new file mode 100644 index 0000000000000..f501dcf98f228 --- /dev/null +++ b/docs/static/img/superset-mark-dark.svg @@ -0,0 +1,20 @@ + + + + + diff --git a/docs/static/resources/openapi.json b/docs/static/resources/openapi.json index c52592aae90be..94dca800c3ab4 100644 --- a/docs/static/resources/openapi.json +++ b/docs/static/resources/openapi.json @@ -1238,7 +1238,7 @@ "type": "array" }, "granularity": { - "description": "Name of temporal column used for time filtering. + "description": "Name of temporal column used for time filtering.", "nullable": true, "type": "string" }, diff --git a/docs/static/video/superset-video-4k.mp4 b/docs/static/video/superset-video-4k.mp4 new file mode 100644 index 0000000000000..068ce30c6f557 Binary files /dev/null and b/docs/static/video/superset-video-4k.mp4 differ diff --git a/helm/superset/Chart.yaml b/helm/superset/Chart.yaml index 81893c32eb810..4f0f6d7305f10 100644 --- a/helm/superset/Chart.yaml +++ b/helm/superset/Chart.yaml @@ -29,7 +29,7 @@ maintainers: - name: craig-rueda email: craig@craigrueda.com url: https://github.com/craig-rueda -version: 0.10.1 +version: 0.10.2 dependencies: - name: postgresql version: 12.1.6 diff --git a/helm/superset/README.md b/helm/superset/README.md index eaa5c857a9e35..dc58b30f78c4f 100644 --- a/helm/superset/README.md +++ b/helm/superset/README.md @@ -23,7 +23,7 @@ NOTE: This file is generated by helm-docs: https://github.com/norwoodj/helm-docs # superset -![Version: 0.10.1](https://img.shields.io/badge/Version-0.10.1-informational?style=flat-square) +![Version: 0.10.2](https://img.shields.io/badge/Version-0.10.2-informational?style=flat-square) Apache Superset is a modern, enterprise-ready business intelligence web application diff --git a/helm/superset/templates/init-job.yaml b/helm/superset/templates/init-job.yaml index 0a8c45330d05c..da83f7f95daf3 100644 --- a/helm/superset/templates/init-job.yaml +++ b/helm/superset/templates/init-job.yaml @@ -21,7 +21,7 @@ apiVersion: batch/v1 kind: Job metadata: - name: {{ template "superset.name" . }}-init-db + name: {{ template "superset.fullname" . }}-init-db namespace: {{ .Release.Namespace }} {{- if .Values.init.jobAnnotations }} annotations: {{- toYaml .Values.init.jobAnnotations | nindent 4 }} @@ -29,7 +29,7 @@ metadata: spec: template: metadata: - name: {{ template "superset.name" . }}-init-db + name: {{ template "superset.fullname" . }}-init-db {{- if .Values.init.podAnnotations }} annotations: {{- toYaml .Values.init.podAnnotations | nindent 8 }} {{- end }} diff --git a/requirements/base.txt b/requirements/base.txt index 148be94e12c70..b72d555957801 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -88,7 +88,7 @@ flask==2.2.5 # flask-migrate # flask-sqlalchemy # flask-wtf -flask-appbuilder==4.3.0 +flask-appbuilder==4.3.2 # via apache-superset flask-babel==1.0.0 # via flask-appbuilder diff --git a/requirements/integration.txt b/requirements/integration.txt index 10ec5278faf92..764f238f3978f 100644 --- a/requirements/integration.txt +++ b/requirements/integration.txt @@ -7,7 +7,7 @@ # build==0.10.0 # via pip-tools -cachetools==5.3.0 +cachetools==5.3.1 # via tox cfgv==3.3.1 # via pre-commit @@ -21,7 +21,7 @@ colorama==0.4.6 # via tox distlib==0.3.6 # via virtualenv -filelock==3.12.0 +filelock==3.12.2 # via # tox # virtualenv @@ -38,7 +38,7 @@ pip-compile-multi==2.6.3 # via -r integration.in pip-tools==6.13.0 # via pip-compile-multi -platformdirs==3.5.1 +platformdirs==3.5.3 # via # tox # virtualenv @@ -54,7 +54,7 @@ pyyaml==5.4.1 # via pre-commit toposort==1.10 # via pip-compile-multi -tox==4.6.0 +tox==4.6.1 # via -r integration.in virtualenv==20.23.0 # via diff --git a/setup.py b/setup.py index d8adea3285a20..c7c7c18df2e62 100644 --- a/setup.py +++ b/setup.py @@ -81,7 +81,7 @@ def get_git_sha() -> str: "cryptography>=39.0.1, <40", "deprecation>=2.1.0, <2.2.0", "flask>=2.2.5, <3.0.0", - "flask-appbuilder>=4.3.0, <5.0.0", + "flask-appbuilder>=4.3.2, <5.0.0", "flask-caching>=1.10.1, <1.11", "flask-compress>=1.13, <2.0", "flask-talisman>=1.0.0, <2.0", diff --git a/superset-frontend/package-lock.json b/superset-frontend/package-lock.json index 736890dc72f28..137695f4f888d 100644 --- a/superset-frontend/package-lock.json +++ b/superset-frontend/package-lock.json @@ -23165,8 +23165,7 @@ "version": "8.8.2", "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.8.2.tgz", "integrity": "sha512-x9VuX+R/jcFj1DHo/fCp99esgGDWiHENrKxaCENuCxpoMCmAt/COCGVDwA7kleEpEzJjDnvh3yGoOuLu0Dtllw==", - "optional": true, - "peer": true, + "devOptional": true, "dependencies": { "fast-deep-equal": "^3.1.1", "json-schema-traverse": "^1.0.0", @@ -23182,8 +23181,7 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", - "optional": true, - "peer": true + "devOptional": true }, "node_modules/ajv-keywords": { "version": "3.5.2", @@ -23751,7 +23749,6 @@ "version": "1.2.5", "resolved": "https://registry.npmjs.org/aphrodite/-/aphrodite-1.2.5.tgz", "integrity": "sha1-g1jDbIC7A67puXFlqqcBhiJbSYM=", - "peer": true, "dependencies": { "asap": "^2.0.3", "inline-style-prefixer": "^3.0.1", @@ -25351,8 +25348,7 @@ "node_modules/bowser": { "version": "1.9.4", "resolved": "https://registry.npmjs.org/bowser/-/bowser-1.9.4.tgz", - "integrity": "sha512-9IdMmj2KjigRq6oWhmwv1W36pDuA4STQZ8q6YO9um+x07xgYNCD3Oou+WP/3L1HNz7iqythGet3/p4wvc8AAwQ==", - "peer": true + "integrity": "sha512-9IdMmj2KjigRq6oWhmwv1W36pDuA4STQZ8q6YO9um+x07xgYNCD3Oou+WP/3L1HNz7iqythGet3/p4wvc8AAwQ==" }, "node_modules/boxen": { "version": "5.1.2", @@ -28248,7 +28244,6 @@ "version": "2.0.1", "resolved": "https://registry.npmjs.org/css-in-js-utils/-/css-in-js-utils-2.0.1.tgz", "integrity": "sha512-PJF0SpJT+WdbVVt0AOYp9C8GnuruRlL/UFW7932nLWmFLQTaWEzTBQEx7/hn4BuV+WON75iAViSUJLiU3PKbpA==", - "peer": true, "dependencies": { "hyphenate-style-name": "^1.0.2", "isobject": "^3.0.1" @@ -29915,23 +29910,6 @@ "topojson": "^1.6.19" } }, - "node_modules/datatables.net": { - "version": "1.11.3", - "resolved": "https://registry.npmjs.org/datatables.net/-/datatables.net-1.11.3.tgz", - "integrity": "sha512-VMj5qEaTebpNurySkM6jy6sGpl+s6onPK8xJhYr296R/vUBnz1+id16NVqNf9z5aR076OGcpGHCuiTuy4E05oQ==", - "dependencies": { - "jquery": ">=1.7" - } - }, - "node_modules/datatables.net-bs": { - "version": "1.11.3", - "resolved": "https://registry.npmjs.org/datatables.net-bs/-/datatables.net-bs-1.11.3.tgz", - "integrity": "sha512-Db1YwAhO0QAWQbZTsKriUrOInT66+xaA+fV616KTKpQt5Zt+p6OsEKK+xv8LxLgG8qu5dPwMBlkhqSiS/hV2sg==", - "dependencies": { - "datatables.net": ">=1.10.25", - "jquery": ">=1.7" - } - }, "node_modules/date-fns": { "version": "2.29.3", "resolved": "https://registry.npmjs.org/date-fns/-/date-fns-2.29.3.tgz", @@ -36621,8 +36599,7 @@ "node_modules/hyphenate-style-name": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/hyphenate-style-name/-/hyphenate-style-name-1.0.4.tgz", - "integrity": "sha512-ygGZLjmXfPHj+ZWh6LwbC37l43MhfztxetbFCoYTM2VjkIUpeHgSNn7QIyVFj7YQ1Wl9Cbw5sholVJPzWvC2MQ==", - "peer": true + "integrity": "sha512-ygGZLjmXfPHj+ZWh6LwbC37l43MhfztxetbFCoYTM2VjkIUpeHgSNn7QIyVFj7YQ1Wl9Cbw5sholVJPzWvC2MQ==" }, "node_modules/iconv-lite": { "version": "0.4.24", @@ -37056,7 +37033,6 @@ "version": "3.0.8", "resolved": "https://registry.npmjs.org/inline-style-prefixer/-/inline-style-prefixer-3.0.8.tgz", "integrity": "sha1-hVG45bTVcyROZqNLBPfTIHaitTQ=", - "peer": true, "dependencies": { "bowser": "^1.7.3", "css-in-js-utils": "^2.0.0" @@ -54244,8 +54220,7 @@ "node_modules/string-hash": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/string-hash/-/string-hash-1.1.3.tgz", - "integrity": "sha1-6Kr8CsGFW0Zmkp7X3RJ1311sgRs=", - "peer": true + "integrity": "sha1-6Kr8CsGFW0Zmkp7X3RJ1311sgRs=" }, "node_modules/string-length": { "version": "4.0.1", @@ -58989,9 +58964,9 @@ } }, "node_modules/xss": { - "version": "1.0.10", - "resolved": "https://registry.npmjs.org/xss/-/xss-1.0.10.tgz", - "integrity": "sha512-qmoqrRksmzqSKvgqzN0055UFWY7OKx1/9JWeRswwEVX9fCG5jcYRxa/A2DHcmZX6VJvjzHRQ2STeeVcQkrmLSw==", + "version": "1.0.14", + "resolved": "https://registry.npmjs.org/xss/-/xss-1.0.14.tgz", + "integrity": "sha512-og7TEJhXvn1a7kzZGQ7ETjdQVS2UfZyTlsEdDOqvQF7GoxNfY+0YLCzBy1kPdsDDx4QuNAonQPddpsn6Xl/7sw==", "dependencies": { "commander": "^2.20.3", "cssfilter": "0.0.10" @@ -60278,7 +60253,8 @@ "reselect": "^4.0.0", "rison": "^0.1.1", "seedrandom": "^3.0.5", - "whatwg-fetch": "^3.0.0" + "whatwg-fetch": "^3.0.0", + "xss": "^1.0.14" }, "devDependencies": { "@emotion/styled": "^11.3.0", @@ -64620,6 +64596,7 @@ "@vx/scale": "0.0.140", "@vx/shape": "0.0.140", "@vx/tooltip": "0.0.140", + "aphrodite": "^1.2.0", "d3-array": "^1.2.0", "d3-format": "^1.2.0", "d3-selection": "^1.1.0", @@ -76494,7 +76471,8 @@ "resize-observer-polyfill": "1.5.1", "rison": "^0.1.1", "seedrandom": "^3.0.5", - "whatwg-fetch": "^3.0.0" + "whatwg-fetch": "^3.0.0", + "xss": "^1.0.14" }, "dependencies": { "@testing-library/react-hooks": { @@ -80287,13 +80265,15 @@ "resolved": "https://registry.npmjs.org/ajv-formats/-/ajv-formats-2.1.1.tgz", "integrity": "sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==", "devOptional": true, - "requires": {}, + "requires": { + "ajv": "^8.0.0" + }, "dependencies": { "ajv": { - "version": "https://registry.npmjs.org/ajv/-/ajv-8.8.2.tgz", + "version": "8.8.2", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.8.2.tgz", "integrity": "sha512-x9VuX+R/jcFj1DHo/fCp99esgGDWiHENrKxaCENuCxpoMCmAt/COCGVDwA7kleEpEzJjDnvh3yGoOuLu0Dtllw==", - "optional": true, - "peer": true, + "devOptional": true, "requires": { "fast-deep-equal": "^3.1.1", "json-schema-traverse": "^1.0.0", @@ -80305,8 +80285,7 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", - "optional": true, - "peer": true + "devOptional": true } } }, @@ -80737,7 +80716,6 @@ "version": "1.2.5", "resolved": "https://registry.npmjs.org/aphrodite/-/aphrodite-1.2.5.tgz", "integrity": "sha1-g1jDbIC7A67puXFlqqcBhiJbSYM=", - "peer": true, "requires": { "asap": "^2.0.3", "inline-style-prefixer": "^3.0.1", @@ -81973,8 +81951,7 @@ "bowser": { "version": "1.9.4", "resolved": "https://registry.npmjs.org/bowser/-/bowser-1.9.4.tgz", - "integrity": "sha512-9IdMmj2KjigRq6oWhmwv1W36pDuA4STQZ8q6YO9um+x07xgYNCD3Oou+WP/3L1HNz7iqythGet3/p4wvc8AAwQ==", - "peer": true + "integrity": "sha512-9IdMmj2KjigRq6oWhmwv1W36pDuA4STQZ8q6YO9um+x07xgYNCD3Oou+WP/3L1HNz7iqythGet3/p4wvc8AAwQ==" }, "boxen": { "version": "5.1.2", @@ -84274,7 +84251,6 @@ "version": "2.0.1", "resolved": "https://registry.npmjs.org/css-in-js-utils/-/css-in-js-utils-2.0.1.tgz", "integrity": "sha512-PJF0SpJT+WdbVVt0AOYp9C8GnuruRlL/UFW7932nLWmFLQTaWEzTBQEx7/hn4BuV+WON75iAViSUJLiU3PKbpA==", - "peer": true, "requires": { "hyphenate-style-name": "^1.0.2", "isobject": "^3.0.1" @@ -85501,23 +85477,6 @@ "topojson": "^1.6.19" } }, - "datatables.net": { - "version": "1.11.3", - "resolved": "https://registry.npmjs.org/datatables.net/-/datatables.net-1.11.3.tgz", - "integrity": "sha512-VMj5qEaTebpNurySkM6jy6sGpl+s6onPK8xJhYr296R/vUBnz1+id16NVqNf9z5aR076OGcpGHCuiTuy4E05oQ==", - "requires": { - "jquery": ">=1.7" - } - }, - "datatables.net-bs": { - "version": "1.11.3", - "resolved": "https://registry.npmjs.org/datatables.net-bs/-/datatables.net-bs-1.11.3.tgz", - "integrity": "sha512-Db1YwAhO0QAWQbZTsKriUrOInT66+xaA+fV616KTKpQt5Zt+p6OsEKK+xv8LxLgG8qu5dPwMBlkhqSiS/hV2sg==", - "requires": { - "datatables.net": ">=1.10.25", - "jquery": ">=1.7" - } - }, "date-fns": { "version": "2.29.3", "resolved": "https://registry.npmjs.org/date-fns/-/date-fns-2.29.3.tgz", @@ -90666,8 +90625,7 @@ "hyphenate-style-name": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/hyphenate-style-name/-/hyphenate-style-name-1.0.4.tgz", - "integrity": "sha512-ygGZLjmXfPHj+ZWh6LwbC37l43MhfztxetbFCoYTM2VjkIUpeHgSNn7QIyVFj7YQ1Wl9Cbw5sholVJPzWvC2MQ==", - "peer": true + "integrity": "sha512-ygGZLjmXfPHj+ZWh6LwbC37l43MhfztxetbFCoYTM2VjkIUpeHgSNn7QIyVFj7YQ1Wl9Cbw5sholVJPzWvC2MQ==" }, "iconv-lite": { "version": "0.4.24", @@ -90985,7 +90943,6 @@ "version": "3.0.8", "resolved": "https://registry.npmjs.org/inline-style-prefixer/-/inline-style-prefixer-3.0.8.tgz", "integrity": "sha1-hVG45bTVcyROZqNLBPfTIHaitTQ=", - "peer": true, "requires": { "bowser": "^1.7.3", "css-in-js-utils": "^2.0.0" @@ -101381,7 +101338,8 @@ "integrity": "sha512-JZUw7hBsAHXK7PTyErJyI7SopSBFRcFHDjWW5SWjcugY0i6iH7f+eJkY8cJmGMlZ1C9xz1J3Vjz0plFpavVeRg==", "requires": { "@babel/runtime": "^7.2.0", - "invariant": "^2.2.4" + "invariant": "^2.2.4", + "prop-types": "^15.5.7" } }, "react-split": { @@ -104185,8 +104143,7 @@ "string-hash": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/string-hash/-/string-hash-1.1.3.tgz", - "integrity": "sha1-6Kr8CsGFW0Zmkp7X3RJ1311sgRs=", - "peer": true + "integrity": "sha1-6Kr8CsGFW0Zmkp7X3RJ1311sgRs=" }, "string-length": { "version": "4.0.1", @@ -107735,9 +107692,9 @@ "dev": true }, "xss": { - "version": "1.0.10", - "resolved": "https://registry.npmjs.org/xss/-/xss-1.0.10.tgz", - "integrity": "sha512-qmoqrRksmzqSKvgqzN0055UFWY7OKx1/9JWeRswwEVX9fCG5jcYRxa/A2DHcmZX6VJvjzHRQ2STeeVcQkrmLSw==", + "version": "1.0.14", + "resolved": "https://registry.npmjs.org/xss/-/xss-1.0.14.tgz", + "integrity": "sha512-og7TEJhXvn1a7kzZGQ7ETjdQVS2UfZyTlsEdDOqvQF7GoxNfY+0YLCzBy1kPdsDDx4QuNAonQPddpsn6Xl/7sw==", "requires": { "commander": "^2.20.3", "cssfilter": "0.0.10" @@ -107976,6 +107933,8 @@ "is-scoped": "^2.1.0", "lodash": "^4.17.10", "log-symbols": "^4.0.0", + "mem-fs": "^1.2.0 || ^2.0.0", + "mem-fs-editor": "^8.1.2 || ^9.0.0", "minimatch": "^3.0.4", "npmlog": "^5.0.1", "p-queue": "^6.6.2", diff --git a/superset-frontend/packages/superset-ui-core/package.json b/superset-frontend/packages/superset-ui-core/package.json index 59894f716948f..62deb7709b652 100644 --- a/superset-frontend/packages/superset-ui-core/package.json +++ b/superset-frontend/packages/superset-ui-core/package.json @@ -60,7 +60,8 @@ "reselect": "^4.0.0", "rison": "^0.1.1", "seedrandom": "^3.0.5", - "whatwg-fetch": "^3.0.0" + "whatwg-fetch": "^3.0.0", + "xss": "^1.0.14" }, "devDependencies": { "@emotion/styled": "^11.3.0", diff --git a/superset-frontend/packages/superset-ui-core/src/utils/html.test.tsx b/superset-frontend/packages/superset-ui-core/src/utils/html.test.tsx new file mode 100644 index 0000000000000..8fd06cb6f8e7a --- /dev/null +++ b/superset-frontend/packages/superset-ui-core/src/utils/html.test.tsx @@ -0,0 +1,113 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +import React from 'react'; +import { + sanitizeHtml, + isProbablyHTML, + sanitizeHtmlIfNeeded, + safeHtmlSpan, + removeHTMLTags, +} from './html'; + +describe('sanitizeHtml', () => { + test('should sanitize the HTML string', () => { + const htmlString = ''; + const sanitizedString = sanitizeHtml(htmlString); + expect(sanitizedString).not.toContain('script'); + }); +}); + +describe('isProbablyHTML', () => { + test('should return true if the text contains HTML tags', () => { + const htmlText = '
      Some HTML content
      '; + const isHTML = isProbablyHTML(htmlText); + expect(isHTML).toBe(true); + }); + + test('should return false if the text does not contain HTML tags', () => { + const plainText = 'Just a plain text'; + const isHTML = isProbablyHTML(plainText); + expect(isHTML).toBe(false); + }); +}); + +describe('sanitizeHtmlIfNeeded', () => { + test('should sanitize the HTML string if it contains HTML tags', () => { + const htmlString = '
      Some HTML content
      '; + const sanitizedString = sanitizeHtmlIfNeeded(htmlString); + expect(sanitizedString).toEqual(htmlString); + }); + + test('should return the string as is if it does not contain HTML tags', () => { + const plainText = 'Just a plain text'; + const sanitizedString = sanitizeHtmlIfNeeded(plainText); + expect(sanitizedString).toEqual(plainText); + }); +}); + +describe('safeHtmlSpan', () => { + test('should return a safe HTML span when the input is HTML', () => { + const htmlString = '
      Some HTML content
      '; + const safeSpan = safeHtmlSpan(htmlString); + expect(safeSpan).toEqual( + , + ); + }); + + test('should return the input string as is when it is not HTML', () => { + const plainText = 'Just a plain text'; + const result = safeHtmlSpan(plainText); + expect(result).toEqual(plainText); + }); +}); + +describe('removeHTMLTags', () => { + test('should remove HTML tags from the string', () => { + const input = '

      Hello, World!

      '; + const output = removeHTMLTags(input); + expect(output).toBe('Hello, World!'); + }); + + test('should return the same string when no HTML tags are present', () => { + const input = 'This is a plain text.'; + const output = removeHTMLTags(input); + expect(output).toBe('This is a plain text.'); + }); + + test('should remove nested HTML tags and return combined text content', () => { + const input = '

      Title

      Content

      '; + const output = removeHTMLTags(input); + expect(output).toBe('TitleContent'); + }); + + test('should handle self-closing tags and return an empty string', () => { + const input = 'Image'; + const output = removeHTMLTags(input); + expect(output).toBe(''); + }); + + test('should handle malformed HTML tags and remove only well-formed tags', () => { + const input = '

      Unclosed tag'; + const output = removeHTMLTags(input); + expect(output).toBe('Unclosed tag'); + }); +}); diff --git a/superset-frontend/packages/superset-ui-core/src/utils/html.tsx b/superset-frontend/packages/superset-ui-core/src/utils/html.tsx new file mode 100644 index 0000000000000..3215eb9b9de5b --- /dev/null +++ b/superset-frontend/packages/superset-ui-core/src/utils/html.tsx @@ -0,0 +1,53 @@ +import React from 'react'; +import { FilterXSS, getDefaultWhiteList } from 'xss'; + +const xssFilter = new FilterXSS({ + whiteList: { + ...getDefaultWhiteList(), + span: ['style', 'class', 'title'], + div: ['style', 'class'], + a: ['style', 'class', 'href', 'title', 'target'], + img: ['style', 'class', 'src', 'alt', 'title', 'width', 'height'], + video: [ + 'autoplay', + 'controls', + 'loop', + 'preload', + 'src', + 'height', + 'width', + 'muted', + ], + }, + stripIgnoreTag: true, + css: false, +}); + +export function sanitizeHtml(htmlString: string) { + return xssFilter.process(htmlString); +} + +export function isProbablyHTML(text: string) { + return /<[^>]+>/.test(text); +} + +export function sanitizeHtmlIfNeeded(htmlString: string) { + return isProbablyHTML(htmlString) ? sanitizeHtml(htmlString) : htmlString; +} + +export function safeHtmlSpan(possiblyHtmlString: string) { + const isHtml = isProbablyHTML(possiblyHtmlString); + if (isHtml) { + return ( + + ); + } + return possiblyHtmlString; +} + +export function removeHTMLTags(str: string): string { + return str.replace(/<[^>]*>/g, ''); +} diff --git a/superset-frontend/packages/superset-ui-core/src/utils/index.ts b/superset-frontend/packages/superset-ui-core/src/utils/index.ts index 4efc3dedb65af..32fa88251ee5f 100644 --- a/superset-frontend/packages/superset-ui-core/src/utils/index.ts +++ b/superset-frontend/packages/superset-ui-core/src/utils/index.ts @@ -31,3 +31,4 @@ export { getSelectedText } from './getSelectedText'; export * from './featureFlags'; export * from './random'; export * from './typedMemo'; +export * from './html'; diff --git a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/components/Tooltip.tsx b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/components/Tooltip.tsx index 9b20113448b7f..d61c4844acfe1 100644 --- a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/components/Tooltip.tsx +++ b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/components/Tooltip.tsx @@ -17,9 +17,8 @@ * under the License. */ -import { styled } from '@superset-ui/core'; -import React, { useMemo } from 'react'; -import { filterXSS } from 'xss'; +import { styled, safeHtmlSpan } from '@superset-ui/core'; +import React from 'react'; export type TooltipProps = { tooltip: @@ -55,28 +54,12 @@ export default function Tooltip(props: TooltipProps) { } const { x, y, content } = tooltip; - - if (typeof content === 'string') { - // eslint-disable-next-line react-hooks/rules-of-hooks - const contentHtml = useMemo( - () => ({ - __html: filterXSS(content, { stripIgnoreTag: true }), - }), - [content], - ); - return ( - -
      - - ); - } + const safeContent = + typeof content === 'string' ? safeHtmlSpan(content) : content; return ( - {content} + {safeContent} ); } diff --git a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Geojson/Geojson.jsx b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Geojson/Geojson.jsx index 0aefc742934f5..263b576ec9a17 100644 --- a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Geojson/Geojson.jsx +++ b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Geojson/Geojson.jsx @@ -123,14 +123,15 @@ export function getLayer(formData, payload, onAddFilter, setTooltip) { return new GeoJsonLayer({ id: `geojson-layer-${fd.slice_id}`, - filled: fd.filled, data: features, - stroked: fd.stroked, extruded: fd.extruded, - pointRadiusScale: fd.point_radius_scale, + filled: fd.filled, + stroked: fd.stroked, getFillColor, - getLineWidth: fd.line_width || 1, getLineColor, + getLineWidth: fd.line_width || 1, + pointRadiusScale: fd.point_radius_scale, + lineWidthUnits: fd.line_width_unit, ...commonLayerProps(fd, setTooltip, setTooltipContent), }); } diff --git a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Geojson/controlPanel.ts b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Geojson/controlPanel.ts index 352e8867b2ad5..894c09853ac08 100644 --- a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Geojson/controlPanel.ts +++ b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Geojson/controlPanel.ts @@ -69,8 +69,23 @@ const config: ControlPanelConfig = { controlSetRows: [ [fillColorPicker, strokeColorPicker], [filled, stroked], - [extruded, null], - [lineWidth, null], + [extruded], + [lineWidth], + [ + { + name: 'line_width_unit', + config: { + type: 'SelectControl', + label: t('Line width unit'), + default: 'meters', + choices: [ + ['meters', t('meters')], + ['pixels', t('pixels')], + ], + renderTrigger: true, + }, + }, + ], [ { name: 'point_radius_scale', @@ -83,7 +98,6 @@ const config: ControlPanelConfig = { choices: formatSelectOptions([0, 100, 200, 300, 500]), }, }, - null, ], ], }, diff --git a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Scatter/Scatter.jsx b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Scatter/Scatter.jsx index 3ad0dcea986b8..523752306770c 100644 --- a/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Scatter/Scatter.jsx +++ b/superset-frontend/plugins/legacy-preset-chart-deckgl/src/layers/Scatter/Scatter.jsx @@ -83,8 +83,8 @@ export function getLayer( fp64: true, getFillColor: d => d.color, getRadius: d => d.radius, - radiusMinPixels: fd.min_radius || null, - radiusMaxPixels: fd.max_radius || null, + radiusMinPixels: Number(fd.min_radius) || null, + radiusMaxPixels: Number(fd.max_radius) || null, stroked: false, ...commonLayerProps( fd, diff --git a/superset-frontend/plugins/plugin-chart-table/src/utils/formatValue.ts b/superset-frontend/plugins/plugin-chart-table/src/utils/formatValue.ts index 327e48ab3d89c..607afa8ac3989 100644 --- a/superset-frontend/plugins/plugin-chart-table/src/utils/formatValue.ts +++ b/superset-frontend/plugins/plugin-chart-table/src/utils/formatValue.ts @@ -16,41 +16,16 @@ * specific language governing permissions and limitations * under the License. */ -import { FilterXSS, getDefaultWhiteList } from 'xss'; import { DataRecordValue, GenericDataType, getNumberFormatter, + isProbablyHTML, + sanitizeHtml, } from '@superset-ui/core'; import { DataColumnMeta } from '../types'; import DateWithFormatter from './DateWithFormatter'; -const xss = new FilterXSS({ - whiteList: { - ...getDefaultWhiteList(), - span: ['style', 'class', 'title'], - div: ['style', 'class'], - a: ['style', 'class', 'href', 'title', 'target'], - img: ['style', 'class', 'src', 'alt', 'title', 'width', 'height'], - video: [ - 'autoplay', - 'controls', - 'loop', - 'preload', - 'src', - 'height', - 'width', - 'muted', - ], - }, - stripIgnoreTag: true, - css: false, -}); - -function isProbablyHTML(text: string) { - return /<[^>]+>/.test(text); -} - /** * Format text for cell value. */ @@ -76,7 +51,7 @@ function formatValue( return [false, formatter(value as number)]; } if (typeof value === 'string') { - return isProbablyHTML(value) ? [true, xss.process(value)] : [false, value]; + return isProbablyHTML(value) ? [true, sanitizeHtml(value)] : [false, value]; } return [false, value.toString()]; } diff --git a/superset-frontend/src/SqlLab/components/SouthPane/SouthPane.test.jsx b/superset-frontend/src/SqlLab/components/SouthPane/SouthPane.test.jsx index 519e729c41847..b06488e3b7871 100644 --- a/superset-frontend/src/SqlLab/components/SouthPane/SouthPane.test.jsx +++ b/superset-frontend/src/SqlLab/components/SouthPane/SouthPane.test.jsx @@ -61,7 +61,7 @@ const store = mockStore({ queries: { LCly_kkIN: { cached: false, - changedOn: Date.now(), + changed_on: new Date().toISOString(), db: 'main', dbId: 1, id: 'LCly_kkIN', @@ -71,7 +71,7 @@ const store = mockStore({ }, lXJa7F9_r: { cached: false, - changedOn: 1559238500401, + changed_on: new Date(1559238500401).toISOString(), db: 'main', dbId: 1, id: 'lXJa7F9_r', @@ -80,7 +80,7 @@ const store = mockStore({ }, '2g2_iRFMl': { cached: false, - changedOn: 1559238506925, + changed_on: new Date(1559238506925).toISOString(), db: 'main', dbId: 1, id: '2g2_iRFMl', @@ -89,7 +89,7 @@ const store = mockStore({ }, erWdqEWPm: { cached: false, - changedOn: 1559238516395, + changed_on: new Date(1559238516395).toISOString(), db: 'main', dbId: 1, id: 'erWdqEWPm', diff --git a/superset-frontend/src/SqlLab/fixtures.ts b/superset-frontend/src/SqlLab/fixtures.ts index e295b02366b1a..ebfd01888515e 100644 --- a/superset-frontend/src/SqlLab/fixtures.ts +++ b/superset-frontend/src/SqlLab/fixtures.ts @@ -217,7 +217,6 @@ export const queries = [ progress: 100, startDttm: 1476910566092.96, state: QueryState.SUCCESS, - changedOn: 1476910566000, tempTable: null, userId: 1, executedSql: null, @@ -276,7 +275,6 @@ export const queries = [ progress: 100, startDttm: 1476910570802.2, state: QueryState.SUCCESS, - changedOn: 1476910572000, tempTable: null, userId: 1, executedSql: @@ -310,7 +308,6 @@ export const queryWithNoQueryLimit = { progress: 100, startDttm: 1476910566092.96, state: QueryState.SUCCESS, - changedOn: 1476910566000, tempTable: null, userId: 1, executedSql: null, @@ -710,7 +707,6 @@ export const testQuery: ISaveableDatasource = { export const mockdatasets = [...new Array(3)].map((_, i) => ({ changed_by_name: 'user', kind: i === 0 ? 'virtual' : 'physical', // ensure there is 1 virtual - changed_by_url: 'changed_by_url', changed_by: 'user', changed_on: new Date().toISOString(), database_name: `db ${i}`, diff --git a/superset-frontend/src/SqlLab/reducers/sqlLab.js b/superset-frontend/src/SqlLab/reducers/sqlLab.js index 6dcd07a77b68b..915bb3f6b730a 100644 --- a/superset-frontend/src/SqlLab/reducers/sqlLab.js +++ b/superset-frontend/src/SqlLab/reducers/sqlLab.js @@ -614,8 +614,9 @@ export default function sqlLabReducer(state = {}, action) { (state.queries[id].state !== QueryState.STOPPED && state.queries[id].state !== QueryState.FAILED) ) { - if (changedQuery.changedOn > queriesLastUpdate) { - queriesLastUpdate = changedQuery.changedOn; + const changedOn = Date.parse(changedQuery.changed_on); + if (changedOn > queriesLastUpdate) { + queriesLastUpdate = changedOn; } const prevState = state.queries[id]?.state; const currentState = changedQuery.state; diff --git a/superset-frontend/src/assets/images/icons/cancel-x.svg b/superset-frontend/src/assets/images/icons/cancel-x.svg index e0e452f74437d..c3fc592d0513c 100644 --- a/superset-frontend/src/assets/images/icons/cancel-x.svg +++ b/superset-frontend/src/assets/images/icons/cancel-x.svg @@ -17,9 +17,6 @@ under the License. --> - - - Created with Sketch. diff --git a/superset-frontend/src/assets/images/icons/search.svg b/superset-frontend/src/assets/images/icons/search.svg index bef0709fd65b4..e3796880a452d 100644 --- a/superset-frontend/src/assets/images/icons/search.svg +++ b/superset-frontend/src/assets/images/icons/search.svg @@ -17,9 +17,7 @@ under the License. --> - Icon / Search@1.5x - Created with Sketch. diff --git a/superset-frontend/src/components/Chart/DrillDetail/DrillDetailMenuItems.tsx b/superset-frontend/src/components/Chart/DrillDetail/DrillDetailMenuItems.tsx index 98fe90eafae43..73f3a028e93e2 100644 --- a/superset-frontend/src/components/Chart/DrillDetail/DrillDetailMenuItems.tsx +++ b/superset-frontend/src/components/Chart/DrillDetail/DrillDetailMenuItems.tsx @@ -26,6 +26,7 @@ import { extractQueryFields, getChartMetadataRegistry, QueryFormData, + removeHTMLTags, styled, t, } from '@superset-ui/core'; @@ -50,7 +51,21 @@ const DisabledMenuItem = ({ children, ...props }: { children: ReactNode }) => ( ); -const Filter = styled.span` +const Filter = ({ + children, + stripHTML = false, +}: { + children: ReactNode; + stripHTML: boolean; +}) => { + const content = + stripHTML && typeof children === 'string' + ? removeHTMLTags(children) + : children; + return {content}; +}; + +const StyledFilter = styled(Filter)` ${({ theme }) => ` font-weight: ${theme.typography.weights.bold}; color: ${theme.colors.primary.base}; @@ -191,7 +206,7 @@ const DrillDetailMenuItems = ({ onClick={openModal.bind(null, [filter])} > {`${DRILL_TO_DETAIL_TEXT} `} - {filter.formattedVal} + {filter.formattedVal} ))} {filters.length > 1 && ( @@ -202,7 +217,7 @@ const DrillDetailMenuItems = ({ >
      {`${DRILL_TO_DETAIL_TEXT} `} - {t('all')} + {t('all')}
      )} diff --git a/superset-frontend/src/components/Chart/DrillDetail/DrillDetailPane.tsx b/superset-frontend/src/components/Chart/DrillDetail/DrillDetailPane.tsx index daf9f3f1cf4b5..d337e9b013afe 100644 --- a/superset-frontend/src/components/Chart/DrillDetail/DrillDetailPane.tsx +++ b/superset-frontend/src/components/Chart/DrillDetail/DrillDetailPane.tsx @@ -300,6 +300,7 @@ export default function DrillDetailPane({ } resizable virtualize + allowHTML /> ); diff --git a/superset-frontend/src/components/Datasource/CollectionTable.tsx b/superset-frontend/src/components/Datasource/CollectionTable.tsx index baa53264c719f..f1a1422287f01 100644 --- a/superset-frontend/src/components/Datasource/CollectionTable.tsx +++ b/superset-frontend/src/components/Datasource/CollectionTable.tsx @@ -18,7 +18,10 @@ */ import React, { ReactNode } from 'react'; import shortid from 'shortid'; + +import { InfoTooltipWithTrigger } from '@superset-ui/chart-controls'; import { t, styled } from '@superset-ui/core'; + import Button from 'src/components/Button'; import Icons from 'src/components/Icons'; import Fieldset from './Fieldset'; @@ -29,6 +32,7 @@ interface CRUDCollectionProps { allowDeletes?: boolean; collection: Array; columnLabels?: object; + columnLabelTooltips?: object; emptyMessage?: ReactNode; expandFieldset?: ReactNode; extraButtons?: ReactNode; @@ -222,6 +226,11 @@ export default class CRUDCollection extends React.PureComponent< return label; } + getTooltip(col: string) { + const { columnLabelTooltips } = this.props; + return columnLabelTooltips?.[col]; + } + changeCollection(collection: any, newItem?: object) { this.setState({ collection }); if (this.props.onChange) { @@ -311,6 +320,25 @@ export default class CRUDCollection extends React.PureComponent< return ; } + renderTH(col: string, sortColumns: Array) { + const tooltip = this.getTooltip(col); + return ( + + {this.getLabel(col)} + {tooltip && ( + <> + {' '} + + + )} + {sortColumns?.includes(col) && this.renderSortIcon(col)} + + ); + } + renderHeaderRow() { const cols = this.effectiveTableColumns(); const { allowDeletes, expandFieldset, extraButtons, sortColumns } = @@ -319,12 +347,7 @@ export default class CRUDCollection extends React.PureComponent< {expandFieldset && } - {cols.map(col => ( - - {this.getLabel(col)} - {sortColumns?.includes(col) && this.renderSortIcon(col)} - - ))} + {cols.map(col => this.renderTH(col, sortColumns))} {extraButtons} {allowDeletes && ( diff --git a/superset-frontend/src/components/Datasource/DatasourceEditor.jsx b/superset-frontend/src/components/Datasource/DatasourceEditor.jsx index 34a702077a0bc..89ad1418097cd 100644 --- a/superset-frontend/src/components/Datasource/DatasourceEditor.jsx +++ b/superset-frontend/src/components/Datasource/DatasourceEditor.jsx @@ -182,10 +182,10 @@ function ColumnCollectionTable({ allowAddItem, allowEditDataType, itemGenerator, + columnLabelTooltips, }) { return ( @@ -1194,10 +1196,17 @@ class DatasourceEditor extends React.PureComponent { tableColumns={['metric_name', 'verbose_name', 'expression']} sortColumns={['metric_name', 'verbose_name', 'expression']} columnLabels={{ - metric_name: t('Metric'), + metric_name: t('Metric Key'), verbose_name: t('Label'), expression: t('SQL expression'), }} + columnLabelTooltips={{ + metric_name: t( + 'This field is used as a unique identifier to attach ' + + 'the metric to charts. It is also used as the alias in the ' + + 'SQL query.', + ), + }} expandFieldset={
      @@ -1417,6 +1426,13 @@ class DatasourceEditor extends React.PureComponent { onColumnsChange={calculatedColumns => this.setColumns({ calculatedColumns }) } + columnLabelTooltips={{ + column_name: t( + 'This field is used as a unique identifier to attach ' + + 'the calculated dimension to charts. It is also used ' + + 'as the alias in the SQL query.', + ), + }} onDatasourceChange={this.onDatasourceChange} datasource={datasource} editableColumnName diff --git a/superset-frontend/src/components/Datasource/DatasourceEditor.test.jsx b/superset-frontend/src/components/Datasource/DatasourceEditor.test.jsx index 7b211a086abe3..f598290ba8608 100644 --- a/superset-frontend/src/components/Datasource/DatasourceEditor.test.jsx +++ b/superset-frontend/src/components/Datasource/DatasourceEditor.test.jsx @@ -29,6 +29,12 @@ const props = { addSuccessToast: () => {}, addDangerToast: () => {}, onChange: () => {}, + columnLabels: { + state: 'State', + }, + columnLabelTooltips: { + state: 'This is a tooltip for `state`', + }, }; const DATASOURCE_ENDPOINT = 'glob:*/datasource/external_metadata_by_name/*'; diff --git a/superset-frontend/src/components/FilterableTable/index.tsx b/superset-frontend/src/components/FilterableTable/index.tsx index 1cf880419db25..2ca38617fccbf 100644 --- a/superset-frontend/src/components/FilterableTable/index.tsx +++ b/superset-frontend/src/components/FilterableTable/index.tsx @@ -22,6 +22,7 @@ import { JSONTree } from 'react-json-tree'; import { getMultipleTextDimensions, t, + safeHtmlSpan, styled, useTheme, } from '@superset-ui/core'; @@ -120,6 +121,7 @@ export interface FilterableTableProps { // need antd 5.0 to support striped color pattern striped?: boolean; expandedColumns?: string[]; + allowHTML?: boolean; } const FilterableTable = ({ @@ -128,6 +130,7 @@ const FilterableTable = ({ height, filterText = '', expandedColumns = [], + allowHTML = true, }: FilterableTableProps) => { const formatTableData = (data: Record[]): Datum[] => data.map(row => { @@ -346,13 +349,17 @@ const FilterableTable = ({ const renderTableCell = (cellData: CellDataType, columnKey: string) => { const cellNode = getCellContent({ cellData, columnKey }); - const content = - cellData === null ? {cellNode} : cellNode; + if (cellData === null) { + return {cellNode}; + } const jsonObject = safeJsonObjectParse(cellData); if (jsonObject) { return renderJsonModal(cellNode, jsonObject, cellData); } - return content; + if (allowHTML && typeof cellData === 'string') { + return safeHtmlSpan(cellNode); + } + return cellNode; }; // exclude the height of the horizontal scroll bar from the height of the table diff --git a/superset-frontend/src/components/Table/VirtualTable.tsx b/superset-frontend/src/components/Table/VirtualTable.tsx index 721fd906b469d..d8658dde60997 100644 --- a/superset-frontend/src/components/Table/VirtualTable.tsx +++ b/superset-frontend/src/components/Table/VirtualTable.tsx @@ -25,12 +25,13 @@ import classNames from 'classnames'; import { useResizeDetector } from 'react-resize-detector'; import React, { useEffect, useRef, useState, useCallback } from 'react'; import { VariableSizeGrid as Grid } from 'react-window'; -import { useTheme, styled } from '@superset-ui/core'; +import { useTheme, styled, safeHtmlSpan } from '@superset-ui/core'; import { TableSize, ETableAction } from './index'; interface VirtualTableProps extends AntTableProps { height?: number; + allowHTML?: boolean; } const StyledCell = styled('div')<{ height?: number }>( @@ -71,7 +72,15 @@ const MIDDLE = 47; const VirtualTable = ( props: VirtualTableProps, ) => { - const { columns, pagination, onChange, height, scroll, size } = props; + const { + columns, + pagination, + onChange, + height, + scroll, + size, + allowHTML = false, + } = props; const [tableWidth, setTableWidth] = useState(0); const onResize = useCallback((width: number) => { setTableWidth(width); @@ -213,6 +222,10 @@ const VirtualTable = ( content = render(content, data, rowIndex); } + if (allowHTML && typeof content === 'string') { + content = safeHtmlSpan(content); + } + return ( { * Returns props that should be applied to each row component. */ onRow?: AntTableProps['onRow']; + /** + * Will render html safely if set to true, anchor tags and such. Currently + * only supported for virtualize == true + */ + allowHTML?: boolean; } const defaultRowSelection: React.Key[] = []; @@ -249,6 +254,7 @@ export function Table( onChange = noop, recordCount, onRow, + allowHTML = false, } = props; const wrapperRef = useRef(null); @@ -405,6 +411,7 @@ export function Table( scrollToFirstRowOnChange: false, }), }} + allowHTML={allowHTML} /> )} diff --git a/superset-frontend/src/components/TableCollection/index.tsx b/superset-frontend/src/components/TableCollection/index.tsx index 88296edf638ee..bcda5139eb141 100644 --- a/superset-frontend/src/components/TableCollection/index.tsx +++ b/superset-frontend/src/components/TableCollection/index.tsx @@ -295,7 +295,6 @@ export default React.memo( const isWrapText = columnsForWrapText?.includes( cell.column.Header as string, ); - return ( { const options = await findAllSelectOptions(); - expect(options).toHaveLength(6); + expect(options).toHaveLength(5); expect(options[0]).toHaveTextContent('Admin'); }); diff --git a/superset-frontend/src/dashboard/components/SliceHeader/index.tsx b/superset-frontend/src/dashboard/components/SliceHeader/index.tsx index 497832766a0f1..c9cb74a8aff5e 100644 --- a/superset-frontend/src/dashboard/components/SliceHeader/index.tsx +++ b/superset-frontend/src/dashboard/components/SliceHeader/index.tsx @@ -148,6 +148,7 @@ const SliceHeader: FC = ({ supersetCanShare = false, supersetCanCSV = false, exportFullCSV, + exportFullXLSX, slice, componentId, dashboardId, @@ -265,6 +266,7 @@ const SliceHeader: FC = ({ exportCSV={exportCSV} exportFullCSV={exportFullCSV} exportXLSX={exportXLSX} + exportFullXLSX={exportFullXLSX} supersetCanExplore={supersetCanExplore} supersetCanShare={supersetCanShare} supersetCanCSV={supersetCanCSV} diff --git a/superset-frontend/src/dashboard/components/SliceHeaderControls/SliceHeaderControls.test.tsx b/superset-frontend/src/dashboard/components/SliceHeaderControls/SliceHeaderControls.test.tsx index d2bc3e4bcc82e..add9bb97396d6 100644 --- a/superset-frontend/src/dashboard/components/SliceHeaderControls/SliceHeaderControls.test.tsx +++ b/superset-frontend/src/dashboard/components/SliceHeaderControls/SliceHeaderControls.test.tsx @@ -45,6 +45,7 @@ const createProps = (viz_type = 'sunburst') => exportCSV: jest.fn(), exportFullCSV: jest.fn(), exportXLSX: jest.fn(), + exportFullXLSX: jest.fn(), forceRefresh: jest.fn(), handleToggleFullSize: jest.fn(), toggleExpandSlice: jest.fn(), @@ -223,6 +224,43 @@ test('Should not show export full CSV if report is not table', async () => { expect(screen.queryByText('Export to full .CSV')).not.toBeInTheDocument(); }); +test('Export full Excel is under featureflag', async () => { + // @ts-ignore + global.featureFlags = { + [FeatureFlag.ALLOW_FULL_CSV_EXPORT]: false, + }; + const props = createProps('table'); + renderWrapper(props); + userEvent.hover(screen.getByText('Download')); + expect(await screen.findByText('Export to Excel')).toBeInTheDocument(); + expect(screen.queryByText('Export to full Excel')).not.toBeInTheDocument(); +}); + +test('Should "export full Excel"', async () => { + // @ts-ignore + global.featureFlags = { + [FeatureFlag.ALLOW_FULL_CSV_EXPORT]: true, + }; + const props = createProps('table'); + renderWrapper(props); + expect(props.exportFullXLSX).toBeCalledTimes(0); + userEvent.hover(screen.getByText('Download')); + userEvent.click(await screen.findByText('Export to full Excel')); + expect(props.exportFullXLSX).toBeCalledTimes(1); + expect(props.exportFullXLSX).toBeCalledWith(371); +}); + +test('Should not show export full Excel if report is not table', async () => { + // @ts-ignore + global.featureFlags = { + [FeatureFlag.ALLOW_FULL_CSV_EXPORT]: true, + }; + renderWrapper(); + userEvent.hover(screen.getByText('Download')); + expect(await screen.findByText('Export to Excel')).toBeInTheDocument(); + expect(screen.queryByText('Export to full Excel')).not.toBeInTheDocument(); +}); + test('Should "Show chart description"', () => { const props = createProps(); renderWrapper(props); diff --git a/superset-frontend/src/dashboard/components/SliceHeaderControls/index.tsx b/superset-frontend/src/dashboard/components/SliceHeaderControls/index.tsx index 3a0137cbf0f01..e3958755179d7 100644 --- a/superset-frontend/src/dashboard/components/SliceHeaderControls/index.tsx +++ b/superset-frontend/src/dashboard/components/SliceHeaderControls/index.tsx @@ -65,6 +65,7 @@ const MENU_KEYS = { EXPORT_CSV: 'export_csv', EXPORT_FULL_CSV: 'export_full_csv', EXPORT_XLSX: 'export_xlsx', + EXPORT_FULL_XLSX: 'export_full_xlsx', FORCE_REFRESH: 'force_refresh', FULLSCREEN: 'fullscreen', TOGGLE_CHART_DESCRIPTION: 'toggle_chart_description', @@ -146,6 +147,7 @@ export interface SliceHeaderControlsProps { exportCSV?: (sliceId: number) => void; exportFullCSV?: (sliceId: number) => void; exportXLSX?: (sliceId: number) => void; + exportFullXLSX?: (sliceId: number) => void; handleToggleFullSize: () => void; addDangerToast: (message: string) => void; @@ -295,6 +297,10 @@ const SliceHeaderControls = (props: SliceHeaderControlsPropsWithRouter) => { // eslint-disable-next-line no-unused-expressions props.exportFullCSV?.(props.slice.slice_id); break; + case MENU_KEYS.EXPORT_FULL_XLSX: + // eslint-disable-next-line no-unused-expressions + props.exportFullXLSX?.(props.slice.slice_id); + break; case MENU_KEYS.EXPORT_XLSX: // eslint-disable-next-line no-unused-expressions props.exportXLSX?.(props.slice.slice_id); @@ -485,25 +491,33 @@ const SliceHeaderControls = (props: SliceHeaderControlsPropsWithRouter) => { > {t('Export to .CSV')} + } + > + {t('Export to Excel')} + {props.slice.viz_type !== 'filter_box' && isFeatureEnabled(FeatureFlag.ALLOW_FULL_CSV_EXPORT) && props.supersetCanCSV && isTable && ( - } - > - {t('Export to full .CSV')} - + <> + } + > + {t('Export to full .CSV')} + + } + > + {t('Export to full Excel')} + + )} - } - > - {t('Export to Excel')} - } diff --git a/superset-frontend/src/dashboard/components/gridComponents/Chart.jsx b/superset-frontend/src/dashboard/components/gridComponents/Chart.jsx index 38b6c837e1e9c..1c4d0dd9566c6 100644 --- a/superset-frontend/src/dashboard/components/gridComponents/Chart.jsx +++ b/superset-frontend/src/dashboard/components/gridComponents/Chart.jsx @@ -140,6 +140,7 @@ class Chart extends React.Component { this.exportCSV = this.exportCSV.bind(this); this.exportFullCSV = this.exportFullCSV.bind(this); this.exportXLSX = this.exportXLSX.bind(this); + this.exportFullXLSX = this.exportFullXLSX.bind(this); this.forceRefresh = this.forceRefresh.bind(this); this.resize = this.resize.bind(this); this.setDescriptionRef = this.setDescriptionRef.bind(this); @@ -337,6 +338,10 @@ class Chart extends React.Component { this.exportTable('xlsx', false); } + exportFullXLSX() { + this.exportTable('xlsx', true); + } + exportTable(format, isFullCSV) { const logAction = format === 'csv' @@ -451,6 +456,7 @@ class Chart extends React.Component { exportCSV={this.exportCSV} exportXLSX={this.exportXLSX} exportFullCSV={this.exportFullCSV} + exportFullXLSX={this.exportFullXLSX} updateSliceName={updateSliceName} sliceName={sliceName} supersetCanExplore={supersetCanExplore} diff --git a/superset-frontend/src/dashboard/components/gridComponents/Chart.test.jsx b/superset-frontend/src/dashboard/components/gridComponents/Chart.test.jsx index 28a972069bbca..89bcca7f78301 100644 --- a/superset-frontend/src/dashboard/components/gridComponents/Chart.test.jsx +++ b/superset-frontend/src/dashboard/components/gridComponents/Chart.test.jsx @@ -63,6 +63,7 @@ describe('Chart', () => { exportCSV() {}, exportFullCSV() {}, exportXLSX() {}, + exportFullXLSX() {}, componentId: 'test', dashboardId: 111, editMode: false, @@ -161,4 +162,14 @@ describe('Chart', () => { ); exploreUtils.exportChart.restore(); }); + it('should call exportChart with row_limit props.maxRows when exportFullXLSX is clicked', () => { + const stubbedExportXLSX = sinon + .stub(exploreUtils, 'exportChart') + .returns(() => {}); + const wrapper = setup(); + wrapper.instance().exportFullXLSX(props.slice.sliceId); + expect(stubbedExportXLSX.calledOnce).toBe(true); + expect(stubbedExportXLSX.lastCall.args[0].formData.row_limit).toEqual(666); + exploreUtils.exportChart.restore(); + }); }); diff --git a/superset-frontend/src/dashboard/components/nativeFilters/utils.ts b/superset-frontend/src/dashboard/components/nativeFilters/utils.ts index ef347de4c2df9..1af395a5d91c5 100644 --- a/superset-frontend/src/dashboard/components/nativeFilters/utils.ts +++ b/superset-frontend/src/dashboard/components/nativeFilters/utils.ts @@ -33,6 +33,14 @@ import extractUrlParams from 'src/dashboard/util/extractUrlParams'; import { isFeatureEnabled } from 'src/featureFlags'; import { CHART_TYPE, TAB_TYPE } from '../../util/componentTypes'; import { DASHBOARD_GRID_ID, DASHBOARD_ROOT_ID } from '../../util/constants'; +import getBootstrapData from '../../../utils/getBootstrapData'; + +const getDefaultRowLimit = (): number => { + const bootstrapData = getBootstrapData(); + const nativeFilterDefaultRowLimit = + bootstrapData?.common?.conf?.NATIVE_FILTER_DEFAULT_ROW_LIMIT; + return nativeFilterDefaultRowLimit || 1000; +}; export const getFormData = ({ datasetId, @@ -75,7 +83,7 @@ export const getFormData = ({ extra_form_data: dependencies, granularity_sqla, metrics: ['count'], - row_limit: 1000, + row_limit: getDefaultRowLimit(), showSearch: true, defaultValue: defaultDataMask?.filterState?.value, time_range, diff --git a/superset-frontend/src/explore/components/DataTableControl/index.tsx b/superset-frontend/src/explore/components/DataTableControl/index.tsx index 7cca07a408008..2deddf664eafb 100644 --- a/superset-frontend/src/explore/components/DataTableControl/index.tsx +++ b/superset-frontend/src/explore/components/DataTableControl/index.tsx @@ -21,6 +21,7 @@ import { css, GenericDataType, getTimeFormatter, + safeHtmlSpan, styled, t, TimeFormats, @@ -263,6 +264,7 @@ export const useTableColumns = ( datasourceId?: string, isVisible?: boolean, moreConfigs?: { [key: string]: Partial }, + allowHTML?: boolean, ) => { const [originalFormattedTimeColumns, setOriginalFormattedTimeColumns] = useState(getTimeColumns(datasourceId)); @@ -346,6 +348,9 @@ export const useTableColumns = ( ) { return timeFormatter(value); } + if (typeof value === 'string' && allowHTML) { + return safeHtmlSpan(value); + } return String(value); }, ...moreConfigs?.[key], diff --git a/superset-frontend/src/explore/components/DataTablesPane/components/SamplesPane.tsx b/superset-frontend/src/explore/components/DataTablesPane/components/SamplesPane.tsx index 5c66075750dc5..b542aad99643a 100644 --- a/superset-frontend/src/explore/components/DataTablesPane/components/SamplesPane.tsx +++ b/superset-frontend/src/explore/components/DataTablesPane/components/SamplesPane.tsx @@ -92,6 +92,8 @@ export const SamplesPane = ({ data, datasourceId, isVisible, + {}, // moreConfig + true, // allowHTML ); const filteredData = useFilteredTableData(filterText, data); diff --git a/superset-frontend/src/explore/components/DataTablesPane/components/SingleQueryResultPane.tsx b/superset-frontend/src/explore/components/DataTablesPane/components/SingleQueryResultPane.tsx index 27d312cc3ccda..c2614dfda6ca6 100644 --- a/superset-frontend/src/explore/components/DataTablesPane/components/SingleQueryResultPane.tsx +++ b/superset-frontend/src/explore/components/DataTablesPane/components/SingleQueryResultPane.tsx @@ -44,6 +44,8 @@ export const SingleQueryResultPane = ({ data, datasourceId, isVisible, + {}, // moreConfig + true, // allowHTML ); const filteredData = useFilteredTableData(filterText, data); diff --git a/superset-frontend/src/explore/constants.ts b/superset-frontend/src/explore/constants.ts index e539c16de754f..09362f423d7dd 100644 --- a/superset-frontend/src/explore/constants.ts +++ b/superset-frontend/src/explore/constants.ts @@ -39,7 +39,6 @@ export enum Operators { NOT_IN = 'NOT_IN', LIKE = 'LIKE', ILIKE = 'ILIKE', - REGEX = 'REGEX', IS_NOT_NULL = 'IS_NOT_NULL', IS_NULL = 'IS_NULL', LATEST_PARTITION = 'LATEST_PARTITION', @@ -75,7 +74,6 @@ export const OPERATOR_ENUM_TO_OPERATOR_TYPE: { display: t('Like (case insensitive)'), operation: 'ILIKE', }, - [Operators.REGEX]: { display: t('Regex'), operation: 'REGEX' }, [Operators.IS_NOT_NULL]: { display: t('Is not null'), operation: 'IS NOT NULL', diff --git a/superset-frontend/src/features/home/ActivityTable.tsx b/superset-frontend/src/features/home/ActivityTable.tsx index 22a6f597c6ea1..cd38c021f84c7 100644 --- a/superset-frontend/src/features/home/ActivityTable.tsx +++ b/superset-frontend/src/features/home/ActivityTable.tsx @@ -36,7 +36,7 @@ import EmptyState from './EmptyState'; import { WelcomeTable } from './types'; /** - * Return result from /api/v1/log/recent_activity/{user_id}/ + * Return result from /api/v1/log/recent_activity/ */ interface RecentActivity { action: string; diff --git a/superset-frontend/src/filters/components/Select/SelectFilterPlugin.tsx b/superset-frontend/src/filters/components/Select/SelectFilterPlugin.tsx index 50f87b655d90b..0ebcb03a5d6bd 100644 --- a/superset-frontend/src/filters/components/Select/SelectFilterPlugin.tsx +++ b/superset-frontend/src/filters/components/Select/SelectFilterPlugin.tsx @@ -21,7 +21,6 @@ import React, { useCallback, useEffect, useMemo, useState } from 'react'; import { AppSection, DataMask, - DataRecordValue, ensureIsArray, ExtraFormData, GenericDataType, @@ -36,7 +35,7 @@ import debounce from 'lodash/debounce'; import { useImmerReducer } from 'use-immer'; import { Select } from 'src/components'; import { SLOW_DEBOUNCE } from 'src/constants'; -import { propertyComparator } from 'src/components/Select/utils'; +import { hasOption, propertyComparator } from 'src/components/Select/utils'; import { FilterBarOrientation } from 'src/dashboard/types'; import { uniqWith, isEqual } from 'lodash'; import { PluginFilterSelectProps, SelectValue } from './types'; @@ -109,6 +108,7 @@ export default function PluginFilterSelect(props: PluginFilterSelectProps) { ); const [col] = groupby; const [initialColtypeMap] = useState(coltypeMap); + const [search, setSearch] = useState(''); const [dataMask, dispatchDataMask] = useImmerReducer(reducer, { extraFormData: {}, filterState, @@ -168,29 +168,25 @@ export default function PluginFilterSelect(props: PluginFilterSelectProps) { const isDisabled = appSection === AppSection.FILTER_CONFIG_MODAL && defaultToFirstItem; - const debouncedOwnStateFunc = useCallback( - debounce((val: string) => { - dispatchDataMask({ - type: 'ownState', - ownState: { - coltypeMap: initialColtypeMap, - search: val, - }, - }); - }, SLOW_DEBOUNCE), - [], - ); - - const searchWrapper = useCallback( - (val: string) => { - if (searchAllOptions) { - debouncedOwnStateFunc(val); - } - }, - [debouncedOwnStateFunc, searchAllOptions], + const onSearch = useMemo( + () => + debounce((search: string) => { + setSearch(search); + if (searchAllOptions) { + dispatchDataMask({ + type: 'ownState', + ownState: { + coltypeMap: initialColtypeMap, + search, + }, + }); + } + }, SLOW_DEBOUNCE), + [dispatchDataMask, initialColtypeMap, searchAllOptions], ); const clearSuggestionSearch = useCallback(() => { + setSearch(''); if (searchAllOptions) { dispatchDataMask({ type: 'ownState', @@ -236,20 +232,29 @@ export default function PluginFilterSelect(props: PluginFilterSelectProps) { return undefined; }, [filterState.validateMessage, filterState.validateStatus]); - const options = useMemo(() => { + const uniqueOptions = useMemo(() => { const allOptions = [...data]; - const uniqueOptions = uniqWith(allOptions, isEqual); - const selectOptions: { label: string; value: DataRecordValue }[] = []; - uniqueOptions.forEach(row => { + return uniqWith(allOptions, isEqual).map(row => { const [value] = groupby.map(col => row[col]); - selectOptions.push({ + return { label: labelFormatter(value, datatype), value, - }); + isNewOption: false, + }; }); - return selectOptions; }, [data, datatype, groupby, labelFormatter]); + const options = useMemo(() => { + if (search && !multiSelect && !hasOption(search, uniqueOptions, true)) { + uniqueOptions.unshift({ + label: search, + value: search, + isNewOption: true, + }); + } + return uniqueOptions; + }, [multiSelect, search, uniqueOptions]); + const sortComparator = useCallback( (a: AntdLabeledValue, b: AntdLabeledValue) => { const labelComparator = propertyComparator('label'); @@ -317,7 +322,7 @@ export default function PluginFilterSelect(props: PluginFilterSelectProps) { showSearch={showSearch} mode={multiSelect ? 'multiple' : 'single'} placeholder={placeholderText} - onSearch={searchWrapper} + onSearch={onSearch} onSelect={clearSuggestionSearch} onBlur={handleBlur} onFocus={setFocusedFilter} @@ -329,7 +334,6 @@ export default function PluginFilterSelect(props: PluginFilterSelectProps) { loading={isRefreshing} oneLine={filterBarOrientation === FilterBarOrientation.HORIZONTAL} invertSelection={inverseSelection} - // @ts-ignore options={options} sortComparator={sortComparator} onDropdownVisibleChange={setFilterActive} diff --git a/superset-frontend/src/logger/LogUtils.ts b/superset-frontend/src/logger/LogUtils.ts index 258b5dbb5eea9..289846fa1cab9 100644 --- a/superset-frontend/src/logger/LogUtils.ts +++ b/superset-frontend/src/logger/LogUtils.ts @@ -35,7 +35,7 @@ export const LOG_ACTIONS_EXPLORE_DASHBOARD_CHART = 'explore_dashboard_chart'; export const LOG_ACTIONS_EXPORT_CSV_DASHBOARD_CHART = 'export_csv_dashboard_chart'; export const LOG_ACTIONS_EXPORT_XLSX_DASHBOARD_CHART = - 'export_csv_dashboard_chart'; + 'export_xlsx_dashboard_chart'; export const LOG_ACTIONS_CHANGE_DASHBOARD_FILTER = 'change_dashboard_filter'; export const LOG_ACTIONS_DATASET_CREATION_EMPTY_CANCELLATION = 'dataset_creation_empty_cancellation'; diff --git a/superset-frontend/src/pages/ChartList/index.tsx b/superset-frontend/src/pages/ChartList/index.tsx index 5a869e3c4fc66..c853c40d8f972 100644 --- a/superset-frontend/src/pages/ChartList/index.tsx +++ b/superset-frontend/src/pages/ChartList/index.tsx @@ -68,7 +68,6 @@ import setupPlugins from 'src/setup/setupPlugins'; import InfoTooltip from 'src/components/InfoTooltip'; import CertifiedBadge from 'src/components/CertifiedBadge'; import { GenericLink } from 'src/components/GenericLink/GenericLink'; -import getBootstrapData from 'src/utils/getBootstrapData'; import Owner from 'src/types/Owner'; import { loadTags } from 'src/components/Tags/utils'; import ChartCard from 'src/features/charts/ChartCard'; @@ -156,8 +155,6 @@ const StyledActions = styled.div` color: ${({ theme }) => theme.colors.grayscale.base}; `; -const bootstrapData = getBootstrapData(); - function ChartList(props: ChartListProps) { const { addDangerToast, @@ -234,8 +231,6 @@ function ChartList(props: ChartListProps) { const canExport = hasPerm('can_export') && isFeatureEnabled(FeatureFlag.VERSIONED_EXPORT); const initialSort = [{ id: 'changed_on_delta_humanized', desc: true }]; - const enableBroadUserAccess = - bootstrapData.common.conf.ENABLE_BROAD_ACTIVITY_ACCESS; const handleBulkChartExport = (chartsToExport: Chart[]) => { const ids = chartsToExport.map(({ id }) => id); handleResourceExport('chart', ids, () => { @@ -417,17 +412,9 @@ function ChartList(props: ChartListProps) { { Cell: ({ row: { - original: { - last_saved_by: lastSavedBy, - changed_by_url: changedByUrl, - }, + original: { last_saved_by: lastSavedBy }, }, - }: any) => - enableBroadUserAccess ? ( - {changedByName(lastSavedBy)} - ) : ( - <>{changedByName(lastSavedBy)} - ), + }: any) => <>{changedByName(lastSavedBy)}, Header: t('Modified by'), accessor: 'last_saved_by.first_name', size: 'xl', diff --git a/superset-frontend/src/pages/DashboardList/DashboardList.test.jsx b/superset-frontend/src/pages/DashboardList/DashboardList.test.jsx index be04c323318ec..bd91faf614111 100644 --- a/superset-frontend/src/pages/DashboardList/DashboardList.test.jsx +++ b/superset-frontend/src/pages/DashboardList/DashboardList.test.jsx @@ -58,7 +58,6 @@ const mockDashboards = [...new Array(3)].map((_, i) => ({ url: 'url', dashboard_title: `title ${i}`, changed_by_name: 'user', - changed_by_url: 'changed_by_url', changed_by_fk: 1, published: true, changed_on_utc: new Date().toISOString(), diff --git a/superset-frontend/src/pages/DashboardList/index.tsx b/superset-frontend/src/pages/DashboardList/index.tsx index 22e7b6c12fc19..3db775e66ec82 100644 --- a/superset-frontend/src/pages/DashboardList/index.tsx +++ b/superset-frontend/src/pages/DashboardList/index.tsx @@ -54,7 +54,6 @@ import Dashboard from 'src/dashboard/containers/Dashboard'; import { Dashboard as CRUDDashboard } from 'src/views/CRUD/types'; import CertifiedBadge from 'src/components/CertifiedBadge'; import { loadTags } from 'src/components/Tags/utils'; -import getBootstrapData from 'src/utils/getBootstrapData'; import DashboardCard from 'src/features/dashboards/DashboardCard'; import { DashboardStatus } from 'src/features/dashboards/types'; @@ -84,7 +83,6 @@ interface DashboardListProps { interface Dashboard { changed_by_name: string; - changed_by_url: string; changed_on_delta_humanized: string; changed_by: string; dashboard_title: string; @@ -101,8 +99,6 @@ const Actions = styled.div` color: ${({ theme }) => theme.colors.grayscale.base}; `; -const bootstrapData = getBootstrapData(); - function DashboardList(props: DashboardListProps) { const { addDangerToast, @@ -143,8 +139,6 @@ function DashboardList(props: DashboardListProps) { const [importingDashboard, showImportModal] = useState(false); const [passwordFields, setPasswordFields] = useState([]); const [preparingExport, setPreparingExport] = useState(false); - const enableBroadUserAccess = - bootstrapData?.common?.conf?.ENABLE_BROAD_ACTIVITY_ACCESS; const [sshTunnelPasswordFields, setSSHTunnelPasswordFields] = useState< string[] >([]); @@ -195,7 +189,6 @@ function DashboardList(props: DashboardListProps) { if (dashboard.id === json?.result?.id) { const { changed_by_name, - changed_by_url, changed_by, dashboard_title = '', slug = '', @@ -210,7 +203,6 @@ function DashboardList(props: DashboardListProps) { return { ...dashboard, changed_by_name, - changed_by_url, changed_by, dashboard_title, slug, @@ -312,17 +304,9 @@ function DashboardList(props: DashboardListProps) { { Cell: ({ row: { - original: { - changed_by_name: changedByName, - changed_by_url: changedByUrl, - }, + original: { changed_by_name: changedByName }, }, - }: any) => - enableBroadUserAccess ? ( - {changedByName} - ) : ( - <>{changedByName} - ), + }: any) => <>{changedByName}, Header: t('Modified by'), accessor: 'changed_by.first_name', size: 'xl', diff --git a/superset-frontend/src/pages/DatabaseList/index.tsx b/superset-frontend/src/pages/DatabaseList/index.tsx index 656dac89ea97c..e2ba4ea9b0370 100644 --- a/superset-frontend/src/pages/DatabaseList/index.tsx +++ b/superset-frontend/src/pages/DatabaseList/index.tsx @@ -74,7 +74,7 @@ const IconCheck = styled(Icons.Check)` `; const IconCancelX = styled(Icons.CancelX)` - color: ${({ theme }) => theme.colors.grayscale.dark1}; + color: ${({ theme }) => theme.colors.grayscale.light1}; `; const Actions = styled.div` diff --git a/superset-frontend/src/pages/DatasetList/DatasetList.test.tsx b/superset-frontend/src/pages/DatasetList/DatasetList.test.tsx index 861624716e806..358a5fcfcca34 100644 --- a/superset-frontend/src/pages/DatasetList/DatasetList.test.tsx +++ b/superset-frontend/src/pages/DatasetList/DatasetList.test.tsx @@ -50,7 +50,6 @@ const datasetsEndpoint = 'glob:*/api/v1/dataset/?*'; const mockdatasets = [...new Array(3)].map((_, i) => ({ changed_by_name: 'user', kind: i === 0 ? 'virtual' : 'physical', // ensure there is 1 virtual - changed_by_url: 'changed_by_url', changed_by: 'user', changed_on: new Date().toISOString(), database_name: `db ${i}`, diff --git a/superset-frontend/src/pages/DatasetList/index.tsx b/superset-frontend/src/pages/DatasetList/index.tsx index fa006ad42502f..43913a828029c 100644 --- a/superset-frontend/src/pages/DatasetList/index.tsx +++ b/superset-frontend/src/pages/DatasetList/index.tsx @@ -109,7 +109,6 @@ const Actions = styled.div` type Dataset = { changed_by_name: string; - changed_by_url: string; changed_by: string; changed_on_delta_humanized: string; database: { diff --git a/superset-frontend/src/pages/Home/index.tsx b/superset-frontend/src/pages/Home/index.tsx index 21217ae3b18b4..00124eac7c5a6 100644 --- a/superset-frontend/src/pages/Home/index.tsx +++ b/superset-frontend/src/pages/Home/index.tsx @@ -160,7 +160,7 @@ function Welcome({ user, addDangerToast }: WelcomeProps) { const userid = user.userId; const id = userid!.toString(); // confident that user is not a guest user const params = rison.encode({ page_size: 6 }); - const recent = `/api/v1/log/recent_activity/${user.userId}/?q=${params}`; + const recent = `/api/v1/log/recent_activity/?q=${params}`; const [activeChild, setActiveChild] = useState('Loading'); const userKey = dangerouslyGetItemDoNotUse(id, null); let defaultChecked = false; diff --git a/superset-frontend/src/pages/RowLevelSecurityList/RowLevelSecurityList.test.tsx b/superset-frontend/src/pages/RowLevelSecurityList/RowLevelSecurityList.test.tsx index 44b9c86bf5017..a4621ed10eada 100644 --- a/superset-frontend/src/pages/RowLevelSecurityList/RowLevelSecurityList.test.tsx +++ b/superset-frontend/src/pages/RowLevelSecurityList/RowLevelSecurityList.test.tsx @@ -50,7 +50,7 @@ const mockRules = [ }, { id: 5, - name: 'granter', + name: 'Gamma', }, ], tables: [ @@ -79,7 +79,7 @@ const mockRules = [ }, { id: 5, - name: 'granter', + name: 'Gamma', }, ], tables: [ diff --git a/superset-frontend/src/profile/components/Favorites.tsx b/superset-frontend/src/profile/components/Favorites.tsx index 1a52c8b047837..834f933071676 100644 --- a/superset-frontend/src/profile/components/Favorites.tsx +++ b/superset-frontend/src/profile/components/Favorites.tsx @@ -33,7 +33,7 @@ export default class Favorites extends React.PureComponent { const mutator = (payload: { result: Chart[] }) => payload.result.map(slice => ({ slice: {slice.slice_name}, - creator: {slice.created_by_name}, + creator: slice.created_by_name, favorited: moment.utc(slice.changed_on_dttm).fromNow(), _favorited: slice.changed_on_dttm, })); diff --git a/superset-frontend/src/profile/components/RecentActivity.tsx b/superset-frontend/src/profile/components/RecentActivity.tsx index 975d8cb3ddf3d..2810fb3544963 100644 --- a/superset-frontend/src/profile/components/RecentActivity.tsx +++ b/superset-frontend/src/profile/components/RecentActivity.tsx @@ -48,7 +48,7 @@ export default function RecentActivity({ user }: RecentActivityProps) { className="table-condensed" mutator={mutator} sortable - dataEndpoint={`/api/v1/log/recent_activity/${user?.userId}/?q=${params}`} + dataEndpoint={`/api/v1/log/recent_activity/?q=${params}`} noDataText={t('No Data')} /> diff --git a/superset-frontend/src/profile/components/fixtures.tsx b/superset-frontend/src/profile/components/fixtures.tsx index e721b6dfa7510..d4a3612df4f41 100644 --- a/superset-frontend/src/profile/components/fixtures.tsx +++ b/superset-frontend/src/profile/components/fixtures.tsx @@ -29,7 +29,6 @@ export const user: UserWithPermissionsAndRoles = { ], sql_lab: [ ['menu_access', 'SQL Lab'], - ['can_sql_json', 'Superset'], ['can_search_queries', 'Superset'], ['can_csv', 'Superset'], ], diff --git a/superset-frontend/src/profile/types.ts b/superset-frontend/src/profile/types.ts index 1a4dc784a04af..370434dfe817d 100644 --- a/superset-frontend/src/profile/types.ts +++ b/superset-frontend/src/profile/types.ts @@ -31,7 +31,6 @@ export type Chart = { slice_name: string; slice_url: string; created_by_name?: string; - created_by_url?: string; changed_on_dttm: number; }; diff --git a/superset-frontend/src/types/Dataset.ts b/superset-frontend/src/types/Dataset.ts index 7d69932f6f0dd..1bb14207d6d30 100644 --- a/superset-frontend/src/types/Dataset.ts +++ b/superset-frontend/src/types/Dataset.ts @@ -20,7 +20,6 @@ import Owner from './Owner'; export default interface Dataset { changed_by_name: string; - changed_by_url: string; changed_by: string; changed_on_delta_humanized: string; database: { diff --git a/superset-frontend/src/views/CRUD/types.ts b/superset-frontend/src/views/CRUD/types.ts index 87748d1539711..20800b71ce474 100644 --- a/superset-frontend/src/views/CRUD/types.ts +++ b/superset-frontend/src/views/CRUD/types.ts @@ -55,7 +55,6 @@ export interface Dashboard { certified_by?: string; certification_details?: string; changed_by_name: string; - changed_by_url: string; changed_on_delta_humanized?: string; changed_on_utc?: string; changed_by: string; diff --git a/superset-frontend/src/views/store.ts b/superset-frontend/src/views/store.ts index 78d5a99714f02..193386a5ffba3 100644 --- a/superset-frontend/src/views/store.ts +++ b/superset-frontend/src/views/store.ts @@ -17,6 +17,7 @@ * under the License. */ import { configureStore, ConfigureStoreOptions, Store } from '@reduxjs/toolkit'; +import thunk from 'redux-thunk'; import { api } from 'src/hooks/apiResources/queryApi'; import messageToastReducer from 'src/components/MessageToasts/reducers'; import charts from 'src/components/Chart/chartReducer'; @@ -75,6 +76,22 @@ const userReducer = ( return user; }; +const getMiddleware: ConfigureStoreOptions['middleware'] = + getDefaultMiddleware => + process.env.REDUX_DEFAULT_MIDDLEWARE + ? getDefaultMiddleware({ + immutableCheck: { + warnAfter: 200, + }, + serializableCheck: { + // Ignores AbortController instances + ignoredActionPaths: [/queryController/g], + ignoredPaths: [/queryController/g], + warnAfter: 200, + }, + }).concat(logger, api.middleware) + : [thunk, logger, api.middleware]; + // TODO: This reducer is a combination of the Dashboard and Explore reducers. // The correct way of handling this is to unify the actions and reducers from both // modules in shared files. This involves a big refactor to unify the parameter types @@ -137,18 +154,7 @@ export function setupStore({ [api.reducerPath]: api.reducer, ...rootReducers, }, - middleware: getDefaultMiddleware => - getDefaultMiddleware({ - immutableCheck: { - warnAfter: 200, - }, - serializableCheck: { - // Ignores AbortController instances - ignoredActionPaths: [/queryController/g], - ignoredPaths: [/queryController/g], - warnAfter: 200, - }, - }).concat(logger, api.middleware), + middleware: getMiddleware, devTools: process.env.WEBPACK_MODE === 'development' && !disableDebugger, ...overrides, }); diff --git a/superset-frontend/webpack.config.js b/superset-frontend/webpack.config.js index cf61a352ed63a..f728ce23dfb2d 100644 --- a/superset-frontend/webpack.config.js +++ b/superset-frontend/webpack.config.js @@ -116,6 +116,8 @@ const plugins = [ // expose mode variable to other modules new webpack.DefinePlugin({ 'process.env.WEBPACK_MODE': JSON.stringify(mode), + 'process.env.REDUX_DEFAULT_MIDDLEWARE': + process.env.REDUX_DEFAULT_MIDDLEWARE, }), new CopyPlugin({ diff --git a/superset-websocket/package-lock.json b/superset-websocket/package-lock.json index d8cb2968d42a4..0523f5a89346d 100644 --- a/superset-websocket/package-lock.json +++ b/superset-websocket/package-lock.json @@ -22,12 +22,12 @@ "@types/ioredis": "^4.27.8", "@types/jest": "^27.0.2", "@types/jsonwebtoken": "^9.0.2", - "@types/node": "^20.2.5", - "@types/uuid": "^9.0.1", - "@types/ws": "^8.5.4", - "@typescript-eslint/eslint-plugin": "^5.59.9", - "@typescript-eslint/parser": "^5.59.8", - "eslint": "^8.41.0", + "@types/node": "^20.3.1", + "@types/uuid": "^9.0.2", + "@types/ws": "^8.5.5", + "@typescript-eslint/eslint-plugin": "^5.59.11", + "@typescript-eslint/parser": "^5.59.11", + "eslint": "^8.42.0", "eslint-config-prettier": "^8.8.0", "jest": "^27.3.1", "prettier": "^2.8.8", @@ -746,18 +746,18 @@ } }, "node_modules/@eslint/js": { - "version": "8.41.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.41.0.tgz", - "integrity": "sha512-LxcyMGxwmTh2lY9FwHPGWOHmYFCZvbrFCBZL4FzSSsxsRPuhrYUg/49/0KDfW8tnIEaEHtfmn6+NPN+1DqaNmA==", + "version": "8.42.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.42.0.tgz", + "integrity": "sha512-6SWlXpWU5AvId8Ac7zjzmIOqMOba/JWY8XZ4A7q7Gn1Vlfg/SFFIlrtHXt9nPn4op9ZPAkl91Jao+QQv3r/ukw==", "dev": true, "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" } }, "node_modules/@humanwhocodes/config-array": { - "version": "0.11.8", - "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.8.tgz", - "integrity": "sha512-UybHIJzJnR5Qc/MsD9Kr+RpO2h+/P1GhOwdiLPXK5TWk5sgTdu88bTD9UP+CKbPPh5Rni1u0GjAdYQLemG8g+g==", + "version": "0.11.10", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.10.tgz", + "integrity": "sha512-KVVjQmNUepDVGXNuoRRdmmEjruj0KfiGSbS8LVc12LMsWDQzRXJ0qdhN8L8uUigKpfEHRhlaQFY0ib1tnUbNeQ==", "dev": true, "dependencies": { "@humanwhocodes/object-schema": "^1.2.1", @@ -1318,9 +1318,9 @@ } }, "node_modules/@types/node": { - "version": "20.2.5", - "resolved": "https://registry.npmjs.org/@types/node/-/node-20.2.5.tgz", - "integrity": "sha512-JJulVEQXmiY9Px5axXHeYGLSjhkZEnD+MDPDGbCbIAbMslkKwmygtZFy1X6s/075Yo94sf8GuSlFfPzysQrWZQ==", + "version": "20.3.1", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.3.1.tgz", + "integrity": "sha512-EhcH/wvidPy1WeML3TtYFGR83UzjxeWRen9V402T8aUGYsCHOmfoisV3ZSg03gAFIbLq8TnWOJ0f4cALtnSEUg==", "dev": true }, "node_modules/@types/prettier": { @@ -1342,15 +1342,15 @@ "dev": true }, "node_modules/@types/uuid": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-9.0.1.tgz", - "integrity": "sha512-rFT3ak0/2trgvp4yYZo5iKFEPsET7vKydKF+VRCxlQ9bpheehyAJH89dAkaLEq/j/RZXJIqcgsmPJKUP1Z28HA==", + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-9.0.2.tgz", + "integrity": "sha512-kNnC1GFBLuhImSnV7w4njQkUiJi0ZXUycu1rUaouPqiKlXkh77JKgdRnTAp1x5eBwcIwbtI+3otwzuIDEuDoxQ==", "dev": true }, "node_modules/@types/ws": { - "version": "8.5.4", - "resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.5.4.tgz", - "integrity": "sha512-zdQDHKUgcX/zBc4GrwsE/7dVdAD8JR4EuiAXiiUhhfyIJXXb2+PrGshFyeXWQPMmmZ2XxgaqclgpIC7eTXc1mg==", + "version": "8.5.5", + "resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.5.5.tgz", + "integrity": "sha512-lwhs8hktwxSjf9UaZ9tG5M03PGogvFaH8gUgLNbN9HKIg0dvv6q+gkSuJ8HN4/VbyxkuLzCjlN7GquQ0gUJfIg==", "dev": true, "dependencies": { "@types/node": "*" @@ -1372,15 +1372,15 @@ "dev": true }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "5.59.9", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.59.9.tgz", - "integrity": "sha512-4uQIBq1ffXd2YvF7MAvehWKW3zVv/w+mSfRAu+8cKbfj3nwzyqJLNcZJpQ/WZ1HLbJDiowwmQ6NO+63nCA+fqA==", + "version": "5.59.11", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.59.11.tgz", + "integrity": "sha512-XxuOfTkCUiOSyBWIvHlUraLw/JT/6Io1365RO6ZuI88STKMavJZPNMU0lFcUTeQXEhHiv64CbxYxBNoDVSmghg==", "dev": true, "dependencies": { "@eslint-community/regexpp": "^4.4.0", - "@typescript-eslint/scope-manager": "5.59.9", - "@typescript-eslint/type-utils": "5.59.9", - "@typescript-eslint/utils": "5.59.9", + "@typescript-eslint/scope-manager": "5.59.11", + "@typescript-eslint/type-utils": "5.59.11", + "@typescript-eslint/utils": "5.59.11", "debug": "^4.3.4", "grapheme-splitter": "^1.0.4", "ignore": "^5.2.0", @@ -1405,62 +1405,15 @@ } } }, - "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/scope-manager": { - "version": "5.59.9", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.59.9.tgz", - "integrity": "sha512-8RA+E+w78z1+2dzvK/tGZ2cpGigBZ58VMEHDZtpE1v+LLjzrYGc8mMaTONSxKyEkz3IuXFM0IqYiGHlCsmlZxQ==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "5.59.9", - "@typescript-eslint/visitor-keys": "5.59.9" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/types": { - "version": "5.59.9", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.59.9.tgz", - "integrity": "sha512-uW8H5NRgTVneSVTfiCVffBb8AbwWSKg7qcA4Ot3JI3MPCJGsB4Db4BhvAODIIYE5mNj7Q+VJkK7JxmRhk2Lyjw==", - "dev": true, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/eslint-plugin/node_modules/@typescript-eslint/visitor-keys": { - "version": "5.59.9", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.59.9.tgz", - "integrity": "sha512-bT7s0td97KMaLwpEBckbzj/YohnvXtqbe2XgqNvTl6RJVakY5mvENOTPvw5u66nljfZxthESpDozs86U+oLY8Q==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "5.59.9", - "eslint-visitor-keys": "^3.3.0" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, "node_modules/@typescript-eslint/parser": { - "version": "5.59.8", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.59.8.tgz", - "integrity": "sha512-AnR19RjJcpjoeGojmwZtCwBX/RidqDZtzcbG3xHrmz0aHHoOcbWnpDllenRDmDvsV0RQ6+tbb09/kyc+UT9Orw==", + "version": "5.59.11", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.59.11.tgz", + "integrity": "sha512-s9ZF3M+Nym6CAZEkJJeO2TFHHDsKAM3ecNkLuH4i4s8/RCPnF5JRip2GyviYkeEAcwGMJxkqG9h2dAsnA1nZpA==", "dev": true, "dependencies": { - "@typescript-eslint/scope-manager": "5.59.8", - "@typescript-eslint/types": "5.59.8", - "@typescript-eslint/typescript-estree": "5.59.8", + "@typescript-eslint/scope-manager": "5.59.11", + "@typescript-eslint/types": "5.59.11", + "@typescript-eslint/typescript-estree": "5.59.11", "debug": "^4.3.4" }, "engines": { @@ -1480,13 +1433,13 @@ } }, "node_modules/@typescript-eslint/scope-manager": { - "version": "5.59.8", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.59.8.tgz", - "integrity": "sha512-/w08ndCYI8gxGf+9zKf1vtx/16y8MHrZs5/tnjHhMLNSixuNcJavSX4wAiPf4aS5x41Es9YPCn44MIe4cxIlig==", + "version": "5.59.11", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.59.11.tgz", + "integrity": "sha512-dHFOsxoLFtrIcSj5h0QoBT/89hxQONwmn3FOQ0GOQcLOOXm+MIrS8zEAhs4tWl5MraxCY3ZJpaXQQdFMc2Tu+Q==", "dev": true, "dependencies": { - "@typescript-eslint/types": "5.59.8", - "@typescript-eslint/visitor-keys": "5.59.8" + "@typescript-eslint/types": "5.59.11", + "@typescript-eslint/visitor-keys": "5.59.11" }, "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" @@ -1497,13 +1450,13 @@ } }, "node_modules/@typescript-eslint/type-utils": { - "version": "5.59.9", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-5.59.9.tgz", - "integrity": "sha512-ksEsT0/mEHg9e3qZu98AlSrONAQtrSTljL3ow9CGej8eRo7pe+yaC/mvTjptp23Xo/xIf2mLZKC6KPv4Sji26Q==", + "version": "5.59.11", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-5.59.11.tgz", + "integrity": "sha512-LZqVY8hMiVRF2a7/swmkStMYSoXMFlzL6sXV6U/2gL5cwnLWQgLEG8tjWPpaE4rMIdZ6VKWwcffPlo1jPfk43g==", "dev": true, "dependencies": { - "@typescript-eslint/typescript-estree": "5.59.9", - "@typescript-eslint/utils": "5.59.9", + "@typescript-eslint/typescript-estree": "5.59.11", + "@typescript-eslint/utils": "5.59.11", "debug": "^4.3.4", "tsutils": "^3.21.0" }, @@ -1523,67 +1476,10 @@ } } }, - "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/types": { - "version": "5.59.9", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.59.9.tgz", - "integrity": "sha512-uW8H5NRgTVneSVTfiCVffBb8AbwWSKg7qcA4Ot3JI3MPCJGsB4Db4BhvAODIIYE5mNj7Q+VJkK7JxmRhk2Lyjw==", - "dev": true, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/typescript-estree": { - "version": "5.59.9", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.59.9.tgz", - "integrity": "sha512-pmM0/VQ7kUhd1QyIxgS+aRvMgw+ZljB3eDb+jYyp6d2bC0mQWLzUDF+DLwCTkQ3tlNyVsvZRXjFyV0LkU/aXjA==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "5.59.9", - "@typescript-eslint/visitor-keys": "5.59.9", - "debug": "^4.3.4", - "globby": "^11.1.0", - "is-glob": "^4.0.3", - "semver": "^7.3.7", - "tsutils": "^3.21.0" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@typescript-eslint/type-utils/node_modules/@typescript-eslint/visitor-keys": { - "version": "5.59.9", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.59.9.tgz", - "integrity": "sha512-bT7s0td97KMaLwpEBckbzj/YohnvXtqbe2XgqNvTl6RJVakY5mvENOTPvw5u66nljfZxthESpDozs86U+oLY8Q==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "5.59.9", - "eslint-visitor-keys": "^3.3.0" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, "node_modules/@typescript-eslint/types": { - "version": "5.59.8", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.59.8.tgz", - "integrity": "sha512-+uWuOhBTj/L6awoWIg0BlWy0u9TyFpCHrAuQ5bNfxDaZ1Ppb3mx6tUigc74LHcbHpOHuOTOJrBoAnhdHdaea1w==", + "version": "5.59.11", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.59.11.tgz", + "integrity": "sha512-epoN6R6tkvBYSc+cllrz+c2sOFWkbisJZWkOE+y3xHtvYaOE6Wk6B8e114McRJwFRjGvYdJwLXQH5c9osME/AA==", "dev": true, "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" @@ -1594,13 +1490,13 @@ } }, "node_modules/@typescript-eslint/typescript-estree": { - "version": "5.59.8", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.59.8.tgz", - "integrity": "sha512-Jy/lPSDJGNow14vYu6IrW790p7HIf/SOV1Bb6lZ7NUkLc2iB2Z9elESmsaUtLw8kVqogSbtLH9tut5GCX1RLDg==", + "version": "5.59.11", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.59.11.tgz", + "integrity": "sha512-YupOpot5hJO0maupJXixi6l5ETdrITxeo5eBOeuV7RSKgYdU3G5cxO49/9WRnJq9EMrB7AuTSLH/bqOsXi7wPA==", "dev": true, "dependencies": { - "@typescript-eslint/types": "5.59.8", - "@typescript-eslint/visitor-keys": "5.59.8", + "@typescript-eslint/types": "5.59.11", + "@typescript-eslint/visitor-keys": "5.59.11", "debug": "^4.3.4", "globby": "^11.1.0", "is-glob": "^4.0.3", @@ -1621,17 +1517,17 @@ } }, "node_modules/@typescript-eslint/utils": { - "version": "5.59.9", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-5.59.9.tgz", - "integrity": "sha512-1PuMYsju/38I5Ggblaeb98TOoUvjhRvLpLa1DoTOFaLWqaXl/1iQ1eGurTXgBY58NUdtfTXKP5xBq7q9NDaLKg==", + "version": "5.59.11", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-5.59.11.tgz", + "integrity": "sha512-didu2rHSOMUdJThLk4aZ1Or8IcO3HzCw/ZvEjTTIfjIrcdd5cvSIwwDy2AOlE7htSNp7QIZ10fLMyRCveesMLg==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@types/json-schema": "^7.0.9", "@types/semver": "^7.3.12", - "@typescript-eslint/scope-manager": "5.59.9", - "@typescript-eslint/types": "5.59.9", - "@typescript-eslint/typescript-estree": "5.59.9", + "@typescript-eslint/scope-manager": "5.59.11", + "@typescript-eslint/types": "5.59.11", + "@typescript-eslint/typescript-estree": "5.59.11", "eslint-scope": "^5.1.1", "semver": "^7.3.7" }, @@ -1646,87 +1542,13 @@ "eslint": "^6.0.0 || ^7.0.0 || ^8.0.0" } }, - "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/scope-manager": { - "version": "5.59.9", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.59.9.tgz", - "integrity": "sha512-8RA+E+w78z1+2dzvK/tGZ2cpGigBZ58VMEHDZtpE1v+LLjzrYGc8mMaTONSxKyEkz3IuXFM0IqYiGHlCsmlZxQ==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "5.59.9", - "@typescript-eslint/visitor-keys": "5.59.9" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/types": { - "version": "5.59.9", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.59.9.tgz", - "integrity": "sha512-uW8H5NRgTVneSVTfiCVffBb8AbwWSKg7qcA4Ot3JI3MPCJGsB4Db4BhvAODIIYE5mNj7Q+VJkK7JxmRhk2Lyjw==", - "dev": true, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/typescript-estree": { - "version": "5.59.9", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.59.9.tgz", - "integrity": "sha512-pmM0/VQ7kUhd1QyIxgS+aRvMgw+ZljB3eDb+jYyp6d2bC0mQWLzUDF+DLwCTkQ3tlNyVsvZRXjFyV0LkU/aXjA==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "5.59.9", - "@typescript-eslint/visitor-keys": "5.59.9", - "debug": "^4.3.4", - "globby": "^11.1.0", - "is-glob": "^4.0.3", - "semver": "^7.3.7", - "tsutils": "^3.21.0" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@typescript-eslint/utils/node_modules/@typescript-eslint/visitor-keys": { - "version": "5.59.9", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.59.9.tgz", - "integrity": "sha512-bT7s0td97KMaLwpEBckbzj/YohnvXtqbe2XgqNvTl6RJVakY5mvENOTPvw5u66nljfZxthESpDozs86U+oLY8Q==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "5.59.9", - "eslint-visitor-keys": "^3.3.0" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, "node_modules/@typescript-eslint/visitor-keys": { - "version": "5.59.8", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.59.8.tgz", - "integrity": "sha512-pJhi2ms0x0xgloT7xYabil3SGGlojNNKjK/q6dB3Ey0uJLMjK2UDGJvHieiyJVW/7C3KI+Z4Q3pEHkm4ejA+xQ==", + "version": "5.59.11", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.59.11.tgz", + "integrity": "sha512-KGYniTGG3AMTuKF9QBD7EIrvufkB6O6uX3knP73xbKLMpH+QRPcgnCxjWXSHjMRuOxFLovljqQgQpR0c7GvjoA==", "dev": true, "dependencies": { - "@typescript-eslint/types": "5.59.8", + "@typescript-eslint/types": "5.59.11", "eslint-visitor-keys": "^3.3.0" }, "engines": { @@ -2588,16 +2410,16 @@ } }, "node_modules/eslint": { - "version": "8.41.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.41.0.tgz", - "integrity": "sha512-WQDQpzGBOP5IrXPo4Hc0814r4/v2rrIsB0rhT7jtunIalgg6gYXWhRMOejVO8yH21T/FGaxjmFjBMNqcIlmH1Q==", + "version": "8.42.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.42.0.tgz", + "integrity": "sha512-ulg9Ms6E1WPf67PHaEY4/6E2tEn5/f7FXGzr3t9cBMugOmf1INYvuUwwh1aXQN4MfJ6a5K2iNwP3w4AColvI9A==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.4.0", "@eslint/eslintrc": "^2.0.3", - "@eslint/js": "8.41.0", - "@humanwhocodes/config-array": "^0.11.8", + "@eslint/js": "8.42.0", + "@humanwhocodes/config-array": "^0.11.10", "@humanwhocodes/module-importer": "^1.0.1", "@nodelib/fs.walk": "^1.2.8", "ajv": "^6.10.0", @@ -6622,15 +6444,15 @@ } }, "@eslint/js": { - "version": "8.41.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.41.0.tgz", - "integrity": "sha512-LxcyMGxwmTh2lY9FwHPGWOHmYFCZvbrFCBZL4FzSSsxsRPuhrYUg/49/0KDfW8tnIEaEHtfmn6+NPN+1DqaNmA==", + "version": "8.42.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.42.0.tgz", + "integrity": "sha512-6SWlXpWU5AvId8Ac7zjzmIOqMOba/JWY8XZ4A7q7Gn1Vlfg/SFFIlrtHXt9nPn4op9ZPAkl91Jao+QQv3r/ukw==", "dev": true }, "@humanwhocodes/config-array": { - "version": "0.11.8", - "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.8.tgz", - "integrity": "sha512-UybHIJzJnR5Qc/MsD9Kr+RpO2h+/P1GhOwdiLPXK5TWk5sgTdu88bTD9UP+CKbPPh5Rni1u0GjAdYQLemG8g+g==", + "version": "0.11.10", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.10.tgz", + "integrity": "sha512-KVVjQmNUepDVGXNuoRRdmmEjruj0KfiGSbS8LVc12LMsWDQzRXJ0qdhN8L8uUigKpfEHRhlaQFY0ib1tnUbNeQ==", "dev": true, "requires": { "@humanwhocodes/object-schema": "^1.2.1", @@ -7102,9 +6924,9 @@ } }, "@types/node": { - "version": "20.2.5", - "resolved": "https://registry.npmjs.org/@types/node/-/node-20.2.5.tgz", - "integrity": "sha512-JJulVEQXmiY9Px5axXHeYGLSjhkZEnD+MDPDGbCbIAbMslkKwmygtZFy1X6s/075Yo94sf8GuSlFfPzysQrWZQ==", + "version": "20.3.1", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.3.1.tgz", + "integrity": "sha512-EhcH/wvidPy1WeML3TtYFGR83UzjxeWRen9V402T8aUGYsCHOmfoisV3ZSg03gAFIbLq8TnWOJ0f4cALtnSEUg==", "dev": true }, "@types/prettier": { @@ -7126,15 +6948,15 @@ "dev": true }, "@types/uuid": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-9.0.1.tgz", - "integrity": "sha512-rFT3ak0/2trgvp4yYZo5iKFEPsET7vKydKF+VRCxlQ9bpheehyAJH89dAkaLEq/j/RZXJIqcgsmPJKUP1Z28HA==", + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-9.0.2.tgz", + "integrity": "sha512-kNnC1GFBLuhImSnV7w4njQkUiJi0ZXUycu1rUaouPqiKlXkh77JKgdRnTAp1x5eBwcIwbtI+3otwzuIDEuDoxQ==", "dev": true }, "@types/ws": { - "version": "8.5.4", - "resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.5.4.tgz", - "integrity": "sha512-zdQDHKUgcX/zBc4GrwsE/7dVdAD8JR4EuiAXiiUhhfyIJXXb2+PrGshFyeXWQPMmmZ2XxgaqclgpIC7eTXc1mg==", + "version": "8.5.5", + "resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.5.5.tgz", + "integrity": "sha512-lwhs8hktwxSjf9UaZ9tG5M03PGogvFaH8gUgLNbN9HKIg0dvv6q+gkSuJ8HN4/VbyxkuLzCjlN7GquQ0gUJfIg==", "dev": true, "requires": { "@types/node": "*" @@ -7156,132 +6978,71 @@ "dev": true }, "@typescript-eslint/eslint-plugin": { - "version": "5.59.9", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.59.9.tgz", - "integrity": "sha512-4uQIBq1ffXd2YvF7MAvehWKW3zVv/w+mSfRAu+8cKbfj3nwzyqJLNcZJpQ/WZ1HLbJDiowwmQ6NO+63nCA+fqA==", + "version": "5.59.11", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.59.11.tgz", + "integrity": "sha512-XxuOfTkCUiOSyBWIvHlUraLw/JT/6Io1365RO6ZuI88STKMavJZPNMU0lFcUTeQXEhHiv64CbxYxBNoDVSmghg==", "dev": true, "requires": { "@eslint-community/regexpp": "^4.4.0", - "@typescript-eslint/scope-manager": "5.59.9", - "@typescript-eslint/type-utils": "5.59.9", - "@typescript-eslint/utils": "5.59.9", + "@typescript-eslint/scope-manager": "5.59.11", + "@typescript-eslint/type-utils": "5.59.11", + "@typescript-eslint/utils": "5.59.11", "debug": "^4.3.4", "grapheme-splitter": "^1.0.4", "ignore": "^5.2.0", "natural-compare-lite": "^1.4.0", "semver": "^7.3.7", "tsutils": "^3.21.0" - }, - "dependencies": { - "@typescript-eslint/scope-manager": { - "version": "5.59.9", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.59.9.tgz", - "integrity": "sha512-8RA+E+w78z1+2dzvK/tGZ2cpGigBZ58VMEHDZtpE1v+LLjzrYGc8mMaTONSxKyEkz3IuXFM0IqYiGHlCsmlZxQ==", - "dev": true, - "requires": { - "@typescript-eslint/types": "5.59.9", - "@typescript-eslint/visitor-keys": "5.59.9" - } - }, - "@typescript-eslint/types": { - "version": "5.59.9", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.59.9.tgz", - "integrity": "sha512-uW8H5NRgTVneSVTfiCVffBb8AbwWSKg7qcA4Ot3JI3MPCJGsB4Db4BhvAODIIYE5mNj7Q+VJkK7JxmRhk2Lyjw==", - "dev": true - }, - "@typescript-eslint/visitor-keys": { - "version": "5.59.9", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.59.9.tgz", - "integrity": "sha512-bT7s0td97KMaLwpEBckbzj/YohnvXtqbe2XgqNvTl6RJVakY5mvENOTPvw5u66nljfZxthESpDozs86U+oLY8Q==", - "dev": true, - "requires": { - "@typescript-eslint/types": "5.59.9", - "eslint-visitor-keys": "^3.3.0" - } - } } }, "@typescript-eslint/parser": { - "version": "5.59.8", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.59.8.tgz", - "integrity": "sha512-AnR19RjJcpjoeGojmwZtCwBX/RidqDZtzcbG3xHrmz0aHHoOcbWnpDllenRDmDvsV0RQ6+tbb09/kyc+UT9Orw==", + "version": "5.59.11", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.59.11.tgz", + "integrity": "sha512-s9ZF3M+Nym6CAZEkJJeO2TFHHDsKAM3ecNkLuH4i4s8/RCPnF5JRip2GyviYkeEAcwGMJxkqG9h2dAsnA1nZpA==", "dev": true, "requires": { - "@typescript-eslint/scope-manager": "5.59.8", - "@typescript-eslint/types": "5.59.8", - "@typescript-eslint/typescript-estree": "5.59.8", + "@typescript-eslint/scope-manager": "5.59.11", + "@typescript-eslint/types": "5.59.11", + "@typescript-eslint/typescript-estree": "5.59.11", "debug": "^4.3.4" } }, "@typescript-eslint/scope-manager": { - "version": "5.59.8", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.59.8.tgz", - "integrity": "sha512-/w08ndCYI8gxGf+9zKf1vtx/16y8MHrZs5/tnjHhMLNSixuNcJavSX4wAiPf4aS5x41Es9YPCn44MIe4cxIlig==", + "version": "5.59.11", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.59.11.tgz", + "integrity": "sha512-dHFOsxoLFtrIcSj5h0QoBT/89hxQONwmn3FOQ0GOQcLOOXm+MIrS8zEAhs4tWl5MraxCY3ZJpaXQQdFMc2Tu+Q==", "dev": true, "requires": { - "@typescript-eslint/types": "5.59.8", - "@typescript-eslint/visitor-keys": "5.59.8" + "@typescript-eslint/types": "5.59.11", + "@typescript-eslint/visitor-keys": "5.59.11" } }, "@typescript-eslint/type-utils": { - "version": "5.59.9", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-5.59.9.tgz", - "integrity": "sha512-ksEsT0/mEHg9e3qZu98AlSrONAQtrSTljL3ow9CGej8eRo7pe+yaC/mvTjptp23Xo/xIf2mLZKC6KPv4Sji26Q==", + "version": "5.59.11", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-5.59.11.tgz", + "integrity": "sha512-LZqVY8hMiVRF2a7/swmkStMYSoXMFlzL6sXV6U/2gL5cwnLWQgLEG8tjWPpaE4rMIdZ6VKWwcffPlo1jPfk43g==", "dev": true, "requires": { - "@typescript-eslint/typescript-estree": "5.59.9", - "@typescript-eslint/utils": "5.59.9", + "@typescript-eslint/typescript-estree": "5.59.11", + "@typescript-eslint/utils": "5.59.11", "debug": "^4.3.4", "tsutils": "^3.21.0" - }, - "dependencies": { - "@typescript-eslint/types": { - "version": "5.59.9", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.59.9.tgz", - "integrity": "sha512-uW8H5NRgTVneSVTfiCVffBb8AbwWSKg7qcA4Ot3JI3MPCJGsB4Db4BhvAODIIYE5mNj7Q+VJkK7JxmRhk2Lyjw==", - "dev": true - }, - "@typescript-eslint/typescript-estree": { - "version": "5.59.9", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.59.9.tgz", - "integrity": "sha512-pmM0/VQ7kUhd1QyIxgS+aRvMgw+ZljB3eDb+jYyp6d2bC0mQWLzUDF+DLwCTkQ3tlNyVsvZRXjFyV0LkU/aXjA==", - "dev": true, - "requires": { - "@typescript-eslint/types": "5.59.9", - "@typescript-eslint/visitor-keys": "5.59.9", - "debug": "^4.3.4", - "globby": "^11.1.0", - "is-glob": "^4.0.3", - "semver": "^7.3.7", - "tsutils": "^3.21.0" - } - }, - "@typescript-eslint/visitor-keys": { - "version": "5.59.9", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.59.9.tgz", - "integrity": "sha512-bT7s0td97KMaLwpEBckbzj/YohnvXtqbe2XgqNvTl6RJVakY5mvENOTPvw5u66nljfZxthESpDozs86U+oLY8Q==", - "dev": true, - "requires": { - "@typescript-eslint/types": "5.59.9", - "eslint-visitor-keys": "^3.3.0" - } - } } }, "@typescript-eslint/types": { - "version": "5.59.8", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.59.8.tgz", - "integrity": "sha512-+uWuOhBTj/L6awoWIg0BlWy0u9TyFpCHrAuQ5bNfxDaZ1Ppb3mx6tUigc74LHcbHpOHuOTOJrBoAnhdHdaea1w==", + "version": "5.59.11", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.59.11.tgz", + "integrity": "sha512-epoN6R6tkvBYSc+cllrz+c2sOFWkbisJZWkOE+y3xHtvYaOE6Wk6B8e114McRJwFRjGvYdJwLXQH5c9osME/AA==", "dev": true }, "@typescript-eslint/typescript-estree": { - "version": "5.59.8", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.59.8.tgz", - "integrity": "sha512-Jy/lPSDJGNow14vYu6IrW790p7HIf/SOV1Bb6lZ7NUkLc2iB2Z9elESmsaUtLw8kVqogSbtLH9tut5GCX1RLDg==", + "version": "5.59.11", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.59.11.tgz", + "integrity": "sha512-YupOpot5hJO0maupJXixi6l5ETdrITxeo5eBOeuV7RSKgYdU3G5cxO49/9WRnJq9EMrB7AuTSLH/bqOsXi7wPA==", "dev": true, "requires": { - "@typescript-eslint/types": "5.59.8", - "@typescript-eslint/visitor-keys": "5.59.8", + "@typescript-eslint/types": "5.59.11", + "@typescript-eslint/visitor-keys": "5.59.11", "debug": "^4.3.4", "globby": "^11.1.0", "is-glob": "^4.0.3", @@ -7290,71 +7051,28 @@ } }, "@typescript-eslint/utils": { - "version": "5.59.9", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-5.59.9.tgz", - "integrity": "sha512-1PuMYsju/38I5Ggblaeb98TOoUvjhRvLpLa1DoTOFaLWqaXl/1iQ1eGurTXgBY58NUdtfTXKP5xBq7q9NDaLKg==", + "version": "5.59.11", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-5.59.11.tgz", + "integrity": "sha512-didu2rHSOMUdJThLk4aZ1Or8IcO3HzCw/ZvEjTTIfjIrcdd5cvSIwwDy2AOlE7htSNp7QIZ10fLMyRCveesMLg==", "dev": true, "requires": { "@eslint-community/eslint-utils": "^4.2.0", "@types/json-schema": "^7.0.9", "@types/semver": "^7.3.12", - "@typescript-eslint/scope-manager": "5.59.9", - "@typescript-eslint/types": "5.59.9", - "@typescript-eslint/typescript-estree": "5.59.9", + "@typescript-eslint/scope-manager": "5.59.11", + "@typescript-eslint/types": "5.59.11", + "@typescript-eslint/typescript-estree": "5.59.11", "eslint-scope": "^5.1.1", "semver": "^7.3.7" - }, - "dependencies": { - "@typescript-eslint/scope-manager": { - "version": "5.59.9", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.59.9.tgz", - "integrity": "sha512-8RA+E+w78z1+2dzvK/tGZ2cpGigBZ58VMEHDZtpE1v+LLjzrYGc8mMaTONSxKyEkz3IuXFM0IqYiGHlCsmlZxQ==", - "dev": true, - "requires": { - "@typescript-eslint/types": "5.59.9", - "@typescript-eslint/visitor-keys": "5.59.9" - } - }, - "@typescript-eslint/types": { - "version": "5.59.9", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.59.9.tgz", - "integrity": "sha512-uW8H5NRgTVneSVTfiCVffBb8AbwWSKg7qcA4Ot3JI3MPCJGsB4Db4BhvAODIIYE5mNj7Q+VJkK7JxmRhk2Lyjw==", - "dev": true - }, - "@typescript-eslint/typescript-estree": { - "version": "5.59.9", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.59.9.tgz", - "integrity": "sha512-pmM0/VQ7kUhd1QyIxgS+aRvMgw+ZljB3eDb+jYyp6d2bC0mQWLzUDF+DLwCTkQ3tlNyVsvZRXjFyV0LkU/aXjA==", - "dev": true, - "requires": { - "@typescript-eslint/types": "5.59.9", - "@typescript-eslint/visitor-keys": "5.59.9", - "debug": "^4.3.4", - "globby": "^11.1.0", - "is-glob": "^4.0.3", - "semver": "^7.3.7", - "tsutils": "^3.21.0" - } - }, - "@typescript-eslint/visitor-keys": { - "version": "5.59.9", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.59.9.tgz", - "integrity": "sha512-bT7s0td97KMaLwpEBckbzj/YohnvXtqbe2XgqNvTl6RJVakY5mvENOTPvw5u66nljfZxthESpDozs86U+oLY8Q==", - "dev": true, - "requires": { - "@typescript-eslint/types": "5.59.9", - "eslint-visitor-keys": "^3.3.0" - } - } } }, "@typescript-eslint/visitor-keys": { - "version": "5.59.8", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.59.8.tgz", - "integrity": "sha512-pJhi2ms0x0xgloT7xYabil3SGGlojNNKjK/q6dB3Ey0uJLMjK2UDGJvHieiyJVW/7C3KI+Z4Q3pEHkm4ejA+xQ==", + "version": "5.59.11", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.59.11.tgz", + "integrity": "sha512-KGYniTGG3AMTuKF9QBD7EIrvufkB6O6uX3knP73xbKLMpH+QRPcgnCxjWXSHjMRuOxFLovljqQgQpR0c7GvjoA==", "dev": true, "requires": { - "@typescript-eslint/types": "5.59.8", + "@typescript-eslint/types": "5.59.11", "eslint-visitor-keys": "^3.3.0" } }, @@ -8033,16 +7751,16 @@ } }, "eslint": { - "version": "8.41.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.41.0.tgz", - "integrity": "sha512-WQDQpzGBOP5IrXPo4Hc0814r4/v2rrIsB0rhT7jtunIalgg6gYXWhRMOejVO8yH21T/FGaxjmFjBMNqcIlmH1Q==", + "version": "8.42.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.42.0.tgz", + "integrity": "sha512-ulg9Ms6E1WPf67PHaEY4/6E2tEn5/f7FXGzr3t9cBMugOmf1INYvuUwwh1aXQN4MfJ6a5K2iNwP3w4AColvI9A==", "dev": true, "requires": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.4.0", "@eslint/eslintrc": "^2.0.3", - "@eslint/js": "8.41.0", - "@humanwhocodes/config-array": "^0.11.8", + "@eslint/js": "8.42.0", + "@humanwhocodes/config-array": "^0.11.10", "@humanwhocodes/module-importer": "^1.0.1", "@nodelib/fs.walk": "^1.2.8", "ajv": "^6.10.0", diff --git a/superset-websocket/package.json b/superset-websocket/package.json index 7179d9081a4f9..68c233fbb3d43 100644 --- a/superset-websocket/package.json +++ b/superset-websocket/package.json @@ -28,12 +28,12 @@ "@types/ioredis": "^4.27.8", "@types/jest": "^27.0.2", "@types/jsonwebtoken": "^9.0.2", - "@types/node": "^20.2.5", - "@types/uuid": "^9.0.1", - "@types/ws": "^8.5.4", - "@typescript-eslint/eslint-plugin": "^5.59.9", - "@typescript-eslint/parser": "^5.59.8", - "eslint": "^8.41.0", + "@types/node": "^20.3.1", + "@types/uuid": "^9.0.2", + "@types/ws": "^8.5.5", + "@typescript-eslint/eslint-plugin": "^5.59.11", + "@typescript-eslint/parser": "^5.59.11", + "eslint": "^8.42.0", "eslint-config-prettier": "^8.8.0", "jest": "^27.3.1", "prettier": "^2.8.8", diff --git a/superset/annotation_layers/annotations/commands/bulk_delete.py b/superset/annotation_layers/annotations/commands/bulk_delete.py index dd47047788a59..2e0c53808f4fb 100644 --- a/superset/annotation_layers/annotations/commands/bulk_delete.py +++ b/superset/annotation_layers/annotations/commands/bulk_delete.py @@ -21,9 +21,9 @@ AnnotationBulkDeleteFailedError, AnnotationNotFoundError, ) -from superset.annotation_layers.annotations.dao import AnnotationDAO from superset.commands.base import BaseCommand -from superset.dao.exceptions import DAODeleteFailedError +from superset.daos.annotation import AnnotationDAO +from superset.daos.exceptions import DAODeleteFailedError from superset.models.annotations import Annotation logger = logging.getLogger(__name__) diff --git a/superset/annotation_layers/annotations/commands/create.py b/superset/annotation_layers/annotations/commands/create.py index 986b5642917e6..ed4e76cee669a 100644 --- a/superset/annotation_layers/annotations/commands/create.py +++ b/superset/annotation_layers/annotations/commands/create.py @@ -27,11 +27,10 @@ AnnotationInvalidError, AnnotationUniquenessValidationError, ) -from superset.annotation_layers.annotations.dao import AnnotationDAO from superset.annotation_layers.commands.exceptions import AnnotationLayerNotFoundError -from superset.annotation_layers.dao import AnnotationLayerDAO from superset.commands.base import BaseCommand -from superset.dao.exceptions import DAOCreateFailedError +from superset.daos.annotation import AnnotationDAO, AnnotationLayerDAO +from superset.daos.exceptions import DAOCreateFailedError logger = logging.getLogger(__name__) diff --git a/superset/annotation_layers/annotations/commands/delete.py b/superset/annotation_layers/annotations/commands/delete.py index 915f7f80cef64..b86ae997a4b4e 100644 --- a/superset/annotation_layers/annotations/commands/delete.py +++ b/superset/annotation_layers/annotations/commands/delete.py @@ -23,9 +23,9 @@ AnnotationDeleteFailedError, AnnotationNotFoundError, ) -from superset.annotation_layers.annotations.dao import AnnotationDAO from superset.commands.base import BaseCommand -from superset.dao.exceptions import DAODeleteFailedError +from superset.daos.annotation import AnnotationDAO +from superset.daos.exceptions import DAODeleteFailedError from superset.models.annotations import Annotation logger = logging.getLogger(__name__) diff --git a/superset/annotation_layers/annotations/commands/update.py b/superset/annotation_layers/annotations/commands/update.py index 99ab20916501b..03797a555ba0d 100644 --- a/superset/annotation_layers/annotations/commands/update.py +++ b/superset/annotation_layers/annotations/commands/update.py @@ -28,11 +28,10 @@ AnnotationUniquenessValidationError, AnnotationUpdateFailedError, ) -from superset.annotation_layers.annotations.dao import AnnotationDAO from superset.annotation_layers.commands.exceptions import AnnotationLayerNotFoundError -from superset.annotation_layers.dao import AnnotationLayerDAO from superset.commands.base import BaseCommand -from superset.dao.exceptions import DAOUpdateFailedError +from superset.daos.annotation import AnnotationDAO, AnnotationLayerDAO +from superset.daos.exceptions import DAOUpdateFailedError from superset.models.annotations import Annotation logger = logging.getLogger(__name__) diff --git a/superset/annotation_layers/annotations/dao.py b/superset/annotation_layers/annotations/dao.py deleted file mode 100644 index da69e576e5087..0000000000000 --- a/superset/annotation_layers/annotations/dao.py +++ /dev/null @@ -1,64 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -import logging -from typing import Optional - -from sqlalchemy.exc import SQLAlchemyError - -from superset.dao.base import BaseDAO -from superset.dao.exceptions import DAODeleteFailedError -from superset.extensions import db -from superset.models.annotations import Annotation - -logger = logging.getLogger(__name__) - - -class AnnotationDAO(BaseDAO): - model_cls = Annotation - - @staticmethod - def bulk_delete(models: Optional[list[Annotation]], commit: bool = True) -> None: - item_ids = [model.id for model in models] if models else [] - try: - db.session.query(Annotation).filter(Annotation.id.in_(item_ids)).delete( - synchronize_session="fetch" - ) - if commit: - db.session.commit() - except SQLAlchemyError as ex: - db.session.rollback() - raise DAODeleteFailedError() from ex - - @staticmethod - def validate_update_uniqueness( - layer_id: int, short_descr: str, annotation_id: Optional[int] = None - ) -> bool: - """ - Validate if this annotation short description is unique. `id` is optional - and serves for validating on updates - - :param short_descr: The annotation short description - :param layer_id: The annotation layer current id - :param annotation_id: This annotation is (only for validating on updates) - :return: bool - """ - query = db.session.query(Annotation).filter( - Annotation.short_descr == short_descr, Annotation.layer_id == layer_id - ) - if annotation_id: - query = query.filter(Annotation.id != annotation_id) - return not db.session.query(query.exists()).scalar() diff --git a/superset/annotation_layers/commands/bulk_delete.py b/superset/annotation_layers/commands/bulk_delete.py index 4910dc4275f11..e4696065a6f31 100644 --- a/superset/annotation_layers/commands/bulk_delete.py +++ b/superset/annotation_layers/commands/bulk_delete.py @@ -22,9 +22,9 @@ AnnotationLayerBulkDeleteIntegrityError, AnnotationLayerNotFoundError, ) -from superset.annotation_layers.dao import AnnotationLayerDAO from superset.commands.base import BaseCommand -from superset.dao.exceptions import DAODeleteFailedError +from superset.daos.annotation import AnnotationLayerDAO +from superset.daos.exceptions import DAODeleteFailedError from superset.models.annotations import AnnotationLayer logger = logging.getLogger(__name__) diff --git a/superset/annotation_layers/commands/create.py b/superset/annotation_layers/commands/create.py index 86b0cb3b85893..08ef4ad10d7b5 100644 --- a/superset/annotation_layers/commands/create.py +++ b/superset/annotation_layers/commands/create.py @@ -25,9 +25,9 @@ AnnotationLayerInvalidError, AnnotationLayerNameUniquenessValidationError, ) -from superset.annotation_layers.dao import AnnotationLayerDAO from superset.commands.base import BaseCommand -from superset.dao.exceptions import DAOCreateFailedError +from superset.daos.annotation import AnnotationLayerDAO +from superset.daos.exceptions import DAOCreateFailedError logger = logging.getLogger(__name__) diff --git a/superset/annotation_layers/commands/delete.py b/superset/annotation_layers/commands/delete.py index 3dbd7a574f2f8..0692d4dd834f7 100644 --- a/superset/annotation_layers/commands/delete.py +++ b/superset/annotation_layers/commands/delete.py @@ -24,9 +24,9 @@ AnnotationLayerDeleteIntegrityError, AnnotationLayerNotFoundError, ) -from superset.annotation_layers.dao import AnnotationLayerDAO from superset.commands.base import BaseCommand -from superset.dao.exceptions import DAODeleteFailedError +from superset.daos.annotation import AnnotationLayerDAO +from superset.daos.exceptions import DAODeleteFailedError from superset.models.annotations import AnnotationLayer logger = logging.getLogger(__name__) diff --git a/superset/annotation_layers/commands/update.py b/superset/annotation_layers/commands/update.py index 67d869c0054d3..ca3a288413f06 100644 --- a/superset/annotation_layers/commands/update.py +++ b/superset/annotation_layers/commands/update.py @@ -26,9 +26,9 @@ AnnotationLayerNotFoundError, AnnotationLayerUpdateFailedError, ) -from superset.annotation_layers.dao import AnnotationLayerDAO from superset.commands.base import BaseCommand -from superset.dao.exceptions import DAOUpdateFailedError +from superset.daos.annotation import AnnotationLayerDAO +from superset.daos.exceptions import DAOUpdateFailedError from superset.models.annotations import AnnotationLayer logger = logging.getLogger(__name__) diff --git a/superset/charts/api.py b/superset/charts/api.py index 48e0856c85496..c87b7bdda8dd2 100644 --- a/superset/charts/api.py +++ b/superset/charts/api.py @@ -48,7 +48,6 @@ from superset.charts.commands.importers.dispatcher import ImportChartsCommand from superset.charts.commands.update import UpdateChartCommand from superset.charts.commands.warm_up_cache import ChartWarmUpCacheCommand -from superset.charts.dao import ChartDAO from superset.charts.filters import ( ChartAllTextFilter, ChartCertifiedFilter, @@ -78,6 +77,7 @@ ) from superset.commands.importers.v1.utils import get_contents_from_bundle from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP, RouteMethod +from superset.daos.chart import ChartDAO from superset.extensions import event_logger from superset.models.slice import Slice from superset.tasks.thumbnails import cache_chart_thumbnail @@ -161,7 +161,6 @@ def ensure_thumbnails_enabled(self) -> Optional[Response]: "changed_by.first_name", "changed_by.last_name", "changed_by_name", - "changed_by_url", "changed_on_delta_humanized", "changed_on_dttm", "changed_on_utc", @@ -169,7 +168,6 @@ def ensure_thumbnails_enabled(self) -> Optional[Response]: "created_by.id", "created_by.last_name", "created_by_name", - "created_by_url", "created_on_delta_humanized", "datasource_id", "datasource_name_text", diff --git a/superset/charts/commands/bulk_delete.py b/superset/charts/commands/bulk_delete.py index ac801b7421e7b..964c40d8129af 100644 --- a/superset/charts/commands/bulk_delete.py +++ b/superset/charts/commands/bulk_delete.py @@ -26,12 +26,12 @@ ChartForbiddenError, ChartNotFoundError, ) -from superset.charts.dao import ChartDAO from superset.commands.base import BaseCommand from superset.commands.exceptions import DeleteFailedError +from superset.daos.chart import ChartDAO +from superset.daos.report import ReportScheduleDAO from superset.exceptions import SupersetSecurityException from superset.models.slice import Slice -from superset.reports.dao import ReportScheduleDAO logger = logging.getLogger(__name__) diff --git a/superset/charts/commands/create.py b/superset/charts/commands/create.py index 78706b3a665c1..3eb0001bb9315 100644 --- a/superset/charts/commands/create.py +++ b/superset/charts/commands/create.py @@ -27,11 +27,11 @@ ChartInvalidError, DashboardsNotFoundValidationError, ) -from superset.charts.dao import ChartDAO from superset.commands.base import BaseCommand, CreateMixin from superset.commands.utils import get_datasource_by_id -from superset.dao.exceptions import DAOCreateFailedError -from superset.dashboards.dao import DashboardDAO +from superset.daos.chart import ChartDAO +from superset.daos.dashboard import DashboardDAO +from superset.daos.exceptions import DAOCreateFailedError logger = logging.getLogger(__name__) diff --git a/superset/charts/commands/delete.py b/superset/charts/commands/delete.py index 11f6e5925773d..184e9f8e1a16b 100644 --- a/superset/charts/commands/delete.py +++ b/superset/charts/commands/delete.py @@ -27,13 +27,13 @@ ChartForbiddenError, ChartNotFoundError, ) -from superset.charts.dao import ChartDAO from superset.commands.base import BaseCommand -from superset.dao.exceptions import DAODeleteFailedError +from superset.daos.chart import ChartDAO +from superset.daos.exceptions import DAODeleteFailedError +from superset.daos.report import ReportScheduleDAO from superset.exceptions import SupersetSecurityException from superset.models.dashboard import Dashboard from superset.models.slice import Slice -from superset.reports.dao import ReportScheduleDAO logger = logging.getLogger(__name__) diff --git a/superset/charts/commands/export.py b/superset/charts/commands/export.py index 22310ade99ce4..d1183a999c813 100644 --- a/superset/charts/commands/export.py +++ b/superset/charts/commands/export.py @@ -23,7 +23,7 @@ import yaml from superset.charts.commands.exceptions import ChartNotFoundError -from superset.charts.dao import ChartDAO +from superset.daos.chart import ChartDAO from superset.datasets.commands.export import ExportDatasetsCommand from superset.commands.export.models import ExportModelsCommand from superset.models.slice import Slice diff --git a/superset/charts/commands/importers/v1/__init__.py b/superset/charts/commands/importers/v1/__init__.py index 132df21b0815b..2a9c691159b4c 100644 --- a/superset/charts/commands/importers/v1/__init__.py +++ b/superset/charts/commands/importers/v1/__init__.py @@ -22,10 +22,10 @@ from superset.charts.commands.exceptions import ChartImportError from superset.charts.commands.importers.v1.utils import import_chart -from superset.charts.dao import ChartDAO from superset.charts.schemas import ImportV1ChartSchema from superset.commands.importers.v1 import ImportModelsCommand from superset.connectors.sqla.models import SqlaTable +from superset.daos.chart import ChartDAO from superset.databases.commands.importers.v1.utils import import_database from superset.databases.schemas import ImportV1DatabaseSchema from superset.datasets.commands.importers.v1.utils import import_dataset diff --git a/superset/charts/commands/update.py b/superset/charts/commands/update.py index a4265d083539e..9a5b4e1f291d8 100644 --- a/superset/charts/commands/update.py +++ b/superset/charts/commands/update.py @@ -31,11 +31,11 @@ DashboardsNotFoundValidationError, DatasourceTypeUpdateRequiredValidationError, ) -from superset.charts.dao import ChartDAO from superset.commands.base import BaseCommand, UpdateMixin from superset.commands.utils import get_datasource_by_id -from superset.dao.exceptions import DAOUpdateFailedError -from superset.dashboards.dao import DashboardDAO +from superset.daos.chart import ChartDAO +from superset.daos.dashboard import DashboardDAO +from superset.daos.exceptions import DAOUpdateFailedError from superset.exceptions import SupersetSecurityException from superset.models.slice import Slice diff --git a/superset/charts/data/api.py b/superset/charts/data/api.py index 552044ebfa909..effad0e010c6d 100644 --- a/superset/charts/data/api.py +++ b/superset/charts/data/api.py @@ -41,7 +41,7 @@ from superset.charts.schemas import ChartDataQueryContextSchema from superset.common.chart_data import ChartDataResultFormat, ChartDataResultType from superset.connectors.base.models import BaseDatasource -from superset.dao.exceptions import DatasourceNotFound +from superset.daos.exceptions import DatasourceNotFound from superset.exceptions import QueryObjectValidationError from superset.extensions import event_logger from superset.models.sql_lab import Query diff --git a/superset/charts/schemas.py b/superset/charts/schemas.py index f3958ab8feebf..1145d5be73694 100644 --- a/superset/charts/schemas.py +++ b/superset/charts/schemas.py @@ -164,7 +164,7 @@ class ChartEntityResponseSchema(Schema): id = fields.Integer(metadata={"description": id_description}) slice_name = fields.String(metadata={"description": slice_name_description}) cache_timeout = fields.Integer(metadata={"description": cache_timeout_description}) - changed_on = fields.String(metadata={"description": changed_on_description}) + changed_on = fields.DateTime(metadata={"description": changed_on_description}) description = fields.String(metadata={"description": description_description}) description_markeddown = fields.String( metadata={"description": description_markeddown_description} diff --git a/superset/commands/export/models.py b/superset/commands/export/models.py index 3f21f29281c44..27f4572af3fd4 100644 --- a/superset/commands/export/models.py +++ b/superset/commands/export/models.py @@ -23,7 +23,7 @@ from superset.commands.base import BaseCommand from superset.commands.exceptions import CommandException -from superset.dao.base import BaseDAO +from superset.daos.base import BaseDAO from superset.utils.dict_import_export import EXPORT_VERSION METADATA_FILE_NAME = "metadata.yaml" diff --git a/superset/commands/importers/v1/__init__.py b/superset/commands/importers/v1/__init__.py index 09830bf3cf727..38d6568af4d07 100644 --- a/superset/commands/importers/v1/__init__.py +++ b/superset/commands/importers/v1/__init__.py @@ -30,7 +30,7 @@ METADATA_FILE_NAME, validate_metadata_type, ) -from superset.dao.base import BaseDAO +from superset.daos.base import BaseDAO from superset.models.core import Database diff --git a/superset/commands/importers/v1/examples.py b/superset/commands/importers/v1/examples.py index 4c20e93ff7434..501e993e961e0 100644 --- a/superset/commands/importers/v1/examples.py +++ b/superset/commands/importers/v1/examples.py @@ -27,7 +27,7 @@ from superset.charts.schemas import ImportV1ChartSchema from superset.commands.exceptions import CommandException from superset.commands.importers.v1 import ImportModelsCommand -from superset.dao.base import BaseDAO +from superset.daos.base import BaseDAO from superset.dashboards.commands.importers.v1 import ImportDashboardsCommand from superset.dashboards.commands.importers.v1.utils import ( find_chart_uuids, diff --git a/superset/commands/utils.py b/superset/commands/utils.py index 7bb13984f8c69..02b6b5f383516 100644 --- a/superset/commands/utils.py +++ b/superset/commands/utils.py @@ -27,8 +27,8 @@ OwnersNotFoundValidationError, RolesNotFoundValidationError, ) -from superset.dao.exceptions import DatasourceNotFound -from superset.datasource.dao import DatasourceDAO +from superset.daos.datasource import DatasourceDAO +from superset.daos.exceptions import DatasourceNotFound from superset.extensions import db from superset.utils.core import DatasourceType, get_user_id diff --git a/superset/common/query_context_factory.py b/superset/common/query_context_factory.py index 62018def8db24..a6fe549894db0 100644 --- a/superset/common/query_context_factory.py +++ b/superset/common/query_context_factory.py @@ -19,12 +19,12 @@ from typing import Any, TYPE_CHECKING from superset import app, db -from superset.charts.dao import ChartDAO from superset.common.chart_data import ChartDataResultFormat, ChartDataResultType from superset.common.query_context import QueryContext from superset.common.query_object import QueryObject from superset.common.query_object_factory import QueryObjectFactory -from superset.datasource.dao import DatasourceDAO +from superset.daos.chart import ChartDAO +from superset.daos.datasource import DatasourceDAO from superset.models.slice import Slice from superset.utils.core import DatasourceDict, DatasourceType diff --git a/superset/common/query_context_processor.py b/superset/common/query_context_processor.py index 6553c19807ee2..58e9022736730 100644 --- a/superset/common/query_context_processor.py +++ b/superset/common/query_context_processor.py @@ -28,8 +28,6 @@ from typing_extensions import TypedDict from superset import app -from superset.annotation_layers.dao import AnnotationLayerDAO -from superset.charts.dao import ChartDAO from superset.common.chart_data import ChartDataResultFormat from superset.common.db_query_status import QueryStatus from superset.common.query_actions import get_query_results @@ -38,6 +36,8 @@ from superset.common.utils.time_range_utils import get_since_until_from_query_object from superset.connectors.base.models import BaseDatasource from superset.constants import CacheRegion, TimeGrain +from superset.daos.annotation import AnnotationLayerDAO +from superset.daos.chart import ChartDAO from superset.exceptions import ( InvalidPostProcessingError, QueryObjectValidationError, diff --git a/superset/common/query_object_factory.py b/superset/common/query_object_factory.py index 5676dc9eda5ff..ae85912cdfe78 100644 --- a/superset/common/query_object_factory.py +++ b/superset/common/query_object_factory.py @@ -27,7 +27,7 @@ from sqlalchemy.orm import sessionmaker from superset.connectors.base.models import BaseDatasource - from superset.datasource.dao import DatasourceDAO + from superset.daos.datasource import DatasourceDAO class QueryObjectFactory: # pylint: disable=too-few-public-methods diff --git a/superset/config.py b/superset/config.py index 80e7132e68446..d62003991a8fb 100644 --- a/superset/config.py +++ b/superset/config.py @@ -146,6 +146,8 @@ def _try_json_readsha(filepath: str, length: int) -> str | None: ROW_LIMIT = 50000 # default row limit when requesting samples from datasource in explore view SAMPLES_ROW_LIMIT = 1000 +# default row limit for native filters +NATIVE_FILTER_DEFAULT_ROW_LIMIT = 1000 # max rows retrieved by filter select auto complete FILTER_SELECT_ROW_LIMIT = 10000 # default time filter in explore @@ -472,6 +474,10 @@ class D3Format(TypedDict, total=False): # otherwise enabling this flag won't have any effect on the DB. "SSH_TUNNELING": False, "AVOID_COLORS_COLLISION": True, + # Set to False to only allow viewing own recent activity + # or to disallow users from viewing other users profile page + # Do not show user info or profile in the menu + "MENU_HIDE_USER_INFO": False, } # ------------------------------ @@ -491,7 +497,11 @@ class D3Format(TypedDict, total=False): # ---------------------------------------------------------------------- SSH_TUNNEL_MANAGER_CLASS = "superset.extensions.ssh.SSHManager" SSH_TUNNEL_LOCAL_BIND_ADDRESS = "127.0.0.1" +#: Timeout (seconds) for tunnel connection (open_channel timeout) SSH_TUNNEL_TIMEOUT_SEC = 10.0 +#: Timeout (seconds) for transport socket (``socket.settimeout``) +SSH_TUNNEL_PACKET_TIMEOUT_SEC = 1.0 + # Feature flags may also be set via 'SUPERSET_FEATURE_' prefixed environment vars. DEFAULT_FEATURE_FLAGS.update( @@ -1071,10 +1081,6 @@ def CSV_TO_HIVE_UPLOAD_DIRECTORY_FUNC( # pylint: disable=invalid-name # example: FLASK_APP_MUTATOR = lambda x: x.before_request = f FLASK_APP_MUTATOR = None -# Set this to false if you don't want users to be able to request/grant -# datasource access requests from/to other users. -ENABLE_ACCESS_REQUEST = False - # smtp server configuration EMAIL_NOTIFICATIONS = False # all the emails are sent using dryrun SMTP_HOST = "localhost" @@ -1481,13 +1487,6 @@ def EMAIL_HEADER_MUTATOR( # pylint: disable=invalid-name,unused-argument # DATASET_HEALTH_CHECK: Callable[[SqlaTable], str] | None = None -# Do not show user info or profile in the menu -MENU_HIDE_USER_INFO = False - -# Set to False to only allow viewing own recent activity -# or to disallow users from viewing other users profile page -ENABLE_BROAD_ACTIVITY_ACCESS = True - # the advanced data type key should correspond to that set in the column metadata ADVANCED_DATA_TYPES: dict[str, AdvancedDataType] = { "internet_address": internet_address, diff --git a/superset/connectors/base/models.py b/superset/connectors/base/models.py index d43d07863902b..f370a9f64ce1d 100644 --- a/superset/connectors/base/models.py +++ b/superset/connectors/base/models.py @@ -152,6 +152,7 @@ def is_virtual(self) -> bool: def slices(self) -> RelationshipProperty: return relationship( "Slice", + overlaps="table", primaryjoin=lambda: and_( foreign(Slice.datasource_id) == self.id, foreign(Slice.datasource_type) == self.type, diff --git a/superset/connectors/sqla/models.py b/superset/connectors/sqla/models.py index 41a9c89757891..c44b79062b732 100644 --- a/superset/connectors/sqla/models.py +++ b/superset/connectors/sqla/models.py @@ -32,7 +32,7 @@ import pandas as pd import sqlalchemy as sa import sqlparse -from flask import current_app, escape, Markup +from flask import escape, Markup from flask_appbuilder import Model from flask_babel import lazy_gettext as _ from jinja2.exceptions import TemplateError @@ -58,6 +58,7 @@ backref, Mapped, Query, + reconstructor, relationship, RelationshipProperty, Session, @@ -218,6 +219,30 @@ class TableColumn(Model, BaseColumn, CertificationMixin): update_from_object_fields = [s for s in export_fields if s not in ("table_id",)] export_parent = "table" + def __init__(self, **kwargs: Any) -> None: + """ + Construct a TableColumn object. + + Historically a TableColumn object (from an ORM perspective) was tighly bound to + a SqlaTable object, however with the introduction of the Query datasource this + is no longer true, i.e., the SqlaTable relationship is optional. + + Now the TableColumn is either directly associated with the Database object ( + which is unknown to the ORM) or indirectly via the SqlaTable object (courtesy of + the ORM) depending on the context. + """ + + self._database: Database | None = kwargs.pop("database", None) + super().__init__(**kwargs) + + @reconstructor + def init_on_load(self) -> None: + """ + Construct a TableColumn object when invoked via the SQLAlchemy ORM. + """ + + self._database = None + @property def is_boolean(self) -> bool: """ @@ -251,51 +276,33 @@ def is_temporal(self) -> bool: return self.is_dttm return self.type_generic == GenericDataType.TEMPORAL + @property + def database(self) -> Database: + return self.table.database if self.table else self._database + @property def db_engine_spec(self) -> type[BaseEngineSpec]: - return self.table.db_engine_spec + return self.database.db_engine_spec @property def db_extra(self) -> dict[str, Any]: - return self.table.database.get_extra() + return self.database.get_extra() @property def type_generic(self) -> utils.GenericDataType | None: if self.is_dttm: return GenericDataType.TEMPORAL - bool_types = ("BOOL",) - num_types = ( - "DOUBLE", - "FLOAT", - "INT", - "BIGINT", - "NUMBER", - "LONG", - "REAL", - "NUMERIC", - "DECIMAL", - "MONEY", - ) - date_types = ("DATE", "TIME") - str_types = ("VARCHAR", "STRING", "CHAR") - - if self.table is None: - # Query.TableColumns don't have a reference to a table.db_engine_spec - # reference so this logic will manage rendering types - if self.type and any(map(lambda t: t in self.type.upper(), str_types)): - return GenericDataType.STRING - if self.type and any(map(lambda t: t in self.type.upper(), bool_types)): - return GenericDataType.BOOLEAN - if self.type and any(map(lambda t: t in self.type.upper(), num_types)): - return GenericDataType.NUMERIC - if self.type and any(map(lambda t: t in self.type.upper(), date_types)): - return GenericDataType.TEMPORAL - - column_spec = self.db_engine_spec.get_column_spec( - self.type, db_extra=self.db_extra + return ( + column_spec.generic_type # pylint: disable=used-before-assignment + if ( + column_spec := self.db_engine_spec.get_column_spec( + self.type, + db_extra=self.db_extra, + ) + ) + else None ) - return column_spec.generic_type if column_spec else None def get_sqla_col( self, @@ -312,7 +319,7 @@ def get_sqla_col( col = literal_column(expression, type_=type_) else: col = column(self.column_name, type_=type_) - col = self.table.make_sqla_column_compatible(col, label) + col = self.database.make_sqla_column_compatible(col, label) return col @property @@ -343,7 +350,7 @@ def get_timestamp_expression( type_ = column_spec.sqla_type if column_spec else DateTime if not self.expression and not time_grain and not is_epoch: sqla_col = column(self.column_name, type_=type_) - return self.table.make_sqla_column_compatible(sqla_col, label) + return self.database.make_sqla_column_compatible(sqla_col, label) if expression := self.expression: if template_processor: expression = template_processor.process_template(expression) @@ -351,7 +358,7 @@ def get_timestamp_expression( else: col = column(self.column_name, type_=type_) time_expr = self.db_engine_spec.get_timestamp_expr(col, pdf, time_grain) - return self.table.make_sqla_column_compatible(time_expr, label) + return self.database.make_sqla_column_compatible(time_expr, label) @property def data(self) -> dict[str, Any]: @@ -423,7 +430,7 @@ def get_sqla_col( expression = template_processor.process_template(expression) sqla_col: ColumnClause = literal_column(expression) - return self.table.make_sqla_column_compatible(sqla_col, label) + return self.table.database.make_sqla_column_compatible(sqla_col, label) @property def perm(self) -> str | None: @@ -592,15 +599,6 @@ def changed_by_name(self) -> str: return "" return str(self.changed_by) - @property - def changed_by_url(self) -> str: - if ( - not self.changed_by - or not current_app.config["ENABLE_BROAD_ACTIVITY_ACCESS"] - ): - return "" - return f"/superset/profile/{self.changed_by.username}" - @property def connection(self) -> str: return str(self.database) @@ -1009,23 +1007,6 @@ def adhoc_column_to_sqla( # pylint: disable=too-many-locals ) return self.make_sqla_column_compatible(sqla_column, label) - def make_sqla_column_compatible( - self, sqla_col: ColumnElement, label: str | None = None - ) -> ColumnElement: - """Takes a sqlalchemy column object and adds label info if supported by engine. - :param sqla_col: sqlalchemy column instance - :param label: alias/label that column is expected to have - :return: either a sql alchemy column or label instance if supported by engine - """ - label_expected = label or sqla_col.name - db_engine_spec = self.db_engine_spec - # add quotes to tables - if db_engine_spec.allows_alias_in_select: - label = db_engine_spec.make_label_compatible(label_expected) - sqla_col = sqla_col.label(label) - sqla_col.key = label_expected - return sqla_col - def make_orderby_compatible( self, select_exprs: list[ColumnElement], orderby_exprs: list[ColumnElement] ) -> None: @@ -1458,8 +1439,8 @@ def before_update( """ # pylint: disable=import-outside-toplevel + from superset.daos.dataset import DatasetDAO from superset.datasets.commands.exceptions import get_dataset_exist_error_msg - from superset.datasets.dao import DatasetDAO # Check whether the relevant attributes have changed. state = db.inspect(target) # pylint: disable=no-member @@ -1630,6 +1611,9 @@ class RowLevelSecurityFilter(Model, AuditMixinNullable): backref="row_level_security_filters", ) tables = relationship( - SqlaTable, secondary=RLSFilterTables, backref="row_level_security_filters" + SqlaTable, + overlaps="table", + secondary=RLSFilterTables, + backref="row_level_security_filters", ) clause = Column(Text, nullable=False) diff --git a/superset/css_templates/commands/bulk_delete.py b/superset/css_templates/commands/bulk_delete.py index 57612d90485d7..c676e9eed8402 100644 --- a/superset/css_templates/commands/bulk_delete.py +++ b/superset/css_templates/commands/bulk_delete.py @@ -22,8 +22,8 @@ CssTemplateBulkDeleteFailedError, CssTemplateNotFoundError, ) -from superset.css_templates.dao import CssTemplateDAO -from superset.dao.exceptions import DAODeleteFailedError +from superset.daos.css import CssTemplateDAO +from superset.daos.exceptions import DAODeleteFailedError from superset.models.core import CssTemplate logger = logging.getLogger(__name__) diff --git a/superset/dao/__init__.py b/superset/daos/__init__.py similarity index 100% rename from superset/dao/__init__.py rename to superset/daos/__init__.py diff --git a/superset/annotation_layers/dao.py b/superset/daos/annotation.py similarity index 65% rename from superset/annotation_layers/dao.py rename to superset/daos/annotation.py index 67efc19f88009..171a708fa422b 100644 --- a/superset/annotation_layers/dao.py +++ b/superset/daos/annotation.py @@ -19,14 +19,51 @@ from sqlalchemy.exc import SQLAlchemyError -from superset.dao.base import BaseDAO -from superset.dao.exceptions import DAODeleteFailedError +from superset.daos.base import BaseDAO +from superset.daos.exceptions import DAODeleteFailedError from superset.extensions import db from superset.models.annotations import Annotation, AnnotationLayer logger = logging.getLogger(__name__) +class AnnotationDAO(BaseDAO): + model_cls = Annotation + + @staticmethod + def bulk_delete(models: Optional[list[Annotation]], commit: bool = True) -> None: + item_ids = [model.id for model in models] if models else [] + try: + db.session.query(Annotation).filter(Annotation.id.in_(item_ids)).delete( + synchronize_session="fetch" + ) + if commit: + db.session.commit() + except SQLAlchemyError as ex: + db.session.rollback() + raise DAODeleteFailedError() from ex + + @staticmethod + def validate_update_uniqueness( + layer_id: int, short_descr: str, annotation_id: Optional[int] = None + ) -> bool: + """ + Validate if this annotation short description is unique. `id` is optional + and serves for validating on updates + + :param short_descr: The annotation short description + :param layer_id: The annotation layer current id + :param annotation_id: This annotation is (only for validating on updates) + :return: bool + """ + query = db.session.query(Annotation).filter( + Annotation.short_descr == short_descr, Annotation.layer_id == layer_id + ) + if annotation_id: + query = query.filter(Annotation.id != annotation_id) + return not db.session.query(query.exists()).scalar() + + class AnnotationLayerDAO(BaseDAO): model_cls = AnnotationLayer diff --git a/superset/dao/base.py b/superset/daos/base.py similarity index 99% rename from superset/dao/base.py rename to superset/daos/base.py index 539dbab2d5141..6465e5b177b08 100644 --- a/superset/dao/base.py +++ b/superset/daos/base.py @@ -23,7 +23,7 @@ from sqlalchemy.exc import SQLAlchemyError, StatementError from sqlalchemy.orm import Session -from superset.dao.exceptions import ( +from superset.daos.exceptions import ( DAOConfigError, DAOCreateFailedError, DAODeleteFailedError, diff --git a/superset/charts/dao.py b/superset/daos/chart.py similarity index 98% rename from superset/charts/dao.py rename to superset/daos/chart.py index 9c6b2c26ef55d..838d93abdf8b9 100644 --- a/superset/charts/dao.py +++ b/superset/daos/chart.py @@ -22,7 +22,7 @@ from sqlalchemy.exc import SQLAlchemyError from superset.charts.filters import ChartFilter -from superset.dao.base import BaseDAO +from superset.daos.base import BaseDAO from superset.extensions import db from superset.models.core import FavStar, FavStarClassName from superset.models.slice import Slice diff --git a/superset/css_templates/dao.py b/superset/daos/css.py similarity index 94% rename from superset/css_templates/dao.py rename to superset/daos/css.py index bc1a796269384..224277a40ad1c 100644 --- a/superset/css_templates/dao.py +++ b/superset/daos/css.py @@ -19,8 +19,8 @@ from sqlalchemy.exc import SQLAlchemyError -from superset.dao.base import BaseDAO -from superset.dao.exceptions import DAODeleteFailedError +from superset.daos.base import BaseDAO +from superset.daos.exceptions import DAODeleteFailedError from superset.extensions import db from superset.models.core import CssTemplate diff --git a/superset/dashboards/dao.py b/superset/daos/dashboard.py similarity index 84% rename from superset/dashboards/dao.py rename to superset/daos/dashboard.py index 9de94f9493bc8..1e31591e1f56b 100644 --- a/superset/dashboards/dao.py +++ b/superset/daos/dashboard.py @@ -20,16 +20,28 @@ from typing import Any, Optional, Union from flask import g +from flask_appbuilder.models.sqla import Model from flask_appbuilder.models.sqla.interface import SQLAInterface from sqlalchemy.exc import SQLAlchemyError from superset import security_manager -from superset.dao.base import BaseDAO +from superset.daos.base import BaseDAO +from superset.daos.exceptions import DAOConfigError, DAOCreateFailedError from superset.dashboards.commands.exceptions import DashboardNotFoundError +from superset.dashboards.filter_sets.consts import ( + DASHBOARD_ID_FIELD, + DESCRIPTION_FIELD, + JSON_METADATA_FIELD, + NAME_FIELD, + OWNER_ID_FIELD, + OWNER_TYPE_FIELD, +) from superset.dashboards.filters import DashboardAccessFilter, is_uuid from superset.extensions import db from superset.models.core import FavStar, FavStarClassName from superset.models.dashboard import Dashboard, id_or_slug_filter +from superset.models.embedded_dashboard import EmbeddedDashboard +from superset.models.filter_set import FilterSet from superset.models.slice import Slice from superset.utils.core import get_user_id from superset.utils.dashboard_filter_scopes_converter import copy_filter_scopes @@ -365,3 +377,59 @@ def remove_favorite(dashboard: Dashboard) -> None: if fav: db.session.delete(fav) db.session.commit() + + +class EmbeddedDashboardDAO(BaseDAO): + model_cls = EmbeddedDashboard + # There isn't really a regular scenario where we would rather get Embedded by id + id_column_name = "uuid" + + @staticmethod + def upsert(dashboard: Dashboard, allowed_domains: list[str]) -> EmbeddedDashboard: + """ + Sets up a dashboard to be embeddable. + Upsert is used to preserve the embedded_dashboard uuid across updates. + """ + embedded: EmbeddedDashboard = ( + dashboard.embedded[0] if dashboard.embedded else EmbeddedDashboard() + ) + embedded.allow_domain_list = ",".join(allowed_domains) + dashboard.embedded = [embedded] + db.session.commit() + return embedded + + @classmethod + def create(cls, properties: dict[str, Any], commit: bool = True) -> Any: + """ + Use EmbeddedDashboardDAO.upsert() instead. + At least, until we are ok with more than one embedded instance per dashboard. + """ + raise NotImplementedError("Use EmbeddedDashboardDAO.upsert() instead.") + + +class FilterSetDAO(BaseDAO): + model_cls = FilterSet + + @classmethod + def create(cls, properties: dict[str, Any], commit: bool = True) -> Model: + if cls.model_cls is None: + raise DAOConfigError() + model = FilterSet() + setattr(model, NAME_FIELD, properties[NAME_FIELD]) + setattr(model, JSON_METADATA_FIELD, properties[JSON_METADATA_FIELD]) + setattr(model, DESCRIPTION_FIELD, properties.get(DESCRIPTION_FIELD, None)) + setattr( + model, + OWNER_ID_FIELD, + properties.get(OWNER_ID_FIELD, properties[DASHBOARD_ID_FIELD]), + ) + setattr(model, OWNER_TYPE_FIELD, properties[OWNER_TYPE_FIELD]) + setattr(model, DASHBOARD_ID_FIELD, properties[DASHBOARD_ID_FIELD]) + try: + db.session.add(model) + if commit: + db.session.commit() + except SQLAlchemyError as ex: # pragma: no cover + db.session.rollback() + raise DAOCreateFailedError() from ex + return model diff --git a/superset/databases/dao.py b/superset/daos/database.py similarity index 84% rename from superset/databases/dao.py rename to superset/daos/database.py index 9ce3b5e73ec2b..569568472ae16 100644 --- a/superset/databases/dao.py +++ b/superset/daos/database.py @@ -17,7 +17,7 @@ import logging from typing import Any, Optional -from superset.dao.base import BaseDAO +from superset.daos.base import BaseDAO from superset.databases.filters import DatabaseFilter from superset.databases.ssh_tunnel.models import SSHTunnel from superset.extensions import db @@ -26,6 +26,7 @@ from superset.models.slice import Slice from superset.models.sql_lab import TabState from superset.utils.core import DatasourceType +from superset.utils.ssh_tunnel import unmask_password_info logger = logging.getLogger(__name__) @@ -135,3 +136,28 @@ def get_ssh_tunnel(cls, database_id: int) -> Optional[SSHTunnel]: ) return ssh_tunnel + + +class SSHTunnelDAO(BaseDAO): + model_cls = SSHTunnel + + @classmethod + def update( + cls, + model: SSHTunnel, + properties: dict[str, Any], + commit: bool = True, + ) -> SSHTunnel: + """ + Unmask ``password``, ``private_key`` and ``private_key_password`` before updating. + + When a database is edited the user sees a masked version of + the aforementioned fields. + + The masked values should be unmasked before the ssh tunnel is updated. + """ + # ID cannot be updated so we remove it if present in the payload + properties.pop("id", None) + properties = unmask_password_info(properties, model) + + return super().update(model, properties, commit) diff --git a/superset/datasets/dao.py b/superset/daos/dataset.py similarity index 99% rename from superset/datasets/dao.py rename to superset/daos/dataset.py index f4d46be109799..3937e6c312024 100644 --- a/superset/datasets/dao.py +++ b/superset/daos/dataset.py @@ -20,7 +20,7 @@ from sqlalchemy.exc import SQLAlchemyError from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn -from superset.dao.base import BaseDAO +from superset.daos.base import BaseDAO from superset.extensions import db from superset.models.core import Database from superset.models.dashboard import Dashboard diff --git a/superset/datasource/dao.py b/superset/daos/datasource.py similarity index 94% rename from superset/datasource/dao.py rename to superset/daos/datasource.py index 4682f070e2bed..684106161c34b 100644 --- a/superset/datasource/dao.py +++ b/superset/daos/datasource.py @@ -21,8 +21,8 @@ from sqlalchemy.orm import Session from superset.connectors.sqla.models import SqlaTable -from superset.dao.base import BaseDAO -from superset.dao.exceptions import DatasourceNotFound, DatasourceTypeNotSupportedError +from superset.daos.base import BaseDAO +from superset.daos.exceptions import DatasourceNotFound, DatasourceTypeNotSupportedError from superset.datasets.models import Dataset from superset.models.sql_lab import Query, SavedQuery from superset.tables.models import Table diff --git a/superset/dao/exceptions.py b/superset/daos/exceptions.py similarity index 100% rename from superset/dao/exceptions.py rename to superset/daos/exceptions.py diff --git a/superset/views/log/dao.py b/superset/daos/log.py similarity index 95% rename from superset/views/log/dao.py rename to superset/daos/log.py index 87bc0817daf98..81767a48cba90 100644 --- a/superset/views/log/dao.py +++ b/superset/daos/log.py @@ -22,10 +22,11 @@ from sqlalchemy.sql import functions as func from superset import db -from superset.dao.base import BaseDAO +from superset.daos.base import BaseDAO from superset.models.core import Log from superset.models.dashboard import Dashboard from superset.models.slice import Slice +from superset.utils.core import get_user_id from superset.utils.dates import datetime_to_epoch @@ -34,8 +35,12 @@ class LogDAO(BaseDAO): @staticmethod def get_recent_activity( - user_id: int, actions: list[str], distinct: bool, page: int, page_size: int + actions: list[str], + distinct: bool, + page: int, + page_size: int, ) -> list[dict[str, Any]]: + user_id = get_user_id() has_subject_title = or_( and_( Dashboard.dashboard_title is not None, diff --git a/superset/queries/dao.py b/superset/daos/query.py similarity index 80% rename from superset/queries/dao.py rename to superset/daos/query.py index e9fe15cac5c7c..8aca1a4e25649 100644 --- a/superset/queries/dao.py +++ b/superset/daos/query.py @@ -16,15 +16,19 @@ # under the License. import logging from datetime import datetime -from typing import Any, Union +from typing import Any, Optional, Union + +from sqlalchemy.exc import SQLAlchemyError from superset import sql_lab from superset.common.db_query_status import QueryStatus -from superset.dao.base import BaseDAO +from superset.daos.base import BaseDAO +from superset.daos.exceptions import DAODeleteFailedError from superset.exceptions import QueryNotFoundException, SupersetCancelQueryException from superset.extensions import db from superset.models.sql_lab import Query, SavedQuery from superset.queries.filters import QueryFilter +from superset.queries.saved_queries.filters import SavedQueryFilter from superset.utils.core import get_user_id from superset.utils.dates import now_as_float @@ -95,3 +99,21 @@ def stop_query(client_id: str) -> None: query.status = QueryStatus.STOPPED query.end_time = now_as_float() db.session.commit() + + +class SavedQueryDAO(BaseDAO): + model_cls = SavedQuery + base_filter = SavedQueryFilter + + @staticmethod + def bulk_delete(models: Optional[list[SavedQuery]], commit: bool = True) -> None: + item_ids = [model.id for model in models] if models else [] + try: + db.session.query(SavedQuery).filter(SavedQuery.id.in_(item_ids)).delete( + synchronize_session="fetch" + ) + if commit: + db.session.commit() + except SQLAlchemyError as ex: + db.session.rollback() + raise DAODeleteFailedError() from ex diff --git a/superset/reports/dao.py b/superset/daos/report.py similarity index 98% rename from superset/reports/dao.py rename to superset/daos/report.py index 64777e959ae11..4f8d914adc9b4 100644 --- a/superset/reports/dao.py +++ b/superset/daos/report.py @@ -23,8 +23,8 @@ from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.orm import Session -from superset.dao.base import BaseDAO -from superset.dao.exceptions import DAOCreateFailedError, DAODeleteFailedError +from superset.daos.base import BaseDAO +from superset.daos.exceptions import DAOCreateFailedError, DAODeleteFailedError from superset.extensions import db from superset.reports.filters import ReportScheduleFilter from superset.reports.models import ( diff --git a/superset/row_level_security/dao.py b/superset/daos/security.py similarity index 95% rename from superset/row_level_security/dao.py rename to superset/daos/security.py index 1226e4d5498a0..a435f224a651e 100644 --- a/superset/row_level_security/dao.py +++ b/superset/daos/security.py @@ -16,7 +16,7 @@ # under the License. from superset.connectors.sqla.models import RowLevelSecurityFilter -from superset.dao.base import BaseDAO +from superset.daos.base import BaseDAO class RLSDAO(BaseDAO): diff --git a/superset/tags/dao.py b/superset/daos/tag.py similarity index 98% rename from superset/tags/dao.py rename to superset/daos/tag.py index 9ea61f5c90d0b..ec991edb13680 100644 --- a/superset/tags/dao.py +++ b/superset/daos/tag.py @@ -20,8 +20,8 @@ from sqlalchemy.exc import SQLAlchemyError -from superset.dao.base import BaseDAO -from superset.dao.exceptions import DAOCreateFailedError, DAODeleteFailedError +from superset.daos.base import BaseDAO +from superset.daos.exceptions import DAOCreateFailedError, DAODeleteFailedError from superset.extensions import db from superset.models.dashboard import Dashboard from superset.models.slice import Slice diff --git a/superset/dashboards/api.py b/superset/dashboards/api.py index be248f7877f5f..3b8b70e1678d2 100644 --- a/superset/dashboards/api.py +++ b/superset/dashboards/api.py @@ -38,6 +38,7 @@ from superset.commands.importers.exceptions import NoValidFilesFoundError from superset.commands.importers.v1.utils import get_contents_from_bundle from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP, RouteMethod +from superset.daos.dashboard import DashboardDAO, EmbeddedDashboardDAO from superset.dashboards.commands.bulk_delete import BulkDeleteDashboardCommand from superset.dashboards.commands.create import CreateDashboardCommand from superset.dashboards.commands.delete import DeleteDashboardCommand @@ -54,7 +55,6 @@ from superset.dashboards.commands.export import ExportDashboardsCommand from superset.dashboards.commands.importers.dispatcher import ImportDashboardsCommand from superset.dashboards.commands.update import UpdateDashboardCommand -from superset.dashboards.dao import DashboardDAO from superset.dashboards.filters import ( DashboardAccessFilter, DashboardCertifiedFilter, @@ -80,7 +80,6 @@ openapi_spec_methods_override, thumbnail_query_schema, ) -from superset.embedded.dao import EmbeddedDAO from superset.extensions import event_logger from superset.models.dashboard import Dashboard from superset.models.embedded_dashboard import EmbeddedDashboard @@ -174,7 +173,6 @@ def ensure_thumbnails_enabled(self) -> Optional[Response]: "changed_by.last_name", "changed_by.id", "changed_by_name", - "changed_by_url", "changed_on_utc", "changed_on_delta_humanized", "created_on_delta_humanized", @@ -1321,7 +1319,7 @@ def set_embedded(self, dashboard: Dashboard) -> Response: """ try: body = self.embedded_config_schema.load(request.json) - embedded = EmbeddedDAO.upsert(dashboard, body["allowed_domains"]) + embedded = EmbeddedDashboardDAO.upsert(dashboard, body["allowed_domains"]) result = self.embedded_response_schema.dump(embedded) return self.response(200, result=result) except ValidationError as error: diff --git a/superset/dashboards/commands/bulk_delete.py b/superset/dashboards/commands/bulk_delete.py index 385f1fbc6d285..4802c9f101764 100644 --- a/superset/dashboards/commands/bulk_delete.py +++ b/superset/dashboards/commands/bulk_delete.py @@ -22,16 +22,16 @@ from superset import security_manager from superset.commands.base import BaseCommand from superset.commands.exceptions import DeleteFailedError +from superset.daos.dashboard import DashboardDAO +from superset.daos.report import ReportScheduleDAO from superset.dashboards.commands.exceptions import ( DashboardBulkDeleteFailedError, DashboardBulkDeleteFailedReportsExistError, DashboardForbiddenError, DashboardNotFoundError, ) -from superset.dashboards.dao import DashboardDAO from superset.exceptions import SupersetSecurityException from superset.models.dashboard import Dashboard -from superset.reports.dao import ReportScheduleDAO logger = logging.getLogger(__name__) diff --git a/superset/dashboards/commands/create.py b/superset/dashboards/commands/create.py index 58acc379baf5b..98ecd6eb78e33 100644 --- a/superset/dashboards/commands/create.py +++ b/superset/dashboards/commands/create.py @@ -22,13 +22,13 @@ from superset.commands.base import BaseCommand, CreateMixin from superset.commands.utils import populate_roles -from superset.dao.exceptions import DAOCreateFailedError +from superset.daos.dashboard import DashboardDAO +from superset.daos.exceptions import DAOCreateFailedError from superset.dashboards.commands.exceptions import ( DashboardCreateFailedError, DashboardInvalidError, DashboardSlugExistsValidationError, ) -from superset.dashboards.dao import DashboardDAO logger = logging.getLogger(__name__) diff --git a/superset/dashboards/commands/delete.py b/superset/dashboards/commands/delete.py index 8ce7cb0cbf84f..f774b92a51196 100644 --- a/superset/dashboards/commands/delete.py +++ b/superset/dashboards/commands/delete.py @@ -22,17 +22,17 @@ from superset import security_manager from superset.commands.base import BaseCommand -from superset.dao.exceptions import DAODeleteFailedError +from superset.daos.dashboard import DashboardDAO +from superset.daos.exceptions import DAODeleteFailedError +from superset.daos.report import ReportScheduleDAO from superset.dashboards.commands.exceptions import ( DashboardDeleteFailedError, DashboardDeleteFailedReportsExistError, DashboardForbiddenError, DashboardNotFoundError, ) -from superset.dashboards.dao import DashboardDAO from superset.exceptions import SupersetSecurityException from superset.models.dashboard import Dashboard -from superset.reports.dao import ReportScheduleDAO logger = logging.getLogger(__name__) diff --git a/superset/dashboards/commands/export.py b/superset/dashboards/commands/export.py index 2e70e29bb0caf..4e25e5c1fc1d7 100644 --- a/superset/dashboards/commands/export.py +++ b/superset/dashboards/commands/export.py @@ -28,10 +28,10 @@ from superset.charts.commands.export import ExportChartsCommand from superset.dashboards.commands.exceptions import DashboardNotFoundError from superset.dashboards.commands.importers.v1.utils import find_chart_uuids -from superset.dashboards.dao import DashboardDAO +from superset.daos.dashboard import DashboardDAO from superset.commands.export.models import ExportModelsCommand from superset.datasets.commands.export import ExportDatasetsCommand -from superset.datasets.dao import DatasetDAO +from superset.daos.dataset import DatasetDAO from superset.models.dashboard import Dashboard from superset.models.slice import Slice from superset.utils.dict_import_export import EXPORT_VERSION diff --git a/superset/dashboards/commands/importers/v1/__init__.py b/superset/dashboards/commands/importers/v1/__init__.py index 597adba6d9cab..e86bddec9fe3e 100644 --- a/superset/dashboards/commands/importers/v1/__init__.py +++ b/superset/dashboards/commands/importers/v1/__init__.py @@ -24,6 +24,7 @@ from superset.charts.commands.importers.v1.utils import import_chart from superset.charts.schemas import ImportV1ChartSchema from superset.commands.importers.v1 import ImportModelsCommand +from superset.daos.dashboard import DashboardDAO from superset.dashboards.commands.exceptions import DashboardImportError from superset.dashboards.commands.importers.v1.utils import ( find_chart_uuids, @@ -31,7 +32,6 @@ import_dashboard, update_id_refs, ) -from superset.dashboards.dao import DashboardDAO from superset.dashboards.schemas import ImportV1DashboardSchema from superset.databases.commands.importers.v1.utils import import_database from superset.databases.schemas import ImportV1DatabaseSchema diff --git a/superset/dashboards/commands/update.py b/superset/dashboards/commands/update.py index fefa65e3f6f0b..cd9c07e0fdfdf 100644 --- a/superset/dashboards/commands/update.py +++ b/superset/dashboards/commands/update.py @@ -24,7 +24,8 @@ from superset import security_manager from superset.commands.base import BaseCommand, UpdateMixin from superset.commands.utils import populate_roles -from superset.dao.exceptions import DAOUpdateFailedError +from superset.daos.dashboard import DashboardDAO +from superset.daos.exceptions import DAOUpdateFailedError from superset.dashboards.commands.exceptions import ( DashboardForbiddenError, DashboardInvalidError, @@ -32,7 +33,6 @@ DashboardSlugExistsValidationError, DashboardUpdateFailedError, ) -from superset.dashboards.dao import DashboardDAO from superset.exceptions import SupersetSecurityException from superset.extensions import db from superset.models.dashboard import Dashboard diff --git a/superset/dashboards/filter_sets/api.py b/superset/dashboards/filter_sets/api.py index d236b16d9c68f..11291b91cc5a6 100644 --- a/superset/dashboards/filter_sets/api.py +++ b/superset/dashboards/filter_sets/api.py @@ -30,8 +30,8 @@ from marshmallow import ValidationError from superset.commands.exceptions import ObjectNotFoundError +from superset.daos.dashboard import DashboardDAO from superset.dashboards.commands.exceptions import DashboardNotFoundError -from superset.dashboards.dao import DashboardDAO from superset.dashboards.filter_sets.commands.create import CreateFilterSetCommand from superset.dashboards.filter_sets.commands.delete import DeleteFilterSetCommand from superset.dashboards.filter_sets.commands.exceptions import ( diff --git a/superset/dashboards/filter_sets/commands/base.py b/superset/dashboards/filter_sets/commands/base.py index a7897eca8e7c7..8c53e8a818a52 100644 --- a/superset/dashboards/filter_sets/commands/base.py +++ b/superset/dashboards/filter_sets/commands/base.py @@ -21,8 +21,8 @@ from superset import security_manager from superset.common.not_authorized_object import NotAuthorizedException +from superset.daos.dashboard import DashboardDAO from superset.dashboards.commands.exceptions import DashboardNotFoundError -from superset.dashboards.dao import DashboardDAO from superset.dashboards.filter_sets.commands.exceptions import ( FilterSetForbiddenError, FilterSetNotFoundError, diff --git a/superset/dashboards/filter_sets/commands/create.py b/superset/dashboards/filter_sets/commands/create.py index 63c4534786249..127cd9e2c3703 100644 --- a/superset/dashboards/filter_sets/commands/create.py +++ b/superset/dashboards/filter_sets/commands/create.py @@ -20,6 +20,7 @@ from flask_appbuilder.models.sqla import Model from superset import security_manager +from superset.daos.dashboard import FilterSetDAO from superset.dashboards.filter_sets.commands.base import BaseFilterSetCommand from superset.dashboards.filter_sets.commands.exceptions import ( DashboardIdInconsistencyError, @@ -32,7 +33,6 @@ OWNER_ID_FIELD, OWNER_TYPE_FIELD, ) -from superset.dashboards.filter_sets.dao import FilterSetDAO from superset.utils.core import get_user_id logger = logging.getLogger(__name__) diff --git a/superset/dashboards/filter_sets/commands/delete.py b/superset/dashboards/filter_sets/commands/delete.py index c416252794e8a..93f43833994ad 100644 --- a/superset/dashboards/filter_sets/commands/delete.py +++ b/superset/dashboards/filter_sets/commands/delete.py @@ -18,14 +18,14 @@ from flask_appbuilder.models.sqla import Model -from superset.dao.exceptions import DAODeleteFailedError +from superset.daos.dashboard import FilterSetDAO +from superset.daos.exceptions import DAODeleteFailedError from superset.dashboards.filter_sets.commands.base import BaseFilterSetCommand from superset.dashboards.filter_sets.commands.exceptions import ( FilterSetDeleteFailedError, FilterSetForbiddenError, FilterSetNotFoundError, ) -from superset.dashboards.filter_sets.dao import FilterSetDAO logger = logging.getLogger(__name__) diff --git a/superset/dashboards/filter_sets/commands/update.py b/superset/dashboards/filter_sets/commands/update.py index 722672d6684d8..eecaa34aeb8e9 100644 --- a/superset/dashboards/filter_sets/commands/update.py +++ b/superset/dashboards/filter_sets/commands/update.py @@ -19,13 +19,13 @@ from flask_appbuilder.models.sqla import Model -from superset.dao.exceptions import DAOUpdateFailedError +from superset.daos.dashboard import FilterSetDAO +from superset.daos.exceptions import DAOUpdateFailedError from superset.dashboards.filter_sets.commands.base import BaseFilterSetCommand from superset.dashboards.filter_sets.commands.exceptions import ( FilterSetUpdateFailedError, ) from superset.dashboards.filter_sets.consts import OWNER_ID_FIELD, OWNER_TYPE_FIELD -from superset.dashboards.filter_sets.dao import FilterSetDAO logger = logging.getLogger(__name__) diff --git a/superset/dashboards/filter_sets/dao.py b/superset/dashboards/filter_sets/dao.py deleted file mode 100644 index 5f2b0ba418edd..0000000000000 --- a/superset/dashboards/filter_sets/dao.py +++ /dev/null @@ -1,64 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -import logging -from typing import Any - -from flask_appbuilder.models.sqla import Model -from sqlalchemy.exc import SQLAlchemyError - -from superset.dao.base import BaseDAO -from superset.dao.exceptions import DAOConfigError, DAOCreateFailedError -from superset.dashboards.filter_sets.consts import ( - DASHBOARD_ID_FIELD, - DESCRIPTION_FIELD, - JSON_METADATA_FIELD, - NAME_FIELD, - OWNER_ID_FIELD, - OWNER_TYPE_FIELD, -) -from superset.extensions import db -from superset.models.filter_set import FilterSet - -logger = logging.getLogger(__name__) - - -class FilterSetDAO(BaseDAO): - model_cls = FilterSet - - @classmethod - def create(cls, properties: dict[str, Any], commit: bool = True) -> Model: - if cls.model_cls is None: - raise DAOConfigError() - model = FilterSet() - setattr(model, NAME_FIELD, properties[NAME_FIELD]) - setattr(model, JSON_METADATA_FIELD, properties[JSON_METADATA_FIELD]) - setattr(model, DESCRIPTION_FIELD, properties.get(DESCRIPTION_FIELD, None)) - setattr( - model, - OWNER_ID_FIELD, - properties.get(OWNER_ID_FIELD, properties[DASHBOARD_ID_FIELD]), - ) - setattr(model, OWNER_TYPE_FIELD, properties[OWNER_TYPE_FIELD]) - setattr(model, DASHBOARD_ID_FIELD, properties[DASHBOARD_ID_FIELD]) - try: - db.session.add(model) - if commit: - db.session.commit() - except SQLAlchemyError as ex: # pragma: no cover - db.session.rollback() - raise DAOCreateFailedError() from ex - return model diff --git a/superset/dashboards/filter_state/commands/utils.py b/superset/dashboards/filter_state/commands/utils.py index 35f940f4343e1..7e52518249fcb 100644 --- a/superset/dashboards/filter_state/commands/utils.py +++ b/superset/dashboards/filter_state/commands/utils.py @@ -15,11 +15,11 @@ # specific language governing permissions and limitations # under the License. +from superset.daos.dashboard import DashboardDAO from superset.dashboards.commands.exceptions import ( DashboardAccessDeniedError, DashboardNotFoundError, ) -from superset.dashboards.dao import DashboardDAO from superset.temporary_cache.commands.exceptions import ( TemporaryCacheAccessDeniedError, TemporaryCacheResourceNotFoundError, diff --git a/superset/dashboards/permalink/commands/create.py b/superset/dashboards/permalink/commands/create.py index f0cf3e71cb0c1..320003ff3da3b 100644 --- a/superset/dashboards/permalink/commands/create.py +++ b/superset/dashboards/permalink/commands/create.py @@ -18,7 +18,7 @@ from sqlalchemy.exc import SQLAlchemyError -from superset.dashboards.dao import DashboardDAO +from superset.daos.dashboard import DashboardDAO from superset.dashboards.permalink.commands.base import BaseDashboardPermalinkCommand from superset.dashboards.permalink.exceptions import DashboardPermalinkCreateFailedError from superset.dashboards.permalink.types import DashboardPermalinkState diff --git a/superset/dashboards/permalink/commands/get.py b/superset/dashboards/permalink/commands/get.py index da54ae0b66e81..6b32a459a594b 100644 --- a/superset/dashboards/permalink/commands/get.py +++ b/superset/dashboards/permalink/commands/get.py @@ -19,8 +19,8 @@ from sqlalchemy.exc import SQLAlchemyError +from superset.daos.dashboard import DashboardDAO from superset.dashboards.commands.exceptions import DashboardNotFoundError -from superset.dashboards.dao import DashboardDAO from superset.dashboards.permalink.commands.base import BaseDashboardPermalinkCommand from superset.dashboards.permalink.exceptions import DashboardPermalinkGetFailedError from superset.dashboards.permalink.types import DashboardPermalinkValue diff --git a/superset/dashboards/schemas.py b/superset/dashboards/schemas.py index 846ed39e825cf..8d47d39cd3baa 100644 --- a/superset/dashboards/schemas.py +++ b/superset/dashboards/schemas.py @@ -194,7 +194,6 @@ class DashboardGetResponseSchema(Schema): metadata={"description": certification_details_description} ) changed_by_name = fields.String() - changed_by_url = fields.String() changed_by = fields.Nested(UserSchema(exclude=(["username"]))) changed_on = fields.DateTime() charts = fields.List(fields.String(metadata={"description": charts_description})) diff --git a/superset/databases/api.py b/superset/databases/api.py index c214065a27d11..8f7569af925a8 100644 --- a/superset/databases/api.py +++ b/superset/databases/api.py @@ -35,6 +35,7 @@ ) from superset.commands.importers.v1.utils import get_contents_from_bundle from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP, RouteMethod +from superset.daos.database import DatabaseDAO from superset.databases.commands.create import CreateDatabaseCommand from superset.databases.commands.delete import DeleteDatabaseCommand from superset.databases.commands.exceptions import ( @@ -55,7 +56,6 @@ from superset.databases.commands.update import UpdateDatabaseCommand from superset.databases.commands.validate import ValidateDatabaseParametersCommand from superset.databases.commands.validate_sql import ValidateSQLCommand -from superset.databases.dao import DatabaseDAO from superset.databases.decorators import check_datasource_access from superset.databases.filters import DatabaseFilter, DatabaseUploadEnabledFilter from superset.databases.schemas import ( diff --git a/superset/databases/commands/create.py b/superset/databases/commands/create.py index e3fd667130c2e..a8681c5048671 100644 --- a/superset/databases/commands/create.py +++ b/superset/databases/commands/create.py @@ -23,7 +23,8 @@ from superset import is_feature_enabled from superset.commands.base import BaseCommand -from superset.dao.exceptions import DAOCreateFailedError +from superset.daos.database import DatabaseDAO +from superset.daos.exceptions import DAOCreateFailedError from superset.databases.commands.exceptions import ( DatabaseConnectionFailedError, DatabaseCreateFailedError, @@ -32,7 +33,6 @@ DatabaseRequiredFieldValidationError, ) from superset.databases.commands.test_connection import TestConnectionDatabaseCommand -from superset.databases.dao import DatabaseDAO from superset.databases.ssh_tunnel.commands.create import CreateSSHTunnelCommand from superset.databases.ssh_tunnel.commands.exceptions import ( SSHTunnelCreateFailedError, diff --git a/superset/databases/commands/delete.py b/superset/databases/commands/delete.py index 825b12621811a..b8eb3f6e5e637 100644 --- a/superset/databases/commands/delete.py +++ b/superset/databases/commands/delete.py @@ -21,16 +21,16 @@ from flask_babel import lazy_gettext as _ from superset.commands.base import BaseCommand -from superset.dao.exceptions import DAODeleteFailedError +from superset.daos.database import DatabaseDAO +from superset.daos.exceptions import DAODeleteFailedError +from superset.daos.report import ReportScheduleDAO from superset.databases.commands.exceptions import ( DatabaseDeleteDatasetsExistFailedError, DatabaseDeleteFailedError, DatabaseDeleteFailedReportsExistError, DatabaseNotFoundError, ) -from superset.databases.dao import DatabaseDAO from superset.models.core import Database -from superset.reports.dao import ReportScheduleDAO logger = logging.getLogger(__name__) diff --git a/superset/databases/commands/export.py b/superset/databases/commands/export.py index 889cb86c8f095..71dc55a0268fb 100644 --- a/superset/databases/commands/export.py +++ b/superset/databases/commands/export.py @@ -24,7 +24,7 @@ import yaml from superset.databases.commands.exceptions import DatabaseNotFoundError -from superset.databases.dao import DatabaseDAO +from superset.daos.database import DatabaseDAO from superset.commands.export.models import ExportModelsCommand from superset.models.core import Database from superset.utils.dict_import_export import EXPORT_VERSION diff --git a/superset/databases/commands/importers/v1/__init__.py b/superset/databases/commands/importers/v1/__init__.py index ba119beaaa80f..585c2d54ca160 100644 --- a/superset/databases/commands/importers/v1/__init__.py +++ b/superset/databases/commands/importers/v1/__init__.py @@ -21,9 +21,9 @@ from sqlalchemy.orm import Session from superset.commands.importers.v1 import ImportModelsCommand +from superset.daos.database import DatabaseDAO from superset.databases.commands.exceptions import DatabaseImportError from superset.databases.commands.importers.v1.utils import import_database -from superset.databases.dao import DatabaseDAO from superset.databases.schemas import ImportV1DatabaseSchema from superset.datasets.commands.importers.v1.utils import import_dataset from superset.datasets.schemas import ImportV1DatasetSchema diff --git a/superset/databases/commands/tables.py b/superset/databases/commands/tables.py index b7dbb4d461315..6232470ece569 100644 --- a/superset/databases/commands/tables.py +++ b/superset/databases/commands/tables.py @@ -17,13 +17,15 @@ import logging from typing import Any, cast +from sqlalchemy.orm import lazyload, load_only + from superset.commands.base import BaseCommand from superset.connectors.sqla.models import SqlaTable +from superset.daos.database import DatabaseDAO from superset.databases.commands.exceptions import ( DatabaseNotFoundError, DatabaseTablesUnexpectedError, ) -from superset.databases.dao import DatabaseDAO from superset.exceptions import SupersetException from superset.extensions import db, security_manager from superset.models.core import Database @@ -74,10 +76,18 @@ def run(self) -> dict[str, Any]: extra_dict_by_name = { table.name: table.extra_dict for table in ( - db.session.query(SqlaTable).filter( + db.session.query(SqlaTable) + .filter( SqlaTable.database_id == self._model.id, SqlaTable.schema == self._schema_name, ) + .options( + load_only( + SqlaTable.schema, SqlaTable.table_name, SqlaTable.extra + ), + lazyload(SqlaTable.columns), + lazyload(SqlaTable.metrics), + ) ).all() } diff --git a/superset/databases/commands/test_connection.py b/superset/databases/commands/test_connection.py index 2680c5e8c180b..49c5340dd25f9 100644 --- a/superset/databases/commands/test_connection.py +++ b/superset/databases/commands/test_connection.py @@ -27,16 +27,15 @@ from superset import is_feature_enabled from superset.commands.base import BaseCommand +from superset.daos.database import DatabaseDAO, SSHTunnelDAO from superset.databases.commands.exceptions import ( DatabaseSecurityUnsafeError, DatabaseTestConnectionDriverError, DatabaseTestConnectionUnexpectedError, ) -from superset.databases.dao import DatabaseDAO from superset.databases.ssh_tunnel.commands.exceptions import ( SSHTunnelingNotEnabledError, ) -from superset.databases.ssh_tunnel.dao import SSHTunnelDAO from superset.databases.ssh_tunnel.models import SSHTunnel from superset.databases.utils import make_url_safe from superset.errors import ErrorLevel, SupersetErrorType diff --git a/superset/databases/commands/update.py b/superset/databases/commands/update.py index f12706fa1d159..ea49801aac0b3 100644 --- a/superset/databases/commands/update.py +++ b/superset/databases/commands/update.py @@ -22,7 +22,8 @@ from superset import is_feature_enabled from superset.commands.base import BaseCommand -from superset.dao.exceptions import DAOCreateFailedError, DAOUpdateFailedError +from superset.daos.database import DatabaseDAO +from superset.daos.exceptions import DAOCreateFailedError, DAOUpdateFailedError from superset.databases.commands.exceptions import ( DatabaseConnectionFailedError, DatabaseExistsValidationError, @@ -30,7 +31,6 @@ DatabaseNotFoundError, DatabaseUpdateFailedError, ) -from superset.databases.dao import DatabaseDAO from superset.databases.ssh_tunnel.commands.create import CreateSSHTunnelCommand from superset.databases.ssh_tunnel.commands.exceptions import ( SSHTunnelCreateFailedError, diff --git a/superset/databases/commands/validate.py b/superset/databases/commands/validate.py index d97ad33af9eaf..6ea412b490969 100644 --- a/superset/databases/commands/validate.py +++ b/superset/databases/commands/validate.py @@ -21,13 +21,13 @@ from flask_babel import gettext as __ from superset.commands.base import BaseCommand +from superset.daos.database import DatabaseDAO from superset.databases.commands.exceptions import ( DatabaseOfflineError, DatabaseTestConnectionFailedError, InvalidEngineError, InvalidParametersError, ) -from superset.databases.dao import DatabaseDAO from superset.databases.utils import make_url_safe from superset.db_engine_specs import get_engine_spec from superset.errors import ErrorLevel, SupersetError, SupersetErrorType diff --git a/superset/databases/commands/validate_sql.py b/superset/databases/commands/validate_sql.py index 40d88af7457f0..1ac378a03cb31 100644 --- a/superset/databases/commands/validate_sql.py +++ b/superset/databases/commands/validate_sql.py @@ -22,6 +22,7 @@ from flask_babel import gettext as __ from superset.commands.base import BaseCommand +from superset.daos.database import DatabaseDAO from superset.databases.commands.exceptions import ( DatabaseNotFoundError, NoValidatorConfigFoundError, @@ -30,7 +31,6 @@ ValidatorSQLError, ValidatorSQLUnexpectedError, ) -from superset.databases.dao import DatabaseDAO from superset.errors import ErrorLevel, SupersetError, SupersetErrorType from superset.models.core import Database from superset.sql_validators import get_validator_by_name diff --git a/superset/databases/ssh_tunnel/commands/create.py b/superset/databases/ssh_tunnel/commands/create.py index 9c41b83392dc7..6fb8a92f4f713 100644 --- a/superset/databases/ssh_tunnel/commands/create.py +++ b/superset/databases/ssh_tunnel/commands/create.py @@ -21,13 +21,13 @@ from marshmallow import ValidationError from superset.commands.base import BaseCommand -from superset.dao.exceptions import DAOCreateFailedError +from superset.daos.database import SSHTunnelDAO +from superset.daos.exceptions import DAOCreateFailedError from superset.databases.ssh_tunnel.commands.exceptions import ( SSHTunnelCreateFailedError, SSHTunnelInvalidError, SSHTunnelRequiredFieldValidationError, ) -from superset.databases.ssh_tunnel.dao import SSHTunnelDAO from superset.extensions import db, event_logger logger = logging.getLogger(__name__) diff --git a/superset/databases/ssh_tunnel/commands/delete.py b/superset/databases/ssh_tunnel/commands/delete.py index 235ceb697bede..910df35a19ab1 100644 --- a/superset/databases/ssh_tunnel/commands/delete.py +++ b/superset/databases/ssh_tunnel/commands/delete.py @@ -21,13 +21,13 @@ from superset import is_feature_enabled from superset.commands.base import BaseCommand -from superset.dao.exceptions import DAODeleteFailedError +from superset.daos.database import SSHTunnelDAO +from superset.daos.exceptions import DAODeleteFailedError from superset.databases.ssh_tunnel.commands.exceptions import ( SSHTunnelDeleteFailedError, SSHTunnelingNotEnabledError, SSHTunnelNotFoundError, ) -from superset.databases.ssh_tunnel.dao import SSHTunnelDAO from superset.databases.ssh_tunnel.models import SSHTunnel logger = logging.getLogger(__name__) diff --git a/superset/databases/ssh_tunnel/commands/update.py b/superset/databases/ssh_tunnel/commands/update.py index 37fd4a94b9652..4e4edcb664b8a 100644 --- a/superset/databases/ssh_tunnel/commands/update.py +++ b/superset/databases/ssh_tunnel/commands/update.py @@ -20,14 +20,14 @@ from flask_appbuilder.models.sqla import Model from superset.commands.base import BaseCommand -from superset.dao.exceptions import DAOUpdateFailedError +from superset.daos.database import SSHTunnelDAO +from superset.daos.exceptions import DAOUpdateFailedError from superset.databases.ssh_tunnel.commands.exceptions import ( SSHTunnelInvalidError, SSHTunnelNotFoundError, SSHTunnelRequiredFieldValidationError, SSHTunnelUpdateFailedError, ) -from superset.databases.ssh_tunnel.dao import SSHTunnelDAO from superset.databases.ssh_tunnel.models import SSHTunnel logger = logging.getLogger(__name__) diff --git a/superset/databases/ssh_tunnel/dao.py b/superset/databases/ssh_tunnel/dao.py deleted file mode 100644 index 731f9183b348a..0000000000000 --- a/superset/databases/ssh_tunnel/dao.py +++ /dev/null @@ -1,49 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -import logging -from typing import Any - -from superset.dao.base import BaseDAO -from superset.databases.ssh_tunnel.models import SSHTunnel -from superset.utils.ssh_tunnel import unmask_password_info - -logger = logging.getLogger(__name__) - - -class SSHTunnelDAO(BaseDAO): - model_cls = SSHTunnel - - @classmethod - def update( - cls, - model: SSHTunnel, - properties: dict[str, Any], - commit: bool = True, - ) -> SSHTunnel: - """ - Unmask ``password``, ``private_key`` and ``private_key_password`` before updating. - - When a database is edited the user sees a masked version of - the aforementioned fields. - - The masked values should be unmasked before the ssh tunnel is updated. - """ - # ID cannot be updated so we remove it if present in the payload - properties.pop("id", None) - properties = unmask_password_info(properties, model) - - return super().update(model, properties, commit) diff --git a/superset/datasets/api.py b/superset/datasets/api.py index fa5173458a43d..6e6cf38aad89e 100644 --- a/superset/datasets/api.py +++ b/superset/datasets/api.py @@ -35,6 +35,7 @@ from superset.commands.importers.v1.utils import get_contents_from_bundle from superset.connectors.sqla.models import SqlaTable from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP, RouteMethod +from superset.daos.dataset import DatasetDAO from superset.databases.filters import DatabaseFilter from superset.datasets.commands.bulk_delete import BulkDeleteDatasetCommand from superset.datasets.commands.create import CreateDatasetCommand @@ -55,7 +56,6 @@ from superset.datasets.commands.refresh import RefreshDatasetCommand from superset.datasets.commands.update import UpdateDatasetCommand from superset.datasets.commands.warm_up_cache import DatasetWarmUpCacheCommand -from superset.datasets.dao import DatasetDAO from superset.datasets.filters import DatasetCertifiedFilter, DatasetIsNullOrEmptyFilter from superset.datasets.schemas import ( DatasetCacheWarmUpRequestSchema, @@ -107,7 +107,6 @@ class DatasetRestApi(BaseSupersetModelRestApi): "database.id", "database.database_name", "changed_by_name", - "changed_by_url", "changed_by.first_name", "changed_by.last_name", "changed_on_utc", @@ -169,7 +168,6 @@ class DatasetRestApi(BaseSupersetModelRestApi): "columns.type", "columns.uuid", "columns.verbose_name", - "metrics", # TODO(john-bodley): Deprecate in 3.0. "metrics.changed_on", "metrics.created_on", "metrics.d3format", diff --git a/superset/datasets/columns/commands/delete.py b/superset/datasets/columns/commands/delete.py index 8fb27f9386489..6ff8c21d7da59 100644 --- a/superset/datasets/columns/commands/delete.py +++ b/superset/datasets/columns/commands/delete.py @@ -22,13 +22,13 @@ from superset import security_manager from superset.commands.base import BaseCommand from superset.connectors.sqla.models import TableColumn -from superset.dao.exceptions import DAODeleteFailedError +from superset.daos.dataset import DatasetDAO +from superset.daos.exceptions import DAODeleteFailedError from superset.datasets.columns.commands.exceptions import ( DatasetColumnDeleteFailedError, DatasetColumnForbiddenError, DatasetColumnNotFoundError, ) -from superset.datasets.dao import DatasetDAO from superset.exceptions import SupersetSecurityException logger = logging.getLogger(__name__) diff --git a/superset/datasets/commands/bulk_delete.py b/superset/datasets/commands/bulk_delete.py index fd133518098cc..9733aa21e8c98 100644 --- a/superset/datasets/commands/bulk_delete.py +++ b/superset/datasets/commands/bulk_delete.py @@ -21,12 +21,12 @@ from superset.commands.base import BaseCommand from superset.commands.exceptions import DeleteFailedError from superset.connectors.sqla.models import SqlaTable +from superset.daos.dataset import DatasetDAO from superset.datasets.commands.exceptions import ( DatasetBulkDeleteFailedError, DatasetForbiddenError, DatasetNotFoundError, ) -from superset.datasets.dao import DatasetDAO from superset.exceptions import SupersetSecurityException from superset.extensions import db diff --git a/superset/datasets/commands/create.py b/superset/datasets/commands/create.py index 1c864ad196d1f..28b0250ab386b 100644 --- a/superset/datasets/commands/create.py +++ b/superset/datasets/commands/create.py @@ -22,7 +22,8 @@ from sqlalchemy.exc import SQLAlchemyError from superset.commands.base import BaseCommand, CreateMixin -from superset.dao.exceptions import DAOCreateFailedError +from superset.daos.dataset import DatasetDAO +from superset.daos.exceptions import DAOCreateFailedError from superset.datasets.commands.exceptions import ( DatabaseNotFoundValidationError, DatasetCreateFailedError, @@ -30,7 +31,6 @@ DatasetInvalidError, TableNotFoundValidationError, ) -from superset.datasets.dao import DatasetDAO from superset.extensions import db logger = logging.getLogger(__name__) diff --git a/superset/datasets/commands/delete.py b/superset/datasets/commands/delete.py index 1487f1028b3be..7078f09c37819 100644 --- a/superset/datasets/commands/delete.py +++ b/superset/datasets/commands/delete.py @@ -23,13 +23,13 @@ from superset import security_manager from superset.commands.base import BaseCommand from superset.connectors.sqla.models import SqlaTable -from superset.dao.exceptions import DAODeleteFailedError +from superset.daos.dataset import DatasetDAO +from superset.daos.exceptions import DAODeleteFailedError from superset.datasets.commands.exceptions import ( DatasetDeleteFailedError, DatasetForbiddenError, DatasetNotFoundError, ) -from superset.datasets.dao import DatasetDAO from superset.exceptions import SupersetSecurityException from superset.extensions import db diff --git a/superset/datasets/commands/duplicate.py b/superset/datasets/commands/duplicate.py index 5a4a84fdf9dfe..dc3ccb85d4b0c 100644 --- a/superset/datasets/commands/duplicate.py +++ b/superset/datasets/commands/duplicate.py @@ -25,14 +25,14 @@ from superset.commands.base import BaseCommand, CreateMixin from superset.commands.exceptions import DatasourceTypeInvalidError from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn -from superset.dao.exceptions import DAOCreateFailedError +from superset.daos.dataset import DatasetDAO +from superset.daos.exceptions import DAOCreateFailedError from superset.datasets.commands.exceptions import ( DatasetDuplicateFailedError, DatasetExistsValidationError, DatasetInvalidError, DatasetNotFoundError, ) -from superset.datasets.dao import DatasetDAO from superset.errors import ErrorLevel, SupersetError, SupersetErrorType from superset.exceptions import SupersetErrorException from superset.extensions import db diff --git a/superset/datasets/commands/export.py b/superset/datasets/commands/export.py index 8c02a23f2967c..392265232204e 100644 --- a/superset/datasets/commands/export.py +++ b/superset/datasets/commands/export.py @@ -24,9 +24,9 @@ from superset.commands.export.models import ExportModelsCommand from superset.connectors.sqla.models import SqlaTable -from superset.databases.dao import DatabaseDAO +from superset.daos.database import DatabaseDAO from superset.datasets.commands.exceptions import DatasetNotFoundError -from superset.datasets.dao import DatasetDAO +from superset.daos.dataset import DatasetDAO from superset.utils.dict_import_export import EXPORT_VERSION from superset.utils.file import get_filename from superset.utils.ssh_tunnel import mask_password_info diff --git a/superset/datasets/commands/importers/v1/__init__.py b/superset/datasets/commands/importers/v1/__init__.py index e753138ab8fb4..f46c137b7e8f7 100644 --- a/superset/datasets/commands/importers/v1/__init__.py +++ b/superset/datasets/commands/importers/v1/__init__.py @@ -21,11 +21,11 @@ from sqlalchemy.orm import Session from superset.commands.importers.v1 import ImportModelsCommand +from superset.daos.dataset import DatasetDAO from superset.databases.commands.importers.v1.utils import import_database from superset.databases.schemas import ImportV1DatabaseSchema from superset.datasets.commands.exceptions import DatasetImportError from superset.datasets.commands.importers.v1.utils import import_dataset -from superset.datasets.dao import DatasetDAO from superset.datasets.schemas import ImportV1DatasetSchema diff --git a/superset/datasets/commands/refresh.py b/superset/datasets/commands/refresh.py index 5277c27771c90..a25609636db0e 100644 --- a/superset/datasets/commands/refresh.py +++ b/superset/datasets/commands/refresh.py @@ -22,12 +22,12 @@ from superset import security_manager from superset.commands.base import BaseCommand from superset.connectors.sqla.models import SqlaTable +from superset.daos.dataset import DatasetDAO from superset.datasets.commands.exceptions import ( DatasetForbiddenError, DatasetNotFoundError, DatasetRefreshFailedError, ) -from superset.datasets.dao import DatasetDAO from superset.exceptions import SupersetSecurityException logger = logging.getLogger(__name__) diff --git a/superset/datasets/commands/update.py b/superset/datasets/commands/update.py index be9625709fdb3..1636805567b52 100644 --- a/superset/datasets/commands/update.py +++ b/superset/datasets/commands/update.py @@ -25,7 +25,8 @@ from superset import security_manager from superset.commands.base import BaseCommand, UpdateMixin from superset.connectors.sqla.models import SqlaTable -from superset.dao.exceptions import DAOUpdateFailedError +from superset.daos.dataset import DatasetDAO +from superset.daos.exceptions import DAOUpdateFailedError from superset.datasets.commands.exceptions import ( DatabaseChangeValidationError, DatasetColumnNotFoundValidationError, @@ -41,7 +42,6 @@ DatasetNotFoundError, DatasetUpdateFailedError, ) -from superset.datasets.dao import DatasetDAO from superset.exceptions import SupersetSecurityException from superset.utils.urls import is_safe_url diff --git a/superset/datasets/metrics/commands/delete.py b/superset/datasets/metrics/commands/delete.py index d57e7fa359b49..5e7b2144c0810 100644 --- a/superset/datasets/metrics/commands/delete.py +++ b/superset/datasets/metrics/commands/delete.py @@ -22,8 +22,8 @@ from superset import security_manager from superset.commands.base import BaseCommand from superset.connectors.sqla.models import SqlMetric -from superset.dao.exceptions import DAODeleteFailedError -from superset.datasets.dao import DatasetDAO +from superset.daos.dataset import DatasetDAO +from superset.daos.exceptions import DAODeleteFailedError from superset.datasets.metrics.commands.exceptions import ( DatasetMetricDeleteFailedError, DatasetMetricForbiddenError, diff --git a/superset/datasource/api.py b/superset/datasource/api.py index 471077ff70ce5..6399d197e0049 100644 --- a/superset/datasource/api.py +++ b/superset/datasource/api.py @@ -19,8 +19,8 @@ from flask_appbuilder.api import expose, protect, safe from superset import app, db, event_logger -from superset.dao.exceptions import DatasourceNotFound, DatasourceTypeNotSupportedError -from superset.datasource.dao import DatasourceDAO +from superset.daos.datasource import DatasourceDAO +from superset.daos.exceptions import DatasourceNotFound, DatasourceTypeNotSupportedError from superset.exceptions import SupersetSecurityException from superset.superset_typing import FlaskResponse from superset.utils.core import apply_max_row_limit, DatasourceType diff --git a/superset/embedded/api.py b/superset/embedded/api.py index def5bf9128fcf..229ecc81c94b8 100644 --- a/superset/embedded/api.py +++ b/superset/embedded/api.py @@ -24,8 +24,8 @@ from superset import is_feature_enabled from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP, RouteMethod +from superset.daos.dashboard import EmbeddedDashboardDAO from superset.dashboards.schemas import EmbeddedDashboardResponseSchema -from superset.embedded.dao import EmbeddedDAO from superset.embedded_dashboard.commands.exceptions import ( EmbeddedDashboardNotFoundError, ) @@ -98,7 +98,7 @@ def get(self, uuid: str) -> Response: $ref: '#/components/responses/500' """ try: - embedded = EmbeddedDAO.find_by_id(uuid) + embedded = EmbeddedDashboardDAO.find_by_id(uuid) if not embedded: raise EmbeddedDashboardNotFoundError() result = self.embedded_response_schema.dump(embedded) diff --git a/superset/embedded/dao.py b/superset/embedded/dao.py deleted file mode 100644 index 27ca3385023be..0000000000000 --- a/superset/embedded/dao.py +++ /dev/null @@ -1,53 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -import logging -from typing import Any - -from superset.dao.base import BaseDAO -from superset.extensions import db -from superset.models.dashboard import Dashboard -from superset.models.embedded_dashboard import EmbeddedDashboard - -logger = logging.getLogger(__name__) - - -class EmbeddedDAO(BaseDAO): - model_cls = EmbeddedDashboard - # There isn't really a regular scenario where we would rather get Embedded by id - id_column_name = "uuid" - - @staticmethod - def upsert(dashboard: Dashboard, allowed_domains: list[str]) -> EmbeddedDashboard: - """ - Sets up a dashboard to be embeddable. - Upsert is used to preserve the embedded_dashboard uuid across updates. - """ - embedded: EmbeddedDashboard = ( - dashboard.embedded[0] if dashboard.embedded else EmbeddedDashboard() - ) - embedded.allow_domain_list = ",".join(allowed_domains) - dashboard.embedded = [embedded] - db.session.commit() - return embedded - - @classmethod - def create(cls, properties: dict[str, Any], commit: bool = True) -> Any: - """ - Use EmbeddedDAO.upsert() instead. - At least, until we are ok with more than one embedded instance per dashboard. - """ - raise NotImplementedError("Use EmbeddedDAO.upsert() instead.") diff --git a/superset/embedded/view.py b/superset/embedded/view.py index b7062c0b5ee64..e59a6ced90f68 100644 --- a/superset/embedded/view.py +++ b/superset/embedded/view.py @@ -23,7 +23,7 @@ from flask_wtf.csrf import same_origin from superset import event_logger, is_feature_enabled -from superset.embedded.dao import EmbeddedDAO +from superset.daos.dashboard import EmbeddedDashboardDAO from superset.superset_typing import FlaskResponse from superset.utils import core as utils from superset.views.base import BaseSupersetView, common_bootstrap_payload @@ -50,7 +50,7 @@ def embedded( if not is_feature_enabled("EMBEDDED_SUPERSET"): abort(404) - embedded = EmbeddedDAO.find_by_id(uuid) + embedded = EmbeddedDashboardDAO.find_by_id(uuid) if not embedded: abort(404) diff --git a/superset/explore/commands/get.py b/superset/explore/commands/get.py index 490d198360dad..148ad2a3f535e 100644 --- a/superset/explore/commands/get.py +++ b/superset/explore/commands/get.py @@ -19,19 +19,19 @@ from typing import Any, cast, Optional import simplejson as json -from flask import current_app, request -from flask_babel import gettext as __, lazy_gettext as _ +from flask import request +from flask_babel import lazy_gettext as _ from sqlalchemy.exc import SQLAlchemyError -from superset import db, security_manager +from superset import db from superset.commands.base import BaseCommand from superset.connectors.base.models import BaseDatasource from superset.connectors.sqla.models import SqlaTable -from superset.dao.exceptions import DatasourceNotFound -from superset.datasource.dao import DatasourceDAO +from superset.daos.datasource import DatasourceDAO +from superset.daos.exceptions import DatasourceNotFound from superset.exceptions import SupersetException from superset.explore.commands.parameters import CommandParameters -from superset.explore.exceptions import DatasetAccessDeniedError, WrongEndpointError +from superset.explore.exceptions import WrongEndpointError from superset.explore.form_data.commands.get import GetFormDataCommand from superset.explore.form_data.commands.parameters import ( CommandParameters as FormDataCommandParameters, @@ -119,20 +119,6 @@ def run(self) -> Optional[dict[str, Any]]: except DatasourceNotFound: pass datasource_name = datasource.name if datasource else _("[Missing Dataset]") - - if datasource: - if current_app.config["ENABLE_ACCESS_REQUEST"] and ( - not security_manager.can_access_datasource(datasource) - ): - message = __( - security_manager.get_datasource_access_error_msg(datasource) - ) - raise DatasetAccessDeniedError( - message=message, - datasource_type=self._datasource_type, - datasource_id=self._datasource_id, - ) - viz_type = form_data.get("viz_type") if not viz_type and datasource and datasource.default_endpoint: raise WrongEndpointError(redirect=datasource.default_endpoint) diff --git a/superset/explore/schemas.py b/superset/explore/schemas.py index f0060360cfe49..37044c0394284 100644 --- a/superset/explore/schemas.py +++ b/superset/explore/schemas.py @@ -114,7 +114,7 @@ class SliceSchema(Schema): certified_by = fields.String( metadata={"description": "Person or group that has certified this dashboard."} ) - changed_on = fields.String( + changed_on = fields.DateTime( metadata={"description": "Timestamp of the last modification."} ) changed_on_humanized = fields.String( diff --git a/superset/explore/utils.py b/superset/explore/utils.py index 01f63f53f2f44..ca73cb39fb481 100644 --- a/superset/explore/utils.py +++ b/superset/explore/utils.py @@ -21,18 +21,18 @@ ChartAccessDeniedError, ChartNotFoundError, ) -from superset.charts.dao import ChartDAO from superset.commands.exceptions import ( DatasourceNotFoundValidationError, DatasourceTypeInvalidError, QueryNotFoundValidationError, ) +from superset.daos.chart import ChartDAO +from superset.daos.dataset import DatasetDAO +from superset.daos.query import QueryDAO from superset.datasets.commands.exceptions import ( DatasetAccessDeniedError, DatasetNotFoundError, ) -from superset.datasets.dao import DatasetDAO -from superset.queries.dao import QueryDAO from superset.utils.core import DatasourceType diff --git a/superset/extensions/ssh.py b/superset/extensions/ssh.py index 78b0c4116b192..5cf84099f2688 100644 --- a/superset/extensions/ssh.py +++ b/superset/extensions/ssh.py @@ -35,6 +35,7 @@ def __init__(self, app: Flask) -> None: super().__init__() self.local_bind_address = app.config["SSH_TUNNEL_LOCAL_BIND_ADDRESS"] sshtunnel.TUNNEL_TIMEOUT = app.config["SSH_TUNNEL_TIMEOUT_SEC"] + sshtunnel.SSH_TIMEOUT = app.config["SSH_TUNNEL_PACKET_TIMEOUT_SEC"] def build_sqla_url( # pylint: disable=no-self-use self, sqlalchemy_url: str, server: sshtunnel.SSHTunnelForwarder diff --git a/superset/initialization/__init__.py b/superset/initialization/__init__.py index bbe25f498b4ee..3d7d9817f787e 100644 --- a/superset/initialization/__init__.py +++ b/superset/initialization/__init__.py @@ -116,6 +116,7 @@ def init_views(self) -> None: # the global Flask app # # pylint: disable=import-outside-toplevel,too-many-locals,too-many-statements + from superset import security_manager from superset.advanced_data_type.api import AdvancedDataTypeRestApi from superset.annotation_layers.annotations.api import AnnotationRestApi from superset.annotation_layers.api import AnnotationLayerRestApi @@ -154,7 +155,6 @@ def init_views(self) -> None: from superset.security.api import SecurityRestApi from superset.sqllab.api import SqlLabRestApi from superset.tags.api import TagRestApi - from superset.views.access_requests import AccessRequestsModelView from superset.views.alerts import AlertView, ReportView from superset.views.all_entities import TaggedObjectsModelView, TaggedObjectView from superset.views.annotations import AnnotationLayerView @@ -334,10 +334,12 @@ def init_views(self) -> None: category="Manage", category_label=__("Manage"), category_icon="fa-wrench", - cond=lambda: not feature_flag_manager.is_feature_enabled( - "VERSIONED_EXPORT" + cond=lambda: ( + security_manager.can_access("can_import_dashboards", "Superset") + and not feature_flag_manager.is_feature_enabled("VERSIONED_EXPORT") ), ) + appbuilder.add_link( "SQL Editor", label=__("SQL Lab"), @@ -419,16 +421,6 @@ def init_views(self) -> None: category_label=__("Manage"), ) - appbuilder.add_view( - AccessRequestsModelView, - "Access requests", - label=__("Access requests"), - category="Security", - category_label=__("Security"), - icon="fa-table", - menu_cond=lambda: bool(self.config["ENABLE_ACCESS_REQUEST"]), - ) - appbuilder.add_view( RowLevelSecurityView, "Row Level Security", diff --git a/superset/jinja_context.py b/superset/jinja_context.py index f096b65cd1617..4bb0b91a4e3db 100644 --- a/superset/jinja_context.py +++ b/superset/jinja_context.py @@ -630,7 +630,7 @@ def dataset_macro( the user can also request metrics to be included, and columns to group by. """ # pylint: disable=import-outside-toplevel - from superset.datasets.dao import DatasetDAO + from superset.daos.dataset import DatasetDAO dataset = DatasetDAO.find_by_id(dataset_id) if not dataset: diff --git a/superset/migrations/versions/2023-06-01_13-13_83e1abbe777f_drop_access_request.py b/superset/migrations/versions/2023-06-01_13-13_83e1abbe777f_drop_access_request.py new file mode 100644 index 0000000000000..a95650ec5a99e --- /dev/null +++ b/superset/migrations/versions/2023-06-01_13-13_83e1abbe777f_drop_access_request.py @@ -0,0 +1,50 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""drop access_request + +Revision ID: 83e1abbe777f +Revises: ae58e1e58e5c +Create Date: 2023-06-01 13:13:18.147362 + +""" + +# revision identifiers, used by Alembic. +revision = "83e1abbe777f" +down_revision = "ae58e1e58e5c" + +import sqlalchemy as sa +from alembic import op + + +def upgrade(): + op.drop_table("access_request") + + +def downgrade(): + op.create_table( + "access_request", + sa.Column("created_on", sa.DateTime(), nullable=True), + sa.Column("changed_on", sa.DateTime(), nullable=True), + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("datasource_type", sa.String(length=200), nullable=True), + sa.Column("datasource_id", sa.Integer(), nullable=True), + sa.Column("changed_by_fk", sa.Integer(), nullable=True), + sa.Column("created_by_fk", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(["changed_by_fk"], ["ab_user.id"]), + sa.ForeignKeyConstraint(["created_by_fk"], ["ab_user.id"]), + sa.PrimaryKeyConstraint("id"), + ) diff --git a/superset/models/__init__.py b/superset/models/__init__.py index a102a0fff59a4..067d6ae831a98 100644 --- a/superset/models/__init__.py +++ b/superset/models/__init__.py @@ -14,4 +14,4 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -from . import core, datasource_access_request, dynamic_plugins, sql_lab, user_attributes +from . import core, dynamic_plugins, sql_lab, user_attributes diff --git a/superset/models/core.py b/superset/models/core.py index 3c2b12d3782ba..92e6f2dbb5b6a 100755 --- a/superset/models/core.py +++ b/superset/models/core.py @@ -14,7 +14,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -# pylint: disable=line-too-long +# pylint: disable=line-too-long,too-many-lines """A collection of ORM sqlalchemy models for Superset""" import builtins import enum @@ -31,6 +31,7 @@ import numpy import pandas as pd import sqlalchemy as sqla +import sshtunnel from flask import g, request from flask_appbuilder import Model from sqlalchemy import ( @@ -53,7 +54,7 @@ from sqlalchemy.orm import relationship from sqlalchemy.pool import NullPool from sqlalchemy.schema import UniqueConstraint -from sqlalchemy.sql import expression, Select +from sqlalchemy.sql import ColumnElement, expression, Select from superset import app, db_engine_specs from superset.constants import LRU_CACHE_MAX_SIZE, PASSWORD_MASK @@ -386,7 +387,7 @@ def get_sqla_engine_with_context( source: Optional[utils.QuerySource] = None, override_ssh_tunnel: Optional["SSHTunnel"] = None, ) -> Engine: - from superset.databases.dao import ( # pylint: disable=import-outside-toplevel + from superset.daos.database import ( # pylint: disable=import-outside-toplevel DatabaseDAO, ) @@ -406,9 +407,10 @@ def get_sqla_engine_with_context( with engine_context as server_context: if ssh_tunnel and server_context: logger.info( - "[SSH] Successfully create tunnel at %s: %s", + "[SSH] Successfully created tunnel w/ %s tunnel_timeout + %s ssh_timeout at %s", + sshtunnel.TUNNEL_TIMEOUT, + sshtunnel.SSH_TIMEOUT, server_context.local_bind_address, - server_context.local_bind_port, ) sqlalchemy_uri = ssh_manager_factory.instance.build_sqla_url( sqlalchemy_uri, server_context @@ -953,6 +955,22 @@ def get_dialect(self) -> Dialect: sqla_url = make_url_safe(self.sqlalchemy_uri_decrypted) return sqla_url.get_dialect()() + def make_sqla_column_compatible( + self, sqla_col: ColumnElement, label: Optional[str] = None + ) -> ColumnElement: + """Takes a sqlalchemy column object and adds label info if supported by engine. + :param sqla_col: sqlalchemy column instance + :param label: alias/label that column is expected to have + :return: either a sql alchemy column or label instance if supported by engine + """ + label_expected = label or sqla_col.name + # add quotes to tables + if self.db_engine_spec.allows_alias_in_select: + label = self.db_engine_spec.make_label_compatible(label_expected) + sqla_col = sqla_col.label(label) + sqla_col.key = label_expected + return sqla_col + sqla.event.listen(Database, "after_insert", security_manager.database_after_insert) sqla.event.listen(Database, "after_update", security_manager.database_after_update) diff --git a/superset/models/dashboard.py b/superset/models/dashboard.py index f3b9c08794793..649c5a499d8da 100644 --- a/superset/models/dashboard.py +++ b/superset/models/dashboard.py @@ -24,7 +24,6 @@ from typing import Any, Callable import sqlalchemy as sqla -from flask import current_app from flask_appbuilder import Model from flask_appbuilder.models.decorators import renders from flask_appbuilder.security.sqla.models import User @@ -50,7 +49,7 @@ from superset import app, db, is_feature_enabled, security_manager from superset.connectors.base.models import BaseDatasource from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn -from superset.datasource.dao import DatasourceDAO +from superset.daos.datasource import DatasourceDAO from superset.extensions import cache_manager from superset.models.filter_set import FilterSet from superset.models.helpers import AuditMixinNullable, ImportExportMixin @@ -150,6 +149,7 @@ class Dashboard(Model, AuditMixinNullable, ImportExportMixin): owners = relationship(security_manager.user_model, secondary=dashboard_user) tags = relationship( "Tag", + overlaps="objects,tag,tags,tags", secondary="tagged_object", primaryjoin="and_(Dashboard.id == TaggedObject.object_id)", secondaryjoin="and_(TaggedObject.tag_id == Tag.id, " @@ -269,15 +269,6 @@ def changed_by_name(self) -> str: return "" return str(self.changed_by) - @property - def changed_by_url(self) -> str: - if ( - not self.changed_by - or not current_app.config["ENABLE_BROAD_ACTIVITY_ACCESS"] - ): - return "" - return f"/superset/profile/{self.changed_by.username}" - @property def data(self) -> dict[str, Any]: positions = self.position_json diff --git a/superset/models/datasource_access_request.py b/superset/models/datasource_access_request.py deleted file mode 100644 index 23df4cffae38a..0000000000000 --- a/superset/models/datasource_access_request.py +++ /dev/null @@ -1,97 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from typing import Optional, TYPE_CHECKING - -from flask import Markup -from flask_appbuilder import Model -from sqlalchemy import Column, Integer, String - -from superset import app, db, security_manager -from superset.models.helpers import AuditMixinNullable - -if TYPE_CHECKING: - from superset.connectors.base.models import BaseDatasource - -config = app.config - - -class DatasourceAccessRequest(Model, AuditMixinNullable): - """ORM model for the access requests for datasources and dbs.""" - - __tablename__ = "access_request" - id = Column(Integer, primary_key=True) - - datasource_id = Column(Integer) - datasource_type = Column(String(200)) - - ROLES_DENYLIST = set(config["ROBOT_PERMISSION_ROLES"]) - - @property - def cls_model(self) -> type["BaseDatasource"]: - # pylint: disable=import-outside-toplevel - from superset.datasource.dao import DatasourceDAO - - return DatasourceDAO.sources[self.datasource_type] - - @property - def username(self) -> Markup: - return self.creator() - - @property - def datasource(self) -> "BaseDatasource": - return self.get_datasource - - @datasource.getter # type: ignore - def get_datasource(self) -> "BaseDatasource": - ds = db.session.query(self.cls_model).filter_by(id=self.datasource_id).first() - return ds - - @property - def datasource_link(self) -> Optional[Markup]: - return self.datasource.link # pylint: disable=no-member - - @property - def roles_with_datasource(self) -> str: - action_list = "" - perm = self.datasource.perm # pylint: disable=no-member - pv = security_manager.find_permission_view_menu("datasource_access", perm) - for role in pv.role: - if role.name in self.ROLES_DENYLIST: - continue - href = ( - f"/superset/approve?datasource_type={self.datasource_type}&" - f"datasource_id={self.datasource_id}&" - f"created_by={self.created_by.username}&role_to_grant={role.name}" - ) - link = f'Grant {role.name} Role' - action_list = action_list + "
    • " + link + "
    • " - return "
        " + action_list + "
      " - - @property - def user_roles(self) -> str: - action_list = "" - for role in self.created_by.roles: - href = ( - f"/superset/approve?datasource_type={self.datasource_type}&" - f"datasource_id={self.datasource_id}&" - f"created_by={self.created_by.username}&role_to_extend={role.name}" - ) - link = f'Extend {role.name} Role' - if role.name in self.ROLES_DENYLIST: - link = f"{role.name} Role" - action_list = action_list + "
    • " + link + "
    • " - return "
        " + action_list + "
      " diff --git a/superset/models/filter_set.py b/superset/models/filter_set.py index ac25b114ff0c1..e2b19f32a04cb 100644 --- a/superset/models/filter_set.py +++ b/superset/models/filter_set.py @@ -20,7 +20,6 @@ import logging from typing import Any -from flask import current_app from flask_appbuilder import Model from sqlalchemy import Column, ForeignKey, Integer, MetaData, String, Text from sqlalchemy.orm import relationship @@ -66,15 +65,6 @@ def changed_by_name(self) -> str: return "" return str(self.changed_by) - @property - def changed_by_url(self) -> str: - if ( - not self.changed_by - or not current_app.config["ENABLE_BROAD_ACTIVITY_ACCESS"] - ): - return "" - return f"/superset/profile/{self.changed_by.username}" - def to_dict(self) -> dict[str, Any]: return { "id": self.id, diff --git a/superset/models/slice.py b/superset/models/slice.py index 15dddfc7e1eaf..a6ffb22a087a0 100644 --- a/superset/models/slice.py +++ b/superset/models/slice.py @@ -22,7 +22,6 @@ from urllib import parse import sqlalchemy as sqla -from flask import current_app from flask_appbuilder import Model from flask_appbuilder.models.decorators import renders from markupsafe import escape, Markup @@ -100,6 +99,7 @@ class Slice( # pylint: disable=too-many-public-methods tags = relationship( "Tag", secondary="tagged_object", + overlaps="objects,tag,tags", primaryjoin="and_(Slice.id == TaggedObject.object_id)", secondaryjoin="and_(TaggedObject.tag_id == Tag.id, " "TaggedObject.object_type == 'chart')", @@ -107,6 +107,7 @@ class Slice( # pylint: disable=too-many-public-methods table = relationship( "SqlaTable", foreign_keys=[datasource_id], + overlaps="table", primaryjoin="and_(Slice.datasource_id == SqlaTable.id, " "Slice.datasource_type == 'table')", remote_side="SqlaTable.id", @@ -136,7 +137,7 @@ def __repr__(self) -> str: @property def cls_model(self) -> type[BaseDatasource]: # pylint: disable=import-outside-toplevel - from superset.datasource.dao import DatasourceDAO + from superset.daos.datasource import DatasourceDAO return DatasourceDAO.sources[self.datasource_type] @@ -332,21 +333,6 @@ def slice_link(self) -> Markup: name = escape(self.chart) return Markup(f'{name}') - @property - def created_by_url(self) -> str: - if not self.created_by: - return "" - return f"/superset/profile/{self.created_by.username}" - - @property - def changed_by_url(self) -> str: - if ( - not self.changed_by - or not current_app.config["ENABLE_BROAD_ACTIVITY_ACCESS"] - ): - return "" - return f"/superset/profile/{self.changed_by.username}" - @property def icons(self) -> str: return f""" diff --git a/superset/models/sql_lab.py b/superset/models/sql_lab.py index b9ab153798f9c..fbadaaa2f488a 100644 --- a/superset/models/sql_lab.py +++ b/superset/models/sql_lab.py @@ -135,7 +135,6 @@ def get_template_processor(self, **kwargs: Any) -> BaseTemplateProcessor: def to_dict(self) -> dict[str, Any]: return { - "changedOn": self.changed_on, "changed_on": self.changed_on.isoformat(), "dbId": self.database_id, "db": self.database.database_name if self.database else None, @@ -192,18 +191,17 @@ def columns(self) -> list["TableColumn"]: TableColumn, ) - columns = [] - for col in self.extra.get("columns", []): - columns.append( - TableColumn( - column_name=col["name"], - type=col["type"], - is_dttm=col["is_dttm"], - groupby=True, - filterable=True, - ) + return [ + TableColumn( + column_name=col["name"], + database=self.database, + is_dttm=col["is_dttm"], + filterable=True, + groupby=True, + type=col["type"], ) - return columns + for col in self.extra.get("columns", []) + ] @property def db_extra(self) -> Optional[dict[str, Any]]: @@ -385,6 +383,7 @@ class SavedQuery(Model, AuditMixinNullable, ExtraJSONMixin, ImportExportMixin): tags = relationship( "Tag", secondary="tagged_object", + overlaps="tags", primaryjoin="and_(SavedQuery.id == TaggedObject.object_id)", secondaryjoin="and_(TaggedObject.tag_id == Tag.id, " "TaggedObject.object_type == 'query')", diff --git a/superset/queries/api.py b/superset/queries/api.py index bc60742024222..2b0173ec163be 100644 --- a/superset/queries/api.py +++ b/superset/queries/api.py @@ -23,10 +23,10 @@ from superset import db, event_logger from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP, RouteMethod +from superset.daos.query import QueryDAO from superset.databases.filters import DatabaseFilter from superset.exceptions import SupersetException from superset.models.sql_lab import Query -from superset.queries.dao import QueryDAO from superset.queries.filters import QueryFilter from superset.queries.schemas import ( openapi_spec_methods_override, diff --git a/superset/queries/saved_queries/commands/bulk_delete.py b/superset/queries/saved_queries/commands/bulk_delete.py index fb230180c8137..ba01bd456a4b1 100644 --- a/superset/queries/saved_queries/commands/bulk_delete.py +++ b/superset/queries/saved_queries/commands/bulk_delete.py @@ -18,13 +18,13 @@ from typing import Optional from superset.commands.base import BaseCommand -from superset.dao.exceptions import DAODeleteFailedError +from superset.daos.exceptions import DAODeleteFailedError +from superset.daos.query import SavedQueryDAO from superset.models.dashboard import Dashboard from superset.queries.saved_queries.commands.exceptions import ( SavedQueryBulkDeleteFailedError, SavedQueryNotFoundError, ) -from superset.queries.saved_queries.dao import SavedQueryDAO logger = logging.getLogger(__name__) diff --git a/superset/queries/saved_queries/commands/export.py b/superset/queries/saved_queries/commands/export.py index 323256306ac20..1b85cda796a91 100644 --- a/superset/queries/saved_queries/commands/export.py +++ b/superset/queries/saved_queries/commands/export.py @@ -26,7 +26,7 @@ from superset.commands.export.models import ExportModelsCommand from superset.models.sql_lab import SavedQuery from superset.queries.saved_queries.commands.exceptions import SavedQueryNotFoundError -from superset.queries.saved_queries.dao import SavedQueryDAO +from superset.daos.query import SavedQueryDAO from superset.utils.dict_import_export import EXPORT_VERSION logger = logging.getLogger(__name__) diff --git a/superset/queries/saved_queries/commands/importers/v1/__init__.py b/superset/queries/saved_queries/commands/importers/v1/__init__.py index 79ec04f54b4f4..c8a159c7f5cfe 100644 --- a/superset/queries/saved_queries/commands/importers/v1/__init__.py +++ b/superset/queries/saved_queries/commands/importers/v1/__init__.py @@ -22,13 +22,13 @@ from superset.commands.importers.v1 import ImportModelsCommand from superset.connectors.sqla.models import SqlaTable +from superset.daos.query import SavedQueryDAO from superset.databases.commands.importers.v1.utils import import_database from superset.databases.schemas import ImportV1DatabaseSchema from superset.queries.saved_queries.commands.exceptions import SavedQueryImportError from superset.queries.saved_queries.commands.importers.v1.utils import ( import_saved_query, ) -from superset.queries.saved_queries.dao import SavedQueryDAO from superset.queries.saved_queries.schemas import ImportV1SavedQuerySchema diff --git a/superset/queries/saved_queries/dao.py b/superset/queries/saved_queries/dao.py deleted file mode 100644 index daae1de8f5bd8..0000000000000 --- a/superset/queries/saved_queries/dao.py +++ /dev/null @@ -1,46 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -import logging -from typing import Optional - -from sqlalchemy.exc import SQLAlchemyError - -from superset.dao.base import BaseDAO -from superset.dao.exceptions import DAODeleteFailedError -from superset.extensions import db -from superset.models.sql_lab import SavedQuery -from superset.queries.saved_queries.filters import SavedQueryFilter - -logger = logging.getLogger(__name__) - - -class SavedQueryDAO(BaseDAO): - model_cls = SavedQuery - base_filter = SavedQueryFilter - - @staticmethod - def bulk_delete(models: Optional[list[SavedQuery]], commit: bool = True) -> None: - item_ids = [model.id for model in models] if models else [] - try: - db.session.query(SavedQuery).filter(SavedQuery.id.in_(item_ids)).delete( - synchronize_session="fetch" - ) - if commit: - db.session.commit() - except SQLAlchemyError as ex: - db.session.rollback() - raise DAODeleteFailedError() from ex diff --git a/superset/reports/commands/base.py b/superset/reports/commands/base.py index 598370576b370..da871ef17c3a0 100644 --- a/superset/reports/commands/base.py +++ b/superset/reports/commands/base.py @@ -19,9 +19,9 @@ from marshmallow import ValidationError -from superset.charts.dao import ChartDAO from superset.commands.base import BaseCommand -from superset.dashboards.dao import DashboardDAO +from superset.daos.chart import ChartDAO +from superset.daos.dashboard import DashboardDAO from superset.reports.commands.exceptions import ( ChartNotFoundValidationError, ChartNotSavedValidationError, diff --git a/superset/reports/commands/bulk_delete.py b/superset/reports/commands/bulk_delete.py index 7d6e1ed791310..a3644d9fb4027 100644 --- a/superset/reports/commands/bulk_delete.py +++ b/superset/reports/commands/bulk_delete.py @@ -19,14 +19,14 @@ from superset import security_manager from superset.commands.base import BaseCommand -from superset.dao.exceptions import DAODeleteFailedError +from superset.daos.exceptions import DAODeleteFailedError +from superset.daos.report import ReportScheduleDAO from superset.exceptions import SupersetSecurityException from superset.reports.commands.exceptions import ( ReportScheduleBulkDeleteFailedError, ReportScheduleForbiddenError, ReportScheduleNotFoundError, ) -from superset.reports.dao import ReportScheduleDAO from superset.reports.models import ReportSchedule logger = logging.getLogger(__name__) diff --git a/superset/reports/commands/create.py b/superset/reports/commands/create.py index 04cf6ef43fe0d..d8bdb03a577c3 100644 --- a/superset/reports/commands/create.py +++ b/superset/reports/commands/create.py @@ -22,8 +22,9 @@ from marshmallow import ValidationError from superset.commands.base import CreateMixin -from superset.dao.exceptions import DAOCreateFailedError -from superset.databases.dao import DatabaseDAO +from superset.daos.database import DatabaseDAO +from superset.daos.exceptions import DAOCreateFailedError +from superset.daos.report import ReportScheduleDAO from superset.reports.commands.base import BaseReportScheduleCommand from superset.reports.commands.exceptions import ( DatabaseNotFoundValidationError, @@ -34,7 +35,6 @@ ReportScheduleNameUniquenessValidationError, ReportScheduleRequiredTypeValidationError, ) -from superset.reports.dao import ReportScheduleDAO from superset.reports.models import ( ReportCreationMethod, ReportSchedule, diff --git a/superset/reports/commands/delete.py b/superset/reports/commands/delete.py index 4adf17683a6a5..3f7e4e5d232a1 100644 --- a/superset/reports/commands/delete.py +++ b/superset/reports/commands/delete.py @@ -21,14 +21,14 @@ from superset import security_manager from superset.commands.base import BaseCommand -from superset.dao.exceptions import DAODeleteFailedError +from superset.daos.exceptions import DAODeleteFailedError +from superset.daos.report import ReportScheduleDAO from superset.exceptions import SupersetSecurityException from superset.reports.commands.exceptions import ( ReportScheduleDeleteFailedError, ReportScheduleForbiddenError, ReportScheduleNotFoundError, ) -from superset.reports.dao import ReportScheduleDAO from superset.reports.models import ReportSchedule logger = logging.getLogger(__name__) diff --git a/superset/reports/commands/execute.py b/superset/reports/commands/execute.py index 608b2564a2907..bb7c53ed5e0fc 100644 --- a/superset/reports/commands/execute.py +++ b/superset/reports/commands/execute.py @@ -28,6 +28,10 @@ from superset.commands.base import BaseCommand from superset.commands.exceptions import CommandException from superset.common.chart_data import ChartDataResultFormat, ChartDataResultType +from superset.daos.report import ( + REPORT_SCHEDULE_ERROR_NOTIFICATION_MARKER, + ReportScheduleDAO, +) from superset.dashboards.permalink.commands.create import ( CreateDashboardPermalinkCommand, ) @@ -52,10 +56,6 @@ ReportScheduleUnexpectedError, ReportScheduleWorkingTimeoutError, ) -from superset.reports.dao import ( - REPORT_SCHEDULE_ERROR_NOTIFICATION_MARKER, - ReportScheduleDAO, -) from superset.reports.models import ( ReportDataFormat, ReportExecutionLog, diff --git a/superset/reports/commands/log_prune.py b/superset/reports/commands/log_prune.py index badd267ecfdd4..09d999541483a 100644 --- a/superset/reports/commands/log_prune.py +++ b/superset/reports/commands/log_prune.py @@ -18,9 +18,9 @@ from datetime import datetime, timedelta from superset.commands.base import BaseCommand -from superset.dao.exceptions import DAODeleteFailedError +from superset.daos.exceptions import DAODeleteFailedError +from superset.daos.report import ReportScheduleDAO from superset.reports.commands.exceptions import ReportSchedulePruneLogError -from superset.reports.dao import ReportScheduleDAO from superset.reports.models import ReportSchedule from superset.utils.celery import session_scope diff --git a/superset/reports/commands/update.py b/superset/reports/commands/update.py index 5ca3ac849a5ee..4985165f6677a 100644 --- a/superset/reports/commands/update.py +++ b/superset/reports/commands/update.py @@ -23,8 +23,9 @@ from superset import security_manager from superset.commands.base import UpdateMixin -from superset.dao.exceptions import DAOUpdateFailedError -from superset.databases.dao import DatabaseDAO +from superset.daos.database import DatabaseDAO +from superset.daos.exceptions import DAOUpdateFailedError +from superset.daos.report import ReportScheduleDAO from superset.exceptions import SupersetSecurityException from superset.reports.commands.base import BaseReportScheduleCommand from superset.reports.commands.exceptions import ( @@ -35,7 +36,6 @@ ReportScheduleNotFoundError, ReportScheduleUpdateFailedError, ) -from superset.reports.dao import ReportScheduleDAO from superset.reports.models import ReportSchedule, ReportScheduleType, ReportState logger = logging.getLogger(__name__) diff --git a/superset/row_level_security/api.py b/superset/row_level_security/api.py index 05a6dddf04b75..43912689fbd02 100644 --- a/superset/row_level_security/api.py +++ b/superset/row_level_security/api.py @@ -30,7 +30,7 @@ ) from superset.connectors.sqla.models import RowLevelSecurityFilter from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP, RouteMethod -from superset.dao.exceptions import DAOCreateFailedError, DAOUpdateFailedError +from superset.daos.exceptions import DAOCreateFailedError, DAOUpdateFailedError from superset.extensions import event_logger from superset.row_level_security.commands.bulk_delete import BulkDeleteRLSRuleCommand from superset.row_level_security.commands.create import CreateRLSRuleCommand diff --git a/superset/row_level_security/commands/bulk_delete.py b/superset/row_level_security/commands/bulk_delete.py index a3703346cc9ed..f180d0b2b810d 100644 --- a/superset/row_level_security/commands/bulk_delete.py +++ b/superset/row_level_security/commands/bulk_delete.py @@ -18,13 +18,13 @@ import logging from superset.commands.base import BaseCommand -from superset.dao.exceptions import DAODeleteFailedError +from superset.daos.exceptions import DAODeleteFailedError +from superset.daos.security import RLSDAO from superset.reports.models import ReportSchedule from superset.row_level_security.commands.exceptions import ( RLSRuleNotFoundError, RuleBulkDeleteFailedError, ) -from superset.row_level_security.dao import RLSDAO logger = logging.getLogger(__name__) diff --git a/superset/row_level_security/commands/create.py b/superset/row_level_security/commands/create.py index 5552feeda02b3..a26fdb7b1216c 100644 --- a/superset/row_level_security/commands/create.py +++ b/superset/row_level_security/commands/create.py @@ -23,9 +23,9 @@ from superset.commands.exceptions import DatasourceNotFoundValidationError from superset.commands.utils import populate_roles from superset.connectors.sqla.models import SqlaTable -from superset.dao.exceptions import DAOCreateFailedError +from superset.daos.exceptions import DAOCreateFailedError +from superset.daos.security import RLSDAO from superset.extensions import db -from superset.row_level_security.dao import RLSDAO logger = logging.getLogger(__name__) diff --git a/superset/row_level_security/commands/update.py b/superset/row_level_security/commands/update.py index a206fc3a393c2..d44aa3efaf9e8 100644 --- a/superset/row_level_security/commands/update.py +++ b/superset/row_level_security/commands/update.py @@ -23,10 +23,10 @@ from superset.commands.exceptions import DatasourceNotFoundValidationError from superset.commands.utils import populate_roles from superset.connectors.sqla.models import RowLevelSecurityFilter, SqlaTable -from superset.dao.exceptions import DAOUpdateFailedError +from superset.daos.exceptions import DAOUpdateFailedError +from superset.daos.security import RLSDAO from superset.extensions import db from superset.row_level_security.commands.exceptions import RLSRuleNotFoundError -from superset.row_level_security.dao import RLSDAO logger = logging.getLogger(__name__) diff --git a/superset/security/manager.py b/superset/security/manager.py index 39283d99413b6..942ed66776d8a 100644 --- a/superset/security/manager.py +++ b/superset/security/manager.py @@ -164,7 +164,6 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods ADMIN_ONLY_VIEW_MENUS = { "Access Requests", - "AccessRequestsModelView", "Action Log", "Log", "List Users", @@ -176,6 +175,8 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods "RowLevelSecurityFiltersModelView", "Security", "SQL Lab", + "User Registrations", + "User's Statistics", } | USER_MODEL_VIEWS ALPHA_ONLY_VIEW_MENUS = { @@ -195,8 +196,6 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods } ADMIN_ONLY_PERMISSIONS = { - "can_override_role_permissions", - "can_approve", "can_update_role", "all_query_access", "can_grant_guest_token", @@ -238,14 +237,9 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods ("can_execute_sql_query", "SQLLab"), ("can_estimate_query_cost", "SQL Lab"), ("can_export_csv", "SQLLab"), - ("can_sql_json", "Superset"), # Deprecated permission remove on 3.0.0 ("can_sqllab_history", "Superset"), - ("can_sqllab_viz", "Superset"), - ("can_sqllab_table_viz", "Superset"), # Deprecated permission remove on 3.0.0 ("can_sqllab", "Superset"), - ("can_stop_query", "Superset"), # Deprecated permission remove on 3.0.0 ("can_test_conn", "Superset"), # Deprecated permission remove on 3.0.0 - ("can_search_queries", "Superset"), # Deprecated permission remove on 3.0.0 ("can_activate", "TabStateView"), ("can_get", "TabStateView"), ("can_delete_query", "TabStateView"), @@ -351,30 +345,28 @@ def can_access_all_queries(self) -> bool: def can_access_all_datasources(self) -> bool: """ - Return True if the user can fully access all the Superset datasources, False - otherwise. + Return True if the user can access all the datasources, False otherwise. - :returns: Whether the user can fully access all Superset datasources + :returns: Whether the user can access all the datasources """ return self.can_access("all_datasource_access", "all_datasource_access") def can_access_all_databases(self) -> bool: """ - Return True if the user can fully access all the Superset databases, False - otherwise. + Return True if the user can access all the databases, False otherwise. - :returns: Whether the user can fully access all Superset databases + :returns: Whether the user can access all the databases """ return self.can_access("all_database_access", "all_database_access") def can_access_database(self, database: "Database") -> bool: """ - Return True if the user can fully access the Superset database, False otherwise. + Return True if the user can access the specified database, False otherwise. - :param database: The Superset database - :returns: Whether the user can fully access the Superset database + :param database: The database + :returns: Whether the user can access the database """ return ( @@ -385,11 +377,11 @@ def can_access_database(self, database: "Database") -> bool: def can_access_schema(self, datasource: "BaseDatasource") -> bool: """ - Return True if the user can fully access the schema associated with the Superset + Return True if the user can access the schema associated with specified datasource, False otherwise. - :param datasource: The Superset datasource - :returns: Whether the user can fully access the datasource's schema + :param datasource: The datasource + :returns: Whether the user can access the datasource's schema """ return ( @@ -400,11 +392,10 @@ def can_access_schema(self, datasource: "BaseDatasource") -> bool: def can_access_datasource(self, datasource: "BaseDatasource") -> bool: """ - Return True if the user can fully access of the Superset datasource, False - otherwise. + Return True if the user can access the specified datasource, False otherwise. - :param datasource: The Superset datasource - :returns: Whether the user can fully access the Superset datasource + :param datasource: The datasource + :returns: Whether the user can access the datasource """ try: @@ -414,6 +405,24 @@ def can_access_datasource(self, datasource: "BaseDatasource") -> bool: return True + def can_access_dashboard(self, dashboard: "Dashboard") -> bool: + """ + Return True if the user can access the specified dashboard, False otherwise. + + :param dashboard: The dashboard + :returns: Whether the user can access the dashboard + """ + + # pylint: disable=import-outside-toplevel + from superset.dashboards.commands.exceptions import DashboardAccessDeniedError + + try: + self.raise_for_dashboard_access(dashboard) + except DashboardAccessDeniedError: + return False + + return True + @staticmethod def get_datasource_access_error_msg(datasource: "BaseDatasource") -> str: """ @@ -701,6 +710,7 @@ def create_custom_permissions(self) -> None: self.add_permission_view_menu("all_datasource_access", "all_datasource_access") self.add_permission_view_menu("all_database_access", "all_database_access") self.add_permission_view_menu("all_query_access", "all_query_access") + self.add_permission_view_menu("can_csv", "Superset") self.add_permission_view_menu("can_share_dashboard", "Superset") self.add_permission_view_menu("can_share_chart", "Superset") @@ -767,7 +777,6 @@ def sync_role_definitions(self) -> None: self.set_role("Admin", self._is_admin_pvm) self.set_role("Alpha", self._is_alpha_pvm) self.set_role("Gamma", self._is_gamma_pvm) - self.set_role("granter", self._is_granter_pvm) self.set_role("sql_lab", self._is_sql_lab_pvm) # Configure public role @@ -981,19 +990,6 @@ def _is_sql_lab_pvm(self, pvm: PermissionView) -> bool: in self.SQLLAB_EXTRA_PERMISSION_VIEWS ) - def _is_granter_pvm( # pylint: disable=no-self-use - self, pvm: PermissionView - ) -> bool: - """ - Return True if the user can grant the FAB permission/view, False - otherwise. - - :param pvm: The FAB permission/view - :returns: Whether the user can grant the FAB permission/view - """ - - return pvm.permission.name in {"can_override_role_permissions", "can_approve"} - def database_after_insert( self, mapper: Mapper, @@ -1995,55 +1991,45 @@ def get_rls_cache_key(self, datasource: "BaseDatasource") -> list[str]: guest_rls = self.get_guest_rls_filters_str(datasource) return guest_rls + rls_str - @staticmethod - def raise_for_user_activity_access(user_id: int) -> None: - if not get_user_id() or ( - not current_app.config["ENABLE_BROAD_ACTIVITY_ACCESS"] - and user_id != get_user_id() - ): - raise SupersetSecurityException( - SupersetError( - error_type=SupersetErrorType.USER_ACTIVITY_SECURITY_ACCESS_ERROR, - message="Access to user's activity data is restricted", - level=ErrorLevel.ERROR, - ) - ) - def raise_for_dashboard_access(self, dashboard: "Dashboard") -> None: """ Raise an exception if the user cannot access the dashboard. - This does not check for the required role/permission pairs, - it only concerns itself with entity relationships. + + This does not check for the required role/permission pairs, it only concerns + itself with entity relationships. :param dashboard: Dashboard the user wants access to :raises DashboardAccessDeniedError: If the user cannot access the resource """ + # pylint: disable=import-outside-toplevel from superset import is_feature_enabled from superset.dashboards.commands.exceptions import DashboardAccessDeniedError - def has_rbac_access() -> bool: - if not is_feature_enabled("DASHBOARD_RBAC") or not dashboard.roles: - return True - - return any( - dashboard_role.id - in [user_role.id for user_role in self.get_user_roles()] - for dashboard_role in dashboard.roles - ) - if self.is_guest_user() and dashboard.embedded: - can_access = self.has_guest_access(dashboard) + if self.has_guest_access(dashboard): + return else: - can_access = ( - self.is_admin() - or self.is_owner(dashboard) - or (dashboard.published and has_rbac_access()) - or (not dashboard.published and not dashboard.roles) - ) + if self.is_admin() or self.is_owner(dashboard): + return + + # RBAC and legacy (datasource inferred) access controls. + if is_feature_enabled("DASHBOARD_RBAC") and dashboard.roles: + if dashboard.published and {role.id for role in dashboard.roles} & { + role.id for role in self.get_user_roles() + }: + return + elif ( + not dashboard.published + or not dashboard.datasources + or any( + self.can_access_datasource(datasource) + for datasource in dashboard.datasources + ) + ): + return - if not can_access: - raise DashboardAccessDeniedError() + raise DashboardAccessDeniedError() @staticmethod def can_access_based_on_dashboard(datasource: "BaseDatasource") -> bool: @@ -2085,7 +2071,7 @@ def _get_guest_token_jwt_audience() -> str: @staticmethod def validate_guest_token_resources(resources: GuestTokenResources) -> None: # pylint: disable=import-outside-toplevel - from superset.embedded.dao import EmbeddedDAO + from superset.daos.dashboard import EmbeddedDashboardDAO from superset.embedded_dashboard.commands.exceptions import ( EmbeddedDashboardNotFoundError, ) @@ -2096,7 +2082,7 @@ def validate_guest_token_resources(resources: GuestTokenResources) -> None: # TODO (embedded): remove this check once uuids are rolled out dashboard = Dashboard.get(str(resource["id"])) if not dashboard: - embedded = EmbeddedDAO.find_by_id(str(resource["id"])) + embedded = EmbeddedDashboardDAO.find_by_id(str(resource["id"])) if not embedded: raise EmbeddedDashboardNotFoundError() diff --git a/superset/sqllab/api.py b/superset/sqllab/api.py index 35d110d8fca14..72d1558c046cc 100644 --- a/superset/sqllab/api.py +++ b/superset/sqllab/api.py @@ -25,11 +25,11 @@ from marshmallow import ValidationError from superset import app, is_feature_enabled -from superset.databases.dao import DatabaseDAO +from superset.daos.database import DatabaseDAO +from superset.daos.query import QueryDAO from superset.extensions import event_logger from superset.jinja_context import get_template_processor from superset.models.sql_lab import Query -from superset.queries.dao import QueryDAO from superset.sql_lab import get_sql_results from superset.sqllab.command_status import SqlJsonExecutionStatus from superset.sqllab.commands.estimate import QueryEstimationCommand diff --git a/superset/sqllab/commands/execute.py b/superset/sqllab/commands/execute.py index 09b0769ce21b7..8b854d4971c60 100644 --- a/superset/sqllab/commands/execute.py +++ b/superset/sqllab/commands/execute.py @@ -25,7 +25,7 @@ from superset.commands.base import BaseCommand from superset.common.db_query_status import QueryStatus -from superset.dao.exceptions import DAOCreateFailedError +from superset.daos.exceptions import DAOCreateFailedError from superset.errors import SupersetErrorType from superset.exceptions import ( SupersetErrorException, @@ -43,8 +43,8 @@ from superset.sqllab.limiting_factor import LimitingFactor if TYPE_CHECKING: - from superset.databases.dao import DatabaseDAO - from superset.queries.dao import QueryDAO + from superset.daos.database import DatabaseDAO + from superset.daos.query import QueryDAO from superset.sqllab.sql_json_executer import SqlJsonExecutor from superset.sqllab.sqllab_execution_context import SqlJsonExecutionContext diff --git a/superset/sqllab/schemas.py b/superset/sqllab/schemas.py index 30274d6de923d..d388dc0353d72 100644 --- a/superset/sqllab/schemas.py +++ b/superset/sqllab/schemas.py @@ -58,8 +58,7 @@ class ExecutePayloadSchema(Schema): class QueryResultSchema(Schema): - changedOn = fields.DateTime() - changed_on = fields.String() + changed_on = fields.DateTime() dbId = fields.Integer() db = fields.String() # pylint: disable=invalid-name endDttm = fields.Float() diff --git a/superset/sqllab/sql_json_executer.py b/superset/sqllab/sql_json_executer.py index 124f477e9625f..a25b39ad0baf7 100644 --- a/superset/sqllab/sql_json_executer.py +++ b/superset/sqllab/sql_json_executer.py @@ -37,7 +37,7 @@ from superset.utils.dates import now_as_float if TYPE_CHECKING: - from superset.queries.dao import QueryDAO + from superset.daos.query import QueryDAO from superset.sqllab.sqllab_execution_context import SqlJsonExecutionContext QueryStatus = utils.QueryStatus diff --git a/superset/tags/api.py b/superset/tags/api.py index 859acdb5373f7..f9aa7f7be9cae 100644 --- a/superset/tags/api.py +++ b/superset/tags/api.py @@ -22,6 +22,7 @@ from flask_appbuilder.models.sqla.interface import SQLAInterface from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP, RouteMethod +from superset.daos.tag import TagDAO from superset.extensions import event_logger from superset.tags.commands.create import CreateCustomTagCommand from superset.tags.commands.delete import DeleteTaggedObjectCommand, DeleteTagsCommand @@ -33,7 +34,6 @@ TagInvalidError, TagNotFoundError, ) -from superset.tags.dao import TagDAO from superset.tags.models import ObjectTypes, Tag from superset.tags.schemas import ( delete_tags_schema, diff --git a/superset/tags/commands/create.py b/superset/tags/commands/create.py index 20327b54f01cd..7e9f040015e2b 100644 --- a/superset/tags/commands/create.py +++ b/superset/tags/commands/create.py @@ -17,10 +17,10 @@ import logging from superset.commands.base import BaseCommand, CreateMixin -from superset.dao.exceptions import DAOCreateFailedError +from superset.daos.exceptions import DAOCreateFailedError +from superset.daos.tag import TagDAO from superset.tags.commands.exceptions import TagCreateFailedError, TagInvalidError from superset.tags.commands.utils import to_object_type -from superset.tags.dao import TagDAO from superset.tags.models import ObjectTypes logger = logging.getLogger(__name__) diff --git a/superset/tags/commands/delete.py b/superset/tags/commands/delete.py index 08189b5ac55d8..4b92e40ff5820 100644 --- a/superset/tags/commands/delete.py +++ b/superset/tags/commands/delete.py @@ -17,7 +17,8 @@ import logging from superset.commands.base import BaseCommand -from superset.dao.exceptions import DAODeleteFailedError +from superset.daos.exceptions import DAODeleteFailedError +from superset.daos.tag import TagDAO from superset.tags.commands.exceptions import ( TagDeleteFailedError, TaggedObjectDeleteFailedError, @@ -26,7 +27,6 @@ TagNotFoundError, ) from superset.tags.commands.utils import to_object_type -from superset.tags.dao import TagDAO from superset.tags.models import ObjectTypes from superset.views.base import DeleteMixin diff --git a/superset/tags/models.py b/superset/tags/models.py index bb845303ffd20..d7feb4f48c4b1 100644 --- a/superset/tags/models.py +++ b/superset/tags/models.py @@ -77,6 +77,10 @@ class Tag(Model, AuditMixinNullable): name = Column(String(250), unique=True) type = Column(Enum(TagTypes)) + objects = relationship( + "TaggedObject", back_populates="tag", overlaps="objects,tags" + ) + class TaggedObject(Model, AuditMixinNullable): @@ -93,7 +97,7 @@ class TaggedObject(Model, AuditMixinNullable): ) object_type = Column(Enum(ObjectTypes)) - tag = relationship("Tag", backref="objects") + tag = relationship("Tag", back_populates="objects", overlaps="tags") def get_tag(name: str, session: Session, type_: TagTypes) -> Tag: diff --git a/superset/tasks/scheduler.py b/superset/tasks/scheduler.py index b3efa240fa52d..7e0422b0019dd 100644 --- a/superset/tasks/scheduler.py +++ b/superset/tasks/scheduler.py @@ -22,11 +22,11 @@ from superset import app, is_feature_enabled from superset.commands.exceptions import CommandException +from superset.daos.report import ReportScheduleDAO from superset.extensions import celery_app from superset.reports.commands.exceptions import ReportScheduleUnexpectedError from superset.reports.commands.execute import AsyncExecuteReportScheduleCommand from superset.reports.commands.log_prune import AsyncPruneReportScheduleLogCommand -from superset.reports.dao import ReportScheduleDAO from superset.tasks.cron_util import cron_schedule_window from superset.utils.celery import session_scope from superset.utils.core import LoggerLevel diff --git a/superset/templates/email/role_extended.txt b/superset/templates/email/role_extended.txt deleted file mode 100644 index 463fb32c9c46e..0000000000000 --- a/superset/templates/email/role_extended.txt +++ /dev/null @@ -1,32 +0,0 @@ - -Dear {{ user.username }}, -
      - - {{ granter.username }} has extended the role {{ role.name }} to include - - {{datasource.full_name}} and granted you access to it. -
      -
      -To see all your permissions please visit your - - profile page. -
      -
      -Regards, Superset Admin. diff --git a/superset/templates/email/role_granted.txt b/superset/templates/email/role_granted.txt deleted file mode 100644 index 312a04947387d..0000000000000 --- a/superset/templates/email/role_granted.txt +++ /dev/null @@ -1,36 +0,0 @@ - -Dear {{ user.username }}, -
      - - {{ granter.username }} has granted you the role {{ role.name }} - that gives access to the - - {{datasource.full_name}} -
      -
      -In addition to that role grants you access to the: {{ role.permissions }}. -
      -
      -To see all your permissions please visit your - - profile page. -
      -
      -Regards, Superset Admin. diff --git a/superset/templates/superset/basic.html b/superset/templates/superset/basic.html index fdd2e8e0de52d..0a4d24cc058e6 100644 --- a/superset/templates/superset/basic.html +++ b/superset/templates/superset/basic.html @@ -44,7 +44,14 @@ > {% endfor %} - + + + + + + + + {{ css_bundle("theme") }} diff --git a/superset/templates/superset/request_access.html b/superset/templates/superset/request_access.html deleted file mode 100644 index e157cb8136388..0000000000000 --- a/superset/templates/superset/request_access.html +++ /dev/null @@ -1,38 +0,0 @@ -{# - Licensed to the Apache Software Foundation (ASF) under one - or more contributor license agreements. See the NOTICE file - distributed with this work for additional information - regarding copyright ownership. The ASF licenses this file - to you under the Apache License, Version 2.0 (the - "License"); you may not use this file except in compliance - with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, - software distributed under the License is distributed on an - "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - KIND, either express or implied. See the License for the - specific language governing permissions and limitations - under the License. -#} -{% extends "superset/basic.html" %} -{% block title %}{{ _("No Access!") }}{% endblock %} -{% block body %} -
      - {% include "superset/flash_wrapper.html" %} -

      - {{ _("You do not have permissions to access the datasource(s): %(name)s.", - name=datasource_names) - }} -

      -
      - - -
      -
      -{% endblock %} diff --git a/superset/translations/en/LC_MESSAGES/messages.json b/superset/translations/en/LC_MESSAGES/messages.json index c1f7da11a1767..2fc7407bf3322 100644 --- a/superset/translations/en/LC_MESSAGES/messages.json +++ b/superset/translations/en/LC_MESSAGES/messages.json @@ -201,7 +201,7 @@ "" ], "A timeout occurred while executing the query.": [""], - "A timeout occurred while generating a csv.": [""], + "A timeout occurred while generating a CSV.": [""], "A timeout occurred while generating a dataframe.": [""], "A timeout occurred while taking a screenshot.": [""], "A valid color scheme is required": [""], @@ -346,7 +346,7 @@ "Allow CREATE TABLE AS option in SQL Lab": [""], "Allow CREATE VIEW AS": [""], "Allow CREATE VIEW AS option in SQL Lab": [""], - "Allow Csv Upload": [""], + "Allow CSV Upload": [""], "Allow DML": [""], "Allow columns to be rearranged": [""], "Allow creation of new tables based on queries": [""], @@ -1139,7 +1139,7 @@ "Database \"%(database_name)s\" schema \"%(schema_name)s\" is not allowed for columnar uploads. Please contact your Superset Admin.": [ "" ], - "Database \"%(database_name)s\" schema \"%(schema_name)s\" is not allowed for csv uploads. Please contact your Superset Admin.": [ + "Database \"%(database_name)s\" schema \"%(schema_name)s\" is not allowed for CSV uploads. Please contact your Superset Admin.": [ "" ], "Database \"%(database_name)s\" schema \"%(schema_name)s\" is not allowed for excel uploads. Please contact your Superset Admin.": [ @@ -1825,7 +1825,7 @@ "If duplicate columns are not overridden, they will be presented as \"X.1, X.2 ...X.x\"": [ "" ], - "If selected, please set the schemas allowed for csv upload in Extra.": [ + "If selected, please set the schemas allowed for CSV upload in Extra.": [ "" ], "If table exists do one of the following: Fail (do nothing), Replace (drop and recreate table) or Append (insert data).": [ @@ -2758,7 +2758,7 @@ "Report Schedule could not be deleted.": [""], "Report Schedule could not be updated.": [""], "Report Schedule delete failed.": [""], - "Report Schedule execution failed when generating a csv.": [""], + "Report Schedule execution failed when generating a CSV.": [""], "Report Schedule execution failed when generating a dataframe.": [""], "Report Schedule execution failed when generating a screenshot.": [""], "Report Schedule execution got an unexpected error.": [""], @@ -3513,10 +3513,10 @@ "The number of hours, negative or positive, to shift the time column. This can be used to move UTC time to local time.": [ "" ], - "The number of results displayed is limited to %(rows)d by the configuration DISPLAY_MAX_ROWS. Please add additional limits/filters or download to csv to see more rows up to the %(limit)d limit.": [ + "The number of results displayed is limited to %(rows)d by the configuration DISPLAY_MAX_ROWS. Please add additional limits/filters or download to CSV to see more rows up to the %(limit)d limit.": [ "" ], - "The number of results displayed is limited to %(rows)d. Please add additional limits/filters, download to csv, or contact an admin to see more rows up to the %(limit)d limit.": [ + "The number of results displayed is limited to %(rows)d. Please add additional limits/filters, download to CSV, or contact an admin to see more rows up to the %(limit)d limit.": [ "" ], "The number of rows displayed is limited to %(rows)d by the dropdown.": [ @@ -4424,7 +4424,7 @@ "You don't have the rights to alter this title.": [""], "You don't have the rights to create a chart": [""], "You don't have the rights to create a dashboard": [""], - "You don't have the rights to download as csv": [""], + "You don't have the rights to download as CSV": [""], "You have no permission to approve this request": [""], "You have removed this filter.": [""], "You have unsaved changes.": [""], diff --git a/superset/translations/en/LC_MESSAGES/messages.po b/superset/translations/en/LC_MESSAGES/messages.po index 76763280ac88b..660a8451944d7 100644 --- a/superset/translations/en/LC_MESSAGES/messages.po +++ b/superset/translations/en/LC_MESSAGES/messages.po @@ -872,7 +872,7 @@ msgid "A timeout occurred while executing the query." msgstr "" #: superset/reports/commands/exceptions.py:238 -msgid "A timeout occurred while generating a csv." +msgid "A timeout occurred while generating a CSV." msgstr "" #: superset/reports/commands/exceptions.py:243 @@ -1554,7 +1554,7 @@ msgid "Allow CREATE VIEW AS option in SQL Lab" msgstr "" #: superset/views/database/mixins.py:198 -msgid "Allow Csv Upload" +msgid "Allow CSV Upload" msgstr "" #: superset-frontend/src/features/databases/DatabaseModal/ExtraOptions.tsx:164 @@ -4847,7 +4847,7 @@ msgstr "" #, python-format msgid "" "Database \"%(database_name)s\" schema \"%(schema_name)s\" is not allowed " -"for csv uploads. Please contact your Superset Admin." +"for CSV uploads. Please contact your Superset Admin." msgstr "" #: superset/views/database/views.py:321 @@ -7685,7 +7685,7 @@ msgid "" msgstr "" #: superset/views/database/mixins.py:177 -msgid "If selected, please set the schemas allowed for csv upload in Extra." +msgid "If selected, please set the schemas allowed for CSV upload in Extra." msgstr "" #: superset/views/database/forms.py:328 superset/views/database/forms.py:459 @@ -11702,7 +11702,7 @@ msgid "Report Schedule delete failed." msgstr "" #: superset/reports/commands/exceptions.py:159 -msgid "Report Schedule execution failed when generating a csv." +msgid "Report Schedule execution failed when generating a CSV." msgstr "" #: superset/reports/commands/exceptions.py:163 @@ -14728,14 +14728,14 @@ msgstr "" msgid "" "The number of results displayed is limited to %(rows)d by the " "configuration DISPLAY_MAX_ROWS. Please add additional limits/filters or " -"download to csv to see more rows up to the %(limit)d limit." +"download to CSV to see more rows up to the %(limit)d limit." msgstr "" #: superset-frontend/src/SqlLab/components/ResultSet/index.tsx:307 #, python-format msgid "" "The number of results displayed is limited to %(rows)d. Please add " -"additional limits/filters, download to csv, or contact an admin to see " +"additional limits/filters, download to CSV, or contact an admin to see " "more rows up to the %(limit)d limit." msgstr "" @@ -17911,7 +17911,7 @@ msgid "You don't have the rights to create a dashboard" msgstr "" #: superset/views/core.py:649 -msgid "You don't have the rights to download as csv" +msgid "You don't have the rights to download as CSV" msgstr "" #: superset/views/core.py:425 diff --git a/superset/utils/core.py b/superset/utils/core.py index 036414ef1d9d3..125a406bf5d6a 100644 --- a/superset/utils/core.py +++ b/superset/utils/core.py @@ -59,9 +59,9 @@ import sqlalchemy as sa from cryptography.hazmat.backends import default_backend from cryptography.x509 import Certificate, load_pem_x509_certificate -from flask import current_app, flash, g, Markup, render_template, request +from flask import current_app, flash, g, Markup, request from flask_appbuilder import SQLA -from flask_appbuilder.security.sqla.models import Role, User +from flask_appbuilder.security.sqla.models import User from flask_babel import gettext as __ from flask_babel.speaklater import LazyString from pandas.api.types import infer_dtype @@ -236,7 +236,6 @@ class FilterOperator(str, Enum): IS_NOT_NULL = "IS NOT NULL" IN = "IN" NOT_IN = "NOT IN" - REGEX = "REGEX" IS_TRUE = "IS TRUE" IS_FALSE = "IS FALSE" TEMPORAL_RANGE = "TEMPORAL_RANGE" @@ -253,7 +252,6 @@ class FilterStringOperators(str, Enum): NOT_IN = ("NOT_IN",) ILIKE = ("ILIKE",) LIKE = ("LIKE",) - REGEX = ("REGEX",) IS_NOT_NULL = ("IS_NOT_NULL",) IS_NULL = ("IS_NULL",) LATEST_PARTITION = ("LATEST_PARTITION",) @@ -852,32 +850,6 @@ def ping_connection(connection: Connection, branch: bool) -> None: connection.should_close_with_result = save_should_close_with_result -def notify_user_about_perm_udate( # pylint: disable=too-many-arguments - granter: User, - user: User, - role: Role, - datasource: BaseDatasource, - tpl_name: str, - config: dict[str, Any], -) -> None: - msg = render_template( - tpl_name, granter=granter, user=user, role=role, datasource=datasource - ) - logger.info(msg) - subject = __( - "[Superset] Access to the datasource %(name)s was granted", - name=datasource.full_name, - ) - send_email_smtp( - user.email, - subject, - msg, - config, - bcc=granter.email, - dryrun=not config["EMAIL_NOTIFICATIONS"], - ) - - def send_email_smtp( # pylint: disable=invalid-name,too-many-arguments,too-many-locals to: str, subject: str, diff --git a/superset/utils/decorators.py b/superset/utils/decorators.py index 4ecd2eca98679..9c21e3b5ec5a5 100644 --- a/superset/utils/decorators.py +++ b/superset/utils/decorators.py @@ -19,13 +19,10 @@ import time from collections.abc import Iterator from contextlib import contextmanager -from functools import wraps from typing import Any, Callable, TYPE_CHECKING from flask import current_app, Response -from superset import is_feature_enabled -from superset.dashboards.commands.exceptions import DashboardAccessDeniedError from superset.utils import core as utils from superset.utils.dates import now_as_float @@ -114,27 +111,3 @@ def wrapped(*args: Any, **kwargs: Any) -> Any: def on_security_exception(self: Any, ex: Exception) -> Response: return self.response(403, **{"message": utils.error_msg_from_exception(ex)}) - - -# noinspection PyPackageRequirements -def check_dashboard_access(on_error: Callable[[str], Any]) -> Callable[..., Any]: - def decorator(f: Callable[..., Any]) -> Callable[..., Any]: - @wraps(f) - def wrapper(self: Any, *args: Any, **kwargs: Any) -> Any: - # pylint: disable=import-outside-toplevel - from superset.models.dashboard import Dashboard - - dashboard = Dashboard.get(str(kwargs["dashboard_id_or_slug"])) - if is_feature_enabled("DASHBOARD_RBAC"): - try: - current_app.appbuilder.sm.raise_for_dashboard_access(dashboard) - except DashboardAccessDeniedError as ex: - return on_error(str(ex)) - except Exception as exception: - raise exception - - return f(self, *args, dashboard=dashboard, **kwargs) - - return wrapper - - return decorator diff --git a/superset/views/__init__.py b/superset/views/__init__.py index b5a21c77f0b32..1b8d1b8f09567 100644 --- a/superset/views/__init__.py +++ b/superset/views/__init__.py @@ -15,7 +15,6 @@ # specific language governing permissions and limitations # under the License. from . import ( - access_requests, alerts, api, base, diff --git a/superset/views/access_requests.py b/superset/views/access_requests.py deleted file mode 100644 index 063ef5e0be91b..0000000000000 --- a/superset/views/access_requests.py +++ /dev/null @@ -1,59 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -from flask import current_app as app -from flask_appbuilder.hooks import before_request -from flask_appbuilder.models.sqla.interface import SQLAInterface -from flask_babel import lazy_gettext as _ -from werkzeug.exceptions import NotFound - -from superset.constants import RouteMethod -from superset.views.base import DeleteMixin, SupersetModelView -from superset.views.core import DAR - - -class AccessRequestsModelView( # pylint: disable=too-many-ancestors - SupersetModelView, - DeleteMixin, -): - datamodel = SQLAInterface(DAR) - include_route_methods = RouteMethod.CRUD_SET - list_columns = [ - "username", - "user_roles", - "datasource_link", - "roles_with_datasource", - "created_on", - ] - order_columns = ["created_on"] - base_order = ("changed_on", "desc") - label_columns = { - "username": _("User"), - "user_roles": _("User Roles"), - "database": _("Database URL"), - "datasource_link": _("Datasource"), - "roles_with_datasource": _("Roles to grant"), - "created_on": _("Created On"), - } - - @staticmethod - def is_enabled() -> bool: - return bool(app.config["ENABLE_ACCESS_REQUEST"]) - - @before_request - def ensure_enabled(self) -> None: - if not self.is_enabled(): - raise NotFound() diff --git a/superset/views/base.py b/superset/views/base.py index 3a72096ac2fc1..e66fea0a48c21 100644 --- a/superset/views/base.py +++ b/superset/views/base.py @@ -58,6 +58,7 @@ conf, db, get_feature_flags, + is_feature_enabled, security_manager, ) from superset.commands.exceptions import CommandException, CommandInvalidError @@ -89,7 +90,6 @@ "SUPERSET_DASHBOARD_PERIODICAL_REFRESH_WARNING_MESSAGE", "DISABLE_DATASET_SOURCE_EDIT", "ENABLE_JAVASCRIPT_CONTROLS", - "ENABLE_BROAD_ACTIVITY_ACCESS", "DEFAULT_SQLLAB_LIMIT", "DEFAULT_VIZ_TYPE", "SQL_MAX_ROW", @@ -119,6 +119,7 @@ "ALERT_REPORTS_DEFAULT_CRON_VALUE", "ALERT_REPORTS_DEFAULT_RETENTION", "ALERT_REPORTS_DEFAULT_WORKING_TIMEOUT", + "NATIVE_FILTER_DEFAULT_ROW_LIMIT", ) logger = logging.getLogger(__name__) @@ -383,13 +384,13 @@ def menu_data(user: User) -> dict[str, Any]: "show_language_picker": len(languages.keys()) > 1, "user_is_anonymous": user.is_anonymous, "user_info_url": None - if appbuilder.app.config["MENU_HIDE_USER_INFO"] + if is_feature_enabled("MENU_HIDE_USER_INFO") else appbuilder.get_url_for_userinfo, "user_logout_url": appbuilder.get_url_for_logout, "user_login_url": appbuilder.get_url_for_login, "user_profile_url": None - if user.is_anonymous or appbuilder.app.config["MENU_HIDE_USER_INFO"] - else f"/superset/profile/{user.username}", + if user.is_anonymous or is_feature_enabled("MENU_HIDE_USER_INFO") + else "/superset/profile/", "locale": session.get("locale", "en"), }, } diff --git a/superset/views/core.py b/superset/views/core.py index 3d37b400c5821..f584901316b7d 100755 --- a/superset/views/core.py +++ b/superset/views/core.py @@ -18,28 +18,20 @@ from __future__ import annotations import logging -import re -from contextlib import closing from datetime import datetime -from typing import Any, Callable, cast, Optional +from typing import Any, Callable, cast from urllib import parse -import backoff -import pandas as pd import simplejson as json from flask import abort, flash, g, redirect, render_template, request, Response from flask_appbuilder import expose -from flask_appbuilder.models.sqla.interface import SQLAInterface from flask_appbuilder.security.decorators import ( has_access, has_access_api, permission_name, ) -from flask_appbuilder.security.sqla import models as ab_models from flask_babel import gettext as __, lazy_gettext as _ -from sqlalchemy import and_, or_ -from sqlalchemy.exc import DBAPIError, NoSuchModuleError, SQLAlchemyError -from sqlalchemy.orm.session import Session +from sqlalchemy.exc import SQLAlchemyError from superset import ( app, @@ -48,96 +40,43 @@ db, event_logger, is_feature_enabled, - results_backend, - results_backend_use_msgpack, security_manager, - sql_lab, - viz, ) from superset.charts.commands.exceptions import ChartNotFoundError -from superset.charts.dao import ChartDAO from superset.common.chart_data import ChartDataResultFormat, ChartDataResultType -from superset.common.db_query_status import QueryStatus from superset.connectors.base.models import BaseDatasource -from superset.connectors.sqla.models import ( - AnnotationDatasource, - SqlaTable, - SqlMetric, - TableColumn, -) -from superset.constants import QUERY_EARLY_CANCEL_KEY +from superset.connectors.sqla.models import SqlaTable +from superset.daos.chart import ChartDAO +from superset.daos.database import DatabaseDAO +from superset.daos.datasource import DatasourceDAO from superset.dashboards.commands.exceptions import DashboardAccessDeniedError from superset.dashboards.commands.importers.v0 import ImportDashboardsCommand -from superset.dashboards.dao import DashboardDAO from superset.dashboards.permalink.commands.get import GetDashboardPermalinkCommand from superset.dashboards.permalink.exceptions import DashboardPermalinkGetFailedError -from superset.databases.commands.exceptions import DatabaseInvalidError -from superset.databases.dao import DatabaseDAO -from superset.databases.filters import DatabaseFilter -from superset.databases.utils import make_url_safe from superset.datasets.commands.exceptions import DatasetNotFoundError -from superset.datasource.dao import DatasourceDAO -from superset.errors import ErrorLevel, SupersetError, SupersetErrorType -from superset.exceptions import ( - CacheLoadError, - CertificateException, - DatabaseNotFound, - SerializationError, - SupersetCancelQueryException, - SupersetErrorException, - SupersetException, - SupersetGenericErrorException, - SupersetSecurityException, - SupersetTimeoutException, -) +from superset.exceptions import CacheLoadError, DatabaseNotFound, SupersetException from superset.explore.form_data.commands.create import CreateFormDataCommand from superset.explore.form_data.commands.get import GetFormDataCommand from superset.explore.form_data.commands.parameters import CommandParameters from superset.explore.permalink.commands.get import GetExplorePermalinkCommand from superset.explore.permalink.exceptions import ExplorePermalinkGetFailedError from superset.extensions import async_query_manager, cache_manager -from superset.jinja_context import get_template_processor -from superset.models.core import Database, FavStar +from superset.models.core import Database from superset.models.dashboard import Dashboard -from superset.models.datasource_access_request import DatasourceAccessRequest from superset.models.slice import Slice from superset.models.sql_lab import Query, TabState from superset.models.user_attributes import UserAttribute -from superset.queries.dao import QueryDAO -from superset.security.analytics_db_safety import check_sqlalchemy_uri -from superset.sql_lab import get_sql_results -from superset.sql_parse import ParsedQuery -from superset.sql_validators import get_validator_by_name -from superset.sqllab.command_status import SqlJsonExecutionStatus -from superset.sqllab.commands.execute import CommandResult, ExecuteSqlCommand -from superset.sqllab.exceptions import ( - QueryIsForbiddenToAccessException, - SqlLabException, -) -from superset.sqllab.execution_context_convertor import ExecutionContextConvertor -from superset.sqllab.limiting_factor import LimitingFactor -from superset.sqllab.query_render import SqlQueryRenderImpl -from superset.sqllab.sql_json_executer import ( - ASynchronousSqlJsonExecutor, - SqlJsonExecutor, - SynchronousSqlJsonExecutor, -) -from superset.sqllab.sqllab_execution_context import SqlJsonExecutionContext -from superset.sqllab.utils import apply_display_max_row_configuration_if_require -from superset.sqllab.validators import CanAccessQueryValidatorImpl from superset.superset_typing import FlaskResponse from superset.tasks.async_queries import load_explore_json_into_cache -from superset.utils import core as utils, csv +from superset.utils import core as utils from superset.utils.async_query_manager import AsyncQueryTokenException from superset.utils.cache import etag_cache from superset.utils.core import ( - apply_max_row_limit, DatasourceType, get_user_id, + get_username, ReservedUrlParameters, ) -from superset.utils.dates import now_as_float -from superset.utils.decorators import check_dashboard_access from superset.views.base import ( api, BaseSupersetView, @@ -149,19 +88,13 @@ get_error_msg, handle_api_exception, json_error_response, - json_errors_response, json_success, - validate_sqlatable, ) -from superset.views.log.dao import LogDAO -from superset.views.sql_lab.schemas import SqlJsonPayloadSchema from superset.views.utils import ( - _deserialize_results_payload, bootstrap_user_data, check_datasource_perms, check_explore_cache_perms, check_resource_permissions, - check_slice_perms, get_dashboard_extra_filters, get_datasource_info, get_form_data, @@ -175,7 +108,6 @@ config = app.config SQLLAB_QUERY_COST_ESTIMATE_TIMEOUT = config["SQLLAB_QUERY_COST_ESTIMATE_TIMEOUT"] stats_logger = config["STATS_LOGGER"] -DAR = DatasourceAccessRequest logger = logging.getLogger(__name__) DATABASE_KEYS = [ @@ -193,7 +125,6 @@ "disable_data_preview", ] -DASHBOARD_LIST_URL = "/dashboard/list/" DATASOURCE_MISSING_ERR = __("The data source seems to have been deleted") USER_MISSING_ERR = __("The user seems to have been deleted") PARAMETER_MISSING_ERR = __( @@ -210,223 +141,6 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods logger = logging.getLogger(__name__) - @has_access_api - @event_logger.log_this - @expose("/datasources/") - @deprecated(new_target="api/v1/dataset/") - def datasources(self) -> FlaskResponse: - return self.json_response( - sorted( - [ - datasource.short_data - for datasource in security_manager.get_user_datasources() - if datasource.short_data.get("name") - ], - key=lambda datasource: datasource["name"], - ) - ) - - @has_access_api - @event_logger.log_this - @expose("/override_role_permissions/", methods=("POST",)) - @deprecated() - def override_role_permissions(self) -> FlaskResponse: - """Updates the role with the give datasource permissions. - - Permissions not in the request will be revoked. This endpoint should - be available to admins only. Expects JSON in the format: - { - 'role_name': '{role_name}', - 'database': [{ - 'datasource_type': '{table}', - 'name': '{database_name}', - 'schema': [{ - 'name': '{schema_name}', - 'datasources': ['{datasource name}, {datasource name}'] - }] - }] - } - """ - data = request.get_json(force=True) - role_name = data["role_name"] - databases = data["database"] - - db_ds_names = set() - for dbs in databases: - for schema in dbs["schema"]: - for ds_name in schema["datasources"]: - fullname = utils.get_datasource_full_name( - dbs["name"], ds_name, schema=schema["name"] - ) - db_ds_names.add(fullname) - - existing_datasources = SqlaTable.get_all_datasources(db.session) - datasources = [d for d in existing_datasources if d.full_name in db_ds_names] - role = security_manager.find_role(role_name) - # remove all permissions - role.permissions = [] - # grant permissions to the list of datasources - granted_perms = [] - for datasource in datasources: - view_menu_perm = security_manager.find_permission_view_menu( - view_menu_name=datasource.perm, permission_name="datasource_access" - ) - # prevent creating empty permissions - if view_menu_perm and view_menu_perm.view_menu: - role.permissions.append(view_menu_perm) - granted_perms.append(view_menu_perm.view_menu.name) - db.session.commit() - return self.json_response( - {"granted": granted_perms, "requested": list(db_ds_names)}, status=201 - ) - - @has_access - @event_logger.log_this - @expose("/request_access/", methods=("POST",)) - @deprecated() - def request_access(self) -> FlaskResponse: - datasources = set() - dashboard_id = request.args.get("dashboard_id") - if dashboard_id: - dash = db.session.query(Dashboard).filter_by(id=int(dashboard_id)).one() - datasources |= dash.datasources - datasource_id = request.args.get("datasource_id") - datasource_type = request.args.get("datasource_type") - if datasource_id and datasource_type: - ds_class = DatasourceDAO.sources.get(datasource_type) - datasource = ( - db.session.query(ds_class).filter_by(id=int(datasource_id)).one() - ) - datasources.add(datasource) - - has_access_ = all( - datasource and security_manager.can_access_datasource(datasource) - for datasource in datasources - ) - if has_access_: - return redirect(f"/superset/dashboard/{dashboard_id}") - - if request.args.get("action") == "go": - for datasource in datasources: - access_request = DAR( - datasource_id=datasource.id, datasource_type=datasource.type - ) - db.session.add(access_request) - db.session.commit() - flash(__("Access was requested"), "info") - return redirect("/") - - return self.render_template( - "superset/request_access.html", - datasources=datasources, - datasource_names=", ".join([o.name for o in datasources]), - ) - - @has_access - @event_logger.log_this - @expose("/approve", methods=("POST",)) - @deprecated() - def approve(self) -> FlaskResponse: # pylint: disable=too-many-locals,no-self-use - def clean_fulfilled_requests(session: Session) -> None: - for dar in session.query(DAR).all(): - datasource = DatasourceDAO.get_datasource( - session, DatasourceType(dar.datasource_type), dar.datasource_id - ) - if not datasource or security_manager.can_access_datasource(datasource): - # Dataset does not exist anymore - session.delete(dar) - session.commit() - - datasource_type = request.args["datasource_type"] - datasource_id = request.args["datasource_id"] - created_by_username = request.args.get("created_by") - role_to_grant = request.args.get("role_to_grant") - role_to_extend = request.args.get("role_to_extend") - - session = db.session - datasource = DatasourceDAO.get_datasource( - session, DatasourceType(datasource_type), int(datasource_id) - ) - - if not datasource: - flash(DATASOURCE_MISSING_ERR, "alert") - return json_error_response(DATASOURCE_MISSING_ERR) - - requested_by = security_manager.find_user(username=created_by_username) - if not requested_by: - flash(USER_MISSING_ERR, "alert") - return json_error_response(USER_MISSING_ERR) - - requests = ( - session.query(DAR) - .filter( # pylint: disable=comparison-with-callable - DAR.datasource_id == datasource_id, - DAR.datasource_type == datasource_type, - DAR.created_by_fk == requested_by.id, - ) - .all() - ) - - if not requests: - err = __("The access requests seem to have been deleted") - flash(err, "alert") - return json_error_response(err) - - # check if you can approve - if security_manager.can_access_all_datasources() or security_manager.is_owner( - datasource - ): - # can by done by admin only - if role_to_grant: - role = security_manager.find_role(role_to_grant) - requested_by.roles.append(role) - msg = __( - "%(user)s was granted the role %(role)s that gives access " - "to the %(datasource)s", - user=requested_by.username, - role=role_to_grant, - datasource=datasource.full_name, - ) - utils.notify_user_about_perm_udate( - g.user, - requested_by, - role, - datasource, - "email/role_granted.txt", - app.config, - ) - flash(msg, "info") - - if role_to_extend: - perm_view = security_manager.find_permission_view_menu( - "email/datasource_access", datasource.perm - ) - role = security_manager.find_role(role_to_extend) - security_manager.add_permission_role(role, perm_view) - msg = __( - "Role %(r)s was extended to provide the access to " - "the datasource %(ds)s", - r=role_to_extend, - ds=datasource.full_name, - ) - utils.notify_user_about_perm_udate( - g.user, - requested_by, - role, - datasource, - "email/role_extended.txt", - app.config, - ) - flash(msg, "info") - clean_fulfilled_requests(session) - else: - flash(__("You have no permission to approve this request"), "danger") - return redirect("/accessrequestsmodelview/list/") - for request_ in requests: - session.delete(request_) - session.commit() - return redirect("/accessrequestsmodelview/list/") - @has_access @event_logger.log_this @expose("/slice//") @@ -500,65 +214,6 @@ def generate_json( payload = viz_obj.get_payload() return self.send_data_payload_response(viz_obj, payload) - @event_logger.log_this - @api - @has_access_api - @expose("/slice_json/") - @etag_cache() - @check_resource_permissions(check_slice_perms) - @deprecated(new_target="/api/v1/chart//data/") - def slice_json(self, slice_id: int) -> FlaskResponse: - form_data, slc = get_form_data(slice_id, use_slice_data=True) - if not slc: - return json_error_response("The slice does not exist") - - if not slc.datasource: - return json_error_response("The slice's datasource does not exist") - - try: - viz_obj = get_viz( - datasource_type=slc.datasource.type, - datasource_id=slc.datasource.id, - form_data=form_data, - force=False, - ) - return self.generate_json(viz_obj) - except SupersetException as ex: - return json_error_response(utils.error_msg_from_exception(ex)) - - @api - @has_access_api - @event_logger.log_this - @expose("/annotation_json/") - @deprecated(new_target="/api/v1/chart//data/") - def annotation_json( # pylint: disable=no-self-use - self, layer_id: int - ) -> FlaskResponse: - form_data = get_form_data()[0] - force = utils.parse_boolean_string(request.args.get("force")) - - form_data["layer_id"] = layer_id - form_data["filters"] = [{"col": "layer_id", "op": "==", "val": layer_id}] - # Set all_columns to ensure the TableViz returns the necessary columns to the - # frontend. - form_data["all_columns"] = [ - "created_on", - "changed_on", - "id", - "start_dttm", - "end_dttm", - "layer_id", - "short_descr", - "long_descr", - "json_metadata", - "created_by_fk", - "changed_by_fk", - ] - datasource = AnnotationDatasource() - viz_obj = viz.viz_types["table"](datasource, form_data=form_data, force=force) - payload = viz_obj.get_payload() - return data_payload_response(*viz_obj.payload_json_and_has_error(payload)) - @event_logger.log_this @api @has_access_api @@ -888,21 +543,6 @@ def explore( except DatasetNotFoundError: pass datasource_name = datasource.name if datasource else _("[Missing Dataset]") - - if datasource: - if config["ENABLE_ACCESS_REQUEST"] and ( - not security_manager.can_access_datasource(datasource) - ): - flash( - __(security_manager.get_datasource_access_error_msg(datasource)), - "danger", - ) - return redirect( - "superset/request_access/?" - f"datasource_type={datasource_type}&" - f"datasource_id={datasource_id}&" - ) - viz_type = form_data.get("viz_type") if not viz_type and datasource and datasource.default_endpoint: return redirect(datasource.default_endpoint) @@ -982,7 +622,6 @@ def explore( bootstrap_data = { "can_add": slice_add_perm, - "can_download": slice_download_perm, "datasource": sanitize_datasource_data(datasource_data), "form_data": form_data, "datasource_id": datasource_id, @@ -1016,43 +655,6 @@ def explore( standalone_mode=standalone_mode, ) - @api - @handle_api_exception - @has_access_api - @event_logger.log_this - @expose("/filter////") - @deprecated( - new_target="/api/v1/datasource//" - "/column//values/" - ) - def filter( # pylint: disable=no-self-use - self, datasource_type: str, datasource_id: int, column: str - ) -> FlaskResponse: - """ - Endpoint to retrieve values for specified column. - - :param datasource_type: Type of datasource e.g. table - :param datasource_id: Datasource id - :param column: Column name to retrieve values for - :returns: The Flask response - :raises SupersetSecurityException: If the user cannot access the resource - """ - # TODO: Cache endpoint by user, datasource and column - datasource = DatasourceDAO.get_datasource( - db.session, DatasourceType(datasource_type), datasource_id - ) - if not datasource: - return json_error_response(DATASOURCE_MISSING_ERR) - - datasource.raise_for_access() - row_limit = apply_max_row_limit(config["FILTER_SELECT_ROW_LIMIT"]) - payload = json.dumps( - datasource.values_for_column(column_name=column, limit=row_limit), - default=utils.json_int_dttm_ser, - ignore_nan=True, - ) - return json_success(payload) - @staticmethod def save_or_overwrite_slice( # pylint: disable=too-many-arguments,too-many-locals @@ -1163,783 +765,157 @@ def save_or_overwrite_slice( return json_success(json.dumps(response)) + @event_logger.log_this @api @has_access_api - @event_logger.log_this - @expose("/tables///") - @expose("/tables////") - @deprecated(new_target="api/v1/database//tables/") - def tables( # pylint: disable=no-self-use + @expose("/warm_up_cache/", methods=("GET",)) + def warm_up_cache( # pylint: disable=too-many-locals,no-self-use self, - db_id: int, - schema: str, - force_refresh: str = "false", ) -> FlaskResponse: - """Endpoint to fetch the list of tables for given database""" - - force_refresh_parsed = force_refresh.lower() == "true" - schema_parsed = utils.parse_js_uri_path_item(schema, eval_undefined=True) + """Warms up the cache for the slice or table. - if not schema_parsed: - return json_error_response(_("Schema undefined"), status=422) + Note for slices a force refresh occurs. - # Guarantees database filtering by security access - database = ( - DatabaseFilter("id", SQLAInterface(Database, db.session)) - .apply( - db.session.query(Database), - None, - ) - .filter_by(id=db_id) - .one_or_none() - ) + In terms of the `extra_filters` these can be obtained from records in the JSON + encoded `logs.json` column associated with the `explore_json` action. + """ + session = db.session() + slice_id = request.args.get("slice_id") + dashboard_id = request.args.get("dashboard_id") + table_name = request.args.get("table_name") + db_name = request.args.get("db_name") + extra_filters = request.args.get("extra_filters") + slices: list[Slice] = [] - if not database: + if not slice_id and not (table_name and db_name): return json_error_response( - __("Database not found: %(id)s", id=db_id), status=404 - ) - - try: - tables = security_manager.get_datasources_accessible_by_user( - database=database, - schema=schema_parsed, - datasource_names=sorted( - utils.DatasourceName(*datasource_name) - for datasource_name in database.get_all_table_names_in_schema( - schema=schema_parsed, - force=force_refresh_parsed, - cache=database.table_cache_enabled, - cache_timeout=database.table_cache_timeout, - ) + __( + "Malformed request. slice_id or table_name and db_name " + "arguments are expected" ), + status=400, + ) + if slice_id: + slices = session.query(Slice).filter_by(id=slice_id).all() + if not slices: + return json_error_response( + __("Chart %(id)s not found", id=slice_id), status=404 + ) + elif table_name and db_name: + table = ( + session.query(SqlaTable) + .join(Database) + .filter( + Database.database_name == db_name + or SqlaTable.table_name == table_name + ) + ).one_or_none() + if not table: + return json_error_response( + __( + "Table %(table)s wasn't found in the database %(db)s", + table=table_name, + db=db_name, + ), + status=404, + ) + slices = ( + session.query(Slice) + .filter_by(datasource_id=table.id, datasource_type=table.type) + .all() ) - views = security_manager.get_datasources_accessible_by_user( - database=database, - schema=schema_parsed, - datasource_names=sorted( - utils.DatasourceName(*datasource_name) - for datasource_name in database.get_all_view_names_in_schema( - schema=schema_parsed, - force=force_refresh_parsed, - cache=database.table_cache_enabled, - cache_timeout=database.table_cache_timeout, + result = [] + + for slc in slices: + try: + form_data = get_form_data(slc.id, use_slice_data=True)[0] + if dashboard_id: + form_data["extra_filters"] = ( + json.loads(extra_filters) + if extra_filters + else get_dashboard_extra_filters(slc.id, dashboard_id) ) - ), - ) - except SupersetException as ex: - return json_error_response(ex.message, ex.status) - - extra_dict_by_name = { - table.name: table.extra_dict - for table in ( - db.session.query(SqlaTable).filter( - SqlaTable.database_id == database.id, - SqlaTable.schema == schema_parsed, + + if not slc.datasource: + raise Exception("Slice's datasource does not exist") + + obj = get_viz( + datasource_type=slc.datasource.type, + datasource_id=slc.datasource.id, + form_data=form_data, + force=True, ) - ).all() - } - options = sorted( - [ - { - "value": table.table, - "type": "table", - "extra": extra_dict_by_name.get(table.table, None), - } - for table in tables - ] - + [ - { - "value": view.table, - "type": "view", - } - for view in views - ], - key=lambda item: item["value"], - ) + # pylint: disable=assigning-non-slot + g.form_data = form_data + payload = obj.get_payload() + delattr(g, "form_data") + error = payload["errors"] or None + status = payload["status"] + except Exception as ex: # pylint: disable=broad-except + error = utils.error_msg_from_exception(ex) + status = None - payload = {"tableLength": len(tables) + len(views), "options": options} - return json_success(json.dumps(payload)) + result.append( + {"slice_id": slc.id, "viz_error": error, "viz_status": status} + ) - @api - @has_access_api - @event_logger.log_this - @expose( - "/copy_dash//", - methods=( - "GET", - "POST", - ), - ) - @deprecated(new_target="api/v1/dashboard//copy/") - def copy_dash( # pylint: disable=no-self-use - self, dashboard_id: int + return json_success(json.dumps(result)) + + @has_access + @expose("/dashboard//") + @event_logger.log_this_with_extra_payload + def dashboard( + self, + dashboard_id_or_slug: str, + add_extra_log_payload: Callable[..., None] = lambda **kwargs: None, ) -> FlaskResponse: - """Copy dashboard""" - session = db.session() - data = json.loads(request.form["data"]) - # client-side send back last_modified_time which was set when - # the dashboard was open. it was use to avoid mid-air collision. - # remove it to avoid confusion. - data.pop("last_modified_time", None) - - dash = Dashboard() - original_dash = session.query(Dashboard).get(dashboard_id) - - dash.owners = [g.user] if g.user else [] - dash.dashboard_title = data["dashboard_title"] - dash.css = data.get("css") - - old_to_new_slice_ids: dict[int, int] = {} - if data["duplicate_slices"]: - # Duplicating slices as well, mapping old ids to new ones - for slc in original_dash.slices: - new_slice = slc.clone() - new_slice.owners = [g.user] if g.user else [] - session.add(new_slice) - session.flush() - new_slice.dashboards.append(dash) - old_to_new_slice_ids[slc.id] = new_slice.id - - # update chartId of layout entities - for value in data["positions"].values(): - if isinstance(value, dict) and value.get("meta", {}).get("chartId"): - old_id = value["meta"]["chartId"] - new_id = old_to_new_slice_ids.get(old_id) - value["meta"]["chartId"] = new_id - else: - dash.slices = original_dash.slices + """ + Server side rendering for a dashboard. + + :param dashboard_id_or_slug: identifier for dashboard + :param add_extra_log_payload: added by `log_this_with_manual_updates`, set a + default value to appease pylint + """ - dash.params = original_dash.params + dashboard = Dashboard.get(dashboard_id_or_slug) - DashboardDAO.set_dash_metadata(dash, data, old_to_new_slice_ids) - session.add(dash) - session.commit() - dash_json = json.dumps(dash.data) - session.close() - return json_success(dash_json) + if not dashboard: + abort(404) - @api - @has_access_api - @event_logger.log_this - @expose( - "/save_dash//", - methods=( - "GET", - "POST", - ), - ) - @deprecated() - def save_dash( # pylint: disable=no-self-use - self, dashboard_id: int - ) -> FlaskResponse: - """Save a dashboard's metadata""" - session = db.session() - dash = session.query(Dashboard).get(dashboard_id) - security_manager.raise_for_ownership(dash) - data = json.loads(request.form["data"]) - # client-side send back last_modified_time which was set when - # the dashboard was open. it was use to avoid mid-air collision. - remote_last_modified_time = data.get("last_modified_time") - current_last_modified_time = dash.changed_on.replace(microsecond=0).timestamp() - if ( - remote_last_modified_time - and remote_last_modified_time < current_last_modified_time - ): - return json_error_response( - __( - "This dashboard was changed recently. " - "Please reload dashboard to get latest version." - ), - 412, + try: + security_manager.raise_for_dashboard_access(dashboard) + except DashboardAccessDeniedError as ex: + return redirect_with_flash( + url="/dashboard/list/", + message=utils.error_msg_from_exception(ex), + category="danger", ) - # remove to avoid confusion. - data.pop("last_modified_time", None) - - if data.get("css") is not None: - dash.css = data["css"] - if data.get("dashboard_title") is not None: - dash.dashboard_title = data["dashboard_title"] - DashboardDAO.set_dash_metadata(dash, data) - session.merge(dash) - session.commit() - - # get updated changed_on - dash = session.query(Dashboard).get(dashboard_id) - last_modified_time = dash.changed_on.replace(microsecond=0).timestamp() - session.close() - return json_success( - json.dumps({"status": "SUCCESS", "last_modified_time": last_modified_time}) + add_extra_log_payload( + dashboard_id=dashboard.id, + dashboard_version="v2", + dash_edit_perm=( + security_manager.is_owner(dashboard) + and security_manager.can_access("can_write", "Dashboard") + ), + edit_mode=( + request.args.get(ReservedUrlParameters.EDIT_MODE.value) == "true" + ), ) - @api - @has_access_api - @event_logger.log_this - @expose("/add_slices//", methods=("POST",)) - @deprecated(new_target="api/v1/chart/") - def add_slices( # pylint: disable=no-self-use - self, dashboard_id: int - ) -> FlaskResponse: - """Add and save slices to a dashboard""" - data = json.loads(request.form["data"]) - session = db.session() - dash = session.query(Dashboard).get(dashboard_id) - security_manager.raise_for_ownership(dash) - new_slices = session.query(Slice).filter(Slice.id.in_(data["slice_ids"])) - dash.slices += new_slices - session.merge(dash) - session.commit() - session.close() - return "SLICES ADDED" - - @api - @has_access_api - @event_logger.log_this - @expose( - "/testconn", - methods=( - "GET", - "POST", - ), - ) # pylint: disable=no-self-use - @deprecated(new_target="/api/v1/database/test_connection/") - def testconn(self) -> FlaskResponse: - """Tests a sqla connection""" - db_name = request.json.get("name") - uri = request.json.get("uri") - try: - if app.config["PREVENT_UNSAFE_DB_CONNECTIONS"]: - check_sqlalchemy_uri(make_url_safe(uri)) - # if the database already exists in the database, only its safe - # (password-masked) URI would be shown in the UI and would be passed in the - # form data so if the database already exists and the form was submitted - # with the safe URI, we assume we should retrieve the decrypted URI to test - # the connection. - if db_name: - existing_database = ( - db.session.query(Database) - .filter_by(database_name=db_name) - .one_or_none() - ) - if existing_database and uri == existing_database.safe_sqlalchemy_uri(): - uri = existing_database.sqlalchemy_uri_decrypted - - # This is the database instance that will be tested. Note the extra fields - # are represented as JSON encoded strings in the model. - database = Database( - server_cert=request.json.get("server_cert"), - extra=json.dumps(request.json.get("extra", {})), - impersonate_user=request.json.get("impersonate_user"), - encrypted_extra=json.dumps(request.json.get("encrypted_extra", {})), - ) - database.set_sqlalchemy_uri(uri) - database.db_engine_spec.mutate_db_for_connection_test(database) - - with database.get_sqla_engine_with_context() as engine: - with closing(engine.raw_connection()) as conn: - if engine.dialect.do_ping(conn): - return json_success('"OK"') - - raise DBAPIError(None, None, None) - except CertificateException as ex: - logger.info("Certificate exception") - return json_error_response(ex.message) - except (NoSuchModuleError, ModuleNotFoundError): - logger.info("Invalid driver") - driver_name = make_url_safe(uri).drivername - return json_error_response( - _( - "Could not load database driver: %(driver_name)s", - driver_name=driver_name, - ), - 400, - ) - except DatabaseInvalidError: - logger.info("Invalid URI") - return json_error_response( - _( - "Invalid connection string, a valid string usually follows:\n" - "'DRIVER://USER:PASSWORD@DB-HOST/DATABASE-NAME'" - ) - ) - except DBAPIError: - logger.warning("Connection failed") - return json_error_response( - _("Connection failed, please check your connection settings"), 400 - ) - except SupersetSecurityException as ex: - logger.warning("Stopped an unsafe database connection") - return json_error_response(_(str(ex)), 400) - except Exception as ex: # pylint: disable=broad-except - logger.warning("Unexpected error %s", type(ex).__name__) - return json_error_response( - _("Unexpected error occurred, please check your logs for details"), 400 - ) - - @staticmethod - def get_user_activity_access_error(user_id: int) -> FlaskResponse | None: - try: - security_manager.raise_for_user_activity_access(user_id) - except SupersetSecurityException as ex: - return json_error_response( - ex.message, - status=403, - ) - return None - - @api - @has_access_api - @event_logger.log_this - @expose("/recent_activity//", methods=("GET",)) - @deprecated(new_target="/api/v1/log/recent_activity//") - def recent_activity(self, user_id: int) -> FlaskResponse: - """Recent activity (actions) for a given user""" - if error_obj := self.get_user_activity_access_error(user_id): - return error_obj - - limit = request.args.get("limit") - limit = int(limit) if limit and limit.isdigit() else 100 - actions = request.args.get("actions", "explore,dashboard").split(",") - # whether to get distinct subjects - distinct = request.args.get("distinct") != "false" - - payload = LogDAO.get_recent_activity(user_id, actions, distinct, 0, limit) - - return json_success(json.dumps(payload, default=utils.json_int_dttm_ser)) - - @api - @has_access_api - @event_logger.log_this - @expose("/available_domains/", methods=("GET",)) - @deprecated(new_target="/api/v1/available_domains/") - def available_domains(self) -> FlaskResponse: # pylint: disable=no-self-use - """ - Returns the list of available Superset Webserver domains (if any) - defined in config. This enables charts embedded in other apps to - leverage domain sharding if appropriately configured. - """ - return Response( - json.dumps(conf.get("SUPERSET_WEBSERVER_DOMAINS")), mimetype="text/json" - ) - - @api - @has_access_api - @event_logger.log_this - @expose("/fave_dashboards_by_username//", methods=("GET",)) - @deprecated(new_target="api/v1/dashboard/favorite_status/") - def fave_dashboards_by_username(self, username: str) -> FlaskResponse: - """This lets us use a user's username to pull favourite dashboards""" - user = security_manager.find_user(username=username) - return self.fave_dashboards(user.id) - - @api - @has_access_api - @event_logger.log_this - @expose("/fave_dashboards//", methods=("GET",)) - @deprecated(new_target="api/v1/dashboard/favorite_status/") - def fave_dashboards(self, user_id: int) -> FlaskResponse: - if error_obj := self.get_user_activity_access_error(user_id): - return error_obj - qry = ( - db.session.query(Dashboard, FavStar.dttm) - .join( - FavStar, - and_( - FavStar.user_id == int(user_id), - FavStar.class_name == "Dashboard", - Dashboard.id == FavStar.obj_id, - ), - ) - .order_by(FavStar.dttm.desc()) - ) - payload = [] - for o in qry.all(): - dash = { - "id": o.Dashboard.id, - "dashboard": o.Dashboard.dashboard_link(), - "title": o.Dashboard.dashboard_title, - "url": o.Dashboard.url, - "dttm": o.dttm, - } - if o.Dashboard.created_by: - user = o.Dashboard.created_by - dash["creator"] = str(user) - dash["creator_url"] = f"/superset/profile/{user.username}/" - payload.append(dash) - return json_success(json.dumps(payload, default=utils.json_int_dttm_ser)) - - @api - @has_access_api - @event_logger.log_this - @expose("/created_dashboards//", methods=("GET",)) - @deprecated(new_target="api/v1/dashboard/") - def created_dashboards(self, user_id: int) -> FlaskResponse: - if error_obj := self.get_user_activity_access_error(user_id): - return error_obj - qry = ( - db.session.query(Dashboard) - .filter( # pylint: disable=comparison-with-callable - or_( - Dashboard.created_by_fk == user_id, - Dashboard.changed_by_fk == user_id, - ) - ) - .order_by(Dashboard.changed_on.desc()) - ) - payload = [ - { - "id": o.id, - "dashboard": o.dashboard_link(), - "title": o.dashboard_title, - "url": o.url, - "dttm": o.changed_on, - } - for o in qry.all() - ] - return json_success(json.dumps(payload, default=utils.json_int_dttm_ser)) - - @api - @has_access_api - @event_logger.log_this - @expose("/user_slices", methods=("GET",)) - @expose("/user_slices//", methods=("GET",)) - @deprecated(new_target="/api/v1/chart/") - def user_slices(self, user_id: int | None = None) -> FlaskResponse: - """List of slices a user owns, created, modified or faved""" - if not user_id: - user_id = cast(int, get_user_id()) - if error_obj := self.get_user_activity_access_error(user_id): - return error_obj - - owner_ids_query = ( - db.session.query(Slice.id) - .join(Slice.owners) - .filter(security_manager.user_model.id == user_id) - ) - - qry = ( - db.session.query(Slice, FavStar.dttm) - .join( - FavStar, - and_( - FavStar.user_id == user_id, - FavStar.class_name == "slice", - Slice.id == FavStar.obj_id, - ), - isouter=True, - ) - .filter( # pylint: disable=comparison-with-callable - or_( - Slice.id.in_(owner_ids_query), - Slice.created_by_fk == user_id, - Slice.changed_by_fk == user_id, - FavStar.user_id == user_id, - ) - ) - .order_by(Slice.slice_name.asc()) - ) - payload = [ - { - "id": o.Slice.id, - "title": o.Slice.slice_name, - "url": o.Slice.slice_url, - "data": o.Slice.form_data, - "dttm": o.dttm if o.dttm else o.Slice.changed_on, - "viz_type": o.Slice.viz_type, - } - for o in qry.all() - ] - return json_success(json.dumps(payload, default=utils.json_int_dttm_ser)) - - @api - @has_access_api - @event_logger.log_this - @expose("/created_slices", methods=("GET",)) - @expose("/created_slices//", methods=("GET",)) - @deprecated(new_target="api/v1/chart/") - def created_slices(self, user_id: int | None = None) -> FlaskResponse: - """List of slices created by this user""" - if not user_id: - user_id = cast(int, get_user_id()) - if error_obj := self.get_user_activity_access_error(user_id): - return error_obj - qry = ( - db.session.query(Slice) - .filter( # pylint: disable=comparison-with-callable - or_(Slice.created_by_fk == user_id, Slice.changed_by_fk == user_id) - ) - .order_by(Slice.changed_on.desc()) - ) - payload = [ - { - "id": o.id, - "title": o.slice_name, - "url": o.slice_url, - "dttm": o.changed_on, - "viz_type": o.viz_type, - } - for o in qry.all() - ] - return json_success(json.dumps(payload, default=utils.json_int_dttm_ser)) - - @api - @has_access_api - @event_logger.log_this - @expose("/fave_slices", methods=("GET",)) - @expose("/fave_slices//", methods=("GET",)) - @deprecated(new_target="api/v1/chart/") - def fave_slices(self, user_id: int | None = None) -> FlaskResponse: - """Favorite slices for a user""" - if user_id is None: - user_id = cast(int, get_user_id()) - if error_obj := self.get_user_activity_access_error(user_id): - return error_obj - qry = ( - db.session.query(Slice, FavStar.dttm) - .join( - FavStar, - and_( - FavStar.user_id == user_id, - FavStar.class_name == "slice", - Slice.id == FavStar.obj_id, - ), - ) - .order_by(FavStar.dttm.desc()) - ) - payload = [] - for o in qry.all(): - dash = { - "id": o.Slice.id, - "title": o.Slice.slice_name, - "url": o.Slice.slice_url, - "dttm": o.dttm, - "viz_type": o.Slice.viz_type, - } - if o.Slice.created_by: - user = o.Slice.created_by - dash["creator"] = str(user) - dash["creator_url"] = f"/superset/profile/{user.username}/" - payload.append(dash) - return json_success(json.dumps(payload, default=utils.json_int_dttm_ser)) - - @event_logger.log_this - @api - @has_access_api - @expose("/warm_up_cache/", methods=("GET",)) - @deprecated(new_target="api/v1/chart/warm_up_cache") - def warm_up_cache( # pylint: disable=too-many-locals,no-self-use - self, - ) -> FlaskResponse: - """Warms up the cache for the slice or table. - - Note for slices a force refresh occurs. - - In terms of the `extra_filters` these can be obtained from records in the JSON - encoded `logs.json` column associated with the `explore_json` action. - """ - session = db.session() - slice_id = request.args.get("slice_id") - dashboard_id = request.args.get("dashboard_id") - table_name = request.args.get("table_name") - db_name = request.args.get("db_name") - extra_filters = request.args.get("extra_filters") - slices: list[Slice] = [] - - if not slice_id and not (table_name and db_name): - return json_error_response( - __( - "Malformed request. slice_id or table_name and db_name " - "arguments are expected" - ), - status=400, - ) - if slice_id: - slices = session.query(Slice).filter_by(id=slice_id).all() - if not slices: - return json_error_response( - __("Chart %(id)s not found", id=slice_id), status=404 - ) - elif table_name and db_name: - table = ( - session.query(SqlaTable) - .join(Database) - .filter( - Database.database_name == db_name - or SqlaTable.table_name == table_name - ) - ).one_or_none() - if not table: - return json_error_response( - __( - "Table %(table)s wasn't found in the database %(db)s", - table=table_name, - db=db_name, - ), - status=404, - ) - slices = ( - session.query(Slice) - .filter_by(datasource_id=table.id, datasource_type=table.type) - .all() - ) - - result = [] - - for slc in slices: - try: - form_data = get_form_data(slc.id, use_slice_data=True)[0] - if dashboard_id: - form_data["extra_filters"] = ( - json.loads(extra_filters) - if extra_filters - else get_dashboard_extra_filters(slc.id, dashboard_id) - ) - - if not slc.datasource: - raise Exception("Slice's datasource does not exist") - - obj = get_viz( - datasource_type=slc.datasource.type, - datasource_id=slc.datasource.id, - form_data=form_data, - force=True, - ) - - # pylint: disable=assigning-non-slot - g.form_data = form_data - payload = obj.get_payload() - delattr(g, "form_data") - error = payload["errors"] or None - status = payload["status"] - except Exception as ex: # pylint: disable=broad-except - error = utils.error_msg_from_exception(ex) - status = None - - result.append( - {"slice_id": slc.id, "viz_error": error, "viz_status": status} - ) - - return json_success(json.dumps(result)) - - @has_access_api - @event_logger.log_this - @expose("/favstar////") - @deprecated(new_target="api/v1/dashboard|chart//favorites/") - def favstar( # pylint: disable=no-self-use - self, class_name: str, obj_id: int, action: str - ) -> FlaskResponse: - """Toggle favorite stars on Slices and Dashboard""" - if not get_user_id(): - return json_error_response("ERROR: Favstar toggling denied", status=403) - session = db.session() - count = 0 - favs = ( - session.query(FavStar) - .filter_by(class_name=class_name, obj_id=obj_id, user_id=get_user_id()) - .all() - ) - if action == "select": - if not favs: - session.add( - FavStar( - class_name=class_name, - obj_id=obj_id, - user_id=get_user_id(), - dttm=datetime.now(), - ) - ) - count = 1 - elif action == "unselect": - for fav in favs: - session.delete(fav) - else: - count = len(favs) - session.commit() - return json_success(json.dumps({"count": count})) - - @has_access - @expose("/dashboard//") - @event_logger.log_this_with_extra_payload - @check_dashboard_access( - on_error=lambda msg: redirect_with_flash(DASHBOARD_LIST_URL, msg, "danger") - ) - def dashboard( - self, - dashboard_id_or_slug: str, # pylint: disable=unused-argument - add_extra_log_payload: Callable[..., None] = lambda **kwargs: None, - dashboard: Dashboard | None = None, - ) -> FlaskResponse: - """ - Server side rendering for a dashboard - :param dashboard_id_or_slug: identifier for dashboard. used in the decorators - :param add_extra_log_payload: added by `log_this_with_manual_updates`, set a - default value to appease pylint - :param dashboard: added by `check_dashboard_access` - """ - if not dashboard: - abort(404) - - assert dashboard is not None - - has_access_ = False - for datasource in dashboard.datasources: - datasource = DatasourceDAO.get_datasource( - datasource_type=DatasourceType(datasource.type), - datasource_id=datasource.id, - session=db.session(), - ) - if datasource and security_manager.can_access_datasource( - datasource=datasource, - ): - has_access_ = True - - if has_access_ is False and config["ENABLE_ACCESS_REQUEST"]: - flash( - __(security_manager.get_datasource_access_error_msg(datasource)), - "danger", - ) - return redirect( - f"/superset/request_access/?dashboard_id={dashboard.id}" - ) - - if has_access_: - break - - if dashboard.datasources and not has_access_: - flash(DashboardAccessDeniedError.message, "danger") - return redirect(DASHBOARD_LIST_URL) - - dash_edit_perm = security_manager.is_owner( - dashboard - ) and security_manager.can_access("can_save_dash", "Superset") - edit_mode = ( - request.args.get(utils.ReservedUrlParameters.EDIT_MODE.value) == "true" - ) - - standalone_mode = ReservedUrlParameters.is_standalone_mode() - - add_extra_log_payload( - dashboard_id=dashboard.id, - dashboard_version="v2", - dash_edit_perm=dash_edit_perm, - edit_mode=edit_mode, - ) - - bootstrap_data = { - "user": bootstrap_user_data(g.user, include_perms=True), - "common": common_bootstrap_payload(g.user), - } - return self.render_template( "superset/spa.html", entry="spa", - # dashboard title is always visible - title=dashboard.dashboard_title, + title=dashboard.dashboard_title, # dashboard title is always visible bootstrap_data=json.dumps( - bootstrap_data, default=utils.pessimistic_json_iso_dttm_ser + { + "user": bootstrap_user_data(g.user, include_perms=True), + "common": common_bootstrap_payload(g.user), + }, + default=utils.pessimistic_json_iso_dttm_ser, ), - standalone_mode=standalone_mode, + standalone_mode=ReservedUrlParameters.is_standalone_mode(), ) @has_access @@ -1972,560 +948,10 @@ def dashboard_permalink( # pylint: disable=no-self-use def log(self) -> FlaskResponse: # pylint: disable=no-self-use return Response(status=200) - @has_access - @expose("/get_or_create_table/", methods=("POST",)) - @event_logger.log_this - @deprecated(new_target="api/v1/dataset/get_or_create/") - def sqllab_table_viz(self) -> FlaskResponse: # pylint: disable=no-self-use - """Gets or creates a table object with attributes passed to the API. - - It expects the json with params: - * datasourceName - e.g. table name, required - * dbId - database id, required - * schema - table schema, optional - * templateParams - params for the Jinja templating syntax, optional - :return: Response - """ - data = json.loads(request.form["data"]) - table_name = data["datasourceName"] - database_id = data["dbId"] - table = ( - db.session.query(SqlaTable) - .filter_by(database_id=database_id, table_name=table_name) - .one_or_none() - ) - if not table: - # Create table if doesn't exist. - with db.session.no_autoflush: - table = SqlaTable(table_name=table_name, owners=[g.user]) - table.database_id = database_id - table.database = ( - db.session.query(Database).filter_by(id=database_id).one() - ) - table.schema = data.get("schema") - table.template_params = data.get("templateParams") - # needed for the table validation. - # fn can be deleted when this endpoint is removed - validate_sqlatable(table) - - db.session.add(table) - table.fetch_metadata() - db.session.commit() - - return json_success(json.dumps({"table_id": table.id})) - - @has_access - @expose("/sqllab_viz/", methods=("POST",)) - @event_logger.log_this - @deprecated(new_target="api/v1/dataset/") - def sqllab_viz(self) -> FlaskResponse: # pylint: disable=no-self-use - data = json.loads(request.form["data"]) - try: - table_name = data["datasourceName"] - database_id = data["dbId"] - except KeyError as ex: - raise SupersetGenericErrorException( - __( - "One or more required fields are missing in the request. Please try " - "again, and if the problem persists contact your administrator." - ), - status=400, - ) from ex - database = db.session.query(Database).get(database_id) - if not database: - raise SupersetErrorException( - SupersetError( - message=__("The database was not found."), - error_type=SupersetErrorType.DATABASE_NOT_FOUND_ERROR, - level=ErrorLevel.ERROR, - ), - status=404, - ) - table = ( - db.session.query(SqlaTable) - .filter_by(database_id=database_id, table_name=table_name) - .one_or_none() - ) - - if table: - return json_errors_response( - [ - SupersetError( - message=f"Dataset [{table_name}] already exists", - error_type=SupersetErrorType.GENERIC_BACKEND_ERROR, - level=ErrorLevel.WARNING, - ) - ], - status=422, - ) - - table = SqlaTable(table_name=table_name, owners=[g.user]) - table.database = database - table.schema = data.get("schema") - table.template_params = data.get("templateParams") - table.is_sqllab_view = True - table.sql = ParsedQuery(data.get("sql")).stripped() - db.session.add(table) - cols = [] - for config_ in data.get("columns"): - column_name = config_.get("column_name") or config_.get("name") - col = TableColumn( - column_name=column_name, - filterable=True, - groupby=True, - is_dttm=config_.get("is_dttm", False), - type=config_.get("type", False), - ) - cols.append(col) - - table.columns = cols - table.metrics = [SqlMetric(metric_name="count", expression="count(*)")] - db.session.commit() - - return json_success( - json.dumps( - {"table_id": table.id, "data": sanitize_datasource_data(table.data)} - ) - ) - - @has_access - @expose("/extra_table_metadata////") - @event_logger.log_this - @deprecated( - new_target="api/v1/database//table_extra///" - ) - def extra_table_metadata( # pylint: disable=no-self-use - self, database_id: int, table_name: str, schema: str - ) -> FlaskResponse: - parsed_schema = utils.parse_js_uri_path_item(schema, eval_undefined=True) - table_name = utils.parse_js_uri_path_item(table_name) # type: ignore - mydb = db.session.query(Database).filter_by(id=database_id).one() - payload = mydb.db_engine_spec.extra_table_metadata( - mydb, table_name, parsed_schema - ) - return json_success(json.dumps(payload)) - - @has_access_api - @expose("/estimate_query_cost//", methods=("POST",)) - @expose("/estimate_query_cost///", methods=("POST",)) - @event_logger.log_this - @deprecated(new_target="api/v1/sqllab/estimate/") - def estimate_query_cost( # pylint: disable=no-self-use - self, database_id: int, schema: str | None = None - ) -> FlaskResponse: - mydb = db.session.query(Database).get(database_id) - - sql = json.loads(request.form.get("sql", '""')) - if template_params := json.loads(request.form.get("templateParams") or "{}"): - template_processor = get_template_processor(mydb) - sql = template_processor.process_template(sql, **template_params) - - timeout = SQLLAB_QUERY_COST_ESTIMATE_TIMEOUT - timeout_msg = f"The estimation exceeded the {timeout} seconds timeout." - try: - with utils.timeout(seconds=timeout, error_message=timeout_msg): - cost = mydb.db_engine_spec.estimate_query_cost( - mydb, schema, sql, utils.QuerySource.SQL_LAB - ) - except SupersetTimeoutException as ex: - logger.exception(ex) - return json_errors_response([ex.error]) - except Exception as ex: # pylint: disable=broad-except - return json_error_response(utils.error_msg_from_exception(ex)) - - spec = mydb.db_engine_spec - query_cost_formatters: dict[str, Any] = app.config[ - "QUERY_COST_FORMATTERS_BY_ENGINE" - ] - query_cost_formatter = query_cost_formatters.get( - spec.engine, spec.query_cost_formatter - ) - cost = query_cost_formatter(cost) - - return json_success(json.dumps(cost)) - @expose("/theme/") def theme(self) -> FlaskResponse: return self.render_template("superset/theme.html") - @has_access_api - @expose("/results//") - @event_logger.log_this - @deprecated(new_target="api/v1/sqllab/results/") - def results(self, key: str) -> FlaskResponse: - return self.results_exec(key) - - @staticmethod - def results_exec(key: str) -> FlaskResponse: - """Serves a key off of the results backend - - It is possible to pass the `rows` query argument to limit the number - of rows returned. - """ - if not results_backend: - raise SupersetErrorException( - SupersetError( - message=__("Results backend is not configured."), - error_type=SupersetErrorType.RESULTS_BACKEND_NOT_CONFIGURED_ERROR, - level=ErrorLevel.ERROR, - ) - ) - - read_from_results_backend_start = now_as_float() - blob = results_backend.get(key) - stats_logger.timing( - "sqllab.query.results_backend_read", - now_as_float() - read_from_results_backend_start, - ) - if not blob: - raise SupersetErrorException( - SupersetError( - message=__( - "Data could not be retrieved from the results backend. You " - "need to re-run the original query." - ), - error_type=SupersetErrorType.RESULTS_BACKEND_ERROR, - level=ErrorLevel.ERROR, - ), - status=410, - ) - - query = db.session.query(Query).filter_by(results_key=key).one_or_none() - if query is None: - raise SupersetErrorException( - SupersetError( - message=__( - "The query associated with these results could not be found. " - "You need to re-run the original query." - ), - error_type=SupersetErrorType.RESULTS_BACKEND_ERROR, - level=ErrorLevel.ERROR, - ), - status=404, - ) - - try: - query.raise_for_access() - except SupersetSecurityException as ex: - raise SupersetErrorException( - SupersetError( - message=__( - "You are not authorized to see this query. If you think this " - "is an error, please reach out to your administrator." - ), - error_type=SupersetErrorType.QUERY_SECURITY_ACCESS_ERROR, - level=ErrorLevel.ERROR, - ), - status=403, - ) from ex - - payload = utils.zlib_decompress(blob, decode=not results_backend_use_msgpack) - try: - obj = _deserialize_results_payload( - payload, query, cast(bool, results_backend_use_msgpack) - ) - except SerializationError as ex: - raise SupersetErrorException( - SupersetError( - message=__( - "Data could not be deserialized from the results backend. The " - "storage format might have changed, rendering the old data " - "stake. You need to re-run the original query." - ), - error_type=SupersetErrorType.RESULTS_BACKEND_ERROR, - level=ErrorLevel.ERROR, - ), - status=404, - ) from ex - - if "rows" in request.args: - try: - rows = int(request.args["rows"]) - except ValueError as ex: - raise SupersetErrorException( - SupersetError( - message=__( - "The provided `rows` argument is not a valid integer." - ), - error_type=SupersetErrorType.INVALID_PAYLOAD_SCHEMA_ERROR, - level=ErrorLevel.ERROR, - ), - status=400, - ) from ex - - obj = apply_display_max_row_configuration_if_require(obj, rows) - - return json_success( - json.dumps( - obj, default=utils.json_iso_dttm_ser, ignore_nan=True, encoding=None - ) - ) - - @has_access_api - @handle_api_exception - @expose("/stop_query/", methods=("POST",)) - @event_logger.log_this - @backoff.on_exception( - backoff.constant, - Exception, - interval=1, - on_backoff=lambda details: db.session.rollback(), - on_giveup=lambda details: db.session.rollback(), - max_tries=5, - ) - @deprecated(new_target="/api/v1/query/stop") - def stop_query(self) -> FlaskResponse: - client_id = request.form.get("client_id") - query = db.session.query(Query).filter_by(client_id=client_id).one() - if query.status in [ - QueryStatus.FAILED, - QueryStatus.SUCCESS, - QueryStatus.TIMED_OUT, - ]: - logger.warning( - "Query with client_id could not be stopped: query already complete", - ) - return self.json_response("OK") - - if not sql_lab.cancel_query(query): - raise SupersetCancelQueryException("Could not cancel query") - - query.status = QueryStatus.STOPPED - # Add the stop identity attribute because the sqlalchemy thread is unsafe - # because of multiple updates to the status in the query table - query.set_extra_json_key(QUERY_EARLY_CANCEL_KEY, True) - query.end_time = now_as_float() - db.session.commit() - - return self.json_response("OK") - - @has_access_api - @event_logger.log_this - @expose( - "/validate_sql_json/", - methods=( - "GET", - "POST", - ), - ) - @deprecated(new_target="/api/v1/database//validate_sql/") - def validate_sql_json( - # pylint: disable=too-many-locals,no-self-use - self, - ) -> FlaskResponse: - """Validates that arbitrary sql is acceptable for the given database. - Returns a list of error/warning annotations as json. - """ - sql = request.form["sql"] - database_id = request.form["database_id"] - schema = request.form.get("schema") or None - template_params = json.loads(request.form.get("templateParams") or "{}") - - if template_params is not None and len(template_params) > 0: - # TODO: factor the Database object out of template rendering - # or provide it as mydb so we can render template params - # without having to also persist a Query ORM object. - return json_error_response( - "SQL validation does not support template parameters", status=400 - ) - - session = db.session() - mydb = session.query(Database).filter_by(id=database_id).one_or_none() - if not mydb: - return json_error_response( - f"Database with id {database_id} is missing.", status=400 - ) - - spec = mydb.db_engine_spec - validators_by_engine = app.config["SQL_VALIDATORS_BY_ENGINE"] - if not validators_by_engine or spec.engine not in validators_by_engine: - return json_error_response( - f"no SQL validator is configured for {spec.engine}", status=400 - ) - validator_name = validators_by_engine[spec.engine] - validator = get_validator_by_name(validator_name) - if not validator: - return json_error_response( - "No validator named {} found (configured for the {} engine)".format( - validator_name, spec.engine - ) - ) - - try: - timeout = config["SQLLAB_VALIDATION_TIMEOUT"] - timeout_msg = f"The query exceeded the {timeout} seconds timeout." - with utils.timeout(seconds=timeout, error_message=timeout_msg): - errors = validator.validate(sql, schema, mydb) - payload = json.dumps( - [err.to_dict() for err in errors], - default=utils.pessimistic_json_iso_dttm_ser, - ignore_nan=True, - encoding=None, - ) - return json_success(payload) - except Exception as ex: # pylint: disable=broad-except - logger.exception(ex) - msg = _( - "%(validator)s was unable to check your query.\n" - "Please recheck your query.\n" - "Exception: %(ex)s", - validator=validator.name, - ex=ex, - ) - # Return as a 400 if the database error message says we got a 4xx error - if re.search(r"([\W]|^)4\d{2}([\W]|$)", str(ex)): - return json_error_response(f"{msg}", status=400) - return json_error_response(f"{msg}") - - @has_access_api - @handle_api_exception - @event_logger.log_this - @expose("/sql_json/", methods=("POST",)) - @deprecated(new_target="/api/v1/sqllab/execute/") - def sql_json(self) -> FlaskResponse: - if errors := SqlJsonPayloadSchema().validate(request.json): - return json_error_response(status=400, payload=errors) - - try: - log_params = { - "user_agent": cast(Optional[str], request.headers.get("USER_AGENT")) - } - execution_context = SqlJsonExecutionContext(request.json) - command = self._create_sql_json_command(execution_context, log_params) - command_result: CommandResult = command.run() - return self._create_response_from_execution_context(command_result) - except SqlLabException as ex: - logger.error(ex.message) - self._set_http_status_into_Sql_lab_exception(ex) - payload = {"errors": [ex.to_dict()]} - return json_error_response(status=ex.status, payload=payload) - - @staticmethod - def _create_sql_json_command( - execution_context: SqlJsonExecutionContext, log_params: dict[str, Any] | None - ) -> ExecuteSqlCommand: - query_dao = QueryDAO() - sql_json_executor = Superset._create_sql_json_executor( - execution_context, query_dao - ) - execution_context_convertor = ExecutionContextConvertor() - execution_context_convertor.set_max_row_in_display( - int(config.get("DISPLAY_MAX_ROW")) - ) - return ExecuteSqlCommand( - execution_context, - query_dao, - DatabaseDAO(), - CanAccessQueryValidatorImpl(), - SqlQueryRenderImpl(get_template_processor), - sql_json_executor, - execution_context_convertor, - config["SQLLAB_CTAS_NO_LIMIT"], - log_params, - ) - - @staticmethod - def _create_sql_json_executor( - execution_context: SqlJsonExecutionContext, query_dao: QueryDAO - ) -> SqlJsonExecutor: - sql_json_executor: SqlJsonExecutor - if execution_context.is_run_asynchronous(): - sql_json_executor = ASynchronousSqlJsonExecutor(query_dao, get_sql_results) - else: - sql_json_executor = SynchronousSqlJsonExecutor( - query_dao, - get_sql_results, - config.get("SQLLAB_TIMEOUT"), - is_feature_enabled("SQLLAB_BACKEND_PERSISTENCE"), - ) - return sql_json_executor - - @staticmethod - def _set_http_status_into_Sql_lab_exception(ex: SqlLabException) -> None: - if isinstance(ex, QueryIsForbiddenToAccessException): - ex.status = 403 - - def _create_response_from_execution_context( # pylint: disable=invalid-name, no-self-use - self, - command_result: CommandResult, - ) -> FlaskResponse: - status_code = 200 - if command_result["status"] == SqlJsonExecutionStatus.QUERY_IS_RUNNING: - status_code = 202 - return json_success(command_result["payload"], status_code) - - @has_access - @event_logger.log_this - @expose("/csv/") - @deprecated(new_target="api/v1/sqllab/export/") - def csv(self, client_id: str) -> FlaskResponse: # pylint: disable=no-self-use - """Download the query results as csv.""" - logger.info("Exporting CSV file [%s]", client_id) - query = db.session.query(Query).filter_by(client_id=client_id).one() - - try: - query.raise_for_access() - except SupersetSecurityException as ex: - flash(ex.error.message) - return redirect("/") - - blob = None - if results_backend and query.results_key: - logger.info("Fetching CSV from results backend [%s]", query.results_key) - blob = results_backend.get(query.results_key) - if blob: - logger.info("Decompressing") - payload = utils.zlib_decompress( - blob, decode=not results_backend_use_msgpack - ) - obj = _deserialize_results_payload( - payload, query, cast(bool, results_backend_use_msgpack) - ) - - df = pd.DataFrame( - data=obj["data"], - dtype=object, - columns=[c["name"] for c in obj["columns"]], - ) - - logger.info("Using pandas to convert to CSV") - else: - logger.info("Running a query to turn into CSV") - if query.select_sql: - sql = query.select_sql - limit = None - else: - sql = query.executed_sql - limit = ParsedQuery(sql).limit - if limit is not None and query.limiting_factor in { - LimitingFactor.QUERY, - LimitingFactor.DROPDOWN, - LimitingFactor.QUERY_AND_DROPDOWN, - }: - # remove extra row from `increased_limit` - limit -= 1 - df = query.database.get_df(sql, query.schema)[:limit] - - csv_data = csv.df_to_escaped_csv(df, index=False, **config["CSV_EXPORT"]) - quoted_csv_name = parse.quote(query.name) - response = CsvResponse( - csv_data, headers=generate_download_headers("csv", quoted_csv_name) - ) - event_info = { - "event_type": "data_export", - "client_id": client_id, - "row_count": len(df.index), - "database": query.database.name, - "schema": query.schema, - "sql": query.sql, - "exported_format": "csv", - } - event_rep = repr(event_info) - logger.debug( - "CSV exported: %s", event_rep, extra={"superset_event": event_info} - ) - return response - @api @handle_api_exception @has_access @@ -2550,102 +976,6 @@ def fetch_datasource_metadata(self) -> FlaskResponse: # pylint: disable=no-self datasource.raise_for_access() return json_success(json.dumps(sanitize_datasource_data(datasource.data))) - @has_access_api - @event_logger.log_this - @expose("/queries/") - @expose("/queries/") - @deprecated(new_target="api/v1/query/updated_since") - def queries(self, last_updated_ms: float | int) -> FlaskResponse: - """ - Get the updated queries. - - :param last_updated_ms: Unix time (milliseconds) - """ - - return self.queries_exec(last_updated_ms) - - @staticmethod - def queries_exec(last_updated_ms: float | int) -> FlaskResponse: - stats_logger.incr("queries") - if not get_user_id(): - return json_error_response( - "Please login to access the queries.", status=403 - ) - - # UTC date time, same that is stored in the DB. - last_updated_dt = datetime.utcfromtimestamp(last_updated_ms / 1000) - - sql_queries = ( - db.session.query(Query) - .filter(Query.user_id == get_user_id(), Query.changed_on >= last_updated_dt) - .all() - ) - dict_queries = {q.client_id: q.to_dict() for q in sql_queries} - return json_success(json.dumps(dict_queries, default=utils.json_int_dttm_ser)) - - @has_access - @event_logger.log_this - @expose("/search_queries") - @deprecated(new_target="api/v1/query/") - def search_queries(self) -> FlaskResponse: # pylint: disable=no-self-use - """ - Search for previously run sqllab queries. Used for Sqllab Query Search - page /superset/sqllab#search. - - Custom permission can_only_search_queries_owned restricts queries - to only queries run by current user. - - :returns: Response with list of sql query dicts - """ - if security_manager.can_access_all_queries(): - search_user_id = request.args.get("user_id") - elif request.args.get("user_id") is not None: - try: - search_user_id = int(cast(int, request.args.get("user_id"))) - except ValueError: - return Response(status=400, mimetype="application/json") - if search_user_id != get_user_id(): - return Response(status=403, mimetype="application/json") - else: - search_user_id = get_user_id() - database_id = request.args.get("database_id") - search_text = request.args.get("search_text") - # From and To time stamp should be Epoch timestamp in seconds - - query = db.session.query(Query) - if search_user_id: - # Filter on user_id - query = query.filter(Query.user_id == search_user_id) - - if database_id: - # Filter on db Id - query = query.filter(Query.database_id == database_id) - - if status := request.args.get("status"): - # Filter on status - query = query.filter(Query.status == status) - - if search_text: - # Filter on search text - query = query.filter(Query.sql.like(f"%{search_text}%")) - - if from_time := request.args.get("from"): - query = query.filter(Query.start_time > int(from_time)) - - if to_time := request.args.get("to"): - query = query.filter(Query.start_time < int(to_time)) - - query_limit = config["QUERY_SEARCH_LIMIT"] - sql_queries = query.order_by(Query.start_time.asc()).limit(query_limit).all() - - dict_queries = [q.to_dict() for q in sql_queries] - - return Response( - json.dumps(dict_queries, default=utils.json_int_dttm_ser), - status=200, - mimetype="application/json", - ) - @app.errorhandler(500) def show_traceback(self) -> FlaskResponse: # pylint: disable=no-self-use return ( @@ -2685,27 +1015,20 @@ def welcome(self) -> FlaskResponse: @has_access @event_logger.log_this - @expose("/profile//") - def profile(self, username: str) -> FlaskResponse: + @expose("/profile/") + def profile(self) -> FlaskResponse: """User profile page""" - user = ( - db.session.query(ab_models.User).filter_by(username=username).one_or_none() - ) - # Prevent returning 404 when user is not found to prevent username scanning - user_id = -1 if not user else user.id - # Prevent unauthorized access to other user's profiles, - # unless configured to do so on with ENABLE_BROAD_ACTIVITY_ACCESS - if error_obj := self.get_user_activity_access_error(user_id): - return error_obj - + user = g.user if hasattr(g, "user") and g.user else None + if not user or security_manager.is_guest_user(user) or user.is_anonymous: + abort(404) payload = { "user": bootstrap_user_data(user, include_perms=True), - "common": common_bootstrap_payload(g.user), + "common": common_bootstrap_payload(user), } return self.render_template( "superset/basic.html", - title=_("%(user)s's profile", user=username).__str__(), + title=_("%(user)s's profile", user=get_username()).__str__(), entry="profile", bootstrap_data=json.dumps( payload, default=utils.pessimistic_json_iso_dttm_ser @@ -2795,38 +1118,3 @@ def sqllab(self) -> FlaskResponse: @event_logger.log_this def sqllab_history(self) -> FlaskResponse: return super().render_app_template() - - @api - @has_access_api - @event_logger.log_this - @expose("/schemas_access_for_file_upload") - @deprecated(new_target="api/v1/database/{pk}/schemas_access_for_file_upload/") - def schemas_access_for_file_upload(self) -> FlaskResponse: - """ - This method exposes an API endpoint to - get the schema access control settings for file upload in this database - """ - if not request.args.get("db_id"): - return json_error_response("No database is allowed for your file upload") - - db_id = int(request.args["db_id"]) - database = db.session.query(Database).filter_by(id=db_id).one() - try: - schemas_allowed = database.get_schema_access_for_file_upload() - if security_manager.can_access_database(database): - return self.json_response(schemas_allowed) - # the list schemas_allowed should not be empty here - # and the list schemas_allowed_processed returned from security_manager - # should not be empty either, - # otherwise the database should have been filtered out - # in CsvToDatabaseForm - schemas_allowed_processed = security_manager.get_schemas_accessible_by_user( - database, schemas_allowed, False - ) - return self.json_response(schemas_allowed_processed) - except Exception as ex: # pylint: disable=broad-except - logger.exception(ex) - return json_error_response( - "Failed to fetch schemas allowed for csv upload in this database! " - "Please contact your Superset Admin!" - ) diff --git a/superset/views/datasource/utils.py b/superset/views/datasource/utils.py index a4cf0c5e9063f..65b19c34938f3 100644 --- a/superset/views/datasource/utils.py +++ b/superset/views/datasource/utils.py @@ -21,8 +21,8 @@ from superset.common.query_context_factory import QueryContextFactory from superset.common.utils.query_cache_manager import QueryCacheManager from superset.constants import CacheRegion +from superset.daos.datasource import DatasourceDAO from superset.datasets.commands.exceptions import DatasetSamplesFailedError -from superset.datasource.dao import DatasourceDAO from superset.utils.core import QueryStatus from superset.views.datasource.schemas import SamplesPayloadSchema diff --git a/superset/views/datasource/views.py b/superset/views/datasource/views.py index ba5a05b9cfbfd..f1086acd47330 100644 --- a/superset/views/datasource/views.py +++ b/superset/views/datasource/views.py @@ -31,11 +31,11 @@ from superset.commands.utils import populate_owners from superset.connectors.sqla.models import SqlaTable from superset.connectors.sqla.utils import get_physical_table_metadata +from superset.daos.datasource import DatasourceDAO from superset.datasets.commands.exceptions import ( DatasetForbiddenError, DatasetNotFoundError, ) -from superset.datasource.dao import DatasourceDAO from superset.exceptions import SupersetException, SupersetSecurityException from superset.models.core import Database from superset.superset_typing import FlaskResponse diff --git a/superset/views/log/api.py b/superset/views/log/api.py index e218792c25970..d3699e3885991 100644 --- a/superset/views/log/api.py +++ b/superset/views/log/api.py @@ -23,19 +23,18 @@ import superset.models.core as models from superset import event_logger, security_manager +from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP +from superset.daos.log import LogDAO from superset.exceptions import SupersetSecurityException from superset.superset_typing import FlaskResponse from superset.views.base_api import BaseSupersetModelRestApi, statsd_metrics -from superset.views.log.dao import LogDAO +from superset.views.log import LogMixin from superset.views.log.schemas import ( get_recent_activity_schema, RecentActivityResponseSchema, RecentActivitySchema, ) -from ...constants import MODEL_API_RW_METHOD_PERMISSION_MAP -from . import LogMixin - class LogRestApi(LogMixin, BaseSupersetModelRestApi): datamodel = SQLAInterface(models.Log) @@ -82,7 +81,7 @@ def get_user_activity_access_error(self, user_id: int) -> Optional[FlaskResponse return self.response(403, message=ex.message) return None - @expose("/recent_activity//", methods=("GET",)) + @expose("/recent_activity/", methods=("GET",)) @protect() @safe @statsd_metrics @@ -92,7 +91,7 @@ def get_user_activity_access_error(self, user_id: int) -> Optional[FlaskResponse f".recent_activity", log_to_statsd=False, ) - def recent_activity(self, user_id: int, **kwargs: Any) -> FlaskResponse: + def recent_activity(self, **kwargs: Any) -> FlaskResponse: """Get recent activity data for a user --- get: @@ -125,16 +124,11 @@ def recent_activity(self, user_id: int, **kwargs: Any) -> FlaskResponse: 500: $ref: '#/components/responses/500' """ - if error_obj := self.get_user_activity_access_error(user_id): - return error_obj - args = kwargs["rison"] page, page_size = self._sanitize_page_args(*self._handle_page_args(args)) actions = args.get("actions", ["explore", "dashboard"]) distinct = args.get("distinct", True) - payload = LogDAO.get_recent_activity( - user_id, actions, distinct, page, page_size - ) + payload = LogDAO.get_recent_activity(actions, distinct, page, page_size) return self.response(200, result=payload) diff --git a/superset/views/utils.py b/superset/views/utils.py index 75ab4ebe4fe85..29f5a4c7e463d 100644 --- a/superset/views/utils.py +++ b/superset/views/utils.py @@ -33,7 +33,7 @@ import superset.models.core as models from superset import app, dataframe, db, result_set, viz from superset.common.db_query_status import QueryStatus -from superset.datasource.dao import DatasourceDAO +from superset.daos.datasource import DatasourceDAO from superset.errors import ErrorLevel, SupersetError, SupersetErrorType from superset.exceptions import ( CacheLoadError, @@ -516,39 +516,6 @@ def check_datasource_perms( viz_obj.raise_for_access() -def check_slice_perms(_self: Any, slice_id: int) -> None: - """ - Check if user can access a cached response from slice_json. - - This function takes `self` since it must have the same signature as the - the decorated method. - - :param slice_id: The slice ID - :raises SupersetSecurityException: If the user cannot access the resource - """ - - form_data, slc = get_form_data(slice_id, use_slice_data=True) - - if slc and slc.datasource: - try: - viz_obj = get_viz( - datasource_type=slc.datasource.type, - datasource_id=slc.datasource.id, - form_data=form_data, - force=False, - ) - except NoResultFound as ex: - raise SupersetSecurityException( - SupersetError( - error_type=SupersetErrorType.UNKNOWN_DATASOURCE_TYPE_ERROR, - level=ErrorLevel.ERROR, - message="Could not find viz object", - ) - ) from ex - - viz_obj.raise_for_access() - - def _deserialize_results_payload( payload: Union[bytes, str], query: Query, use_msgpack: Optional[bool] = False ) -> dict[str, Any]: diff --git a/tests/integration_tests/access_tests.py b/tests/integration_tests/access_tests.py index ab0100ac24eda..86e898462c10a 100644 --- a/tests/integration_tests/access_tests.py +++ b/tests/integration_tests/access_tests.py @@ -16,10 +16,8 @@ # under the License. # isort:skip_file """Unit tests for Superset""" -import json import unittest from typing import Optional -from unittest import mock import pytest from flask.ctx import AppContext @@ -42,7 +40,6 @@ from superset import db, security_manager from superset.connectors.sqla.models import SqlaTable from superset.models import core as models -from superset.models.datasource_access_request import DatasourceAccessRequest from superset.utils.core import get_user_id, get_username, override_user from superset.utils.database import get_example_database @@ -84,29 +81,6 @@ SCHEMA_ACCESS_ROLE = "schema_access_role" -def create_access_request(session, ds_type, ds_name, role_name, username): - # TODO: generalize datasource names - if ds_type == "table": - ds = session.query(SqlaTable).filter(SqlaTable.table_name == ds_name).first() - else: - # This function will only work for ds_type == "table" - raise NotImplementedError() - ds_perm_view = security_manager.find_permission_view_menu( - "datasource_access", ds.perm - ) - security_manager.add_permission_role( - security_manager.find_role(role_name), ds_perm_view - ) - access_request = DatasourceAccessRequest( - datasource_id=ds.id, - datasource_type=ds_type, - created_by_fk=security_manager.find_user(username=username).id, - ) - session.add(access_request) - session.commit() - return access_request - - class TestRequestAccess(SupersetTestCase): @classmethod def setUpClass(cls): @@ -139,386 +113,6 @@ def tearDown(self): db.session.commit() db.session.close() - def test_override_role_permissions_is_admin_only(self): - self.logout() - self.login("alpha") - response = self.client.post( - "/superset/override_role_permissions/", - data=json.dumps(ROLE_TABLES_PERM_DATA), - content_type="application/json", - follow_redirects=True, - ) - self.assertNotEqual(405, response.status_code) - - @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") - def test_override_role_permissions_1_table(self): - database = get_example_database() - with database.get_sqla_engine_with_context() as engine: - schema = inspect(engine).default_schema_name - - perm_data = ROLE_TABLES_PERM_DATA.copy() - perm_data["database"][0]["schema"][0]["name"] = schema - - response = self.client.post( - "/superset/override_role_permissions/", - data=json.dumps(perm_data), - content_type="application/json", - ) - self.assertEqual(201, response.status_code) - - updated_override_me = security_manager.find_role("override_me") - self.assertEqual(1, len(updated_override_me.permissions)) - birth_names = self.get_table(name="birth_names") - self.assertEqual( - birth_names.perm, updated_override_me.permissions[0].view_menu.name - ) - self.assertEqual( - "datasource_access", updated_override_me.permissions[0].permission.name - ) - - @pytest.mark.usefixtures( - "load_energy_table_with_slice", "load_birth_names_dashboard_with_slices" - ) - def test_override_role_permissions_drops_absent_perms(self): - database = get_example_database() - with database.get_sqla_engine_with_context() as engine: - schema = inspect(engine).default_schema_name - - override_me = security_manager.find_role("override_me") - override_me.permissions.append( - security_manager.find_permission_view_menu( - view_menu_name=self.get_table(name="energy_usage").perm, - permission_name="datasource_access", - ) - ) - db.session.flush() - - perm_data = ROLE_TABLES_PERM_DATA.copy() - perm_data["database"][0]["schema"][0]["name"] = schema - - response = self.client.post( - "/superset/override_role_permissions/", - data=json.dumps(perm_data), - content_type="application/json", - ) - self.assertEqual(201, response.status_code) - updated_override_me = security_manager.find_role("override_me") - self.assertEqual(1, len(updated_override_me.permissions)) - birth_names = self.get_table(name="birth_names") - self.assertEqual( - birth_names.perm, updated_override_me.permissions[0].view_menu.name - ) - self.assertEqual( - "datasource_access", updated_override_me.permissions[0].permission.name - ) - - def test_clean_requests_after_role_extend(self): - session = db.session - - # Case 1. Gamma and gamma2 requested test_role1 on energy_usage access - # Gamma already has role test_role1 - # Extend test_role1 with energy_usage access for gamma2 - # Check if access request for gamma at energy_usage was deleted - - # gamma2 and gamma request table_role on energy usage - if app.config["ENABLE_ACCESS_REQUEST"]: - access_request1 = create_access_request( - session, "table", "random_time_series", TEST_ROLE_1, "gamma2" - ) - ds_1_id = access_request1.datasource_id - create_access_request( - session, "table", "random_time_series", TEST_ROLE_1, "gamma" - ) - access_requests = self.get_access_requests("gamma", "table", ds_1_id) - self.assertTrue(access_requests) - # gamma gets test_role1 - self.get_resp( - GRANT_ROLE_REQUEST.format("table", ds_1_id, "gamma", TEST_ROLE_1) - ) - # extend test_role1 with access on energy usage - self.client.get( - EXTEND_ROLE_REQUEST.format("table", ds_1_id, "gamma2", TEST_ROLE_1) - ) - access_requests = self.get_access_requests("gamma", "table", ds_1_id) - self.assertFalse(access_requests) - - gamma_user = security_manager.find_user(username="gamma") - gamma_user.roles.remove(security_manager.find_role("test_role1")) - - @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") - def test_clean_requests_after_alpha_grant(self): - session = db.session - - # Case 2. Two access requests from gamma and gamma2 - # Gamma becomes alpha, gamma2 gets granted - # Check if request by gamma has been deleted - - access_request1 = create_access_request( - session, "table", "birth_names", TEST_ROLE_1, "gamma" - ) - create_access_request(session, "table", "birth_names", TEST_ROLE_2, "gamma2") - ds_1_id = access_request1.datasource_id - # gamma becomes alpha - alpha_role = security_manager.find_role("Alpha") - gamma_user = security_manager.find_user(username="gamma") - gamma_user.roles.append(alpha_role) - session.commit() - access_requests = self.get_access_requests("gamma", "table", ds_1_id) - self.assertTrue(access_requests) - self.client.post( - EXTEND_ROLE_REQUEST.format("table", ds_1_id, "gamma2", TEST_ROLE_2) - ) - access_requests = self.get_access_requests("gamma", "table", ds_1_id) - self.assertFalse(access_requests) - - gamma_user = security_manager.find_user(username="gamma") - gamma_user.roles.remove(security_manager.find_role("Alpha")) - session.commit() - - @pytest.mark.usefixtures("load_energy_table_with_slice") - def test_clean_requests_after_db_grant(self): - session = db.session - - # Case 3. Two access requests from gamma and gamma2 - # Gamma gets database access, gamma2 access request granted - # Check if request by gamma has been deleted - - gamma_user = security_manager.find_user(username="gamma") - access_request1 = create_access_request( - session, "table", "energy_usage", TEST_ROLE_1, "gamma" - ) - create_access_request(session, "table", "energy_usage", TEST_ROLE_2, "gamma2") - ds_1_id = access_request1.datasource_id - # gamma gets granted database access - database = session.query(models.Database).first() - - security_manager.add_permission_view_menu("database_access", database.perm) - ds_perm_view = security_manager.find_permission_view_menu( - "database_access", database.perm - ) - security_manager.add_permission_role( - security_manager.find_role(DB_ACCESS_ROLE), ds_perm_view - ) - gamma_user.roles.append(security_manager.find_role(DB_ACCESS_ROLE)) - session.commit() - access_requests = self.get_access_requests("gamma", "table", ds_1_id) - self.assertTrue(access_requests) - # gamma2 request gets fulfilled - self.client.post( - EXTEND_ROLE_REQUEST.format("table", ds_1_id, "gamma2", TEST_ROLE_2) - ) - access_requests = self.get_access_requests("gamma", "table", ds_1_id) - - self.assertFalse(access_requests) - gamma_user = security_manager.find_user(username="gamma") - gamma_user.roles.remove(security_manager.find_role(DB_ACCESS_ROLE)) - session.commit() - - @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") - def test_clean_requests_after_schema_grant(self): - session = db.session - - # Case 4. Two access requests from gamma and gamma2 - # Gamma gets schema access, gamma2 access request granted - # Check if request by gamma has been deleted - - gamma_user = security_manager.find_user(username="gamma") - access_request1 = create_access_request( - session, "table", "wb_health_population", TEST_ROLE_1, "gamma" - ) - create_access_request( - session, "table", "wb_health_population", TEST_ROLE_2, "gamma2" - ) - ds_1_id = access_request1.datasource_id - ds = ( - session.query(SqlaTable) - .filter_by(table_name="wb_health_population") - .first() - ) - original_schema = ds.schema - - ds.schema = "temp_schema" - security_manager.add_permission_view_menu("schema_access", ds.schema_perm) - schema_perm_view = security_manager.find_permission_view_menu( - "schema_access", ds.schema_perm - ) - security_manager.add_permission_role( - security_manager.find_role(SCHEMA_ACCESS_ROLE), schema_perm_view - ) - gamma_user.roles.append(security_manager.find_role(SCHEMA_ACCESS_ROLE)) - session.commit() - # gamma2 request gets fulfilled - self.client.post( - EXTEND_ROLE_REQUEST.format("table", ds_1_id, "gamma2", TEST_ROLE_2) - ) - access_requests = self.get_access_requests("gamma", "table", ds_1_id) - self.assertFalse(access_requests) - gamma_user = security_manager.find_user(username="gamma") - gamma_user.roles.remove(security_manager.find_role(SCHEMA_ACCESS_ROLE)) - - ds.schema = original_schema - session.commit() - - @mock.patch("superset.utils.core.send_mime_email") - def test_approve(self, mock_send_mime): - if app.config["ENABLE_ACCESS_REQUEST"]: - session = db.session - TEST_ROLE_NAME = "table_role" - security_manager.add_role(TEST_ROLE_NAME) - - # Case 1. Grant new role to the user. - - access_request1 = create_access_request( - session, "table", "unicode_test", TEST_ROLE_NAME, "gamma" - ) - ds_1_id = access_request1.datasource_id - self.get_resp( - GRANT_ROLE_REQUEST.format("table", ds_1_id, "gamma", TEST_ROLE_NAME) - ) - # Test email content. - self.assertTrue(mock_send_mime.called) - call_args = mock_send_mime.call_args[0] - self.assertEqual( - [ - security_manager.find_user(username="gamma").email, - security_manager.find_user(username="admin").email, - ], - call_args[1], - ) - self.assertEqual( - "[Superset] Access to the datasource {} was granted".format( - self.get_table_by_id(ds_1_id).full_name - ), - call_args[2]["Subject"], - ) - self.assertIn(TEST_ROLE_NAME, call_args[2].as_string()) - self.assertIn("unicode_test", call_args[2].as_string()) - - access_requests = self.get_access_requests("gamma", "table", ds_1_id) - # request was removed - self.assertFalse(access_requests) - # user was granted table_role - user_roles = [r.name for r in security_manager.find_user("gamma").roles] - self.assertIn(TEST_ROLE_NAME, user_roles) - - # Case 2. Extend the role to have access to the table - - access_request2 = create_access_request( - session, "table", "energy_usage", TEST_ROLE_NAME, "gamma" - ) - ds_2_id = access_request2.datasource_id - energy_usage_perm = access_request2.datasource.perm - - self.client.get( - EXTEND_ROLE_REQUEST.format( - "table", access_request2.datasource_id, "gamma", TEST_ROLE_NAME - ) - ) - access_requests = self.get_access_requests("gamma", "table", ds_2_id) - - # Test email content. - self.assertTrue(mock_send_mime.called) - call_args = mock_send_mime.call_args[0] - self.assertEqual( - [ - security_manager.find_user(username="gamma").email, - security_manager.find_user(username="admin").email, - ], - call_args[1], - ) - self.assertEqual( - "[Superset] Access to the datasource {} was granted".format( - self.get_table_by_id(ds_2_id).full_name - ), - call_args[2]["Subject"], - ) - self.assertIn(TEST_ROLE_NAME, call_args[2].as_string()) - self.assertIn("energy_usage", call_args[2].as_string()) - - # request was removed - self.assertFalse(access_requests) - # table_role was extended to grant access to the energy_usage table/ - perm_view = security_manager.find_permission_view_menu( - "datasource_access", energy_usage_perm - ) - TEST_ROLE = security_manager.find_role(TEST_ROLE_NAME) - self.assertIn(perm_view, TEST_ROLE.permissions) - - def test_request_access(self): - if app.config["ENABLE_ACCESS_REQUEST"]: - session = db.session - self.logout() - self.login(username="gamma") - gamma_user = security_manager.find_user(username="gamma") - security_manager.add_role("dummy_role") - gamma_user.roles.append(security_manager.find_role("dummy_role")) - session.commit() - - ACCESS_REQUEST = ( - "/superset/request_access?" - "datasource_type={}&" - "datasource_id={}&" - "action={}&" - ) - ROLE_GRANT_LINK = ( - 'Grant {} Role' - ) - - # Request table access, there are no roles have this table. - - table1 = ( - session.query(SqlaTable) - .filter_by(table_name="random_time_series") - .first() - ) - table_1_id = table1.id - - # request access to the table - resp = self.get_resp(ACCESS_REQUEST.format("table", table_1_id, "go")) - assert "Access was requested" in resp - access_request1 = self.get_access_requests("gamma", "table", table_1_id) - assert access_request1 is not None - - # Request access, roles exist that contains the table. - # add table to the existing roles - table3 = ( - session.query(SqlaTable).filter_by(table_name="energy_usage").first() - ) - table_3_id = table3.id - table3_perm = table3.perm - - security_manager.add_role("energy_usage_role") - alpha_role = security_manager.find_role("Alpha") - security_manager.add_permission_role( - alpha_role, - security_manager.find_permission_view_menu( - "datasource_access", table3_perm - ), - ) - security_manager.add_permission_role( - security_manager.find_role("energy_usage_role"), - security_manager.find_permission_view_menu( - "datasource_access", table3_perm - ), - ) - session.commit() - - self.get_resp(ACCESS_REQUEST.format("table", table_3_id, "go")) - access_request3 = self.get_access_requests("gamma", "table", table_3_id) - approve_link_3 = ROLE_GRANT_LINK.format( - "table", table_3_id, "gamma", "energy_usage_role", "energy_usage_role" - ) - self.assertEqual( - access_request3.roles_with_datasource, - f"
      • {approve_link_3}
      ", - ) - - # cleanup - gamma_user = security_manager.find_user(username="gamma") - gamma_user.roles.remove(security_manager.find_role("dummy_role")) - session.commit() - @pytest.mark.parametrize( "username,user_id", diff --git a/tests/integration_tests/base_tests.py b/tests/integration_tests/base_tests.py index fec66f88d2da6..7f7c543d8b04a 100644 --- a/tests/integration_tests/base_tests.py +++ b/tests/integration_tests/base_tests.py @@ -42,7 +42,6 @@ from superset.models.slice import Slice from superset.models.core import Database from superset.models.dashboard import Dashboard -from superset.models.datasource_access_request import DatasourceAccessRequest from superset.utils.core import get_example_default_schema from superset.utils.database import get_example_database from superset.views.base_api import BaseSupersetModelRestApi @@ -268,18 +267,6 @@ def get_json_resp( resp = self.get_resp(url, data, follow_redirects, raise_on_error, json_) return json.loads(resp) - def get_access_requests(self, username, ds_type, ds_id): - DAR = DatasourceAccessRequest - return ( - db.session.query(DAR) - .filter( - DAR.created_by == security_manager.find_user(username=username), - DAR.datasource_type == ds_type, - DAR.datasource_id == ds_id, - ) - .first() - ) - def logout(self): self.client.get("/logout/", follow_redirects=True) @@ -411,33 +398,6 @@ def delete_fake_db_for_macros(): db.session.delete(database) db.session.commit() - def validate_sql( - self, - sql, - client_id=None, - username=None, - raise_on_error=False, - database_name="examples", - template_params=None, - ): - if username: - self.logout() - self.login(username=username) - dbid = SupersetTestCase.get_database_by_name(database_name).id - resp = self.get_json_resp( - "/superset/validate_sql_json/", - raise_on_error=False, - data=dict( - database_id=dbid, - sql=sql, - client_id=client_id, - templateParams=template_params, - ), - ) - if raise_on_error and "error" in resp: - raise Exception("validate_sql failed") - return resp - def get_dash_by_slug(self, dash_slug): sesh = db.session() return sesh.query(Dashboard).filter_by(slug=dash_slug).first() @@ -517,6 +477,48 @@ def put_assert_metric( def get_dttm(cls): return datetime.strptime("2019-01-02 03:04:05.678900", "%Y-%m-%d %H:%M:%S.%f") + def insert_dashboard( + self, + dashboard_title: str, + slug: Optional[str], + owners: list[int], + roles: list[int] = [], + created_by=None, + slices: Optional[list[Slice]] = None, + position_json: str = "", + css: str = "", + json_metadata: str = "", + published: bool = False, + certified_by: Optional[str] = None, + certification_details: Optional[str] = None, + ) -> Dashboard: + obj_owners = list() + obj_roles = list() + slices = slices or [] + for owner in owners: + user = db.session.query(security_manager.user_model).get(owner) + obj_owners.append(user) + for role in roles: + role_obj = db.session.query(security_manager.role_model).get(role) + obj_roles.append(role_obj) + dashboard = Dashboard( + dashboard_title=dashboard_title, + slug=slug, + owners=obj_owners, + roles=obj_roles, + position_json=position_json, + css=css, + json_metadata=json_metadata, + slices=slices, + published=published, + created_by=created_by, + certified_by=certified_by, + certification_details=certification_details, + ) + db.session.add(dashboard) + db.session.commit() + return dashboard + @contextmanager def db_insert_temp_object(obj: DeclarativeMeta): diff --git a/tests/integration_tests/charts/api_tests.py b/tests/integration_tests/charts/api_tests.py index 53faa4b3a048e..69e99978e5a29 100644 --- a/tests/integration_tests/charts/api_tests.py +++ b/tests/integration_tests/charts/api_tests.py @@ -35,6 +35,7 @@ from superset.utils.core import get_example_default_schema from superset.utils.database import get_example_database +from tests.integration_tests.conftest import with_feature_flags from tests.integration_tests.base_api_tests import ApiOwnersTestCaseMixin from tests.integration_tests.base_tests import SupersetTestCase from tests.integration_tests.fixtures.birth_names_dashboard import ( @@ -610,58 +611,6 @@ def test_update_chart(self): db.session.delete(model) db.session.commit() - @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") - def test_chart_activity_access_disabled(self): - """ - Chart API: Test ENABLE_BROAD_ACTIVITY_ACCESS = False - """ - access_flag = app.config["ENABLE_BROAD_ACTIVITY_ACCESS"] - app.config["ENABLE_BROAD_ACTIVITY_ACCESS"] = False - admin = self.get_user("admin") - birth_names_table_id = SupersetTestCase.get_table(name="birth_names").id - chart_id = self.insert_chart("title", [admin.id], birth_names_table_id).id - chart_data = { - "slice_name": (new_name := "title1_changed"), - } - self.login(username="admin") - uri = f"api/v1/chart/{chart_id}" - rv = self.put_assert_metric(uri, chart_data, "put") - self.assertEqual(rv.status_code, 200) - model = db.session.query(Slice).get(chart_id) - - self.assertEqual(model.slice_name, new_name) - self.assertEqual(model.changed_by_url, "") - - app.config["ENABLE_BROAD_ACTIVITY_ACCESS"] = access_flag - db.session.delete(model) - db.session.commit() - - @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") - def test_chart_activity_access_enabled(self): - """ - Chart API: Test ENABLE_BROAD_ACTIVITY_ACCESS = True - """ - access_flag = app.config["ENABLE_BROAD_ACTIVITY_ACCESS"] - app.config["ENABLE_BROAD_ACTIVITY_ACCESS"] = True - admin = self.get_user("admin") - birth_names_table_id = SupersetTestCase.get_table(name="birth_names").id - chart_id = self.insert_chart("title", [admin.id], birth_names_table_id).id - chart_data = { - "slice_name": (new_name := "title1_changed"), - } - self.login(username="admin") - uri = f"api/v1/chart/{chart_id}" - rv = self.put_assert_metric(uri, chart_data, "put") - self.assertEqual(rv.status_code, 200) - model = db.session.query(Slice).get(chart_id) - - self.assertEqual(model.slice_name, new_name) - self.assertEqual(model.changed_by_url, "/superset/profile/admin") - - app.config["ENABLE_BROAD_ACTIVITY_ACCESS"] = access_flag - db.session.delete(model) - db.session.commit() - @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") def test_chart_get_list_no_username(self): """ diff --git a/tests/integration_tests/core_tests.py b/tests/integration_tests/core_tests.py index f0c72b068036b..c96555503b598 100644 --- a/tests/integration_tests/core_tests.py +++ b/tests/integration_tests/core_tests.py @@ -16,17 +16,17 @@ # under the License. # isort:skip_file """Unit tests for Superset""" -import csv import datetime import doctest import html -import io import json import logging from urllib.parse import quote +import prison import superset.utils.database from superset.utils.core import backend +from tests.integration_tests.fixtures.public_role import public_role_like_gamma from tests.integration_tests.fixtures.birth_names_dashboard import ( load_birth_names_dashboard_with_slices, load_birth_names_data, @@ -49,6 +49,7 @@ load_energy_table_with_slice, load_energy_table_data, ) +from tests.integration_tests.insert_chart_mixin import InsertChartMixin from tests.integration_tests.test_app import app import superset.views.utils from superset import ( @@ -66,7 +67,6 @@ from superset.models import core as models from superset.models.annotations import Annotation, AnnotationLayer from superset.models.dashboard import Dashboard -from superset.models.datasource_access_request import DatasourceAccessRequest from superset.models.slice import Slice from superset.models.sql_lab import Query from superset.result_set import SupersetResultSet @@ -87,13 +87,12 @@ @pytest.fixture(scope="module") def cleanup(): db.session.query(Query).delete() - db.session.query(DatasourceAccessRequest).delete() db.session.query(models.Log).delete() db.session.commit() yield -class TestCore(SupersetTestCase): +class TestCore(SupersetTestCase, InsertChartMixin): def setUp(self): self.table_ids = { tbl.table_name: tbl.id for tbl in (db.session.query(SqlaTable).all()) @@ -104,6 +103,50 @@ def tearDown(self): db.session.query(Query).delete() app.config["PREVENT_UNSAFE_DB_CONNECTIONS"] = self.original_unsafe_db_setting + def insert_dashboard_created_by(self, username: str) -> Dashboard: + user = self.get_user(username) + dashboard = self.insert_dashboard( + f"create_title_test", + f"create_slug_test", + [user.id], + created_by=user, + ) + return dashboard + + def insert_chart_created_by(self, username: str) -> Slice: + user = self.get_user(username) + dataset = db.session.query(SqlaTable).first() + chart = self.insert_chart( + f"create_title_test", + [user.id], + dataset.id, + created_by=user, + ) + return chart + + @pytest.fixture() + def insert_dashboard_created_by_admin(self): + with self.create_app().app_context(): + dashboard = self.insert_dashboard_created_by("admin") + yield dashboard + db.session.delete(dashboard) + db.session.commit() + + @pytest.fixture() + def insert_dashboard_created_by_gamma(self): + dashboard = self.insert_dashboard_created_by("gamma") + yield dashboard + db.session.delete(dashboard) + db.session.commit() + + @pytest.fixture() + def insert_chart_created_by_admin(self): + with self.create_app().app_context(): + chart = self.insert_chart_created_by("admin") + yield chart + db.session.delete(chart) + db.session.commit() + def test_login(self): resp = self.get_resp("/login/", data=dict(username="admin", password="general")) self.assertNotIn("User confirmation needed", resp) @@ -153,95 +196,6 @@ def test_viz_cache_key(self): self.assertEqual(cache_key_with_groupby, viz.cache_key(qobj)) - def test_get_superset_tables_not_allowed(self): - example_db = superset.utils.database.get_example_database() - schema_name = self.default_schema_backend_map[example_db.backend] - self.login(username="gamma") - uri = f"superset/tables/{example_db.id}/{schema_name}/" - rv = self.client.get(uri) - self.assertEqual(rv.status_code, 404) - - @pytest.mark.usefixtures("load_energy_table_with_slice") - def test_get_superset_tables_allowed(self): - session = db.session - table_name = "energy_usage" - role_name = "dummy_role" - self.logout() - self.login(username="gamma") - gamma_user = security_manager.find_user(username="gamma") - security_manager.add_role(role_name) - dummy_role = security_manager.find_role(role_name) - gamma_user.roles.append(dummy_role) - - tbl_id = self.table_ids.get(table_name) - table = db.session.query(SqlaTable).filter(SqlaTable.id == tbl_id).first() - table_perm = table.perm - - security_manager.add_permission_role( - dummy_role, - security_manager.find_permission_view_menu("datasource_access", table_perm), - ) - - session.commit() - - example_db = utils.get_example_database() - schema_name = self.default_schema_backend_map[example_db.backend] - uri = f"superset/tables/{example_db.id}/{schema_name}/" - rv = self.client.get(uri) - self.assertEqual(rv.status_code, 200) - - # cleanup - gamma_user = security_manager.find_user(username="gamma") - gamma_user.roles.remove(security_manager.find_role(role_name)) - session.commit() - - @pytest.mark.usefixtures("load_energy_table_with_slice") - def test_get_superset_tables_not_allowed_with_out_permissions(self): - session = db.session - role_name = "dummy_role_no_table_access" - self.logout() - self.login(username="gamma") - gamma_user = security_manager.find_user(username="gamma") - security_manager.add_role(role_name) - dummy_role = security_manager.find_role(role_name) - gamma_user.roles.append(dummy_role) - - session.commit() - - example_db = utils.get_example_database() - schema_name = self.default_schema_backend_map[example_db.backend] - uri = f"superset/tables/{example_db.id}/{schema_name}/" - rv = self.client.get(uri) - self.assertEqual(rv.status_code, 404) - - # cleanup - gamma_user = security_manager.find_user(username="gamma") - gamma_user.roles.remove(security_manager.find_role(role_name)) - session.commit() - - def test_get_superset_tables_database_not_found(self): - self.login(username="admin") - uri = f"superset/tables/invalid/public/" - rv = self.client.get(uri) - self.assertEqual(rv.status_code, 404) - - def test_get_superset_tables_schema_undefined(self): - example_db = superset.utils.database.get_example_database() - self.login(username="gamma") - uri = f"superset/tables/{example_db.id}/undefined/" - rv = self.client.get(uri) - self.assertEqual(rv.status_code, 422) - - def test_admin_only_permissions(self): - def assert_admin_permission_in(role_name, assert_func): - role = security_manager.find_role(role_name) - permissions = [p.permission.name for p in role.permissions] - assert_func("can_approve", permissions) - - assert_admin_permission_in("Admin", self.assertIn) - assert_admin_permission_in("Alpha", self.assertNotIn) - assert_admin_permission_in("Gamma", self.assertNotIn) - def test_admin_only_menu_views(self): def assert_admin_view_menus_in(role_name, assert_func): role = security_manager.find_role(role_name) @@ -320,19 +274,6 @@ def test_save_slice(self): db.session.delete(slc) db.session.commit() - @pytest.mark.usefixtures("load_energy_table_with_slice") - def test_filter_endpoint(self): - self.login(username="admin") - tbl_id = self.table_ids.get("energy_usage") - table = db.session.query(SqlaTable).filter(SqlaTable.id == tbl_id) - table.filter_select_enabled = True - url = "/superset/filter/table/{}/target/" - - # Changing name - resp = self.get_resp(url.format(tbl_id)) - assert len(resp) > 0 - assert "energy_target0" in resp - @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") def test_slice_data(self): # slice data should have some required attributes @@ -368,43 +309,6 @@ def test_add_slice(self): resp = self.client.get(url) self.assertEqual(resp.status_code, 200) - @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") - def test_get_user_slices_for_owners(self): - self.login(username="alpha") - user = security_manager.find_user("alpha") - slice_name = "Girls" - - # ensure user is not owner of any slices - url = f"/superset/user_slices/{user.id}/" - resp = self.client.get(url) - data = json.loads(resp.data) - self.assertEqual(data, []) - - # make user owner of slice and verify that endpoint returns said slice - slc = self.get_slice( - slice_name=slice_name, session=db.session, expunge_from_session=False - ) - slc.owners = [user] - db.session.merge(slc) - db.session.commit() - url = f"/superset/user_slices/{user.id}/" - resp = self.client.get(url) - data = json.loads(resp.data) - self.assertEqual(len(data), 1) - self.assertEqual(data[0]["title"], slice_name) - - # remove ownership and ensure user no longer gets slice - slc = self.get_slice( - slice_name=slice_name, session=db.session, expunge_from_session=False - ) - slc.owners = [] - db.session.merge(slc) - db.session.commit() - url = f"/superset/user_slices/{user.id}/" - resp = self.client.get(url) - data = json.loads(resp.data) - self.assertEqual(data, []) - def test_get_user_slices(self): self.login(username="admin") userid = security_manager.find_user("admin").id @@ -448,98 +352,6 @@ def test_misc(self): assert self.get_resp("/healthcheck") == "OK" assert self.get_resp("/ping") == "OK" - def test_testconn(self, username="admin"): - # need to temporarily allow sqlite dbs, teardown will undo this - app.config["PREVENT_UNSAFE_DB_CONNECTIONS"] = False - self.login(username=username) - database = superset.utils.database.get_example_database() - # validate that the endpoint works with the password-masked sqlalchemy uri - data = json.dumps( - { - "uri": database.safe_sqlalchemy_uri(), - "name": "examples", - "impersonate_user": False, - } - ) - response = self.client.post( - "/superset/testconn", data=data, content_type="application/json" - ) - assert response.status_code == 200 - assert response.headers["Content-Type"] == "application/json" - - # validate that the endpoint works with the decrypted sqlalchemy uri - data = json.dumps( - { - "uri": database.sqlalchemy_uri_decrypted, - "name": "examples", - "impersonate_user": False, - } - ) - response = self.client.post( - "/superset/testconn", data=data, content_type="application/json" - ) - assert response.status_code == 200 - assert response.headers["Content-Type"] == "application/json" - - def test_testconn_failed_conn(self, username="admin"): - self.login(username=username) - - data = json.dumps( - {"uri": "broken://url", "name": "examples", "impersonate_user": False} - ) - response = self.client.post( - "/superset/testconn", data=data, content_type="application/json" - ) - assert response.status_code == 400 - assert response.headers["Content-Type"] == "application/json" - response_body = json.loads(response.data.decode("utf-8")) - expected_body = {"error": "Could not load database driver: broken"} - assert response_body == expected_body, "{} != {}".format( - response_body, - expected_body, - ) - - data = json.dumps( - { - "uri": "mssql+pymssql://url", - "name": "examples", - "impersonate_user": False, - } - ) - response = self.client.post( - "/superset/testconn", data=data, content_type="application/json" - ) - assert response.status_code == 400 - assert response.headers["Content-Type"] == "application/json" - response_body = json.loads(response.data.decode("utf-8")) - expected_body = {"error": "Could not load database driver: mssql+pymssql"} - assert response_body == expected_body, "{} != {}".format( - response_body, - expected_body, - ) - - def test_testconn_unsafe_uri(self, username="admin"): - self.login(username=username) - app.config["PREVENT_UNSAFE_DB_CONNECTIONS"] = True - - response = self.client.post( - "/superset/testconn", - data=json.dumps( - { - "uri": "sqlite:///home/superset/unsafe.db", - "name": "unsafe", - "impersonate_user": False, - } - ), - content_type="application/json", - ) - self.assertEqual(400, response.status_code) - response_body = json.loads(response.data.decode("utf-8")) - expected_body = { - "error": "SQLiteDialect_pysqlite cannot be used as a data source for security reasons." - } - self.assertEqual(expected_body, response_body) - def test_custom_password_store(self): database = superset.utils.database.get_example_database() conn_pre = sqla.engine.url.make_url(database.sqlalchemy_uri_decrypted) @@ -657,95 +469,6 @@ def test_gamma(self): assert "Charts" in self.get_resp("/chart/list/") assert "Dashboards" in self.get_resp("/dashboard/list/") - @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") - def test_csv_endpoint(self): - self.login() - client_id = f"{random.getrandbits(64)}"[:10] - get_name_sql = """ - SELECT name - FROM birth_names - LIMIT 1 - """ - resp = self.run_sql(get_name_sql, client_id, raise_on_error=True) - name = resp["data"][0]["name"] - sql = f""" - SELECT name - FROM birth_names - WHERE name = '{name}' - LIMIT 1 - """ - client_id = f"{random.getrandbits(64)}"[:10] - self.run_sql(sql, client_id, raise_on_error=True) - - resp = self.get_resp(f"/superset/csv/{client_id}") - data = csv.reader(io.StringIO(resp)) - expected_data = csv.reader(io.StringIO(f"name\n{name}\n")) - - client_id = f"{random.getrandbits(64)}"[:10] - self.run_sql(sql, client_id, raise_on_error=True) - - resp = self.get_resp(f"/superset/csv/{client_id}") - data = csv.reader(io.StringIO(resp)) - expected_data = csv.reader(io.StringIO(f"name\n{name}\n")) - - self.assertEqual(list(expected_data), list(data)) - self.logout() - - @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") - def test_extra_table_metadata(self): - self.login() - example_db = superset.utils.database.get_example_database() - schema = "default" if example_db.backend in {"presto", "hive"} else "superset" - self.get_json_resp( - f"/superset/extra_table_metadata/{example_db.id}/birth_names/{schema}/" - ) - - def test_required_params_in_sql_json(self): - self.login() - client_id = f"{random.getrandbits(64)}"[:10] - - data = {"client_id": client_id} - rv = self.client.post( - "/superset/sql_json/", - json=data, - ) - failed_resp = { - "sql": ["Missing data for required field."], - "database_id": ["Missing data for required field."], - } - resp_data = json.loads(rv.data.decode("utf-8")) - self.assertDictEqual(resp_data, failed_resp) - self.assertEqual(rv.status_code, 400) - - data = {"sql": "SELECT 1", "client_id": client_id} - rv = self.client.post( - "/superset/sql_json/", - json=data, - ) - failed_resp = {"database_id": ["Missing data for required field."]} - resp_data = json.loads(rv.data.decode("utf-8")) - self.assertDictEqual(resp_data, failed_resp) - self.assertEqual(rv.status_code, 400) - - data = {"database_id": 1, "client_id": client_id} - rv = self.client.post( - "/superset/sql_json/", - json=data, - ) - failed_resp = {"sql": ["Missing data for required field."]} - resp_data = json.loads(rv.data.decode("utf-8")) - self.assertDictEqual(resp_data, failed_resp) - self.assertEqual(rv.status_code, 400) - - data = {"sql": "SELECT 1", "database_id": 1, "client_id": client_id} - rv = self.client.post( - "/superset/sql_json/", - json=data, - ) - resp_data = json.loads(rv.data.decode("utf-8")) - self.assertEqual(resp_data.get("status"), "success") - self.assertEqual(rv.status_code, 200) - def test_templated_sql_json(self): if superset.utils.database.get_example_database().backend == "presto": # TODO: make it work for presto @@ -755,32 +478,6 @@ def test_templated_sql_json(self): data = self.run_sql(sql, "fdaklj3ws") self.assertEqual(data["data"][0]["test"], "2") - @mock.patch( - "tests.integration_tests.superset_test_custom_template_processors.datetime" - ) - @mock.patch("superset.views.core.get_sql_results") - def test_custom_templated_sql_json(self, sql_lab_mock, mock_dt) -> None: - """Test sqllab receives macros expanded query.""" - mock_dt.utcnow = mock.Mock(return_value=datetime.datetime(1970, 1, 1)) - self.login() - sql = "SELECT '$DATE()' as test" - resp = { - "status": QueryStatus.SUCCESS, - "query": {"rows": 1}, - "data": [{"test": "'1970-01-01'"}], - } - sql_lab_mock.return_value = resp - - dbobj = self.create_fake_db_for_macros() - json_payload = dict(database_id=dbobj.id, sql=sql) - self.get_json_resp( - "/superset/sql_json/", raise_on_error=False, json_=json_payload - ) - assert sql_lab_mock.called - self.assertEqual(sql_lab_mock.call_args[0][1], "SELECT '1970-01-01' as test") - - self.delete_fake_db_for_macros() - def test_fetch_datasource_metadata(self): self.login(username="admin") url = "/superset/fetch_datasource_metadata?" "datasourceKey=1__table" @@ -796,74 +493,99 @@ def test_fetch_datasource_metadata(self): for k in keys: self.assertIn(k, resp.keys()) - @staticmethod - def _get_user_activity_endpoints(user: str): - userid = security_manager.find_user(user).id - return ( - f"/superset/recent_activity/{userid}/", - f"/superset/created_slices/{userid}/", - f"/superset/created_dashboards/{userid}/", - f"/superset/fave_slices/{userid}/", - f"/superset/fave_dashboards/{userid}/", - f"/superset/user_slices/{userid}/", - f"/superset/fave_dashboards_by_username/{user}/", - ) - + @pytest.mark.usefixtures("insert_dashboard_created_by_admin") + @pytest.mark.usefixtures("insert_chart_created_by_admin") @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") def test_user_profile(self, username="admin"): self.login(username=username) slc = self.get_slice("Girls", db.session) + dashboard = db.session.query(Dashboard).filter_by(slug="births").first() + # Set a favorite dashboard + self.client.post(f"/api/v1/dashboard/{dashboard.id}/favorites/", json={}) + # Set a favorite chart + self.client.post(f"/api/v1/chart/{slc.id}/favorites/", json={}) + + # Get favorite dashboards: + request_query = { + "columns": ["created_on_delta_humanized", "dashboard_title", "url"], + "filters": [{"col": "id", "opr": "dashboard_is_favorite", "value": True}], + "keys": ["none"], + "order_column": "changed_on", + "order_direction": "desc", + "page": 0, + "page_size": 100, + } + url = f"/api/v1/dashboard/?q={prison.dumps(request_query)}" + resp = self.client.get(url) + assert resp.json["count"] == 1 + assert resp.json["result"][0]["dashboard_title"] == "USA Births Names" + + # Get Favorite Charts + request_query = { + "filters": [{"col": "id", "opr": "chart_is_favorite", "value": True}], + "order_column": "slice_name", + "order_direction": "asc", + "page": 0, + "page_size": 25, + } + url = f"api/v1/chart/?q={prison.dumps(request_query)}" + resp = self.client.get(url) + assert resp.json["count"] == 1 + assert resp.json["result"][0]["id"] == slc.id - # Setting some faves - url = f"/superset/favstar/Slice/{slc.id}/select/" - resp = self.get_json_resp(url) - self.assertEqual(resp["count"], 1) + # Get recent activity + url = "/api/v1/log/recent_activity/?q=(page_size:50)" + resp = self.client.get(url) + # TODO data for recent activity varies for sqlite, we should be able to assert + # the returned data + assert resp.status_code == 200 + + # Get dashboards created by the user + request_query = { + "columns": ["created_on_delta_humanized", "dashboard_title", "url"], + "filters": [ + {"col": "created_by", "opr": "dashboard_created_by_me", "value": "me"} + ], + "keys": ["none"], + "order_column": "changed_on", + "order_direction": "desc", + "page": 0, + "page_size": 100, + } + url = f"/api/v1/dashboard/?q={prison.dumps(request_query)}" + resp = self.client.get(url) + assert resp.json["result"][0]["dashboard_title"] == "create_title_test" - dash = db.session.query(Dashboard).filter_by(slug="births").first() - url = f"/superset/favstar/Dashboard/{dash.id}/select/" - resp = self.get_json_resp(url) - self.assertEqual(resp["count"], 1) + # Get charts created by the user + request_query = { + "columns": ["created_on_delta_humanized", "slice_name", "url"], + "filters": [ + {"col": "created_by", "opr": "chart_created_by_me", "value": "me"} + ], + "keys": ["none"], + "order_column": "changed_on_delta_humanized", + "order_direction": "desc", + "page": 0, + "page_size": 100, + } + url = f"/api/v1/chart/?q={prison.dumps(request_query)}" + resp = self.client.get(url) + assert resp.json["count"] == 1 + assert resp.json["result"][0]["slice_name"] == "create_title_test" - resp = self.get_resp(f"/superset/profile/{username}/") + resp = self.get_resp(f"/superset/profile/") self.assertIn('"app"', resp) - for endpoint in self._get_user_activity_endpoints(username): - data = self.get_json_resp(endpoint) - self.assertNotIn("message", data) - - def test_user_profile_optional_access(self): + def test_user_profile_gamma(self): self.login(username="gamma") - resp = self.client.get(f"/superset/profile/admin/") - self.assertEqual(resp.status_code, 200) - - app.config["ENABLE_BROAD_ACTIVITY_ACCESS"] = False - resp = self.client.get(f"/superset/profile/admin/") - self.assertEqual(resp.status_code, 403) - - # Restore config - app.config["ENABLE_BROAD_ACTIVITY_ACCESS"] = True - - @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") - def test_user_activity_access(self, username="gamma"): - self.login(username=username) - - # accessing own and other users' activity is allowed by default - for user in ("admin", "gamma"): - for endpoint in self._get_user_activity_endpoints(user): - resp = self.client.get(endpoint) - assert resp.status_code == 200 - - # disabling flag will block access to other users' activity data - access_flag = app.config["ENABLE_BROAD_ACTIVITY_ACCESS"] - app.config["ENABLE_BROAD_ACTIVITY_ACCESS"] = False - for user in ("admin", "gamma"): - for endpoint in self._get_user_activity_endpoints(user): - resp = self.client.get(endpoint) - expected_status_code = 200 if user == username else 403 - assert resp.status_code == expected_status_code + resp = self.get_resp(f"/superset/profile/") + self.assertIn('"app"', resp) - # restore flag - app.config["ENABLE_BROAD_ACTIVITY_ACCESS"] = access_flag + @pytest.mark.usefixtures("public_role_like_gamma") + def test_user_profile_anonymous(self): + self.logout() + resp = self.client.get("/superset/profile/") + assert resp.status_code == 404 @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") def test_slice_id_is_always_logged_correctly_on_web_request(self): @@ -1209,78 +931,6 @@ def test_explore_json_data_invalid_cache_key(self): self.assertEqual(rv.status_code, 404) self.assertEqual(data["error"], "Cached data not found") - @mock.patch( - "superset.security.SupersetSecurityManager.get_schemas_accessible_by_user" - ) - @mock.patch("superset.security.SupersetSecurityManager.can_access_database") - @mock.patch("superset.security.SupersetSecurityManager.can_access_all_datasources") - def test_schemas_access_for_csv_upload_endpoint( - self, - mock_can_access_all_datasources, - mock_can_access_database, - mock_schemas_accessible, - ): - self.login(username="admin") - dbobj = self.create_fake_db() - mock_can_access_all_datasources.return_value = False - mock_can_access_database.return_value = False - mock_schemas_accessible.return_value = ["this_schema_is_allowed_too"] - data = self.get_json_resp( - url="/superset/schemas_access_for_file_upload?db_id={db_id}".format( - db_id=dbobj.id - ) - ) - assert data == ["this_schema_is_allowed_too"] - self.delete_fake_db() - - @mock.patch("superset.views.core.results_backend_use_msgpack", False) - def test_display_limit(self): - from superset.views import core - - core.results_backend = mock.Mock() - self.login() - - data = [{"col_0": i} for i in range(100)] - payload = { - "status": QueryStatus.SUCCESS, - "query": {"rows": 100}, - "data": data, - } - # limit results to 1 - expected_key = {"status": "success", "query": {"rows": 100}, "data": data} - limited_data = data[:1] - expected_limited = { - "status": "success", - "query": {"rows": 100}, - "data": limited_data, - "displayLimitReached": True, - } - - query_mock = mock.Mock() - query_mock.sql = "SELECT *" - query_mock.database = 1 - query_mock.schema = "superset" - - # do not apply msgpack serialization - use_msgpack = app.config["RESULTS_BACKEND_USE_MSGPACK"] - app.config["RESULTS_BACKEND_USE_MSGPACK"] = False - serialized_payload = sql_lab._serialize_payload(payload, False) - compressed = utils.zlib_compress(serialized_payload) - core.results_backend.get.return_value = compressed - - with mock.patch("superset.views.core.db") as mock_superset_db: - mock_superset_db.session.query().filter_by().one_or_none.return_value = ( - query_mock - ) - # get all results - result_key = json.loads(self.get_resp("/superset/results/key/")) - result_limited = json.loads(self.get_resp("/superset/results/key/?rows=1")) - - self.assertEqual(result_key, expected_key) - self.assertEqual(result_limited, expected_limited) - - app.config["RESULTS_BACKEND_USE_MSGPACK"] = use_msgpack - def test_results_default_deserialization(self): use_new_deserialization = False data = [("a", 4, 4.0, "2019-08-18T16:39:16.660000")] @@ -1413,7 +1063,7 @@ def test_feature_flag_serialization(self): "/superset/sqllab", "/superset/welcome", f"/superset/dashboard/{dash_id}/", - "/superset/profile/admin/", + "/superset/profile/", f"/explore/?datasource_type=table&datasource_id={tbl_id}", ] for url in urls: @@ -1631,29 +1281,6 @@ def test_dashboard_injected_exceptions(self, mock_db_connection_mutator): data = self.get_resp(url) self.assertIn("Error message", data) - @mock.patch("superset.sql_lab.cancel_query") - @mock.patch("superset.views.core.db.session") - def test_stop_query_not_implemented( - self, mock_superset_db_session, mock_sql_lab_cancel_query - ): - """ - Handles stop query when the DB engine spec does not - have a cancel query method. - """ - form_data = {"client_id": "foo"} - query_mock = mock.Mock() - query_mock.client_id = "foo" - query_mock.status = QueryStatus.RUNNING - self.login(username="admin") - mock_superset_db_session.query().filter_by().one().return_value = query_mock - mock_sql_lab_cancel_query.return_value = False - rv = self.client.post( - "/superset/stop_query/", - data={"form_data": json.dumps(form_data)}, - ) - - assert rv.status_code == 422 - @pytest.mark.usefixtures("load_energy_table_with_slice") @mock.patch("superset.explore.form_data.commands.create.CreateFormDataCommand.run") def test_explore_redirect(self, mock_command: mock.Mock): diff --git a/tests/integration_tests/dashboard_tests.py b/tests/integration_tests/dashboard_tests.py index 669bc936934e5..6558ccc28e21f 100644 --- a/tests/integration_tests/dashboard_tests.py +++ b/tests/integration_tests/dashboard_tests.py @@ -16,8 +16,6 @@ # under the License. # isort:skip_file """Unit tests for Superset""" -from datetime import datetime -import json import re import unittest from random import random @@ -54,28 +52,6 @@ class TestDashboard(SupersetTestCase): - @pytest.fixture - def cleanup_copied_dash(self): - with app.app_context(): - original_dashboard = ( - db.session.query(Dashboard).filter_by(slug="births").first() - ) - original_dashboard_id = original_dashboard.id - yield - copied_dashboard = ( - db.session.query(Dashboard) - .filter( - Dashboard.dashboard_title == "Copy Of Births", - Dashboard.id != original_dashboard_id, - ) - .first() - ) - - db.session.merge(original_dashboard) - if copied_dashboard: - db.session.delete(copied_dashboard) - db.session.commit() - @pytest.fixture def load_dashboard(self): with app.app_context(): @@ -154,235 +130,6 @@ def test_new_dashboard(self): db.session.delete(created_dashboard) db.session.commit() - @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") - def test_save_dash(self, username="admin"): - self.login(username=username) - dash = db.session.query(Dashboard).filter_by(slug="births").first() - positions = self.get_mock_positions(dash) - data = { - "css": "", - "expanded_slices": {}, - "positions": positions, - "dashboard_title": dash.dashboard_title, - # set a further modified_time for unit test - "last_modified_time": datetime.now().timestamp() + 1000, - } - url = f"/superset/save_dash/{dash.id}/" - resp = self.get_resp(url, data=dict(data=json.dumps(data))) - self.assertIn("SUCCESS", resp) - - @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") - def test_save_dash_with_filter(self, username="admin"): - self.login(username=username) - dash = db.session.query(Dashboard).filter_by(slug="world_health").first() - - positions = self.get_mock_positions(dash) - filters = {str(dash.slices[0].id): {"region": ["North America"]}} - default_filters = json.dumps(filters) - data = { - "css": "", - "expanded_slices": {}, - "positions": positions, - "dashboard_title": dash.dashboard_title, - "default_filters": default_filters, - # set a further modified_time for unit test - "last_modified_time": datetime.now().timestamp() + 1000, - } - - url = f"/superset/save_dash/{dash.id}/" - resp = self.get_resp(url, data=dict(data=json.dumps(data))) - self.assertIn("SUCCESS", resp) - - updatedDash = db.session.query(Dashboard).filter_by(slug="world_health").first() - new_url = updatedDash.url - self.assertIn("world_health", new_url) - self.assertNotIn("preselect_filters", new_url) - - @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") - def test_save_dash_with_invalid_filters(self, username="admin"): - self.login(username=username) - dash = db.session.query(Dashboard).filter_by(slug="world_health").first() - - # add an invalid filter slice - positions = self.get_mock_positions(dash) - filters = {str(99999): {"region": ["North America"]}} - default_filters = json.dumps(filters) - data = { - "css": "", - "expanded_slices": {}, - "positions": positions, - "dashboard_title": dash.dashboard_title, - "default_filters": default_filters, - # set a further modified_time for unit test - "last_modified_time": datetime.now().timestamp() + 1000, - } - - url = f"/superset/save_dash/{dash.id}/" - resp = self.get_resp(url, data=dict(data=json.dumps(data))) - self.assertIn("SUCCESS", resp) - - updatedDash = db.session.query(Dashboard).filter_by(slug="world_health").first() - new_url = updatedDash.url - self.assertNotIn("region", new_url) - - @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") - def test_save_dash_with_dashboard_title(self, username="admin"): - self.login(username=username) - dash = db.session.query(Dashboard).filter_by(slug="births").first() - origin_title = dash.dashboard_title - positions = self.get_mock_positions(dash) - data = { - "css": "", - "expanded_slices": {}, - "positions": positions, - "dashboard_title": "new title", - # set a further modified_time for unit test - "last_modified_time": datetime.now().timestamp() + 1000, - } - url = f"/superset/save_dash/{dash.id}/" - self.get_resp(url, data=dict(data=json.dumps(data))) - updatedDash = db.session.query(Dashboard).filter_by(slug="births").first() - self.assertEqual(updatedDash.dashboard_title, "new title") - # bring back dashboard original title - data["dashboard_title"] = origin_title - self.get_resp(url, data=dict(data=json.dumps(data))) - - @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") - def test_save_dash_with_colors(self, username="admin"): - self.login(username=username) - dash = db.session.query(Dashboard).filter_by(slug="births").first() - positions = self.get_mock_positions(dash) - new_label_colors = {"data value": "random color"} - data = { - "css": "", - "expanded_slices": {}, - "positions": positions, - "dashboard_title": dash.dashboard_title, - "color_namespace": "Color Namespace Test", - "color_scheme": "Color Scheme Test", - "label_colors": new_label_colors, - # set a further modified_time for unit test - "last_modified_time": datetime.now().timestamp() + 1000, - } - url = f"/superset/save_dash/{dash.id}/" - self.get_resp(url, data=dict(data=json.dumps(data))) - updatedDash = db.session.query(Dashboard).filter_by(slug="births").first() - self.assertIn("color_namespace", updatedDash.json_metadata) - self.assertIn("color_scheme", updatedDash.json_metadata) - self.assertIn("label_colors", updatedDash.json_metadata) - # bring back original dashboard - del data["color_namespace"] - del data["color_scheme"] - del data["label_colors"] - self.get_resp(url, data=dict(data=json.dumps(data))) - - @pytest.mark.usefixtures( - "load_birth_names_dashboard_with_slices", - "cleanup_copied_dash", - "load_unicode_dashboard_with_position", - ) - def test_copy_dash(self, username="admin"): - self.login(username=username) - dash = db.session.query(Dashboard).filter_by(slug="births").first() - positions = self.get_mock_positions(dash) - new_label_colors = {"data value": "random color"} - data = { - "css": "", - "duplicate_slices": False, - "expanded_slices": {}, - "positions": positions, - "dashboard_title": "Copy Of Births", - "color_namespace": "Color Namespace Test", - "color_scheme": "Color Scheme Test", - "label_colors": new_label_colors, - # set a further modified_time for unit test - "last_modified_time": datetime.now().timestamp() + 1000, - } - - # Save changes to Births dashboard and retrieve updated dash - dash_id = dash.id - url = f"/superset/save_dash/{dash_id}/" - self.client.post(url, data=dict(data=json.dumps(data))) - dash = db.session.query(Dashboard).filter_by(id=dash_id).first() - orig_json_data = dash.data - - # Verify that copy matches original - url = f"/superset/copy_dash/{dash_id}/" - resp = self.get_json_resp(url, data=dict(data=json.dumps(data))) - self.assertEqual(resp["dashboard_title"], "Copy Of Births") - self.assertEqual(resp["position_json"], orig_json_data["position_json"]) - self.assertEqual(resp["metadata"], orig_json_data["metadata"]) - # check every attribute in each dashboard's slices list, - # exclude modified and changed_on attribute - for index, slc in enumerate(orig_json_data["slices"]): - for key in slc: - if key not in ["modified", "changed_on", "changed_on_humanized"]: - self.assertEqual(slc[key], resp["slices"][index][key]) - - @pytest.mark.usefixtures( - "load_energy_table_with_slice", "load_birth_names_dashboard_with_slices" - ) - def test_add_slices(self, username="admin"): - self.login(username=username) - dash = db.session.query(Dashboard).filter_by(slug="births").first() - new_slice = ( - db.session.query(Slice).filter_by(slice_name="Energy Force Layout").first() - ) - existing_slice = ( - db.session.query(Slice).filter_by(slice_name="Girl Name Cloud").first() - ) - data = { - "slice_ids": [new_slice.data["slice_id"], existing_slice.data["slice_id"]] - } - url = f"/superset/add_slices/{dash.id}/" - resp = self.client.post(url, data=dict(data=json.dumps(data))) - assert "SLICES ADDED" in resp.data.decode("utf-8") - - dash = db.session.query(Dashboard).filter_by(slug="births").first() - new_slice = ( - db.session.query(Slice).filter_by(slice_name="Energy Force Layout").first() - ) - assert new_slice in dash.slices - assert len(set(dash.slices)) == len(dash.slices) - - # cleaning up - dash = db.session.query(Dashboard).filter_by(slug="births").first() - dash.slices = [o for o in dash.slices if o.slice_name != "Energy Force Layout"] - db.session.commit() - - @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") - def test_remove_slices(self, username="admin"): - self.login(username=username) - dash = db.session.query(Dashboard).filter_by(slug="births").first() - origin_slices_length = len(dash.slices) - - positions = self.get_mock_positions(dash) - # remove one chart - chart_keys = [] - for key in positions.keys(): - if key.startswith("DASHBOARD_CHART_TYPE"): - chart_keys.append(key) - positions.pop(chart_keys[0]) - - data = { - "css": "", - "expanded_slices": {}, - "positions": positions, - "dashboard_title": dash.dashboard_title, - # set a further modified_time for unit test - "last_modified_time": datetime.now().timestamp() + 1000, - } - - # save dash - dash_id = dash.id - url = f"/superset/save_dash/{dash_id}/" - self.client.post(url, data=dict(data=json.dumps(data))) - dash = db.session.query(Dashboard).filter_by(id=dash_id).first() - - # verify slices data - data = dash.data - self.assertEqual(len(data["slices"]), origin_slices_length - 1) - @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") @pytest.mark.usefixtures("public_role_like_gamma") def test_public_user_dashboard_access(self): @@ -443,25 +190,6 @@ def test_dashboard_with_created_by_can_be_accessed_by_public_users(self): # Cleanup self.revoke_public_access_to_table(table) - @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") - def test_only_owners_can_save(self): - dash = db.session.query(Dashboard).filter_by(slug="births").first() - dash.owners = [] - db.session.merge(dash) - db.session.commit() - self.test_save_dash("admin") - - self.logout() - self.assertRaises(Exception, self.test_save_dash, "alpha") - - alpha = security_manager.find_user("alpha") - - dash = db.session.query(Dashboard).filter_by(slug="births").first() - dash.owners = [alpha] - db.session.merge(dash) - db.session.commit() - self.test_save_dash("alpha") - @pytest.mark.usefixtures("load_energy_table_with_slice", "load_dashboard") def test_users_can_list_published_dashboard(self): self.login("alpha") diff --git a/tests/integration_tests/dashboards/api_tests.py b/tests/integration_tests/dashboards/api_tests.py index 49a6bbecbc85f..f676e873b700f 100644 --- a/tests/integration_tests/dashboards/api_tests.py +++ b/tests/integration_tests/dashboards/api_tests.py @@ -19,7 +19,6 @@ import json from io import BytesIO from time import sleep -from typing import Optional from unittest.mock import ANY, patch from zipfile import is_zipfile, ZipFile @@ -39,6 +38,7 @@ from superset.utils.core import backend, override_user from superset.views.base import generate_download_headers +from tests.integration_tests.conftest import with_feature_flags from tests.integration_tests.base_api_tests import ApiOwnersTestCaseMixin from tests.integration_tests.base_tests import SupersetTestCase from tests.integration_tests.fixtures.importexport import ( @@ -76,48 +76,6 @@ class TestDashboardApi(SupersetTestCase, ApiOwnersTestCaseMixin, InsertChartMixi "published": False, } - def insert_dashboard( - self, - dashboard_title: str, - slug: Optional[str], - owners: list[int], - roles: list[int] = [], - created_by=None, - slices: Optional[list[Slice]] = None, - position_json: str = "", - css: str = "", - json_metadata: str = "", - published: bool = False, - certified_by: Optional[str] = None, - certification_details: Optional[str] = None, - ) -> Dashboard: - obj_owners = list() - obj_roles = list() - slices = slices or [] - for owner in owners: - user = db.session.query(security_manager.user_model).get(owner) - obj_owners.append(user) - for role in roles: - role_obj = db.session.query(security_manager.role_model).get(role) - obj_roles.append(role_obj) - dashboard = Dashboard( - dashboard_title=dashboard_title, - slug=slug, - owners=obj_owners, - roles=obj_roles, - position_json=position_json, - css=css, - json_metadata=json_metadata, - slices=slices, - published=published, - created_by=created_by, - certified_by=certified_by, - certification_details=certification_details, - ) - db.session.add(dashboard) - db.session.commit() - return dashboard - @pytest.fixture() def create_dashboards(self): with self.create_app().app_context(): @@ -410,7 +368,6 @@ def test_get_dashboard(self): "certification_details": None, "changed_by": None, "changed_by_name": "", - "changed_by_url": "", "charts": [], "created_by": { "id": 1, @@ -506,43 +463,6 @@ def test_get_dashboard_no_data_access(self): db.session.delete(dashboard) db.session.commit() - def test_get_draft_dashboard_without_roles_by_uuid(self): - """ - Dashboard API: Test get draft dashboard without roles by uuid - """ - admin = self.get_user("admin") - dashboard = self.insert_dashboard("title", "slug1", [admin.id]) - assert not dashboard.published - assert dashboard.roles == [] - - self.login(username="gamma") - uri = f"api/v1/dashboard/{dashboard.uuid}" - rv = self.client.get(uri) - assert rv.status_code == 200 - # rollback changes - db.session.delete(dashboard) - db.session.commit() - - def test_cannot_get_draft_dashboard_with_roles_by_uuid(self): - """ - Dashboard API: Test get dashboard by uuid - """ - admin = self.get_user("admin") - admin_role = self.get_role("Admin") - dashboard = self.insert_dashboard( - "title", "slug1", [admin.id], roles=[admin_role.id] - ) - assert not dashboard.published - assert dashboard.roles == [admin_role] - - self.login(username="gamma") - uri = f"api/v1/dashboard/{dashboard.uuid}" - rv = self.client.get(uri) - assert rv.status_code == 403 - # rollback changes - db.session.delete(dashboard) - db.session.commit() - def test_get_dashboards_changed_on(self): """ Dashboard API: Test get dashboards changed on @@ -1405,56 +1325,6 @@ def test_update_dashboard(self): db.session.delete(model) db.session.commit() - def test_dashboard_activity_access_disabled(self): - """ - Dashboard API: Test ENABLE_BROAD_ACTIVITY_ACCESS = False - """ - access_flag = app.config["ENABLE_BROAD_ACTIVITY_ACCESS"] - app.config["ENABLE_BROAD_ACTIVITY_ACCESS"] = False - admin = self.get_user("admin") - admin_role = self.get_role("Admin") - dashboard_id = self.insert_dashboard( - "title1", "slug1", [admin.id], roles=[admin_role.id] - ).id - self.login(username="admin") - uri = f"api/v1/dashboard/{dashboard_id}" - dashboard_data = {"dashboard_title": "title2"} - rv = self.client.put(uri, json=dashboard_data) - self.assertEqual(rv.status_code, 200) - model = db.session.query(Dashboard).get(dashboard_id) - - self.assertEqual(model.dashboard_title, "title2") - self.assertEqual(model.changed_by_url, "") - - app.config["ENABLE_BROAD_ACTIVITY_ACCESS"] = access_flag - db.session.delete(model) - db.session.commit() - - def test_dashboard_activity_access_enabled(self): - """ - Dashboard API: Test ENABLE_BROAD_ACTIVITY_ACCESS = True - """ - access_flag = app.config["ENABLE_BROAD_ACTIVITY_ACCESS"] - app.config["ENABLE_BROAD_ACTIVITY_ACCESS"] = True - admin = self.get_user("admin") - admin_role = self.get_role("Admin") - dashboard_id = self.insert_dashboard( - "title1", "slug1", [admin.id], roles=[admin_role.id] - ).id - self.login(username="admin") - uri = f"api/v1/dashboard/{dashboard_id}" - dashboard_data = {"dashboard_title": "title2"} - rv = self.client.put(uri, json=dashboard_data) - self.assertEqual(rv.status_code, 200) - model = db.session.query(Dashboard).get(dashboard_id) - - self.assertEqual(model.dashboard_title, "title2") - self.assertEqual(model.changed_by_url, "/superset/profile/admin") - - app.config["ENABLE_BROAD_ACTIVITY_ACCESS"] = access_flag - db.session.delete(model) - db.session.commit() - def test_dashboard_get_list_no_username(self): """ Dashboard API: Tests that no username is returned diff --git a/tests/integration_tests/dashboards/base_case.py b/tests/integration_tests/dashboards/base_case.py index db85cd6409a12..6aa6f4576ac00 100644 --- a/tests/integration_tests/dashboards/base_case.py +++ b/tests/integration_tests/dashboards/base_case.py @@ -14,8 +14,6 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -import json -from typing import Any, Union import prison from flask import Response @@ -48,17 +46,6 @@ def get_dashboards_list_response(self) -> Response: def get_dashboards_api_response(self) -> Response: return self.client.get(DASHBOARDS_API_URL) - def save_dashboard_via_view( - self, dashboard_id: Union[str, int], dashboard_data: dict[str, Any] - ) -> Response: - save_dash_url = SAVE_DASHBOARD_URL_FORMAT.format(dashboard_id) - return self.get_resp(save_dash_url, data=dict(data=json.dumps(dashboard_data))) - - def save_dashboard( - self, dashboard_id: Union[str, int], dashboard_data: dict[str, Any] - ) -> Response: - return self.save_dashboard_via_view(dashboard_id, dashboard_data) - def delete_dashboard_via_view(self, dashboard_id: int) -> Response: delete_dashboard_url = DELETE_DASHBOARD_VIEW_URL_FORMAT.format(dashboard_id) return self.get_resp(delete_dashboard_url, {}) @@ -90,26 +77,6 @@ def assert_permissions_were_deleted(self, deleted_dashboard): view_menu = security_manager.find_view_menu(deleted_dashboard.view_name) self.assertIsNone(view_menu) - def save_dash_basic_case(self, username=ADMIN_USERNAME): - # arrange - self.login(username=username) - ( - dashboard_to_save, - data_before_change, - data_after_change, - ) = build_save_dash_parts() - - # act - save_dash_response = self.save_dashboard_via_view( - dashboard_to_save.id, data_after_change - ) - - # assert - self.assertIn("SUCCESS", save_dash_response) - - # post test - self.save_dashboard(dashboard_to_save.id, data_before_change) - def clean_created_objects(self): with app.test_request_context(): self.logout() diff --git a/tests/integration_tests/dashboards/consts.py b/tests/integration_tests/dashboards/consts.py index 59d1c01fa9987..aa41b21399dc8 100644 --- a/tests/integration_tests/dashboards/consts.py +++ b/tests/integration_tests/dashboards/consts.py @@ -24,8 +24,6 @@ EXPORT_DASHBOARDS_API_URL_WITH_QUERY_FORMAT = EXPORT_DASHBOARDS_API_URL + QUERY_FORMAT GET_DASHBOARD_VIEW_URL_FORMAT = "/superset/dashboard/{}/" -SAVE_DASHBOARD_URL_FORMAT = "/superset/save_dash/{}/" -ADD_SLICES_URL_FORMAT = "/superset/add_slices/{}/" DELETE_DASHBOARD_VIEW_URL_FORMAT = "/dashboard/delete/{}" GET_DASHBOARDS_LIST_VIEW = "/dashboard/list/" diff --git a/tests/integration_tests/dashboards/dao_tests.py b/tests/integration_tests/dashboards/dao_tests.py index e62b28a3d72f5..91e27af3b65ea 100644 --- a/tests/integration_tests/dashboards/dao_tests.py +++ b/tests/integration_tests/dashboards/dao_tests.py @@ -23,7 +23,7 @@ import tests.integration_tests.test_app # pylint: disable=unused-import from superset import db, security_manager -from superset.dashboards.dao import DashboardDAO +from superset.daos.dashboard import DashboardDAO from superset.models.dashboard import Dashboard from tests.integration_tests.base_tests import SupersetTestCase from tests.integration_tests.fixtures.world_bank_dashboard import ( @@ -129,7 +129,7 @@ def test_get_dashboard_changed_on(self, mock_sm_g, mock_g): db.session.commit() @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") - @patch("superset.dashboards.dao.g") + @patch("superset.daos.dashboard.g") def test_copy_dashboard(self, mock_g): mock_g.user = security_manager.find_user("admin") original_dash = ( @@ -155,7 +155,7 @@ def test_copy_dashboard(self, mock_g): db.session.commit() @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") - @patch("superset.dashboards.dao.g") + @patch("superset.daos.dashboard.g") def test_copy_dashboard_copies_native_filters(self, mock_g): mock_g.user = security_manager.find_user("admin") original_dash = ( @@ -183,7 +183,7 @@ def test_copy_dashboard_copies_native_filters(self, mock_g): db.session.commit() @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") - @patch("superset.dashboards.dao.g") + @patch("superset.daos.dashboard.g") def test_copy_dashboard_duplicate_slices(self, mock_g): mock_g.user = security_manager.find_user("admin") original_dash = ( diff --git a/tests/integration_tests/dashboards/security/security_dataset_tests.py b/tests/integration_tests/dashboards/security/security_dataset_tests.py index 2eafc4b53e0cd..dffab61a7abef 100644 --- a/tests/integration_tests/dashboards/security/security_dataset_tests.py +++ b/tests/integration_tests/dashboards/security/security_dataset_tests.py @@ -22,6 +22,7 @@ from flask import escape from superset import app +from superset.daos.dashboard import DashboardDAO from superset.models import core as models from tests.integration_tests.dashboards.base_case import DashboardTestCase from tests.integration_tests.dashboards.consts import * @@ -223,7 +224,7 @@ def test_get_dashboards_api_no_data_access(self): """ admin = self.get_user("admin") title = f"title{random_str()}" - create_dashboard_to_db(title, "slug1", owners=[admin]) + dashboard = create_dashboard_to_db(title, "slug1", owners=[admin]) self.login(username="gamma") arguments = { @@ -234,3 +235,4 @@ def test_get_dashboards_api_no_data_access(self): self.assert200(rv) data = json.loads(rv.data.decode("utf-8")) self.assertEqual(0, data["count"]) + DashboardDAO.delete(dashboard) diff --git a/tests/integration_tests/dashboards/security/security_rbac_tests.py b/tests/integration_tests/dashboards/security/security_rbac_tests.py index a49e533f1ba41..03b5da7ce34e9 100644 --- a/tests/integration_tests/dashboards/security/security_rbac_tests.py +++ b/tests/integration_tests/dashboards/security/security_rbac_tests.py @@ -395,3 +395,40 @@ def test_get_dashboards_api__public_user_get_only_published_permitted_dashboards # post for dash in published_dashboards + draft_dashboards: revoke_access_to_dashboard(dash, "Public") + + def test_get_draft_dashboard_without_roles_by_uuid(self): + """ + Dashboard API: Test get draft dashboard without roles by uuid + """ + admin = self.get_user("admin") + dashboard = self.insert_dashboard("title", "slug1", [admin.id]) + assert not dashboard.published + assert dashboard.roles == [] + + self.login(username="gamma") + uri = f"api/v1/dashboard/{dashboard.uuid}" + rv = self.client.get(uri) + assert rv.status_code == 200 + # rollback changes + db.session.delete(dashboard) + db.session.commit() + + def test_cannot_get_draft_dashboard_with_roles_by_uuid(self): + """ + Dashboard API: Test get dashboard by uuid + """ + admin = self.get_user("admin") + admin_role = self.get_role("Admin") + dashboard = self.insert_dashboard( + "title", "slug1", [admin.id], roles=[admin_role.id] + ) + assert not dashboard.published + assert dashboard.roles == [admin_role] + + self.login(username="gamma") + uri = f"api/v1/dashboard/{dashboard.uuid}" + rv = self.client.get(uri) + assert rv.status_code == 403 + # rollback changes + db.session.delete(dashboard) + db.session.commit() diff --git a/tests/integration_tests/databases/api_tests.py b/tests/integration_tests/databases/api_tests.py index 6fa1288067e87..e7f33e4a6450a 100644 --- a/tests/integration_tests/databases/api_tests.py +++ b/tests/integration_tests/databases/api_tests.py @@ -3617,44 +3617,3 @@ def test_validate_sql_endpoint_failure(self, get_validator_by_name): return self.assertEqual(rv.status_code, 422) self.assertIn("Kaboom!", response["errors"][0]["message"]) - - @mock.patch( - "superset.security.SupersetSecurityManager.get_schemas_accessible_by_user" - ) - @mock.patch("superset.security.SupersetSecurityManager.can_access_database") - @mock.patch("superset.security.SupersetSecurityManager.can_access_all_datasources") - def test_schemas_access_for_csv_upload_not_found_endpoint( - self, - mock_can_access_all_datasources, - mock_can_access_database, - mock_schemas_accessible, - ): - self.login(username="gamma") - self.create_fake_db() - mock_can_access_database.return_value = False - mock_schemas_accessible.return_value = ["this_schema_is_allowed_too"] - rv = self.client.get(f"/api/v1/database/120ff/schemas_access_for_file_upload") - self.assertEqual(rv.status_code, 404) - self.delete_fake_db() - - @mock.patch( - "superset.security.SupersetSecurityManager.get_schemas_accessible_by_user" - ) - @mock.patch("superset.security.SupersetSecurityManager.can_access_database") - @mock.patch("superset.security.SupersetSecurityManager.can_access_all_datasources") - def test_schemas_access_for_csv_upload_endpoint( - self, - mock_can_access_all_datasources, - mock_can_access_database, - mock_schemas_accessible, - ): - self.login(username="admin") - dbobj = self.create_fake_db() - mock_can_access_all_datasources.return_value = False - mock_can_access_database.return_value = False - mock_schemas_accessible.return_value = ["this_schema_is_allowed_too"] - data = self.get_json_resp( - url=f"/api/v1/database/{dbobj.id}/schemas_access_for_file_upload" - ) - assert data == {"schemas": ["this_schema_is_allowed_too"]} - self.delete_fake_db() diff --git a/tests/integration_tests/databases/commands_tests.py b/tests/integration_tests/databases/commands_tests.py index b47d3d89fe108..8ffb31b78215f 100644 --- a/tests/integration_tests/databases/commands_tests.py +++ b/tests/integration_tests/databases/commands_tests.py @@ -859,7 +859,7 @@ def test_import_v1_rollback(self, mock_import_dataset): class TestTestConnectionDatabaseCommand(SupersetTestCase): - @patch("superset.databases.dao.Database._get_sqla_engine") + @patch("superset.daos.database.Database._get_sqla_engine") @patch("superset.databases.commands.test_connection.event_logger.log_with_context") @patch("superset.utils.core.g") def test_connection_db_exception( @@ -880,7 +880,7 @@ def test_connection_db_exception( ) mock_event_logger.assert_called() - @patch("superset.databases.dao.Database._get_sqla_engine") + @patch("superset.daos.database.Database._get_sqla_engine") @patch("superset.databases.commands.test_connection.event_logger.log_with_context") @patch("superset.utils.core.g") def test_connection_do_ping_exception( @@ -925,7 +925,7 @@ def test_connection_do_ping_timeout( == SupersetErrorType.CONNECTION_DATABASE_TIMEOUT ) - @patch("superset.databases.dao.Database._get_sqla_engine") + @patch("superset.daos.database.Database._get_sqla_engine") @patch("superset.databases.commands.test_connection.event_logger.log_with_context") @patch("superset.utils.core.g") def test_connection_superset_security_connection( @@ -948,7 +948,7 @@ def test_connection_superset_security_connection( mock_event_logger.assert_called() - @patch("superset.databases.dao.Database._get_sqla_engine") + @patch("superset.daos.database.Database._get_sqla_engine") @patch("superset.databases.commands.test_connection.event_logger.log_with_context") @patch("superset.utils.core.g") def test_connection_db_api_exc( @@ -1093,7 +1093,7 @@ def test_validate_partial_invalid_hostname(is_hostname_valid, app_context): class TestTablesDatabaseCommand(SupersetTestCase): - @patch("superset.databases.dao.DatabaseDAO.find_by_id") + @patch("superset.daos.database.DatabaseDAO.find_by_id") def test_database_tables_list_with_unknown_database(self, mock_find_by_id): mock_find_by_id.return_value = None command = TablesDatabaseCommand(1, "test", False) @@ -1102,7 +1102,7 @@ def test_database_tables_list_with_unknown_database(self, mock_find_by_id): command.run() assert str(excinfo.value) == ("Database not found.") - @patch("superset.databases.dao.DatabaseDAO.find_by_id") + @patch("superset.daos.database.DatabaseDAO.find_by_id") @patch("superset.security.manager.SupersetSecurityManager.can_access_database") @patch("superset.utils.core.g") def test_database_tables_superset_exception( @@ -1121,7 +1121,7 @@ def test_database_tables_superset_exception( command.run() assert str(excinfo.value) == "Test Error" - @patch("superset.databases.dao.DatabaseDAO.find_by_id") + @patch("superset.daos.database.DatabaseDAO.find_by_id") @patch("superset.security.manager.SupersetSecurityManager.can_access_database") @patch("superset.utils.core.g") def test_database_tables_exception( @@ -1140,7 +1140,7 @@ def test_database_tables_exception( == "Unexpected error occurred, please check your logs for details" ) - @patch("superset.databases.dao.DatabaseDAO.find_by_id") + @patch("superset.daos.database.DatabaseDAO.find_by_id") @patch("superset.security.manager.SupersetSecurityManager.can_access_database") @patch("superset.utils.core.g") def test_database_tables_list_tables( diff --git a/tests/integration_tests/datasets/api_tests.py b/tests/integration_tests/datasets/api_tests.py index 508dd29788b72..2f55a1e97815e 100644 --- a/tests/integration_tests/datasets/api_tests.py +++ b/tests/integration_tests/datasets/api_tests.py @@ -30,7 +30,7 @@ from superset import app from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn -from superset.dao.exceptions import ( +from superset.daos.exceptions import ( DAOCreateFailedError, DAODeleteFailedError, DAOUpdateFailedError, @@ -44,7 +44,7 @@ from superset.utils.database import get_example_database, get_main_database from superset.utils.dict_import_export import export_to_dict from tests.integration_tests.base_tests import SupersetTestCase -from tests.integration_tests.conftest import CTAS_SCHEMA_NAME +from tests.integration_tests.conftest import CTAS_SCHEMA_NAME, with_feature_flags from tests.integration_tests.fixtures.birth_names_dashboard import ( load_birth_names_dashboard_with_slices, load_birth_names_data, @@ -208,7 +208,6 @@ def test_get_dataset_list(self): expected_columns = [ "changed_by", "changed_by_name", - "changed_by_url", "changed_on_delta_humanized", "changed_on_utc", "database", @@ -349,7 +348,7 @@ def test_get_dataset_item(self): "sql": None, "table_name": "energy_usage", "template_params": None, - "uid": "2__table", + "uid": ANY, "datasource_name": "energy_usage", "name": f"{get_example_default_schema()}.energy_usage", "column_formats": {}, @@ -830,7 +829,7 @@ def test_create_dataset_validate_view_exists( rv = self.client.delete(uri) assert rv.status_code == 200 - @patch("superset.datasets.dao.DatasetDAO.create") + @patch("superset.daos.dataset.DatasetDAO.create") def test_create_dataset_sqlalchemy_error(self, mock_dao_create): """ Dataset API: Test create dataset sqlalchemy error @@ -1360,60 +1359,6 @@ def test_dataset_get_no_username(self): db.session.delete(dataset) db.session.commit() - def test_dataset_activity_access_enabled(self): - """ - Dataset API: Test ENABLE_BROAD_ACTIVITY_ACCESS = True - """ - if backend() == "sqlite": - return - - access_flag = app.config["ENABLE_BROAD_ACTIVITY_ACCESS"] - app.config["ENABLE_BROAD_ACTIVITY_ACCESS"] = True - dataset = self.insert_default_dataset() - self.login(username="admin") - table_data = {"description": "changed_description"} - uri = f"api/v1/dataset/{dataset.id}" - rv = self.client.put(uri, json=table_data) - self.assertEqual(rv.status_code, 200) - - response = self.get_assert_metric("api/v1/dataset/", "get_list") - res = json.loads(response.data.decode("utf-8"))["result"] - - current_dataset = [d for d in res if d["id"] == dataset.id][0] - self.assertEqual(current_dataset["description"], "changed_description") - self.assertEqual(current_dataset["changed_by_url"], "/superset/profile/admin") - - app.config["ENABLE_BROAD_ACTIVITY_ACCESS"] = access_flag - db.session.delete(dataset) - db.session.commit() - - def test_dataset_activity_access_disabled(self): - """ - Dataset API: Test ENABLE_BROAD_ACTIVITY_ACCESS = Fase - """ - if backend() == "sqlite": - return - - access_flag = app.config["ENABLE_BROAD_ACTIVITY_ACCESS"] - app.config["ENABLE_BROAD_ACTIVITY_ACCESS"] = False - dataset = self.insert_default_dataset() - self.login(username="admin") - table_data = {"description": "changed_description"} - uri = f"api/v1/dataset/{dataset.id}" - rv = self.put_assert_metric(uri, table_data, "put") - self.assertEqual(rv.status_code, 200) - - response = self.get_assert_metric("api/v1/dataset/", "get_list") - res = json.loads(response.data.decode("utf-8"))["result"] - - current_dataset = [d for d in res if d["id"] == dataset.id][0] - self.assertEqual(current_dataset["description"], "changed_description") - self.assertEqual(current_dataset["changed_by_url"], "") - - app.config["ENABLE_BROAD_ACTIVITY_ACCESS"] = access_flag - db.session.delete(dataset) - db.session.commit() - def test_update_dataset_item_not_owned(self): """ Dataset API: Test update dataset item not owned @@ -1497,7 +1442,7 @@ def test_update_dataset_unsafe_default_endpoint(self): db.session.delete(dataset) db.session.commit() - @patch("superset.datasets.dao.DatasetDAO.update") + @patch("superset.daos.dataset.DatasetDAO.update") def test_update_dataset_sqlalchemy_error(self, mock_dao_update): """ Dataset API: Test update dataset sqlalchemy error @@ -1569,7 +1514,7 @@ def test_delete_dataset_item_not_authorized(self): db.session.delete(dataset) db.session.commit() - @patch("superset.datasets.dao.DatasetDAO.delete") + @patch("superset.daos.dataset.DatasetDAO.delete") def test_delete_dataset_sqlalchemy_error(self, mock_dao_delete): """ Dataset API: Test delete dataset sqlalchemy error @@ -1646,7 +1591,7 @@ def test_delete_dataset_column_not_owned(self): assert rv.status_code == 403 @pytest.mark.usefixtures("create_datasets") - @patch("superset.datasets.dao.DatasetDAO.delete") + @patch("superset.daos.dataset.DatasetDAO.delete") def test_delete_dataset_column_fail(self, mock_dao_delete): """ Dataset API: Test delete dataset column @@ -1726,7 +1671,7 @@ def test_delete_dataset_metric_not_owned(self): assert rv.status_code == 403 @pytest.mark.usefixtures("create_datasets") - @patch("superset.datasets.dao.DatasetDAO.delete") + @patch("superset.daos.dataset.DatasetDAO.delete") def test_delete_dataset_metric_fail(self, mock_dao_delete): """ Dataset API: Test delete dataset metric diff --git a/tests/integration_tests/datasource/api_tests.py b/tests/integration_tests/datasource/api_tests.py index 522aa33383e62..b6a6af105d7ac 100644 --- a/tests/integration_tests/datasource/api_tests.py +++ b/tests/integration_tests/datasource/api_tests.py @@ -21,7 +21,7 @@ from superset import db, security_manager from superset.connectors.sqla.models import SqlaTable -from superset.dao.exceptions import DatasourceTypeNotSupportedError +from superset.daos.exceptions import DatasourceTypeNotSupportedError from tests.integration_tests.base_tests import SupersetTestCase diff --git a/tests/integration_tests/datasource_tests.py b/tests/integration_tests/datasource_tests.py index 2e42c32c8b19e..b73acc2681cfc 100644 --- a/tests/integration_tests/datasource_tests.py +++ b/tests/integration_tests/datasource_tests.py @@ -26,7 +26,7 @@ from superset.common.utils.query_cache_manager import QueryCacheManager from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn from superset.constants import CacheRegion -from superset.dao.exceptions import DatasourceNotFound, DatasourceTypeNotSupportedError +from superset.daos.exceptions import DatasourceNotFound, DatasourceTypeNotSupportedError from superset.datasets.commands.exceptions import DatasetNotFoundError from superset.exceptions import SupersetGenericDBErrorException from superset.models.core import Database @@ -433,7 +433,7 @@ def my_check(datasource): app.config["DATASET_HEALTH_CHECK"] = None def test_get_datasource_failed(self): - from superset.datasource.dao import DatasourceDAO + from superset.daos.datasource import DatasourceDAO pytest.raises( DatasourceNotFound, @@ -445,7 +445,7 @@ def test_get_datasource_failed(self): self.assertEqual(resp.get("error"), "Datasource does not exist") def test_get_datasource_invalid_datasource_failed(self): - from superset.datasource.dao import DatasourceDAO + from superset.daos.datasource import DatasourceDAO pytest.raises( DatasourceTypeNotSupportedError, diff --git a/tests/integration_tests/embedded/api_tests.py b/tests/integration_tests/embedded/api_tests.py index 8f3950fcf5462..113d38166e231 100644 --- a/tests/integration_tests/embedded/api_tests.py +++ b/tests/integration_tests/embedded/api_tests.py @@ -21,7 +21,7 @@ import pytest from superset import db -from superset.embedded.dao import EmbeddedDAO +from superset.daos.dashboard import EmbeddedDashboardDAO from superset.models.dashboard import Dashboard from tests.integration_tests.base_tests import SupersetTestCase from tests.integration_tests.fixtures.birth_names_dashboard import ( @@ -41,7 +41,7 @@ class TestEmbeddedDashboardApi(SupersetTestCase): def test_get_embedded_dashboard(self): self.login("admin") self.dash = db.session.query(Dashboard).filter_by(slug="births").first() - self.embedded = EmbeddedDAO.upsert(self.dash, []) + self.embedded = EmbeddedDashboardDAO.upsert(self.dash, []) uri = f"api/v1/{self.resource_name}/{self.embedded.uuid}" response = self.client.get(uri) self.assert200(response) diff --git a/tests/integration_tests/embedded/dao_tests.py b/tests/integration_tests/embedded/dao_tests.py index 8160144a25cbc..8d62fc0f6dae5 100644 --- a/tests/integration_tests/embedded/dao_tests.py +++ b/tests/integration_tests/embedded/dao_tests.py @@ -19,7 +19,7 @@ import tests.integration_tests.test_app # pylint: disable=unused-import from superset import db -from superset.embedded.dao import EmbeddedDAO +from superset.daos.dashboard import EmbeddedDashboardDAO from superset.models.dashboard import Dashboard from tests.integration_tests.base_tests import SupersetTestCase from tests.integration_tests.fixtures.world_bank_dashboard import ( @@ -28,24 +28,24 @@ ) -class TestEmbeddedDAO(SupersetTestCase): +class TestEmbeddedDashboardDAO(SupersetTestCase): @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") def test_upsert(self): dash = db.session.query(Dashboard).filter_by(slug="world_health").first() assert not dash.embedded - EmbeddedDAO.upsert(dash, ["test.example.com"]) + EmbeddedDashboardDAO.upsert(dash, ["test.example.com"]) assert dash.embedded self.assertEqual(dash.embedded[0].allowed_domains, ["test.example.com"]) original_uuid = dash.embedded[0].uuid self.assertIsNotNone(original_uuid) - EmbeddedDAO.upsert(dash, []) + EmbeddedDashboardDAO.upsert(dash, []) self.assertEqual(dash.embedded[0].allowed_domains, []) self.assertEqual(dash.embedded[0].uuid, original_uuid) @pytest.mark.usefixtures("load_world_bank_dashboard_with_slices") def test_get_by_uuid(self): dash = db.session.query(Dashboard).filter_by(slug="world_health").first() - uuid = str(EmbeddedDAO.upsert(dash, ["test.example.com"]).uuid) + uuid = str(EmbeddedDashboardDAO.upsert(dash, ["test.example.com"]).uuid) db.session.expire_all() - embedded = EmbeddedDAO.find_by_id(uuid) + embedded = EmbeddedDashboardDAO.find_by_id(uuid) self.assertIsNotNone(embedded) diff --git a/tests/integration_tests/embedded/test_view.py b/tests/integration_tests/embedded/test_view.py index 9f524e9c09e2b..1b3248883f9a6 100644 --- a/tests/integration_tests/embedded/test_view.py +++ b/tests/integration_tests/embedded/test_view.py @@ -22,7 +22,7 @@ import pytest from superset import db -from superset.embedded.dao import EmbeddedDAO +from superset.daos.dashboard import EmbeddedDashboardDAO from superset.models.dashboard import Dashboard from tests.integration_tests.fixtures.birth_names_dashboard import ( load_birth_names_dashboard_with_slices, @@ -43,7 +43,7 @@ ) def test_get_embedded_dashboard(client: FlaskClient[Any]): dash = db.session.query(Dashboard).filter_by(slug="births").first() - embedded = EmbeddedDAO.upsert(dash, []) + embedded = EmbeddedDashboardDAO.upsert(dash, []) uri = f"embedded/{embedded.uuid}" response = client.get(uri) assert response.status_code == 200 @@ -56,7 +56,7 @@ def test_get_embedded_dashboard(client: FlaskClient[Any]): ) def test_get_embedded_dashboard_referrer_not_allowed(client: FlaskClient[Any]): dash = db.session.query(Dashboard).filter_by(slug="births").first() - embedded = EmbeddedDAO.upsert(dash, ["test.example.com"]) + embedded = EmbeddedDashboardDAO.upsert(dash, ["test.example.com"]) uri = f"embedded/{embedded.uuid}" response = client.get(uri) assert response.status_code == 403 diff --git a/tests/integration_tests/explore/api_tests.py b/tests/integration_tests/explore/api_tests.py index af5bd8813753d..50606257c267f 100644 --- a/tests/integration_tests/explore/api_tests.py +++ b/tests/integration_tests/explore/api_tests.py @@ -216,7 +216,7 @@ def test_get_dataset_access_denied( assert data["message"] == message -@patch("superset.datasource.dao.DatasourceDAO.get_datasource") +@patch("superset.daos.datasource.DatasourceDAO.get_datasource") def test_wrong_endpoint(mock_get_datasource, test_client, login_as_admin, dataset): dataset.default_endpoint = "another_endpoint" mock_get_datasource.return_value = dataset diff --git a/tests/integration_tests/log_api_tests.py b/tests/integration_tests/log_api_tests.py index 83a7f5fd84b31..6a18ea926ffd3 100644 --- a/tests/integration_tests/log_api_tests.py +++ b/tests/integration_tests/log_api_tests.py @@ -28,9 +28,9 @@ from superset import db from superset.models.core import Log from superset.views.log.api import LogRestApi +from tests.integration_tests.conftest import with_feature_flags from tests.integration_tests.dashboard_utils import create_dashboard from tests.integration_tests.test_app import app - from .base_tests import SupersetTestCase @@ -159,20 +159,6 @@ def test_update_log(self): db.session.delete(log) db.session.commit() - def test_get_recent_activity_no_broad_access(self): - """ - Log API: Test recent activity not visible for other users without - ENABLE_BROAD_ACTIVITY_ACCESS flag on - """ - admin_user = self.get_user("admin") - self.login(username="admin") - app.config["ENABLE_BROAD_ACTIVITY_ACCESS"] = False - - uri = f"api/v1/log/recent_activity/{admin_user.id + 1}/" - rv = self.client.get(uri) - self.assertEqual(rv.status_code, 403) - app.config["ENABLE_BROAD_ACTIVITY_ACCESS"] = True - def test_get_recent_activity(self): """ Log API: Test recent activity endpoint @@ -183,7 +169,7 @@ def test_get_recent_activity(self): log1 = self.insert_log("dashboard", admin_user, dashboard_id=dash.id) log2 = self.insert_log("dashboard", admin_user, dashboard_id=dash.id) - uri = f"api/v1/log/recent_activity/{admin_user.id}/" + uri = f"api/v1/log/recent_activity/" rv = self.client.get(uri) self.assertEqual(rv.status_code, 200) response = json.loads(rv.data.decode("utf-8")) @@ -220,7 +206,7 @@ def test_get_recent_activity_actions_filter(self): log2 = self.insert_log("explore", admin_user, dashboard_id=dash.id) arguments = {"actions": ["dashboard"]} - uri = f"api/v1/log/recent_activity/{admin_user.id}/?q={prison.dumps(arguments)}" + uri = f"api/v1/log/recent_activity/?q={prison.dumps(arguments)}" rv = self.client.get(uri) db.session.delete(log) @@ -245,7 +231,7 @@ def test_get_recent_activity_distinct_false(self): log2 = self.insert_log("dashboard", admin_user, dashboard_id=dash.id) arguments = {"distinct": False} - uri = f"api/v1/log/recent_activity/{admin_user.id}/?q={prison.dumps(arguments)}" + uri = f"api/v1/log/recent_activity/?q={prison.dumps(arguments)}" rv = self.client.get(uri) db.session.delete(log) @@ -275,7 +261,7 @@ def test_get_recent_activity_pagination(self): log.dttm = now - timedelta(days=2) arguments = {"page": 0, "page_size": 2} - uri = f"api/v1/log/recent_activity/{admin_user.id}/?q={prison.dumps(arguments)}" + uri = f"api/v1/log/recent_activity/?q={prison.dumps(arguments)}" rv = self.client.get(uri) self.assertEqual(rv.status_code, 200) @@ -305,7 +291,7 @@ def test_get_recent_activity_pagination(self): ) arguments = {"page": 1, "page_size": 2} - uri = f"api/v1/log/recent_activity/{admin_user.id}/?q={prison.dumps(arguments)}" + uri = f"api/v1/log/recent_activity/?q={prison.dumps(arguments)}" rv = self.client.get(uri) db.session.delete(log) diff --git a/tests/integration_tests/model_tests.py b/tests/integration_tests/model_tests.py index c4bc7aa89bd9f..3a5f7c0a77a1c 100644 --- a/tests/integration_tests/model_tests.py +++ b/tests/integration_tests/model_tests.py @@ -671,3 +671,8 @@ def test_data_for_slices_with_adhoc_column(self): # clean up and auto commit metadata_db.session.delete(slc) + + @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") + def test_table_column_database(self) -> None: + tbl = self.get_table(name="birth_names") + assert tbl.get_column("ds").database is tbl.database # type: ignore diff --git a/tests/integration_tests/queries/api_tests.py b/tests/integration_tests/queries/api_tests.py index b3b291cf96627..c58817c8e0ed1 100644 --- a/tests/integration_tests/queries/api_tests.py +++ b/tests/integration_tests/queries/api_tests.py @@ -439,7 +439,6 @@ def test_get_updated_since(self): for key, value in data["result"][0].items(): # We can't assert timestamp if key not in ( - "changedOn", "changed_on", "end_time", "start_running_time", diff --git a/tests/integration_tests/query_context_tests.py b/tests/integration_tests/query_context_tests.py index 7a3d4e4a1e873..8c2082d1c4b12 100644 --- a/tests/integration_tests/query_context_tests.py +++ b/tests/integration_tests/query_context_tests.py @@ -30,7 +30,7 @@ from superset.common.query_context_factory import QueryContextFactory from superset.common.query_object import QueryObject from superset.connectors.sqla.models import SqlMetric -from superset.datasource.dao import DatasourceDAO +from superset.daos.datasource import DatasourceDAO from superset.extensions import cache_manager from superset.superset_typing import AdhocColumn from superset.utils.core import ( diff --git a/tests/integration_tests/reports/commands_tests.py b/tests/integration_tests/reports/commands_tests.py index db80079d77079..120559f8fd686 100644 --- a/tests/integration_tests/reports/commands_tests.py +++ b/tests/integration_tests/reports/commands_tests.py @@ -1955,7 +1955,7 @@ def test_grace_period_error_flap( @pytest.mark.usefixtures( "load_birth_names_dashboard_with_slices", "create_report_email_dashboard" ) -@patch("superset.reports.dao.ReportScheduleDAO.bulk_delete_logs") +@patch("superset.daos.report.ReportScheduleDAO.bulk_delete_logs") def test_prune_log_soft_time_out(bulk_delete_logs, create_report_email_dashboard): from celery.exceptions import SoftTimeLimitExceeded diff --git a/tests/integration_tests/security/api_tests.py b/tests/integration_tests/security/api_tests.py index 9a5a085c81c34..2462803f21a1d 100644 --- a/tests/integration_tests/security/api_tests.py +++ b/tests/integration_tests/security/api_tests.py @@ -23,7 +23,7 @@ from flask_wtf.csrf import generate_csrf from superset import db -from superset.embedded.dao import EmbeddedDAO +from superset.daos.dashboard import EmbeddedDashboardDAO from superset.models.dashboard import Dashboard from superset.utils.urls import get_url_host from tests.integration_tests.base_tests import SupersetTestCase @@ -89,7 +89,7 @@ def test_post_guest_token_unauthorized(self): @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") def test_post_guest_token_authorized(self): self.dash = db.session.query(Dashboard).filter_by(slug="births").first() - self.embedded = EmbeddedDAO.upsert(self.dash, []) + self.embedded = EmbeddedDashboardDAO.upsert(self.dash, []) self.login(username="admin") user = {"username": "bob", "first_name": "Bob", "last_name": "Also Bob"} resource = {"type": "dashboard", "id": str(self.embedded.uuid)} diff --git a/tests/integration_tests/security/guest_token_security_tests.py b/tests/integration_tests/security/guest_token_security_tests.py index 78bd8bde86f51..86d02975d093a 100644 --- a/tests/integration_tests/security/guest_token_security_tests.py +++ b/tests/integration_tests/security/guest_token_security_tests.py @@ -21,8 +21,8 @@ from flask import g from superset import db, security_manager +from superset.daos.dashboard import EmbeddedDashboardDAO from superset.dashboards.commands.exceptions import DashboardAccessDeniedError -from superset.embedded.dao import EmbeddedDAO from superset.exceptions import SupersetSecurityException from superset.models.dashboard import Dashboard from superset.security.guest_token import GuestTokenResourceType @@ -100,7 +100,7 @@ def test_get_guest_user_roles_implicit(self): class TestGuestUserDashboardAccess(SupersetTestCase): def setUp(self) -> None: self.dash = db.session.query(Dashboard).filter_by(slug="births").first() - self.embedded = EmbeddedDAO.upsert(self.dash, []) + self.embedded = EmbeddedDashboardDAO.upsert(self.dash, []) self.authorized_guest = security_manager.get_guest_user_from_token( { "user": {}, diff --git a/tests/integration_tests/security_tests.py b/tests/integration_tests/security_tests.py index 34350a62bd1ab..fe199443875dd 100644 --- a/tests/integration_tests/security_tests.py +++ b/tests/integration_tests/security_tests.py @@ -29,7 +29,7 @@ from flask import current_app from flask_appbuilder.security.sqla.models import Role -from superset.datasource.dao import DatasourceDAO +from superset.daos.datasource import DatasourceDAO from superset.models.dashboard import Dashboard from superset import app, appbuilder, db, security_manager, viz from superset.connectors.sqla.models import SqlaTable @@ -45,7 +45,6 @@ ) from superset.utils.database import get_example_database from superset.utils.urls import get_url_host -from superset.views.access_requests import AccessRequestsModelView from .base_tests import SupersetTestCase from tests.integration_tests.fixtures.public_role import ( @@ -1351,19 +1350,12 @@ def assert_can_gamma(self, perm_set): # make sure that user can create slices and dashboards self.assert_can_all("Dashboard", perm_set) self.assert_can_all("Chart", perm_set) - self.assertIn(("can_add_slices", "Superset"), perm_set) - self.assertIn(("can_copy_dash", "Superset"), perm_set) - self.assertIn(("can_created_dashboards", "Superset"), perm_set) - self.assertIn(("can_created_slices", "Superset"), perm_set) self.assertIn(("can_csv", "Superset"), perm_set) self.assertIn(("can_dashboard", "Superset"), perm_set) self.assertIn(("can_explore", "Superset"), perm_set) self.assertIn(("can_share_chart", "Superset"), perm_set) self.assertIn(("can_share_dashboard", "Superset"), perm_set) self.assertIn(("can_explore_json", "Superset"), perm_set) - self.assertIn(("can_fave_dashboards", "Superset"), perm_set) - self.assertIn(("can_fave_slices", "Superset"), perm_set) - self.assertIn(("can_save_dash", "Superset"), perm_set) self.assertIn(("can_explore_json", "Superset"), perm_set) self.assertIn(("can_userinfo", "UserDBModelView"), perm_set) self.assert_can_menu("Databases", perm_set) @@ -1386,9 +1378,6 @@ def assert_can_alpha(self, perm_set): self.assertIn(("all_datasource_access", "all_datasource_access"), perm_set) def assert_cannot_alpha(self, perm_set): - if app.config["ENABLE_ACCESS_REQUEST"]: - self.assert_cannot_write("AccessRequestsModelView", perm_set) - self.assert_can_all("AccessRequestsModelView", perm_set) self.assert_cannot_write("Queries", perm_set) self.assert_cannot_write("RoleModelView", perm_set) self.assert_cannot_write("UserDBModelView", perm_set) @@ -1398,12 +1387,7 @@ def assert_can_admin(self, perm_set): self.assert_can_all("Database", perm_set) self.assert_can_all("RoleModelView", perm_set) self.assert_can_all("UserDBModelView", perm_set) - self.assertIn(("all_database_access", "all_database_access"), perm_set) - self.assertIn(("can_override_role_permissions", "Superset"), perm_set) - self.assertIn(("can_override_role_permissions", "Superset"), perm_set) - self.assertIn(("can_approve", "Superset"), perm_set) - self.assert_can_menu("Security", perm_set) self.assert_can_menu("List Users", perm_set) self.assert_can_menu("List Roles", perm_set) @@ -1430,14 +1414,6 @@ def test_is_admin_only(self): ) ) - if app.config["ENABLE_ACCESS_REQUEST"]: - self.assertTrue( - security_manager._is_admin_only( - security_manager.find_permission_view_menu( - "can_list", "AccessRequestsModelView" - ) - ) - ) self.assertTrue( security_manager._is_admin_only( security_manager.find_permission_view_menu( @@ -1445,11 +1421,6 @@ def test_is_admin_only(self): ) ) ) - self.assertTrue( - security_manager._is_admin_only( - security_manager.find_permission_view_menu("can_approve", "Superset") - ) - ) @unittest.skipUnless( SupersetTestCase.is_module_installed("pydruid"), "pydruid not installed" @@ -1522,9 +1493,6 @@ def test_sql_lab_permissions(self): self.assertIn(("can_csv", "Superset"), sql_lab_set) self.assertIn(("can_read", "Database"), sql_lab_set) self.assertIn(("can_read", "SavedQuery"), sql_lab_set) - self.assertIn(("can_sql_json", "Superset"), sql_lab_set) - self.assertIn(("can_sqllab_viz", "Superset"), sql_lab_set) - self.assertIn(("can_sqllab_table_viz", "Superset"), sql_lab_set) self.assertIn(("can_sqllab", "Superset"), sql_lab_set) self.assertIn(("menu_access", "SQL Lab"), sql_lab_set) @@ -1534,13 +1502,6 @@ def test_sql_lab_permissions(self): self.assert_cannot_alpha(sql_lab_set) - def test_granter_permissions(self): - granter_set = get_perm_tuples("granter") - self.assertIn(("can_override_role_permissions", "Superset"), granter_set) - self.assertIn(("can_approve", "Superset"), granter_set) - - self.assert_cannot_alpha(granter_set) - def test_gamma_permissions(self): gamma_perm_set = set() for perm in security_manager.find_role("Gamma").permissions: @@ -1560,19 +1521,12 @@ def test_gamma_permissions(self): self.assert_cannot_write("UserDBModelView", gamma_perm_set) self.assert_cannot_write("RoleModelView", gamma_perm_set) - self.assertIn(("can_add_slices", "Superset"), gamma_perm_set) - self.assertIn(("can_copy_dash", "Superset"), gamma_perm_set) - self.assertIn(("can_created_dashboards", "Superset"), gamma_perm_set) - self.assertIn(("can_created_slices", "Superset"), gamma_perm_set) self.assertIn(("can_csv", "Superset"), gamma_perm_set) self.assertIn(("can_dashboard", "Superset"), gamma_perm_set) self.assertIn(("can_explore", "Superset"), gamma_perm_set) self.assertIn(("can_share_chart", "Superset"), gamma_perm_set) self.assertIn(("can_share_dashboard", "Superset"), gamma_perm_set) self.assertIn(("can_explore_json", "Superset"), gamma_perm_set) - self.assertIn(("can_fave_dashboards", "Superset"), gamma_perm_set) - self.assertIn(("can_fave_slices", "Superset"), gamma_perm_set) - self.assertIn(("can_save_dash", "Superset"), gamma_perm_set) self.assertIn(("can_userinfo", "UserDBModelView"), gamma_perm_set) def test_views_are_secured(self): @@ -1752,22 +1706,6 @@ def test_get_anonymous_roles(self, mock_g): self.assertEqual([security_manager.get_public_role()], roles) -class TestAccessRequestEndpoints(SupersetTestCase): - def test_access_request_disabled(self): - with patch.object(AccessRequestsModelView, "is_enabled", return_value=False): - self.login("admin") - uri = "/accessrequestsmodelview/list/" - rv = self.client.get(uri) - self.assertEqual(rv.status_code, 404) - - def test_access_request_enabled(self): - with patch.object(AccessRequestsModelView, "is_enabled", return_value=True): - self.login("admin") - uri = "/accessrequestsmodelview/list/" - rv = self.client.get(uri) - self.assertLess(rv.status_code, 400) - - class TestDatasources(SupersetTestCase): @patch("superset.security.manager.g") @patch("superset.security.SupersetSecurityManager.can_access_database") diff --git a/tests/integration_tests/sql_validator_tests.py b/tests/integration_tests/sql_validator_tests.py index d2f6e7108d42a..0c53a6e28ac5e 100644 --- a/tests/integration_tests/sql_validator_tests.py +++ b/tests/integration_tests/sql_validator_tests.py @@ -19,12 +19,8 @@ import unittest from unittest.mock import MagicMock, patch -import pytest from pyhive.exc import DatabaseError -from superset import app -from superset.sql_validators import SQLValidationAnnotation -from superset.sql_validators.base import BaseSQLValidator from superset.sql_validators.postgres import PostgreSQLValidator from superset.sql_validators.presto_db import ( PrestoDBSQLValidator, @@ -34,139 +30,6 @@ from .base_tests import SupersetTestCase -PRESTO_SQL_VALIDATORS_BY_ENGINE = { - "presto": "PrestoDBSQLValidator", - "sqlite": "PrestoDBSQLValidator", - "postgresql": "PrestoDBSQLValidator", - "mysql": "PrestoDBSQLValidator", -} - - -class TestSqlValidatorEndpoint(SupersetTestCase): - """Testing for Sql Lab querytext validation endpoint""" - - def tearDown(self): - self.logout() - - @patch.dict( - "superset.config.SQL_VALIDATORS_BY_ENGINE", - {}, - clear=True, - ) - def test_validate_sql_endpoint_noconfig(self): - """Assert that validate_sql_json errors out when no validators are - configured for any db""" - self.login("admin") - - resp = self.validate_sql( - "SELECT * FROM birth_names", client_id="1", raise_on_error=False - ) - self.assertIn("error", resp) - self.assertIn("no SQL validator is configured", resp["error"]) - - @patch("superset.views.core.get_validator_by_name") - @patch.dict( - "superset.config.SQL_VALIDATORS_BY_ENGINE", - PRESTO_SQL_VALIDATORS_BY_ENGINE, - clear=True, - ) - def test_validate_sql_endpoint_mocked(self, get_validator_by_name): - """Assert that, with a mocked validator, annotations make it back out - from the validate_sql_json endpoint as a list of json dictionaries""" - if get_example_database().backend == "hive": - pytest.skip("Hive validator is not implemented") - self.login("admin") - - validator = MagicMock() - get_validator_by_name.return_value = validator - validator.validate.return_value = [ - SQLValidationAnnotation( - message="I don't know what I expected, but it wasn't this", - line_number=4, - start_column=12, - end_column=42, - ) - ] - - resp = self.validate_sql( - "SELECT * FROM somewhere_over_the_rainbow", - client_id="1", - raise_on_error=False, - ) - - self.assertEqual(1, len(resp)) - self.assertIn("expected,", resp[0]["message"]) - - @patch("superset.views.core.get_validator_by_name") - @patch.dict( - "superset.config.SQL_VALIDATORS_BY_ENGINE", - PRESTO_SQL_VALIDATORS_BY_ENGINE, - clear=True, - ) - def test_validate_sql_endpoint_mocked_params(self, get_validator_by_name): - """Assert that, with a mocked validator, annotations make it back out - from the validate_sql_json endpoint as a list of json dictionaries""" - if get_example_database().backend == "hive": - pytest.skip("Hive validator is not implemented") - self.login("admin") - - validator = MagicMock() - get_validator_by_name.return_value = validator - validator.validate.return_value = [ - SQLValidationAnnotation( - message="This worked", - line_number=4, - start_column=12, - end_column=42, - ) - ] - - resp = self.validate_sql( - "SELECT * FROM somewhere_over_the_rainbow", - client_id="1", - raise_on_error=False, - template_params="null", - ) - - self.assertEqual(1, len(resp)) - self.assertNotIn("error,", resp[0]["message"]) - - @patch("superset.views.core.get_validator_by_name") - @patch.dict( - "superset.config.SQL_VALIDATORS_BY_ENGINE", - PRESTO_SQL_VALIDATORS_BY_ENGINE, - clear=True, - ) - def test_validate_sql_endpoint_failure(self, get_validator_by_name): - """Assert that validate_sql_json errors out when the selected validator - raises an unexpected exception""" - self.login("admin") - - validator = MagicMock() - get_validator_by_name.return_value = validator - validator.validate.side_effect = Exception("Kaboom!") - - resp = self.validate_sql( - "SELECT * FROM birth_names", client_id="1", raise_on_error=False - ) - # TODO(bkyryliuk): properly handle hive error - if get_example_database().backend == "hive": - assert resp["error"] == "no SQL validator is configured for hive" - else: - self.assertIn("error", resp) - self.assertIn("Kaboom!", resp["error"]) - - -class TestBaseValidator(SupersetTestCase): - """Testing for the base sql validator""" - - def setUp(self): - self.validator = BaseSQLValidator - - def test_validator_excepts(self): - with self.assertRaises(NotImplementedError): - self.validator.validate(None, None, None) - class TestPrestoValidator(SupersetTestCase): """Testing for the prestodb sql validator""" @@ -236,22 +99,6 @@ def test_validator_query_error(self, flask_g): self.assertEqual(1, len(errors)) - @patch.dict( - "superset.config.SQL_VALIDATORS_BY_ENGINE", - {}, - clear=True, - ) - def test_validate_sql_endpoint(self): - self.login("admin") - # NB this is effectively an integration test -- when there's a default - # validator for sqlite, this test will fail because the validator - # will no longer error out. - resp = self.validate_sql( - "SELECT * FROM birth_names", client_id="1", raise_on_error=False - ) - self.assertIn("error", resp) - self.assertIn("no SQL validator is configured", resp["error"]) - class TestPostgreSQLValidator(SupersetTestCase): def test_valid_syntax(self): diff --git a/tests/integration_tests/sqllab_tests.py b/tests/integration_tests/sqllab_tests.py index e9892b1d36c4b..fbab4d98d25ec 100644 --- a/tests/integration_tests/sqllab_tests.py +++ b/tests/integration_tests/sqllab_tests.py @@ -17,13 +17,11 @@ # isort:skip_file """Unit tests for Sql Lab""" import json -from datetime import datetime, timedelta -from math import ceil, floor +from datetime import datetime import pytest from celery.exceptions import SoftTimeLimitExceeded from parameterized import parameterized -from random import random from unittest import mock import prison @@ -327,132 +325,6 @@ def test_sql_json_schema_access(self): engine.execute(f"DROP TABLE IF EXISTS {CTAS_SCHEMA_NAME}.test_table") db.session.commit() - def test_queries_endpoint(self): - self.run_some_queries() - - # Not logged in, should error out - resp = self.client.get("/superset/queries/0") - # Redirects to the login page - self.assertEqual(401, resp.status_code) - - # Admin sees queries - self.login("admin") - data = self.get_json_resp("/superset/queries/0") - self.assertEqual(2, len(data)) - data = self.get_json_resp("/superset/queries/0.0") - self.assertEqual(2, len(data)) - - # Run 2 more queries - self.run_sql("SELECT * FROM birth_names LIMIT 1", client_id="client_id_4") - self.run_sql("SELECT * FROM birth_names LIMIT 2", client_id="client_id_5") - self.login("admin") - data = self.get_json_resp("/superset/queries/0") - self.assertEqual(4, len(data)) - - now = datetime.now() + timedelta(days=1) - query = ( - db.session.query(Query) - .filter_by(sql="SELECT * FROM birth_names LIMIT 1") - .first() - ) - query.changed_on = now - db.session.commit() - - data = self.get_json_resp( - f"/superset/queries/{float(datetime_to_epoch(now)) - 1000}" - ) - self.assertEqual(1, len(data)) - - self.logout() - resp = self.client.get("/superset/queries/0") - # Redirects to the login page - self.assertEqual(401, resp.status_code) - - def test_search_query_on_db_id(self): - self.run_some_queries() - self.login("admin") - examples_dbid = get_example_database().id - - # Test search queries on database Id - data = self.get_json_resp( - f"/superset/search_queries?database_id={examples_dbid}" - ) - self.assertEqual(3, len(data)) - db_ids = [k["dbId"] for k in data] - self.assertEqual([examples_dbid for i in range(3)], db_ids) - - resp = self.get_resp("/superset/search_queries?database_id=-1") - data = json.loads(resp) - self.assertEqual(0, len(data)) - - def test_search_query_on_user(self): - self.run_some_queries() - self.login("admin") - - # Test search queries on user Id - user_id = security_manager.find_user("admin").id - data = self.get_json_resp(f"/superset/search_queries?user_id={user_id}") - self.assertEqual(2, len(data)) - user_ids = {k["userId"] for k in data} - self.assertEqual({user_id}, user_ids) - - user_id = security_manager.find_user("gamma_sqllab").id - resp = self.get_resp(f"/superset/search_queries?user_id={user_id}") - data = json.loads(resp) - self.assertEqual(1, len(data)) - self.assertEqual(data[0]["userId"], user_id) - - @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") - def test_search_query_on_status(self): - self.run_some_queries() - self.login("admin") - # Test search queries on status - resp = self.get_resp("/superset/search_queries?status=success") - data = json.loads(resp) - self.assertEqual(2, len(data)) - states = [k["state"] for k in data] - self.assertEqual(["success", "success"], states) - - resp = self.get_resp("/superset/search_queries?status=failed") - data = json.loads(resp) - self.assertEqual(1, len(data)) - self.assertEqual(data[0]["state"], "failed") - - def test_search_query_on_text(self): - self.run_some_queries() - self.login("admin") - url = "/superset/search_queries?search_text=birth" - data = self.get_json_resp(url) - self.assertEqual(2, len(data)) - self.assertIn("birth", data[0]["sql"]) - - def test_search_query_filter_by_time(self): - self.run_some_queries() - self.login("admin") - from_time = floor( - (db.session.query(Query).filter_by(sql=QUERY_1).one()).start_time - ) - to_time = ceil( - (db.session.query(Query).filter_by(sql=QUERY_2).one()).start_time - ) - url = f"/superset/search_queries?from={from_time}&to={to_time}" - assert len(self.client.get(url).json) == 2 - - def test_search_query_only_owned(self) -> None: - """ - Test a search query with a user that does not have can_access_all_queries. - """ - # Test search_queries for Alpha user - self.run_some_queries() - self.login("gamma_sqllab") - - user_id = security_manager.find_user("gamma_sqllab").id - data = self.get_json_resp("/superset/search_queries") - - self.assertEqual(1, len(data)) - user_ids = {k["userId"] for k in data} - self.assertEqual({user_id}, user_ids) - def test_alias_duplicate(self): self.run_sql( "SELECT name as col, gender as col FROM birth_names LIMIT 10", @@ -485,102 +357,6 @@ def test_pa_conversion_dict(self): self.assertEqual(len(data), results.size) self.assertEqual(len(cols), len(results.columns)) - def test_sqllab_viz(self): - self.login("admin") - examples_dbid = get_example_database().id - payload = { - "chartType": "dist_bar", - "datasourceName": f"test_viz_flow_table_{random()}", - "schema": "superset", - "columns": [ - { - "is_dttm": False, - "type": "STRING", - "column_name": f"viz_type_{random()}", - }, - { - "is_dttm": False, - "type": "OBJECT", - "column_name": f"ccount_{random()}", - }, - ], - "sql": """\ - SELECT * - FROM birth_names - LIMIT 10""", - "dbId": examples_dbid, - } - data = {"data": json.dumps(payload)} - resp = self.get_json_resp("/superset/sqllab_viz/", data=data) - self.assertIn("table_id", resp) - - # ensure owner is set correctly - table_id = resp["table_id"] - table = db.session.query(SqlaTable).filter_by(id=table_id).one() - self.assertEqual([owner.username for owner in table.owners], ["admin"]) - view_menu = security_manager.find_view_menu(table.get_perm()) - assert view_menu is not None - - # Cleanup - db.session.delete(table) - db.session.commit() - - def test_sqllab_viz_bad_payload(self): - self.login("admin") - payload = { - "chartType": "dist_bar", - "schema": "superset", - "columns": [ - { - "is_dttm": False, - "type": "STRING", - "column_name": f"viz_type_{random()}", - }, - { - "is_dttm": False, - "type": "OBJECT", - "column_name": f"ccount_{random()}", - }, - ], - "sql": """\ - SELECT * - FROM birth_names - LIMIT 10""", - } - data = {"data": json.dumps(payload)} - url = "/superset/sqllab_viz/" - response = self.client.post(url, data=data, follow_redirects=True) - assert response.status_code == 400 - - def test_sqllab_table_viz(self): - self.login("admin") - examples_db = get_example_database() - with examples_db.get_sqla_engine_with_context() as engine: - engine.execute("DROP TABLE IF EXISTS test_sqllab_table_viz") - engine.execute("CREATE TABLE test_sqllab_table_viz AS SELECT 2 as col") - - examples_dbid = examples_db.id - - payload = { - "datasourceName": "test_sqllab_table_viz", - "columns": [], - "dbId": examples_dbid, - } - - data = {"data": json.dumps(payload)} - resp = self.get_json_resp("/superset/get_or_create_table/", data=data) - self.assertIn("table_id", resp) - - # ensure owner is set correctly - table_id = resp["table_id"] - table = db.session.query(SqlaTable).filter_by(id=table_id).one() - self.assertEqual([owner.username for owner in table.owners], ["admin"]) - db.session.delete(table) - - with get_example_database().get_sqla_engine_with_context() as engine: - engine.execute("DROP TABLE test_sqllab_table_viz") - db.session.commit() - @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") def test_sql_limit(self): self.login("admin") diff --git a/tests/integration_tests/tags/dao_tests.py b/tests/integration_tests/tags/dao_tests.py index 49b22d260b048..8acaa353e9d31 100644 --- a/tests/integration_tests/tags/dao_tests.py +++ b/tests/integration_tests/tags/dao_tests.py @@ -18,17 +18,17 @@ from operator import and_ from unittest.mock import patch import pytest -from superset.dao.exceptions import DAOCreateFailedError, DAOException +from superset.daos.exceptions import DAOCreateFailedError, DAOException from superset.models.slice import Slice from superset.models.sql_lab import SavedQuery -from superset.tags.dao import TagDAO +from superset.daos.tag import TagDAO from superset.tags.exceptions import InvalidTagNameError from superset.tags.models import ObjectTypes, Tag, TaggedObject from tests.integration_tests.tags.api_tests import TAGS_FIXTURE_COUNT import tests.integration_tests.test_app # pylint: disable=unused-import from superset import db, security_manager -from superset.dashboards.dao import DashboardDAO +from superset.daos.dashboard import DashboardDAO from superset.models.dashboard import Dashboard from tests.integration_tests.base_tests import SupersetTestCase from tests.integration_tests.fixtures.world_bank_dashboard import ( diff --git a/tests/unit_tests/charts/dao/dao_tests.py b/tests/unit_tests/charts/dao/dao_tests.py index b1d5cc64881da..e8c58b5600723 100644 --- a/tests/unit_tests/charts/dao/dao_tests.py +++ b/tests/unit_tests/charts/dao/dao_tests.py @@ -45,7 +45,7 @@ def session_with_data(session: Session) -> Iterator[Session]: def test_slice_find_by_id_skip_base_filter(session_with_data: Session) -> None: - from superset.charts.dao import ChartDAO + from superset.daos.chart import ChartDAO from superset.models.slice import Slice result = ChartDAO.find_by_id(1, session=session_with_data, skip_base_filter=True) @@ -59,7 +59,7 @@ def test_slice_find_by_id_skip_base_filter(session_with_data: Session) -> None: def test_datasource_find_by_id_skip_base_filter_not_found( session_with_data: Session, ) -> None: - from superset.charts.dao import ChartDAO + from superset.daos.chart import ChartDAO result = ChartDAO.find_by_id( 125326326, session=session_with_data, skip_base_filter=True @@ -68,7 +68,7 @@ def test_datasource_find_by_id_skip_base_filter_not_found( def test_add_favorite(session_with_data: Session) -> None: - from superset.charts.dao import ChartDAO + from superset.daos.chart import ChartDAO chart = ChartDAO.find_by_id(1, session=session_with_data, skip_base_filter=True) if not chart: @@ -83,7 +83,7 @@ def test_add_favorite(session_with_data: Session) -> None: def test_remove_favorite(session_with_data: Session) -> None: - from superset.charts.dao import ChartDAO + from superset.daos.chart import ChartDAO chart = ChartDAO.find_by_id(1, session=session_with_data, skip_base_filter=True) if not chart: diff --git a/tests/unit_tests/dao/queries_test.py b/tests/unit_tests/dao/queries_test.py index d0ab3ec8a51f3..65e9bbfbfbc0a 100644 --- a/tests/unit_tests/dao/queries_test.py +++ b/tests/unit_tests/dao/queries_test.py @@ -51,7 +51,7 @@ def test_query_dao_save_metadata(session: Session) -> None: session.add(db) session.add(query_obj) - from superset.queries.dao import QueryDAO + from superset.daos.query import QueryDAO query = session.query(Query).one() QueryDAO.save_metadata(query=query, payload={"columns": []}) @@ -105,7 +105,7 @@ def test_query_dao_get_queries_changed_after(session: Session) -> None: session.add(old_query_obj) session.add(updated_query_obj) - from superset.queries.dao import QueryDAO + from superset.daos.query import QueryDAO timestamp = datetime.timestamp(now - timedelta(days=2)) * 1000 result = QueryDAO.get_queries_changed_after(timestamp) @@ -146,7 +146,7 @@ def test_query_dao_stop_query_not_found( mocker.patch("superset.sql_lab.cancel_query", return_value=False) - from superset.queries.dao import QueryDAO + from superset.daos.query import QueryDAO with pytest.raises(QueryNotFoundException): QueryDAO.stop_query("foo2") @@ -186,7 +186,7 @@ def test_query_dao_stop_query_not_running( session.add(db) session.add(query_obj) - from superset.queries.dao import QueryDAO + from superset.daos.query import QueryDAO QueryDAO.stop_query(query_obj.client_id) query = session.query(Query).one() @@ -226,7 +226,7 @@ def test_query_dao_stop_query_failed( mocker.patch("superset.sql_lab.cancel_query", return_value=False) - from superset.queries.dao import QueryDAO + from superset.daos.query import QueryDAO with pytest.raises(SupersetCancelQueryException): QueryDAO.stop_query(query_obj.client_id) @@ -266,7 +266,7 @@ def test_query_dao_stop_query(mocker: MockFixture, app: Any, session: Session) - mocker.patch("superset.sql_lab.cancel_query", return_value=True) - from superset.queries.dao import QueryDAO + from superset.daos.query import QueryDAO QueryDAO.stop_query(query_obj.client_id) query = session.query(Query).one() diff --git a/tests/unit_tests/dashboards/dao_tests.py b/tests/unit_tests/dashboards/dao_tests.py index c94d2ab15750b..3bf4038f1692d 100644 --- a/tests/unit_tests/dashboards/dao_tests.py +++ b/tests/unit_tests/dashboards/dao_tests.py @@ -43,7 +43,7 @@ def session_with_data(session: Session) -> Iterator[Session]: def test_add_favorite(session_with_data: Session) -> None: - from superset.dashboards.dao import DashboardDAO + from superset.daos.dashboard import DashboardDAO dashboard = DashboardDAO.find_by_id( 100, session=session_with_data, skip_base_filter=True @@ -60,7 +60,7 @@ def test_add_favorite(session_with_data: Session) -> None: def test_remove_favorite(session_with_data: Session) -> None: - from superset.dashboards.dao import DashboardDAO + from superset.daos.dashboard import DashboardDAO dashboard = DashboardDAO.find_by_id( 100, session=session_with_data, skip_base_filter=True diff --git a/tests/unit_tests/databases/api_test.py b/tests/unit_tests/databases/api_test.py index 342920bd17dd4..24fde88369594 100644 --- a/tests/unit_tests/databases/api_test.py +++ b/tests/unit_tests/databases/api_test.py @@ -356,8 +356,8 @@ def test_delete_ssh_tunnel( Test that we can delete SSH Tunnel """ with app.app_context(): + from superset.daos.database import DatabaseDAO from superset.databases.api import DatabaseRestApi - from superset.databases.dao import DatabaseDAO from superset.databases.ssh_tunnel.models import SSHTunnel from superset.models.core import Database @@ -432,8 +432,8 @@ def test_delete_ssh_tunnel_not_found( Test that we cannot delete a tunnel that does not exist """ with app.app_context(): + from superset.daos.database import DatabaseDAO from superset.databases.api import DatabaseRestApi - from superset.databases.dao import DatabaseDAO from superset.databases.ssh_tunnel.models import SSHTunnel from superset.models.core import Database diff --git a/tests/unit_tests/databases/dao/dao_tests.py b/tests/unit_tests/databases/dao/dao_tests.py index f085cb53c7913..b792a65336a4e 100644 --- a/tests/unit_tests/databases/dao/dao_tests.py +++ b/tests/unit_tests/databases/dao/dao_tests.py @@ -51,7 +51,7 @@ def session_with_data(session: Session) -> Iterator[Session]: def test_database_get_ssh_tunnel(session_with_data: Session) -> None: - from superset.databases.dao import DatabaseDAO + from superset.daos.database import DatabaseDAO from superset.databases.ssh_tunnel.models import SSHTunnel result = DatabaseDAO.get_ssh_tunnel(1) @@ -62,7 +62,7 @@ def test_database_get_ssh_tunnel(session_with_data: Session) -> None: def test_database_get_ssh_tunnel_not_found(session_with_data: Session) -> None: - from superset.databases.dao import DatabaseDAO + from superset.daos.database import DatabaseDAO result = DatabaseDAO.get_ssh_tunnel(2) diff --git a/tests/unit_tests/databases/ssh_tunnel/commands/delete_test.py b/tests/unit_tests/databases/ssh_tunnel/commands/delete_test.py index de0b70db9cbf3..641e34d3477a9 100644 --- a/tests/unit_tests/databases/ssh_tunnel/commands/delete_test.py +++ b/tests/unit_tests/databases/ssh_tunnel/commands/delete_test.py @@ -54,7 +54,7 @@ def session_with_data(session: Session) -> Iterator[Session]: def test_delete_ssh_tunnel_command( mocker: MockFixture, session_with_data: Session ) -> None: - from superset.databases.dao import DatabaseDAO + from superset.daos.database import DatabaseDAO from superset.databases.ssh_tunnel.commands.delete import DeleteSSHTunnelCommand from superset.databases.ssh_tunnel.models import SSHTunnel diff --git a/tests/unit_tests/databases/ssh_tunnel/commands/update_test.py b/tests/unit_tests/databases/ssh_tunnel/commands/update_test.py index 544cf3434a47c..d4a5faba8b145 100644 --- a/tests/unit_tests/databases/ssh_tunnel/commands/update_test.py +++ b/tests/unit_tests/databases/ssh_tunnel/commands/update_test.py @@ -50,7 +50,7 @@ def session_with_data(session: Session) -> Iterator[Session]: def test_update_shh_tunnel_command(session_with_data: Session) -> None: - from superset.databases.dao import DatabaseDAO + from superset.daos.database import DatabaseDAO from superset.databases.ssh_tunnel.commands.update import UpdateSSHTunnelCommand from superset.databases.ssh_tunnel.models import SSHTunnel @@ -72,7 +72,7 @@ def test_update_shh_tunnel_command(session_with_data: Session) -> None: def test_update_shh_tunnel_invalid_params(session_with_data: Session) -> None: - from superset.databases.dao import DatabaseDAO + from superset.daos.database import DatabaseDAO from superset.databases.ssh_tunnel.commands.update import UpdateSSHTunnelCommand from superset.databases.ssh_tunnel.models import SSHTunnel diff --git a/tests/unit_tests/databases/ssh_tunnel/dao_tests.py b/tests/unit_tests/databases/ssh_tunnel/dao_tests.py index 27f9c3b8ad548..af0b1ac001d2e 100644 --- a/tests/unit_tests/databases/ssh_tunnel/dao_tests.py +++ b/tests/unit_tests/databases/ssh_tunnel/dao_tests.py @@ -21,8 +21,7 @@ def test_create_ssh_tunnel(): - from superset.databases.dao import DatabaseDAO - from superset.databases.ssh_tunnel.dao import SSHTunnelDAO + from superset.daos.database import DatabaseDAO, SSHTunnelDAO from superset.databases.ssh_tunnel.models import SSHTunnel from superset.models.core import Database diff --git a/tests/unit_tests/datasets/dao/dao_tests.py b/tests/unit_tests/datasets/dao/dao_tests.py index 4eb43cd9de1bc..3302f2dc04b30 100644 --- a/tests/unit_tests/datasets/dao/dao_tests.py +++ b/tests/unit_tests/datasets/dao/dao_tests.py @@ -46,7 +46,7 @@ def session_with_data(session: Session) -> Iterator[Session]: def test_datasource_find_by_id_skip_base_filter(session_with_data: Session) -> None: from superset.connectors.sqla.models import SqlaTable - from superset.datasets.dao import DatasetDAO + from superset.daos.dataset import DatasetDAO result = DatasetDAO.find_by_id( 1, @@ -63,7 +63,7 @@ def test_datasource_find_by_id_skip_base_filter(session_with_data: Session) -> N def test_datasource_find_by_id_skip_base_filter_not_found( session_with_data: Session, ) -> None: - from superset.datasets.dao import DatasetDAO + from superset.daos.dataset import DatasetDAO result = DatasetDAO.find_by_id( 125326326, @@ -75,7 +75,7 @@ def test_datasource_find_by_id_skip_base_filter_not_found( def test_datasource_find_by_ids_skip_base_filter(session_with_data: Session) -> None: from superset.connectors.sqla.models import SqlaTable - from superset.datasets.dao import DatasetDAO + from superset.daos.dataset import DatasetDAO result = DatasetDAO.find_by_ids( [1, 125326326], @@ -92,7 +92,7 @@ def test_datasource_find_by_ids_skip_base_filter(session_with_data: Session) -> def test_datasource_find_by_ids_skip_base_filter_not_found( session_with_data: Session, ) -> None: - from superset.datasets.dao import DatasetDAO + from superset.daos.dataset import DatasetDAO result = DatasetDAO.find_by_ids( [125326326, 125326326125326326], diff --git a/tests/unit_tests/datasource/dao_tests.py b/tests/unit_tests/datasource/dao_tests.py index 99a485030195f..0af2cbf0200bf 100644 --- a/tests/unit_tests/datasource/dao_tests.py +++ b/tests/unit_tests/datasource/dao_tests.py @@ -101,7 +101,7 @@ def session_with_data(session: Session) -> Iterator[Session]: def test_get_datasource_sqlatable(session_with_data: Session) -> None: from superset.connectors.sqla.models import SqlaTable - from superset.datasource.dao import DatasourceDAO + from superset.daos.datasource import DatasourceDAO result = DatasourceDAO.get_datasource( datasource_type=DatasourceType.TABLE, @@ -115,7 +115,7 @@ def test_get_datasource_sqlatable(session_with_data: Session) -> None: def test_get_datasource_query(session_with_data: Session) -> None: - from superset.datasource.dao import DatasourceDAO + from superset.daos.datasource import DatasourceDAO from superset.models.sql_lab import Query result = DatasourceDAO.get_datasource( @@ -127,7 +127,7 @@ def test_get_datasource_query(session_with_data: Session) -> None: def test_get_datasource_saved_query(session_with_data: Session) -> None: - from superset.datasource.dao import DatasourceDAO + from superset.daos.datasource import DatasourceDAO from superset.models.sql_lab import SavedQuery result = DatasourceDAO.get_datasource( @@ -141,7 +141,7 @@ def test_get_datasource_saved_query(session_with_data: Session) -> None: def test_get_datasource_sl_table(session_with_data: Session) -> None: - from superset.datasource.dao import DatasourceDAO + from superset.daos.datasource import DatasourceDAO from superset.tables.models import Table result = DatasourceDAO.get_datasource( @@ -155,8 +155,8 @@ def test_get_datasource_sl_table(session_with_data: Session) -> None: def test_get_datasource_sl_dataset(session_with_data: Session) -> None: + from superset.daos.datasource import DatasourceDAO from superset.datasets.models import Dataset - from superset.datasource.dao import DatasourceDAO result = DatasourceDAO.get_datasource( datasource_type=DatasourceType.DATASET, @@ -170,8 +170,8 @@ def test_get_datasource_sl_dataset(session_with_data: Session) -> None: def test_get_datasource_w_str_param(session_with_data: Session) -> None: from superset.connectors.sqla.models import SqlaTable + from superset.daos.datasource import DatasourceDAO from superset.datasets.models import Dataset - from superset.datasource.dao import DatasourceDAO from superset.tables.models import Table assert isinstance( @@ -201,8 +201,8 @@ def test_get_all_datasources(session_with_data: Session) -> None: def test_not_found_datasource(session_with_data: Session) -> None: - from superset.dao.exceptions import DatasourceNotFound - from superset.datasource.dao import DatasourceDAO + from superset.daos.datasource import DatasourceDAO + from superset.daos.exceptions import DatasourceNotFound with pytest.raises(DatasourceNotFound): DatasourceDAO.get_datasource( diff --git a/tests/unit_tests/explore/utils_test.py b/tests/unit_tests/explore/utils_test.py index b2989b1244de3..de39187ec7f68 100644 --- a/tests/unit_tests/explore/utils_test.py +++ b/tests/unit_tests/explore/utils_test.py @@ -35,9 +35,9 @@ from superset.exceptions import SupersetSecurityException from superset.utils.core import DatasourceType, override_user -dataset_find_by_id = "superset.datasets.dao.DatasetDAO.find_by_id" -query_find_by_id = "superset.queries.dao.QueryDAO.find_by_id" -chart_find_by_id = "superset.charts.dao.ChartDAO.find_by_id" +dataset_find_by_id = "superset.daos.dataset.DatasetDAO.find_by_id" +query_find_by_id = "superset.daos.query.QueryDAO.find_by_id" +chart_find_by_id = "superset.daos.chart.ChartDAO.find_by_id" is_admin = "superset.security.SupersetSecurityManager.is_admin" is_owner = "superset.security.SupersetSecurityManager.is_owner" can_access_datasource = ( diff --git a/tests/unit_tests/extensions/ssh_test.py b/tests/unit_tests/extensions/ssh_test.py index 4538d719697ea..13bf905e6f9c6 100644 --- a/tests/unit_tests/extensions/ssh_test.py +++ b/tests/unit_tests/extensions/ssh_test.py @@ -28,8 +28,10 @@ def test_ssh_tunnel_timeout_setting() -> None: "SSH_TUNNEL_MAX_RETRIES": 2, "SSH_TUNNEL_LOCAL_BIND_ADDRESS": "test", "SSH_TUNNEL_TIMEOUT_SEC": 123.0, + "SSH_TUNNEL_PACKET_TIMEOUT_SEC": 321.0, "SSH_TUNNEL_MANAGER_CLASS": "superset.extensions.ssh.SSHManager", } factory = SSHManagerFactory() factory.init_app(app) assert sshtunnel.TUNNEL_TIMEOUT == 123.0 + assert sshtunnel.SSH_TIMEOUT == 321.0 diff --git a/tests/unit_tests/jinja_context_test.py b/tests/unit_tests/jinja_context_test.py index 3478a9e3f0932..fe4b144d2fd7a 100644 --- a/tests/unit_tests/jinja_context_test.py +++ b/tests/unit_tests/jinja_context_test.py @@ -83,7 +83,7 @@ def test_dataset_macro(mocker: MockFixture) -> None: schema_perm=None, extra=json.dumps({"warning_markdown": "*WARNING*"}), ) - DatasetDAO = mocker.patch("superset.datasets.dao.DatasetDAO") + DatasetDAO = mocker.patch("superset.daos.dataset.DatasetDAO") DatasetDAO.find_by_id.return_value = dataset assert ( @@ -143,7 +143,7 @@ def mutator(sql: str) -> str: """ return f"-- begin\n{sql}\n-- end" - DatasetDAO = mocker.patch("superset.datasets.dao.DatasetDAO") + DatasetDAO = mocker.patch("superset.daos.dataset.DatasetDAO") DatasetDAO.find_by_id().get_query_str_extended().sql = mutator("SELECT 1") assert ( dataset_macro(1) diff --git a/tests/unit_tests/models/core_test.py b/tests/unit_tests/models/core_test.py index d37296447ad63..267b7c024aae5 100644 --- a/tests/unit_tests/models/core_test.py +++ b/tests/unit_tests/models/core_test.py @@ -207,3 +207,8 @@ def test_dttm_sql_literal( result: str, ) -> None: assert SqlaTable(database=database).dttm_sql_literal(dttm, col) == result + + +def test_table_column_database() -> None: + database = Database(database_name="db") + assert TableColumn(database=database).database is database