diff --git a/superset-frontend/src/features/databases/DatabaseModal/index.tsx b/superset-frontend/src/features/databases/DatabaseModal/index.tsx index 4a80612369557..059d106fdb389 100644 --- a/superset-frontend/src/features/databases/DatabaseModal/index.tsx +++ b/superset-frontend/src/features/databases/DatabaseModal/index.tsx @@ -542,6 +542,7 @@ const DatabaseModal: FunctionComponent = ({ 'database', t('database'), addDangerToast, + 'connection', ); const [tabKey, setTabKey] = useState(DEFAULT_TAB_KEY); diff --git a/superset-frontend/src/views/CRUD/hooks.ts b/superset-frontend/src/views/CRUD/hooks.ts index 91af5817b4101..7947494f315bd 100644 --- a/superset-frontend/src/views/CRUD/hooks.ts +++ b/superset-frontend/src/views/CRUD/hooks.ts @@ -224,6 +224,7 @@ export function useSingleViewResource( resourceName: string, resourceLabel: string, // resourceLabel for translations handleErrorMsg: (errorMsg: string) => void, + path_suffix = '', ) { const [state, setState] = useState>({ loading: false, @@ -242,8 +243,11 @@ export function useSingleViewResource( loading: true, }); + const baseEndpoint = `/api/v1/${resourceName}/${resourceID}`; + const endpoint = + path_suffix !== '' ? `${baseEndpoint}/${path_suffix}` : baseEndpoint; return SupersetClient.get({ - endpoint: `/api/v1/${resourceName}/${resourceID}`, + endpoint, }) .then( ({ json = {} }) => { diff --git a/superset/constants.py b/superset/constants.py index 958592b62fb59..e4bad9f8aa728 100644 --- a/superset/constants.py +++ b/superset/constants.py @@ -154,6 +154,7 @@ class RouteMethod: # pylint: disable=too-few-public-methods "add_objects": "write", "delete_object": "write", "copy_dash": "write", + "get_connection": "write", } EXTRA_FORM_DATA_APPEND_KEYS = { diff --git a/superset/databases/api.py b/superset/databases/api.py index 95b5089652fb5..5445b22351625 100644 --- a/superset/databases/api.py +++ b/superset/databases/api.py @@ -61,6 +61,7 @@ from superset.databases.schemas import ( database_schemas_query_schema, database_tables_query_schema, + DatabaseConnectionSchema, DatabaseFunctionNamesResponse, DatabasePostSchema, DatabasePutSchema, @@ -122,6 +123,7 @@ class DatabaseRestApi(BaseSupersetModelRestApi): "validate_sql", "delete_ssh_tunnel", "schemas_access_for_file_upload", + "get_connection", } resource_name = "database" class_permission_name = "Database" @@ -144,12 +146,6 @@ class DatabaseRestApi(BaseSupersetModelRestApi): "driver", "force_ctas_schema", "impersonate_user", - "masked_encrypted_extra", - "extra", - "parameters", - "parameters_schema", - "server_cert", - "sqlalchemy_uri", "is_managed_externally", "engine_information", ] @@ -223,6 +219,7 @@ class DatabaseRestApi(BaseSupersetModelRestApi): openapi_spec_tag = "Database" openapi_spec_component_schemas = ( + DatabaseConnectionSchema, DatabaseFunctionNamesResponse, DatabaseSchemaAccessForFileUploadResponse, DatabaseRelatedObjectsResponse, @@ -237,6 +234,50 @@ class DatabaseRestApi(BaseSupersetModelRestApi): ValidateSQLResponse, ) + @expose("//connection", methods=("GET",)) + @protect() + @safe + def get_connection(self, pk: int) -> Response: + """Get database connection info. + --- + get: + summary: >- + Get a database connection info + parameters: + - in: path + schema: + type: integer + description: The database id + name: pk + responses: + 200: + description: Database with connection info + content: + application/json: + schema: + $ref: "#/components/schemas/DatabaseConnectionSchema" + 400: + $ref: '#/components/responses/400' + 401: + $ref: '#/components/responses/401' + 422: + $ref: '#/components/responses/422' + 500: + $ref: '#/components/responses/500' + """ + database = DatabaseDAO.find_by_id(pk) + database_connection_schema = DatabaseConnectionSchema() + response = { + "id": pk, + "result": database_connection_schema.dump(database, many=False), + } + try: + if ssh_tunnel := DatabaseDAO.get_ssh_tunnel(pk): + response["result"]["ssh_tunnel"] = ssh_tunnel.data + return self.response(200, **response) + except SupersetException as ex: + return self.response(ex.status, message=ex.message) + @expose("/", methods=("GET",)) @protect() @safe diff --git a/superset/databases/schemas.py b/superset/databases/schemas.py index e1e38667c0330..00e8c3ca5381d 100644 --- a/superset/databases/schemas.py +++ b/superset/databases/schemas.py @@ -880,3 +880,86 @@ class DatabaseSchemaAccessForFileUploadResponse(Schema): "information" }, ) + + +class DatabaseConnectionSchema(Schema): + """ + Schema with database connection information. + + This is only for admins (who have ``can_create`` on ``Database``). + """ + + allow_ctas = fields.Boolean(metadata={"description": allow_ctas_description}) + allow_cvas = fields.Boolean(metadata={"description": allow_cvas_description}) + allow_dml = fields.Boolean(metadata={"description": allow_dml_description}) + allow_file_upload = fields.Boolean( + metadata={"description": allow_file_upload_description} + ) + allow_run_async = fields.Boolean( + metadata={"description": allow_run_async_description} + ) + backend = fields.String( + allow_none=True, metadata={"description": "SQLAlchemy engine to use"} + ) + cache_timeout = fields.Integer( + metadata={"description": cache_timeout_description}, allow_none=True + ) + configuration_method = fields.String( + metadata={"description": configuration_method_description}, + ) + database_name = fields.String( + metadata={"description": database_name_description}, + allow_none=True, + validate=Length(1, 250), + ) + driver = fields.String( + allow_none=True, metadata={"description": "SQLAlchemy driver to use"} + ) + engine_information = fields.Dict(keys=fields.String(), values=fields.Raw()) + expose_in_sqllab = fields.Boolean( + metadata={"description": expose_in_sqllab_description} + ) + extra = fields.String( + metadata={"description": extra_description}, validate=extra_validator + ) + force_ctas_schema = fields.String( + metadata={"description": force_ctas_schema_description}, + allow_none=True, + validate=Length(0, 250), + ) + id = fields.Integer(metadata={"description": "Database ID (for updates)"}) + impersonate_user = fields.Boolean( + metadata={"description": impersonate_user_description} + ) + is_managed_externally = fields.Boolean(allow_none=True, dump_default=False) + server_cert = fields.String( + metadata={"description": server_cert_description}, + allow_none=True, + validate=server_cert_validator, + ) + uuid = fields.String(required=False) + ssh_tunnel = fields.Nested(DatabaseSSHTunnel, allow_none=True) + masked_encrypted_extra = fields.String( + metadata={"description": encrypted_extra_description}, + validate=encrypted_extra_validator, + allow_none=True, + ) + parameters = fields.Dict( + keys=fields.String(), + values=fields.Raw(), + metadata={"description": "DB-specific parameters for configuration"}, + ) + parameters_schema = fields.Dict( + keys=fields.String(), + values=fields.Raw(), + metadata={ + "description": ( + "JSONSchema for configuring the database by " + "parameters instead of SQLAlchemy URI" + ), + }, + ) + sqlalchemy_uri = fields.String( + metadata={"description": sqlalchemy_uri_description}, + validate=[Length(1, 1024), sqlalchemy_uri_validator], + ) diff --git a/tests/unit_tests/databases/api_test.py b/tests/unit_tests/databases/api_test.py index 251eb6cdd3240..342920bd17dd4 100644 --- a/tests/unit_tests/databases/api_test.py +++ b/tests/unit_tests/databases/api_test.py @@ -77,6 +77,7 @@ def test_password_mask( Database.metadata.create_all(session.get_bind()) # pylint: disable=no-member database = Database( + uuid=UUID("02feae18-2dd6-4bb4-a9c0-49e9d4f29d58"), database_name="my_database", sqlalchemy_uri="gsheets://", encrypted_extra=json.dumps( @@ -103,7 +104,9 @@ def test_password_mask( mocker.patch("sqlalchemy.engine.URL.get_driver_name", return_value="gsheets") mocker.patch("superset.utils.log.DBEventLogger.log") - response = client.get("/api/v1/database/1") + response = client.get("/api/v1/database/1/connection") + + # check that private key is masked assert ( response.json["result"]["parameters"]["service_account_info"]["private_key"] == "XXXXXXXXXX" @@ -111,6 +114,151 @@ def test_password_mask( assert "encrypted_extra" not in response.json["result"] +def test_database_connection( + mocker: MockFixture, + app: Any, + session: Session, + client: Any, + full_api_access: None, +) -> None: + """ + Test that connection info is only returned in ``api/v1/database/${id}/connection``. + """ + from superset.databases.api import DatabaseRestApi + from superset.models.core import Database + + DatabaseRestApi.datamodel.session = session + + # create table for databases + Database.metadata.create_all(session.get_bind()) # pylint: disable=no-member + + database = Database( + uuid=UUID("02feae18-2dd6-4bb4-a9c0-49e9d4f29d58"), + database_name="my_database", + sqlalchemy_uri="gsheets://", + encrypted_extra=json.dumps( + { + "service_account_info": { + "type": "service_account", + "project_id": "black-sanctum-314419", + "private_key_id": "259b0d419a8f840056158763ff54d8b08f7b8173", + "private_key": "SECRET", + "client_email": "google-spreadsheets-demo-servi@black-sanctum-314419.iam.gserviceaccount.com", + "client_id": "114567578578109757129", + "auth_uri": "https://accounts.google.com/o/oauth2/auth", + "token_uri": "https://oauth2.googleapis.com/token", + "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", + "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/google-spreadsheets-demo-servi%40black-sanctum-314419.iam.gserviceaccount.com", + }, + } + ), + ) + session.add(database) + session.commit() + + # mock the lookup so that we don't need to include the driver + mocker.patch("sqlalchemy.engine.URL.get_driver_name", return_value="gsheets") + mocker.patch("superset.utils.log.DBEventLogger.log") + + response = client.get("/api/v1/database/1/connection") + assert response.json == { + "id": 1, + "result": { + "allow_ctas": False, + "allow_cvas": False, + "allow_dml": False, + "allow_file_upload": False, + "allow_run_async": False, + "backend": "gsheets", + "cache_timeout": None, + "configuration_method": "sqlalchemy_form", + "database_name": "my_database", + "driver": "gsheets", + "engine_information": { + "disable_ssh_tunneling": True, + "supports_file_upload": False, + }, + "expose_in_sqllab": True, + "extra": '{\n "metadata_params": {},\n "engine_params": {},\n "metadata_cache_timeout": {},\n "schemas_allowed_for_file_upload": []\n}\n', + "force_ctas_schema": None, + "id": 1, + "impersonate_user": False, + "is_managed_externally": False, + "masked_encrypted_extra": json.dumps( + { + "service_account_info": { + "type": "service_account", + "project_id": "black-sanctum-314419", + "private_key_id": "259b0d419a8f840056158763ff54d8b08f7b8173", + "private_key": "XXXXXXXXXX", + "client_email": "google-spreadsheets-demo-servi@black-sanctum-314419.iam.gserviceaccount.com", + "client_id": "114567578578109757129", + "auth_uri": "https://accounts.google.com/o/oauth2/auth", + "token_uri": "https://oauth2.googleapis.com/token", + "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", + "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/google-spreadsheets-demo-servi%40black-sanctum-314419.iam.gserviceaccount.com", + } + } + ), + "parameters": { + "service_account_info": { + "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", + "auth_uri": "https://accounts.google.com/o/oauth2/auth", + "client_email": "google-spreadsheets-demo-servi@black-sanctum-314419.iam.gserviceaccount.com", + "client_id": "114567578578109757129", + "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/google-spreadsheets-demo-servi%40black-sanctum-314419.iam.gserviceaccount.com", + "private_key": "XXXXXXXXXX", + "private_key_id": "259b0d419a8f840056158763ff54d8b08f7b8173", + "project_id": "black-sanctum-314419", + "token_uri": "https://oauth2.googleapis.com/token", + "type": "service_account", + } + }, + "parameters_schema": { + "properties": { + "catalog": {"type": "object"}, + "service_account_info": { + "description": "Contents of GSheets JSON credentials.", + "type": "string", + "x-encrypted-extra": True, + }, + }, + "type": "object", + }, + "server_cert": None, + "sqlalchemy_uri": "gsheets://", + "uuid": "02feae18-2dd6-4bb4-a9c0-49e9d4f29d58", + }, + } + + response = client.get("/api/v1/database/1") + assert response.json == { + "id": 1, + "result": { + "allow_ctas": False, + "allow_cvas": False, + "allow_dml": False, + "allow_file_upload": False, + "allow_run_async": False, + "backend": "gsheets", + "cache_timeout": None, + "configuration_method": "sqlalchemy_form", + "database_name": "my_database", + "driver": "gsheets", + "engine_information": { + "disable_ssh_tunneling": True, + "supports_file_upload": False, + }, + "expose_in_sqllab": True, + "force_ctas_schema": None, + "id": 1, + "impersonate_user": False, + "is_managed_externally": False, + "uuid": "02feae18-2dd6-4bb4-a9c0-49e9d4f29d58", + }, + } + + @pytest.mark.skip(reason="Works locally but fails on CI") def test_update_with_password_mask( app: Any,