Skip to content

Commit

Permalink
refactor: Repeated boilerplate code between upload to database forms (#…
Browse files Browse the repository at this point in the history
…16756)

* abstract boilerplate code into class and rename csv to file

* add db migration

* fix some stuff

* more renaming of csv to file

* rename in translations

* update down revision

* update down revision

* bump chart version

* switch to alter column name approach in db migration

* fix db migration for MySQL

* db migration conflict
  • Loading branch information
exemplary-citizen authored Oct 25, 2021
1 parent 4f1d202 commit ef3afbd
Show file tree
Hide file tree
Showing 53 changed files with 205 additions and 251 deletions.
4 changes: 2 additions & 2 deletions docs/src/pages/docs/installation/kubernetes.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -185,12 +185,12 @@ Data source definitions can be automatically declared by providing key/value yam
extraConfigs:
datasources-init.yaml: |
databases:
- allow_csv_upload: true
- allow_file_upload: true
allow_ctas: true
allow_cvas: true
database_name: example-db
extra: "{\r\n \"metadata_params\": {},\r\n \"engine_params\": {},\r\n \"\
metadata_cache_timeout\": {},\r\n \"schemas_allowed_for_csv_upload\": []\r\n\
metadata_cache_timeout\": {},\r\n \"schemas_allowed_for_file_upload\": []\r\n\
}"
sqlalchemy_uri: example://example-db.local
tables: []
Expand Down
2 changes: 1 addition & 1 deletion helm/superset/Chart.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ maintainers:
- name: craig-rueda
email: craig@craigrueda.com
url: https://github.com/craig-rueda
version: 0.3.10
version: 0.3.11
dependencies:
- name: postgresql
version: 10.2.0
Expand Down
4 changes: 2 additions & 2 deletions helm/superset/values.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -80,12 +80,12 @@ extraSecretEnv: {}
extraConfigs: {}
# datasources-init.yaml: |
# databases:
# - allow_csv_upload: true
# - allow_file_upload: true
# allow_ctas: true
# allow_cvas: true
# database_name: example-db
# extra: "{\r\n \"metadata_params\": {},\r\n \"engine_params\": {},\r\n \"\
# metadata_cache_timeout\": {},\r\n \"schemas_allowed_for_csv_upload\": []\r\n\
# metadata_cache_timeout\": {},\r\n \"schemas_allowed_for_file_upload\": []\r\n\
# }"
# sqlalchemy_uri: example://example-db.local
# tables: []
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ beforeEach(() => {
description_columns: {},
ids: [1, 2],
label_columns: {
allow_csv_upload: 'Allow Csv Upload',
allow_file_upload: 'Allow Csv Upload',
allow_ctas: 'Allow Ctas',
allow_cvas: 'Allow Cvas',
allow_dml: 'Allow Dml',
Expand All @@ -88,7 +88,7 @@ beforeEach(() => {
id: 'Id',
},
list_columns: [
'allow_csv_upload',
'allow_file_upload',
'allow_ctas',
'allow_cvas',
'allow_dml',
Expand All @@ -110,7 +110,7 @@ beforeEach(() => {
],
list_title: 'List Database',
order_columns: [
'allow_csv_upload',
'allow_file_upload',
'allow_dml',
'allow_run_async',
'changed_on',
Expand All @@ -121,7 +121,7 @@ beforeEach(() => {
],
result: [
{
allow_csv_upload: false,
allow_file_upload: false,
allow_ctas: false,
allow_cvas: false,
allow_dml: false,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ const mockdatabases = [...new Array(3)].map((_, i) => ({
backend: 'postgresql',
allow_run_async: true,
allow_dml: false,
allow_csv_upload: true,
allow_file_upload: true,
expose_in_sqllab: false,
changed_on_delta_humanized: `${i} day(s) ago`,
changed_on: new Date().toISOString,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -267,13 +267,13 @@ function DatabaseList({ addDangerToast, addSuccessToast }: DatabaseListProps) {
size: 'sm',
},
{
accessor: 'allow_csv_upload',
accessor: 'allow_file_upload',
Header: t('CSV upload'),
Cell: ({
row: {
original: { allow_csv_upload: allowCSVUpload },
original: { allow_file_upload: allowFileUpload },
},
}: any) => <BooleanDisplay value={allowCSVUpload} />,
}: any) => <BooleanDisplay value={allowFileUpload} />,
size: 'md',
},
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -371,9 +371,9 @@ const ExtraOptions = ({
<div className="input-container">
<input
type="text"
name="schemas_allowed_for_csv_upload"
name="schemas_allowed_for_file_upload"
value={(
db?.extra_json?.schemas_allowed_for_csv_upload || []
db?.extra_json?.schemas_allowed_for_file_upload || []
).join(',')}
placeholder="schema1,schema2"
onChange={onExtraInputChange}
Expand Down Expand Up @@ -410,9 +410,9 @@ const ExtraOptions = ({
<StyledInputContainer css={{ ...no_margin_bottom }}>
<div className="input-container">
<IndeterminateCheckbox
id="allow_csv_upload"
id="allow_file_upload"
indeterminate={false}
checked={!!db?.allow_csv_upload}
checked={!!db?.allow_file_upload}
onChange={onInputChange}
labelText={t('Allow data upload')}
/>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -237,12 +237,12 @@ function dbReducer(
},
};
}
if (action.payload.name === 'schemas_allowed_for_csv_upload') {
if (action.payload.name === 'schemas_allowed_for_file_upload') {
return {
...trimmedState,
extra_json: {
...trimmedState.extra_json,
schemas_allowed_for_csv_upload: (action.payload.value || '').split(
schemas_allowed_for_file_upload: (action.payload.value || '').split(
',',
),
},
Expand Down Expand Up @@ -346,8 +346,8 @@ function dbReducer(
...JSON.parse(action.payload.extra || ''),
metadata_params: JSON.stringify(extra_json?.metadata_params),
engine_params: JSON.stringify(extra_json?.engine_params),
schemas_allowed_for_csv_upload:
extra_json?.schemas_allowed_for_csv_upload,
schemas_allowed_for_file_upload:
extra_json?.schemas_allowed_for_file_upload,
};
}

Expand Down Expand Up @@ -412,8 +412,8 @@ const serializeExtra = (extraJson: DatabaseObject['extra_json']) =>
engine_params: JSON.parse(
((extraJson?.engine_params as unknown) as string) || '{}',
),
schemas_allowed_for_csv_upload: (
extraJson?.schemas_allowed_for_csv_upload || []
schemas_allowed_for_file_upload: (
extraJson?.schemas_allowed_for_file_upload || []
).filter(schema => schema !== ''),
});

Expand Down
4 changes: 2 additions & 2 deletions superset-frontend/src/views/CRUD/data/database/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ export type DatabaseObject = {
// Security
encrypted_extra?: string;
server_cert?: string;
allow_csv_upload?: boolean;
allow_file_upload?: boolean;
impersonate_user?: boolean;
parameters_schema?: Record<string, any>;

Expand All @@ -87,7 +87,7 @@ export type DatabaseObject = {
table_cache_timeout?: number; // in Performance
}; // No field, holds schema and table timeout
allows_virtual_table_explore?: boolean; // in SQL Lab
schemas_allowed_for_csv_upload?: string[]; // in Security
schemas_allowed_for_file_upload?: string[]; // in Security
cancel_query_on_windows_unload?: boolean; // in Performance

version?: string;
Expand Down
2 changes: 1 addition & 1 deletion superset/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -863,7 +863,7 @@ def CSV_TO_HIVE_UPLOAD_DIRECTORY_FUNC( # pylint: disable=invalid-name
UPLOADED_CSV_HIVE_NAMESPACE: Optional[str] = None

# Function that computes the allowed schemas for the CSV uploads.
# Allowed schemas will be a union of schemas_allowed_for_csv_upload
# Allowed schemas will be a union of schemas_allowed_for_file_upload
# db configuration and a result of this function.

# mypy doesn't catch that if case ensures list content being always str
Expand Down
8 changes: 4 additions & 4 deletions superset/databases/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
"cache_timeout",
"expose_in_sqllab",
"allow_run_async",
"allow_csv_upload",
"allow_file_upload",
"configuration_method",
"allow_ctas",
"allow_cvas",
Expand All @@ -121,7 +121,7 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
"sqlalchemy_uri",
]
list_columns = [
"allow_csv_upload",
"allow_file_upload",
"allow_ctas",
"allow_cvas",
"allow_dml",
Expand All @@ -148,7 +148,7 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
"cache_timeout",
"expose_in_sqllab",
"allow_run_async",
"allow_csv_upload",
"allow_file_upload",
"allow_ctas",
"allow_cvas",
"allow_dml",
Expand All @@ -164,7 +164,7 @@ class DatabaseRestApi(BaseSupersetModelRestApi):

list_select_columns = list_columns + ["extra", "sqlalchemy_uri", "password"]
order_columns = [
"allow_csv_upload",
"allow_file_upload",
"allow_dml",
"allow_run_async",
"changed_on",
Expand Down
10 changes: 5 additions & 5 deletions superset/databases/commands/export.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,11 +39,11 @@ def parse_extra(extra_payload: str) -> Dict[str, Any]:
logger.info("Unable to decode `extra` field: %s", extra_payload)
return {}

# Fix for DBs saved with an invalid ``schemas_allowed_for_csv_upload``
schemas_allowed_for_csv_upload = extra.get("schemas_allowed_for_csv_upload")
if isinstance(schemas_allowed_for_csv_upload, str):
extra["schemas_allowed_for_csv_upload"] = json.loads(
schemas_allowed_for_csv_upload
# Fix for DBs saved with an invalid ``schemas_allowed_for_file_upload``
schemas_allowed_for_file_upload = extra.get("schemas_allowed_for_file_upload")
if isinstance(schemas_allowed_for_file_upload, str):
extra["schemas_allowed_for_file_upload"] = json.loads(
schemas_allowed_for_file_upload
)

return extra
Expand Down
26 changes: 13 additions & 13 deletions superset/databases/schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@
"as a results backend. Refer to the installation docs "
"for more information."
)
allow_csv_upload_description = (
allow_file_upload_description = (
"Allow to upload CSV file data into this database"
"If selected, please set the schemas allowed for csv upload in Extra."
)
Expand Down Expand Up @@ -108,9 +108,9 @@
'"table_cache_timeout": 600}**. '
"If unset, cache will not be enabled for the functionality. "
"A timeout of 0 indicates that the cache never expires.<br/>"
"3. The ``schemas_allowed_for_csv_upload`` is a comma separated list "
"3. The ``schemas_allowed_for_file_upload`` is a comma separated list "
"of schemas that CSVs are allowed to upload to. "
'Specify it as **"schemas_allowed_for_csv_upload": '
'Specify it as **"schemas_allowed_for_file_upload": '
'["public", "csv_upload"]**. '
"If database flavor does not support schema or any schema is allowed "
"to be accessed, just leave the list empty<br/>"
Expand Down Expand Up @@ -355,7 +355,7 @@ class Meta: # pylint: disable=too-few-public-methods
)
expose_in_sqllab = fields.Boolean(description=expose_in_sqllab_description)
allow_run_async = fields.Boolean(description=allow_run_async_description)
allow_csv_upload = fields.Boolean(description=allow_csv_upload_description)
allow_file_upload = fields.Boolean(description=allow_file_upload_description)
allow_ctas = fields.Boolean(description=allow_ctas_description)
allow_cvas = fields.Boolean(description=allow_cvas_description)
allow_dml = fields.Boolean(description=allow_dml_description)
Expand Down Expand Up @@ -397,7 +397,7 @@ class Meta: # pylint: disable=too-few-public-methods
)
expose_in_sqllab = fields.Boolean(description=expose_in_sqllab_description)
allow_run_async = fields.Boolean(description=allow_run_async_description)
allow_csv_upload = fields.Boolean(description=allow_csv_upload_description)
allow_file_upload = fields.Boolean(description=allow_file_upload_description)
allow_ctas = fields.Boolean(description=allow_ctas_description)
allow_cvas = fields.Boolean(description=allow_cvas_description)
allow_dml = fields.Boolean(description=allow_dml_description)
Expand Down Expand Up @@ -558,23 +558,23 @@ def fix_schemas_allowed_for_csv_upload(
self, data: Dict[str, Any], **kwargs: Any
) -> Dict[str, Any]:
"""
Fix ``schemas_allowed_for_csv_upload`` being a string.
Fix ``schemas_allowed_for_file_upload`` being a string.
Due to a bug in the database modal, some databases might have been
saved and exported with a string for ``schemas_allowed_for_csv_upload``.
saved and exported with a string for ``schemas_allowed_for_file_upload``.
"""
schemas_allowed_for_csv_upload = data.get("schemas_allowed_for_csv_upload")
if isinstance(schemas_allowed_for_csv_upload, str):
data["schemas_allowed_for_csv_upload"] = json.loads(
schemas_allowed_for_csv_upload
schemas_allowed_for_file_upload = data.get("schemas_allowed_for_file_upload")
if isinstance(schemas_allowed_for_file_upload, str):
data["schemas_allowed_for_file_upload"] = json.loads(
schemas_allowed_for_file_upload
)

return data

metadata_params = fields.Dict(keys=fields.Str(), values=fields.Raw())
engine_params = fields.Dict(keys=fields.Str(), values=fields.Raw())
metadata_cache_timeout = fields.Dict(keys=fields.Str(), values=fields.Integer())
schemas_allowed_for_csv_upload = fields.List(fields.String())
schemas_allowed_for_file_upload = fields.List(fields.String())
cost_estimate_enabled = fields.Boolean()


Expand All @@ -587,7 +587,7 @@ class ImportV1DatabaseSchema(Schema):
allow_run_async = fields.Boolean()
allow_ctas = fields.Boolean()
allow_cvas = fields.Boolean()
allow_csv_upload = fields.Boolean()
allow_file_upload = fields.Boolean()
extra = fields.Nested(ImportV1DatabaseExtraSchema)
uuid = fields.UUID(required=True)
version = fields.String(required=True)
Expand Down
48 changes: 48 additions & 0 deletions superset/migrations/versions/b92d69a6643c_rename_csv_to_file.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""rename_csv_to_file
Revision ID: b92d69a6643c
Revises: 32646df09c64
Create Date: 2021-09-19 14:42:20.130368
"""

# revision identifiers, used by Alembic.
revision = "b92d69a6643c"
down_revision = "32646df09c64"

import sqlalchemy as sa
from alembic import op


def upgrade():
with op.batch_alter_table("dbs") as batch_op:
batch_op.alter_column(
"allow_csv_upload",
new_column_name="allow_file_upload",
existing_type=sa.Boolean(),
)


def downgrade():
with op.batch_alter_table("dbs") as batch_op:
batch_op.alter_column(
"allow_file_upload",
new_column_name="allow_csv_upload",
existing_type=sa.Boolean(),
)
Loading

0 comments on commit ef3afbd

Please sign in to comment.