Skip to content

Commit

Permalink
refactor: convert DatasetList schema filter to use new distinct api (a…
Browse files Browse the repository at this point in the history
  • Loading branch information
nytai authored and auxten committed Nov 20, 2020
1 parent 9564d4b commit 3892ac0
Show file tree
Hide file tree
Showing 5 changed files with 55 additions and 52 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ const store = mockStore({});

const datasetsInfoEndpoint = 'glob:*/api/v1/dataset/_info*';
const datasetsOwnersEndpoint = 'glob:*/api/v1/dataset/related/owners*';
const datasetsSchemaEndpoint = 'glob:*/api/v1/dataset/distinct/schema*';
const databaseEndpoint = 'glob:*/api/v1/dataset/related/database*';
const datasetsEndpoint = 'glob:*/api/v1/dataset/?*';

Expand All @@ -57,6 +58,9 @@ fetchMock.get(datasetsInfoEndpoint, {
fetchMock.get(datasetsOwnersEndpoint, {
result: [],
});
fetchMock.get(datasetsSchemaEndpoint, {
result: [],
});
fetchMock.get(datasetsEndpoint, {
result: mockdatasets,
dataset_count: 3,
Expand Down Expand Up @@ -97,12 +101,20 @@ describe('DatasetList', () => {

it('fetches data', () => {
const callsD = fetchMock.calls(/dataset\/\?q/);
expect(callsD).toHaveLength(2);
expect(callsD[1][0]).toMatchInlineSnapshot(
expect(callsD).toHaveLength(1);
expect(callsD[0][0]).toMatchInlineSnapshot(
`"http://localhost/api/v1/dataset/?q=(order_column:changed_on_delta_humanized,order_direction:desc,page:0,page_size:25)"`,
);
});

it('fetches owner filter values', () => {
expect(fetchMock.calls(/dataset\/related\/owners/)).toHaveLength(1);
});

it('fetches schema filter values', () => {
expect(fetchMock.calls(/dataset\/distinct\/schema/)).toHaveLength(1);
});

it('shows/hides bulk actions when bulk actions is clicked', async () => {
await waitForComponentToPaint(wrapper);
const button = wrapper.find(Button).at(0);
Expand Down
48 changes: 11 additions & 37 deletions superset-frontend/src/views/CRUD/data/dataset/DatasetList.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,11 @@ import { SupersetClient } from '@superset-ui/connection';
import { t } from '@superset-ui/translation';
import React, { FunctionComponent, useState, useMemo } from 'react';
import rison from 'rison';
import { createFetchRelated, createErrorHandler } from 'src/views/CRUD/utils';
import {
createFetchRelated,
createFetchDistinct,
createErrorHandler,
} from 'src/views/CRUD/utils';
import { useListViewResource } from 'src/views/CRUD/hooks';
import ConfirmStatusChange from 'src/components/ConfirmStatusChange';
import DatasourceModal from 'src/datasource/DatasourceModal';
Expand Down Expand Up @@ -59,40 +63,6 @@ interface DatasetListProps {
addSuccessToast: (msg: string) => void;
}

export const createFetchSchemas = (
handleError: (error: Response) => void,
) => async (filterValue = '', pageIndex?: number, pageSize?: number) => {
// add filters if filterValue
const filters = filterValue
? { filters: [{ col: 'schema', opr: 'sw', value: filterValue }] }
: {};
try {
const queryParams = rison.encode({
columns: ['schema'],
keys: ['none'],
order_by: 'schema',
...(pageIndex ? { page: pageIndex } : {}),
...(pageSize ? { page_size: pageSize } : {}),
...filters,
});
const { json = {} } = await SupersetClient.get({
endpoint: `/api/v1/dataset/?q=${queryParams}`,
});

const schemas: string[] = json?.result?.map(
({ schema }: { schema: string }) => schema,
);

// uniqueify schema values and create options
return [...new Set(schemas)]
.filter(schema => Boolean(schema))
.map(schema => ({ label: schema, value: schema }));
} catch (e) {
handleError(e);
}
return [];
};

const DatasetList: FunctionComponent<DatasetListProps> = ({
addDangerToast,
addSuccessToast,
Expand Down Expand Up @@ -393,8 +363,12 @@ const DatasetList: FunctionComponent<DatasetListProps> = ({
input: 'select',
operator: 'eq',
unfilteredLabel: 'All',
fetchSelects: createFetchSchemas(errMsg =>
t('An error occurred while fetching schema values: %s', errMsg),
fetchSelects: createFetchDistinct(
'dataset',
'schema',
createErrorHandler(errMsg =>
t('An error occurred while fetching schema values: %s', errMsg),
),
),
paginate: true,
},
Expand Down
7 changes: 5 additions & 2 deletions superset-frontend/src/views/CRUD/utils.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -24,12 +24,12 @@ import rison from 'rison';
import getClientErrorObject from 'src/utils/getClientErrorObject';
import { logging } from '@superset-ui/core';

export const createFetchRelated = (
const createFetchResourceMethod = (method: string) => (
resource: string,
relation: string,
handleError: (error: Response) => void,
) => async (filterValue = '', pageIndex?: number, pageSize?: number) => {
const resourceEndpoint = `/api/v1/${resource}/related/${relation}`;
const resourceEndpoint = `/api/v1/${resource}/${method}/${relation}`;

try {
const queryParams = rison.encode({
Expand All @@ -53,6 +53,9 @@ export const createFetchRelated = (
return [];
};

export const createFetchRelated = createFetchResourceMethod('related');
export const createFetchDistinct = createFetchResourceMethod('distinct');

export function createErrorHandler(handleErrorFunc: (errMsg?: string) => void) {
return async (e: SupersetClientResponse | string) => {
const parsedError = await getClientErrorObject(e);
Expand Down
6 changes: 5 additions & 1 deletion superset/views/base_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -415,5 +415,9 @@ def distinct(self, column_name: str, **kwargs: Any) -> FlaskResponse:
# Apply pagination
result = self.datamodel.apply_pagination(query, page, page_size).all()
# produce response
result = [{"text": item[0]} for item in result if item[0] is not None]
result = [
{"text": item[0], "value": item[0]}
for item in result
if item[0] is not None
]
return self.response(200, count=count, result=result)
30 changes: 20 additions & 10 deletions tests/datasets/api_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -192,15 +192,16 @@ def pg_test_query_parameter(query_parameter, expected_response):
"columns", "information_schema", [], get_main_database()
)
)
schema_values = [
"",
"admin_database",
"information_schema",
"public",
"superset",
]
expected_response = {
"count": 5,
"result": [
{"text": ""},
{"text": "admin_database"},
{"text": "information_schema"},
{"text": "public"},
{"text": "superset"},
],
"result": [{"text": val, "value": val} for val in schema_values],
}
self.login(username="admin")
uri = "api/v1/dataset/distinct/schema"
Expand All @@ -213,17 +214,26 @@ def pg_test_query_parameter(query_parameter, expected_response):
query_parameter = {"filter": "inf"}
pg_test_query_parameter(
query_parameter,
{"count": 1, "result": [{"text": "information_schema"}]},
{
"count": 1,
"result": [
{"text": "information_schema", "value": "information_schema"}
],
},
)

query_parameter = {"page": 0, "page_size": 1}
pg_test_query_parameter(
query_parameter, {"count": 5, "result": [{"text": ""}]},
query_parameter, {"count": 5, "result": [{"text": "", "value": ""}]},
)

query_parameter = {"page": 1, "page_size": 1}
pg_test_query_parameter(
query_parameter, {"count": 5, "result": [{"text": "admin_database"}]}
query_parameter,
{
"count": 5,
"result": [{"text": "admin_database", "value": "admin_database"}],
},
)

for dataset in datasets:
Expand Down

0 comments on commit 3892ac0

Please sign in to comment.