Skip to content

Commit

Permalink
[Bug] Fix synced time filter loaded value not saved (#892) (#2706)
Browse files Browse the repository at this point in the history
- Move adjustValueToFilterDomain out, only call when add datasets are validated

Signed-off-by: Ihor Dykhta <dikhta.igor@gmail.com>
Co-authored-by: Shan He <heshan0131@gmail.com>
  • Loading branch information
igorDykhta and heshan0131 authored Oct 21, 2024
1 parent e5fe97b commit b258e8a
Show file tree
Hide file tree
Showing 5 changed files with 71 additions and 15 deletions.
9 changes: 6 additions & 3 deletions src/utils/src/filter-utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,7 @@ export function validateFilter<K extends KeplerTableModel<K, L>, L>(
return failed;
}

updatedFilter.value = adjustValueToFilterDomain(filter.value, updatedFilter);
// don't adjust value yet before all datasets are loaded
updatedFilter.view = filter.view ?? updatedFilter.view;

if (updatedFilter.value === null) {
Expand Down Expand Up @@ -1118,12 +1118,14 @@ export function validateFiltersUpdateDatasets<
applyToDatasets: string[];
augmentedDatasets: {[datasetId: string]: any};
}>(
(acc, datasetId) => {
(acc, datasetId, idx) => {

Check warning on line 1121 in src/utils/src/filter-utils.ts

View workflow job for this annotation

GitHub Actions / build (18.x)

'idx' is defined but never used
const dataset = updatedDatasets[datasetId];
const layers = state.layers.filter(l => l.config.dataId === dataset.id);
const toValidate = acc.validatedFilter || filterToValidate;

const {filter: updatedFilter, dataset: updatedDataset} = validateFilterWithData(
acc.augmentedDatasets[datasetId] || dataset,
acc.validatedFilter || filterToValidate,
toValidate,
layers
);

Expand Down Expand Up @@ -1151,6 +1153,7 @@ export function validateFiltersUpdateDatasets<
);

if (validatedFilter && isEqual(datasetIds, applyToDatasets)) {
validatedFilter.value = adjustValueToFilterDomain(filterToValidate.value, validatedFilter);
validated.push(updateFilterPlot(datasets, validatedFilter));
updatedDatasets = {
...updatedDatasets,
Expand Down
8 changes: 7 additions & 1 deletion test/fixtures/test-csv-data.js
Original file line number Diff line number Diff line change
Expand Up @@ -758,7 +758,7 @@ export const expectedSyncedTsFilter = {
// 2016-09-17 00:09:55 // 1474070995000
// 2016-09-17 00:30:08 // 1474072208000
domain: [1474070995000, 1474072208000],
value: [1474071116000, 1474071677000],
value: [1474071116000, 1474072188000],
animationWindow: 'free',
defaultTimeFormat: 'L LTS',
view: 'enlarged',
Expand Down Expand Up @@ -1293,9 +1293,15 @@ export const testAllData = [

export const testCsvDataSlice1 = testAllData.slice(0, 20);
export const testCsvDataSlice1Id = 'test-csv-data-1';
export const timeFieldDomainSlice1 = [1474070995000, 1474072115000];

export const testCsvDataSlice2 = testAllData.slice(5, testAllData.length);
export const timeFieldDomainSlice2 = [1474071301000, 1474072208000];
export const testCsvDataSlice2Id = 'test-csv-data-2';

// the synced value intersect domain of both slices
export const syncTimeFilterValue = [1474071116000, 1474072188000];

export default data;

// geojson layer from wktcsv
Expand Down
10 changes: 9 additions & 1 deletion test/helpers/mock-state.js
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,8 @@ import {
testCsvDataSlice1,
testCsvDataSlice1Id,
testCsvDataSlice2,
testCsvDataSlice2Id
testCsvDataSlice2Id,
syncTimeFilterValue
} from '../fixtures/test-csv-data';
import testLayerData from '../fixtures/test-layer-data';

Expand Down Expand Up @@ -185,6 +186,13 @@ function mockStateWithSyncedTimeFilter() {
{
action: VisStateActions.setFilter,
payload: [0, ['dataId', 'name'], ['test-csv-data-2', 'gps_data.utc_timestamp'], 1]
},

// set filter name to 'time', and value
{
action: VisStateActions.setFilter,
// ['2016-09-17 00:11:56', '2016-09-17 00:21:17']
payload: [0, ['value'], [syncTimeFilterValue]]
}
]);

Expand Down
41 changes: 40 additions & 1 deletion test/node/reducers/vis-state-merger-test.js
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,7 @@ import {
StateWFilters,
StateWMultiFilters,
StateWFilesFiltersLayerColor,
StateWSyncedTimeFilter,
StateWSplitMaps,
testCsvDataId,
testGeoJsonDataId,
Expand All @@ -87,7 +88,8 @@ import {
epochFilterProps,
mergedTimeFilter,
mergedDateFilter,
mergedEpochFilter
mergedEpochFilter,
expectedSyncedTsFilter
} from 'test/fixtures/test-csv-data';

import {
Expand Down Expand Up @@ -1712,6 +1714,43 @@ test('VisStateMerger.v1 -> mergeFilters -> multiFilters', t => {
t.end();
});

test('VisStateMerger.v1 -> mergeFilters -> syncedFilters', t => {
const stateToSave = cloneDeep(StateWSyncedTimeFilter);
const appStateToSave = SchemaManager.save(stateToSave);
const {datasets, config} = appStateToSave;
t.equal(datasets.length, 2, 'should save 2 datasets');

// load config to initial state
const stateWithConfig = coreReducer(InitialState, addDataToMap({config}));

t.equal(stateWithConfig.visState.filters.length, 0, 'should not load filter without data');
t.equal(
stateWithConfig.visState.filterToBeMerged.length,
1,
'should save filter to filterToBeMerged'
);

const parsedDatasets = SchemaManager.parseSavedData(datasets);

// load data 1
const stateWithData1 = coreReducer(stateWithConfig, addDataToMap({datasets: parsedDatasets[0]}));
t.equal(Object.keys(stateWithData1.visState.datasets).length, 1, 'should load 1 dataset');
t.equal(stateWithData1.visState.filters.length, 0, 'should not load filter without all datasets');

// load data 2
const stateWithData2 = coreReducer(stateWithData1, addDataToMap({datasets: parsedDatasets[1]}));
t.equal(Object.keys(stateWithData2.visState.datasets).length, 2, 'should load 2 datasets');
t.equal(
stateWithData2.visState.filters.length,
1,
'should load filter when all datasets are ready'
);

cmpFilters(t, expectedSyncedTsFilter, stateWithData2.visState.filters[0]);

t.end();
});

test('VisStateMerger -> import polygon filter map', t => {
const oldState = cloneDeep(InitialState);
const savedConfig = cloneDeep(polygonFilterMap);
Expand Down
18 changes: 9 additions & 9 deletions test/node/reducers/vis-state-test.js
Original file line number Diff line number Diff line change
Expand Up @@ -2366,11 +2366,11 @@ test('#visStateReducer -> SET_FILTER synced', t => {
changedFilters: {
dynamicDomain: null,
fixedDomain: {
[filterId]: 'dataId_changed'
[filterId]: 'value_changed'
},
cpu: null,
gpu: {
[filterId]: 'dataId_changed'
[filterId]: 'value_changed'
}
},
filterRecord: {
Expand All @@ -2391,7 +2391,7 @@ test('#visStateReducer -> SET_FILTER synced', t => {
},
gpuFilter: {
filterRange: [
[121000, 682000],
[121000, 1193000],
[0, 0],
[0, 0],
[0, 0]
Expand Down Expand Up @@ -2422,11 +2422,11 @@ test('#visStateReducer -> SET_FILTER synced', t => {
changedFilters: {
dynamicDomain: null,
fixedDomain: {
[filterId]: 'added'
[filterId]: 'value_changed'
},
cpu: null,
gpu: {
[filterId]: 'added'
[filterId]: 'value_changed'
}
},
filterRecord: {
Expand All @@ -2447,7 +2447,7 @@ test('#visStateReducer -> SET_FILTER synced', t => {
},
gpuFilter: {
filterRange: [
[121000, 682000],
[121000, 1193000],
[0, 0],
[0, 0],
[0, 0]
Expand Down Expand Up @@ -2495,7 +2495,7 @@ test('#visStateReducer -> SET_FILTER synced -> remove 1', t => {
// reset domain
domain: [1474071301000, 1474072208000],
// adjust value
value: [1474071301000, 1474071677000],
value: [1474071301000, 1474072188000],
gpuChannel: [0],
timeBins: {
'test-csv-data-2': expectedSyncedTsFilter.timeBins['test-csv-data-2']
Expand Down Expand Up @@ -2575,7 +2575,7 @@ test('#visStateReducer -> SET_FILTER synced -> remove 1', t => {
},
gpuFilter: {
filterRange: [
[0, 376000],
[0, 887000],
[0, 0],
[0, 0],
[0, 0]
Expand Down Expand Up @@ -3498,7 +3498,7 @@ test('#visStateReducer -> REMOVE_DATASET w synced filter', t => {
// reset domain
domain: [1474071301000, 1474072208000],
// adjust value
value: [1474071301000, 1474071677000],
value: [1474071301000, 1474072188000],
gpuChannel: [0],
timeBins: {
'test-csv-data-2': expectedSyncedTsFilter.timeBins['test-csv-data-2']
Expand Down

0 comments on commit b258e8a

Please sign in to comment.