diff --git a/src/domain/dataset.js b/src/domain/dataset.js index f552c973f..5a9300a47 100644 --- a/src/domain/dataset.js +++ b/src/domain/dataset.js @@ -60,6 +60,10 @@ const applyHashes = (dataset, configuration) => { if(!('CRVIZ' in i)){ i['CRVIZ'] = {}; } + if(!configuration.hashFields){ + configuration.hashFields = getHashFields(configuration.fields, configuration.ignoredFields || []); + } + addHashKey(configuration.keyFields.length > 0 ? configuration.keyFields : configuration.hashFields, i); addHashWithoutIgnored(configuration.hashFields, i); }); @@ -192,14 +196,7 @@ const reducer = handleActions( { [setDatasets]: (state, { payload }) => { const datasets = payload.datasets; - const keyFields = payload.keyFields || getKeyFields(state); - const ignoredFields = payload.ignoredFields || getIgnoredFields(state); - - Object.keys(datasets).forEach((owner) =>{ - const dataset = datasets[owner].dataset; - const initialConfig = datasets[owner].configuration; - state.datasets[owner] = configureDataset(dataset, initialConfig, keyFields, ignoredFields); - }) + state.datasets = datasets return { ...state}; }, @@ -273,7 +270,6 @@ const reducer = handleActions( [setKeyFields]: (state, { payload }) => { const keyFields = payload; const datasets = _selectDatasets(state); - if(datasets){ Object.keys(datasets).forEach((key) => { const ds = datasets[key]; @@ -419,4 +415,4 @@ export default reducer; export { setDatasets, setDataset, selectDataset, selectDatasets, removeDataset, setFilteredDataset, selectFilteredDataset, removeFilteredDataset, selectConfiguration, selectMergedConfiguration, selectValues, selectMergedValues, getFieldId, configurationFor, setIsFetching, getIsFetching, setKeyFields, getKeyFields, setIgnoredFields, getIgnoredFields, getHashFields, getLastUpdated, - valuesFor, setDatasetDiff, removeDatasetDiff, selectDatasetDiff, selectDatasetIntersection, applyHashes }; + valuesFor, setDatasetDiff, removeDatasetDiff, selectDatasetDiff, selectDatasetIntersection, applyHashes, configureDataset }; diff --git a/src/epics/index-dataset-epic.js b/src/epics/index-dataset-epic.js index 5d19bb784..89199ae9b 100644 --- a/src/epics/index-dataset-epic.js +++ b/src/epics/index-dataset-epic.js @@ -69,7 +69,7 @@ const flattenDataset = (ds, cfg) => { return flattened; for(var key in ds){ - var item = {'id':key}; + var item = {'CRVIZ_HASH_KEY':ds[key].CRVIZ["_HASH_KEY"]}; for(var f in cfg.fields){ var field = cfg.fields[f]; @@ -89,7 +89,7 @@ const generateIndex = (payload) => { ? payload.datasets[owner].configuration : configurationFor(dataset); var flat = flattenDataset(dataset, configuration); const idx = lunr(function () { - this.ref('id'); + this.ref('CRVIZ_HASH_KEY'); if(configuration && configuration.fields){ const filteredFields = configuration.fields.filter(f => !f.displayName.includes("/")) filteredFields.map((field) => { return this.field(field.displayName.toLowerCase()); }) diff --git a/src/epics/load-dataset-epic.js b/src/epics/load-dataset-epic.js index 78a1cc7e1..8ee8bdcd2 100644 --- a/src/epics/load-dataset-epic.js +++ b/src/epics/load-dataset-epic.js @@ -7,7 +7,7 @@ import { isNil, is } from "ramda"; import { buildIndices } from './index-dataset-epic'; import { setError } from "domain/error" -import { setDatasets, setKeyFields, setIgnoredFields } from "domain/dataset"; +import { setDatasets, setKeyFields, setIgnoredFields, configureDataset } from "domain/dataset"; import { setControls } from "domain/controls"; const loadDataset = createAction("LOAD_DATASET"); @@ -123,6 +123,12 @@ const formatPayload = (data) => { throw ValidationError('Data in invalid format'); } + Object.keys(final).forEach((owner) =>{ + const dataset = final[owner].dataset; + const initialConfig = final[owner].configuration; + final[owner] = configureDataset(dataset, initialConfig, keyFields, ignoredFields); + }) + data = { 'datasets': includeData ? final : {}, 'keyFields': includeData ? keyFields : {}, diff --git a/src/epics/load-dataset-epic.test.js b/src/epics/load-dataset-epic.test.js index cd828dc9e..500ec1983 100644 --- a/src/epics/load-dataset-epic.test.js +++ b/src/epics/load-dataset-epic.test.js @@ -3,7 +3,7 @@ import configureMockStore from 'redux-mock-store'; import { createEpicMiddleware } from 'redux-observable'; import rootEpic from './root-epic' -import { setDatasets } from 'domain/dataset' +import { setDatasets, applyHashes, configurationFor } from 'domain/dataset' import { loadDataset, CSVconvert } from "./load-dataset-epic" import { fromJson } from "./upload-dataset-epic" @@ -25,9 +25,6 @@ describe("loadDatasetEpic", () => { epicMiddleware.run(rootEpic); }); - afterEach(() => { - - }); describe("loading various datasets", () => { it("loads the dataset with default config", (done) => { const owner = uuidv4(); @@ -53,7 +50,8 @@ describe("loadDatasetEpic", () => { const configuration = { fields: [ { path: ["uid"], displayName: "UID", groupable: true }, - { path: ["role", "role"], displayName: "Role", groupable: false } + { path: ["role", "role"], displayName: "Role", groupable: false }, + { path: ["role", "confidence"], displayName: "Role.confidence", groupable: false } ] }; @@ -61,7 +59,7 @@ describe("loadDatasetEpic", () => { store.dispatch(action$); let typeToCheck = setDatasets.toString(); expect(store.getActions().filter(a => a.type === typeToCheck)[0].payload.datasets[owner].dataset).to.equal(dataset); - expect(store.getActions().filter(a => a.type === typeToCheck)[0].payload.datasets[owner].configuration).to.equal(configuration); + expect(store.getActions().filter(a => a.type === typeToCheck)[0].payload.datasets[owner].configuration.fields).to.deep.equal(configuration.fields); done(); }); @@ -69,13 +67,21 @@ describe("loadDatasetEpic", () => { it("loads a simple object", (done) => { const owner = uuidv4(); const data = { uid: "uid1", role: { role: "role", confidence: 80 } }; + const expected_data = { + uid: "uid1", + role: { role: "role", confidence: 80 }, + CRVIZ: { + '_HASH_KEY': "uid1:role:80:", + '_HASH_WITHOUT_IGNORED': "uid1|role|80|" + } + }; const action$ = loadDataset({ 'owner': owner, 'content': data }); store.dispatch(action$); let typeToCheck = setDatasets.toString(); expect(store.getActions().filter(a => a.type === typeToCheck)[0].payload.datasets[owner].dataset.length).to.equal(1); - expect(store.getActions().filter(a => a.type === typeToCheck)[0].payload.datasets[owner].dataset[0]).to.deep.equal(data); + expect(store.getActions().filter(a => a.type === typeToCheck)[0].payload.datasets[owner].dataset[0]).to.deep.equal(expected_data); done(); }); @@ -87,7 +93,8 @@ describe("loadDatasetEpic", () => { { uid: "uid1", role: { role: "role", confidence: 80 } }, { uid: "uid2", role: { role: "role", confidence: 80 } } ]; - + const config = configurationFor(data, [], []); + applyHashes(data, config); const action$ = loadDataset({ 'owner': owner, 'content': data }); store.dispatch(action$); expect(store.getState()).to.deep.equal(initialState); diff --git a/src/epics/search-dataset-epic.js b/src/epics/search-dataset-epic.js index 49ae457d4..caf8257d5 100644 --- a/src/epics/search-dataset-epic.js +++ b/src/epics/search-dataset-epic.js @@ -51,9 +51,10 @@ const performSearch = (data) => { data.dataset.forEach((el) => { el.CRVIZ._isSearchResult = false; }); results.forEach((r) => { - if(data.dataset[r.ref]){ - data.dataset[r.ref].CRVIZ._isSearchResult = true; - data.results.push(data.dataset[r.ref]); + const res = data.dataset.find(i => i.CRVIZ["_HASH_KEY"] === r.ref); + if(res){ + res.CRVIZ._isSearchResult = true; + data.results.push(res); } }); }; diff --git a/src/features/visualization/Visualization.module.css b/src/features/visualization/Visualization.module.css index 2ecb53e0e..bb14842d2 100644 --- a/src/features/visualization/Visualization.module.css +++ b/src/features/visualization/Visualization.module.css @@ -20,9 +20,22 @@ } .viz :global(.viz-node.viz-isChanged) circle { + -webkit-animation-name: delta; + -webkit-animation-duration: 2s; + -webkit-animation-iteration-count: infinite; animation: delta 2s infinite; } +@-webkit-keyframes delta { + 0% { + -webkit-clip-path: polygon(50% 0%, 0% 100%, 100% 100%); + clip-path: polygon(50% 0%, 0% 100%, 100% 100%); + }, + 100% { + + } +} + @keyframes delta { 0% { -webkit-clip-path: polygon(50% 0%, 0% 100%, 100% 100%); @@ -34,12 +47,23 @@ } .viz :global(.viz-node.viz-isAdded) circle { + -webkit-animation-name: plus; + -webkit-animation-duration: 2s; + -webkit-animation-iteration-count: infinite; animation: plus 2s infinite; } +@-webkit-keyframes plus { + 0% { + clip-path: polygon(0% 33%, 33% 33%, 33% 0%, 66% 0%, 66% 33%, 100% 33%, 100% 66%, 66% 66%, 66% 100%, 33% 100%, 33% 66%, 0% 66%); + }, + 100% { + + } +} + @keyframes plus { 0% { - -webkit-clip-path: polygon(0% 33%, 33% 33%, 33% 0%, 66% 0%, 66% 33%, 100% 33%, 100% 66%, 66% 66%, 66% 100%, 33% 100%, 33% 66%, 0% 66%); clip-path: polygon(0% 33%, 33% 33%, 33% 0%, 66% 0%, 66% 33%, 100% 33%, 100% 66%, 66% 66%, 66% 100%, 33% 100%, 33% 66%, 0% 66%); }, 100% { @@ -48,9 +72,22 @@ } .viz :global(.viz-node.viz-isRemoved) circle { + -webkit-animation-name: minus; + -webkit-animation-duration: 2s; + -webkit-animation-iteration-count: infinite; animation: minus 2s infinite; } +@-webkit-keyframes minus { + 0% { + -webkit-clip-path: polygon(0% 33%, 100% 33%, 100% 66%, 0% 66%); + clip-path: polygon(0% 33%, 100% 33%, 100% 66%, 0% 66%); + }, + 100% { + + } +} + @keyframes minus { 0% { -webkit-clip-path: polygon(0% 33%, 100% 33%, 100% 66%, 0% 66%);