Skip to content
This repository has been archived by the owner on Jan 23, 2023. It is now read-only.

Commit

Permalink
fix(Search): closes #258
Browse files Browse the repository at this point in the history
-modified index ref to use _HASH_KEY
-perform lookup by _HASH_KEY
-hashing earlier in lifecycle
  • Loading branch information
rashley-iqt committed Mar 6, 2019
1 parent b95914c commit 2a4eeb7
Show file tree
Hide file tree
Showing 6 changed files with 72 additions and 25 deletions.
16 changes: 6 additions & 10 deletions src/domain/dataset.js
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,10 @@ const applyHashes = (dataset, configuration) => {
if(!('CRVIZ' in i)){
i['CRVIZ'] = {};
}
if(!configuration.hashFields){
configuration.hashFields = getHashFields(configuration.fields, configuration.ignoredFields || []);
}

addHashKey(configuration.keyFields.length > 0 ? configuration.keyFields : configuration.hashFields, i);
addHashWithoutIgnored(configuration.hashFields, i);
});
Expand Down Expand Up @@ -192,14 +196,7 @@ const reducer = handleActions(
{
[setDatasets]: (state, { payload }) => {
const datasets = payload.datasets;
const keyFields = payload.keyFields || getKeyFields(state);
const ignoredFields = payload.ignoredFields || getIgnoredFields(state);

Object.keys(datasets).forEach((owner) =>{
const dataset = datasets[owner].dataset;
const initialConfig = datasets[owner].configuration;
state.datasets[owner] = configureDataset(dataset, initialConfig, keyFields, ignoredFields);
})
state.datasets = datasets

return { ...state};
},
Expand Down Expand Up @@ -273,7 +270,6 @@ const reducer = handleActions(
[setKeyFields]: (state, { payload }) => {
const keyFields = payload;
const datasets = _selectDatasets(state);

if(datasets){
Object.keys(datasets).forEach((key) => {
const ds = datasets[key];
Expand Down Expand Up @@ -419,4 +415,4 @@ export default reducer;

export { setDatasets, setDataset, selectDataset, selectDatasets, removeDataset, setFilteredDataset, selectFilteredDataset, removeFilteredDataset, selectConfiguration, selectMergedConfiguration,
selectValues, selectMergedValues, getFieldId, configurationFor, setIsFetching, getIsFetching, setKeyFields, getKeyFields, setIgnoredFields, getIgnoredFields, getHashFields, getLastUpdated,
valuesFor, setDatasetDiff, removeDatasetDiff, selectDatasetDiff, selectDatasetIntersection, applyHashes };
valuesFor, setDatasetDiff, removeDatasetDiff, selectDatasetDiff, selectDatasetIntersection, applyHashes, configureDataset };
4 changes: 2 additions & 2 deletions src/epics/index-dataset-epic.js
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ const flattenDataset = (ds, cfg) => {
return flattened;

for(var key in ds){
var item = {'id':key};
var item = {'CRVIZ_HASH_KEY':ds[key].CRVIZ["_HASH_KEY"]};
for(var f in cfg.fields){
var field = cfg.fields[f];

Expand All @@ -89,7 +89,7 @@ const generateIndex = (payload) => {
? payload.datasets[owner].configuration : configurationFor(dataset);
var flat = flattenDataset(dataset, configuration);
const idx = lunr(function () {
this.ref('id');
this.ref('CRVIZ_HASH_KEY');
if(configuration && configuration.fields){
const filteredFields = configuration.fields.filter(f => !f.displayName.includes("/"))
filteredFields.map((field) => { return this.field(field.displayName.toLowerCase()); })
Expand Down
8 changes: 7 additions & 1 deletion src/epics/load-dataset-epic.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ import { isNil, is } from "ramda";
import { buildIndices } from './index-dataset-epic';

import { setError } from "domain/error"
import { setDatasets, setKeyFields, setIgnoredFields } from "domain/dataset";
import { setDatasets, setKeyFields, setIgnoredFields, configureDataset } from "domain/dataset";
import { setControls } from "domain/controls";

const loadDataset = createAction("LOAD_DATASET");
Expand Down Expand Up @@ -123,6 +123,12 @@ const formatPayload = (data) => {
throw ValidationError('Data in invalid format');
}

Object.keys(final).forEach((owner) =>{
const dataset = final[owner].dataset;
const initialConfig = final[owner].configuration;
final[owner] = configureDataset(dataset, initialConfig, keyFields, ignoredFields);
})

data = {
'datasets': includeData ? final : {},
'keyFields': includeData ? keyFields : {},
Expand Down
23 changes: 15 additions & 8 deletions src/epics/load-dataset-epic.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import configureMockStore from 'redux-mock-store';
import { createEpicMiddleware } from 'redux-observable';

import rootEpic from './root-epic'
import { setDatasets } from 'domain/dataset'
import { setDatasets, applyHashes, configurationFor } from 'domain/dataset'
import { loadDataset, CSVconvert } from "./load-dataset-epic"
import { fromJson } from "./upload-dataset-epic"

Expand All @@ -25,9 +25,6 @@ describe("loadDatasetEpic", () => {
epicMiddleware.run(rootEpic);
});

afterEach(() => {

});
describe("loading various datasets", () => {
it("loads the dataset with default config", (done) => {
const owner = uuidv4();
Expand All @@ -53,29 +50,38 @@ describe("loadDatasetEpic", () => {
const configuration = {
fields: [
{ path: ["uid"], displayName: "UID", groupable: true },
{ path: ["role", "role"], displayName: "Role", groupable: false }
{ path: ["role", "role"], displayName: "Role", groupable: false },
{ path: ["role", "confidence"], displayName: "Role.confidence", groupable: false }
]
};

const action$ = loadDataset({ 'owner': owner, 'content': { 'dataset': dataset, 'configuration': configuration} });
store.dispatch(action$);
let typeToCheck = setDatasets.toString();
expect(store.getActions().filter(a => a.type === typeToCheck)[0].payload.datasets[owner].dataset).to.equal(dataset);
expect(store.getActions().filter(a => a.type === typeToCheck)[0].payload.datasets[owner].configuration).to.equal(configuration);
expect(store.getActions().filter(a => a.type === typeToCheck)[0].payload.datasets[owner].configuration.fields).to.deep.equal(configuration.fields);

done();
});

it("loads a simple object", (done) => {
const owner = uuidv4();
const data = { uid: "uid1", role: { role: "role", confidence: 80 } };
const expected_data = {
uid: "uid1",
role: { role: "role", confidence: 80 },
CRVIZ: {
'_HASH_KEY': "uid1:role:80:",
'_HASH_WITHOUT_IGNORED': "uid1|role|80|"
}
};

const action$ = loadDataset({ 'owner': owner, 'content': data });
store.dispatch(action$);
let typeToCheck = setDatasets.toString();

expect(store.getActions().filter(a => a.type === typeToCheck)[0].payload.datasets[owner].dataset.length).to.equal(1);
expect(store.getActions().filter(a => a.type === typeToCheck)[0].payload.datasets[owner].dataset[0]).to.deep.equal(data);
expect(store.getActions().filter(a => a.type === typeToCheck)[0].payload.datasets[owner].dataset[0]).to.deep.equal(expected_data);

done();
});
Expand All @@ -87,7 +93,8 @@ describe("loadDatasetEpic", () => {
{ uid: "uid1", role: { role: "role", confidence: 80 } },
{ uid: "uid2", role: { role: "role", confidence: 80 } }
];

const config = configurationFor(data, [], []);
applyHashes(data, config);
const action$ = loadDataset({ 'owner': owner, 'content': data });
store.dispatch(action$);
expect(store.getState()).to.deep.equal(initialState);
Expand Down
7 changes: 4 additions & 3 deletions src/epics/search-dataset-epic.js
Original file line number Diff line number Diff line change
Expand Up @@ -51,9 +51,10 @@ const performSearch = (data) => {

data.dataset.forEach((el) => { el.CRVIZ._isSearchResult = false; });
results.forEach((r) => {
if(data.dataset[r.ref]){
data.dataset[r.ref].CRVIZ._isSearchResult = true;
data.results.push(data.dataset[r.ref]);
const res = data.dataset.find(i => i.CRVIZ["_HASH_KEY"] === r.ref);
if(res){
res.CRVIZ._isSearchResult = true;
data.results.push(res);
}
});
};
Expand Down
39 changes: 38 additions & 1 deletion src/features/visualization/Visualization.module.css
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,22 @@
}

.viz :global(.viz-node.viz-isChanged) circle {
-webkit-animation-name: delta;
-webkit-animation-duration: 2s;
-webkit-animation-iteration-count: infinite;
animation: delta 2s infinite;
}

@-webkit-keyframes delta {
0% {
-webkit-clip-path: polygon(50% 0%, 0% 100%, 100% 100%);
clip-path: polygon(50% 0%, 0% 100%, 100% 100%);
},
100% {

}
}

@keyframes delta {
0% {
-webkit-clip-path: polygon(50% 0%, 0% 100%, 100% 100%);
Expand All @@ -34,12 +47,23 @@
}

.viz :global(.viz-node.viz-isAdded) circle {
-webkit-animation-name: plus;
-webkit-animation-duration: 2s;
-webkit-animation-iteration-count: infinite;
animation: plus 2s infinite;
}

@-webkit-keyframes plus {
0% {
clip-path: polygon(0% 33%, 33% 33%, 33% 0%, 66% 0%, 66% 33%, 100% 33%, 100% 66%, 66% 66%, 66% 100%, 33% 100%, 33% 66%, 0% 66%);
},
100% {

}
}

@keyframes plus {
0% {
-webkit-clip-path: polygon(0% 33%, 33% 33%, 33% 0%, 66% 0%, 66% 33%, 100% 33%, 100% 66%, 66% 66%, 66% 100%, 33% 100%, 33% 66%, 0% 66%);
clip-path: polygon(0% 33%, 33% 33%, 33% 0%, 66% 0%, 66% 33%, 100% 33%, 100% 66%, 66% 66%, 66% 100%, 33% 100%, 33% 66%, 0% 66%);
},
100% {
Expand All @@ -48,9 +72,22 @@
}

.viz :global(.viz-node.viz-isRemoved) circle {
-webkit-animation-name: minus;
-webkit-animation-duration: 2s;
-webkit-animation-iteration-count: infinite;
animation: minus 2s infinite;
}

@-webkit-keyframes minus {
0% {
-webkit-clip-path: polygon(0% 33%, 100% 33%, 100% 66%, 0% 66%);
clip-path: polygon(0% 33%, 100% 33%, 100% 66%, 0% 66%);
},
100% {

}
}

@keyframes minus {
0% {
-webkit-clip-path: polygon(0% 33%, 100% 33%, 100% 66%, 0% 66%);
Expand Down

0 comments on commit 2a4eeb7

Please sign in to comment.