Skip to content

Commit

Permalink
Merge pull request #4202 from beyondessential/release-2022-40
Browse files Browse the repository at this point in the history
Release 2022-40
  • Loading branch information
avaek authored Oct 3, 2022
2 parents 99158ef + ac4d58a commit 9dd05c2
Show file tree
Hide file tree
Showing 41 changed files with 962 additions and 476 deletions.
2 changes: 2 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,9 @@
"concurrently": "^5.2.0",
"cypress-dotenv": "^1.2.2",
"eslint": "^7.9.0",
"eslint-import-resolver-babel-module": "^5.3.1",
"eslint-plugin-cypress": "^2.11.1",
"eslint-plugin-module-resolver": "^1.5.0",
"jest": "^27.0.6",
"jest-extended": "^0.11.5",
"jest-when": "^2.7.2",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ export const useSearchDataSources = ({ search, type = 'dataElement', maxResults
const endpoint = stringifyQuery(undefined, `${type}s`, {
columns: JSON.stringify(['code']),
filter: JSON.stringify({
code: { comparator: 'ilike', comparisonValue: `${search}%`, castAs: 'text' },
code: { comparator: 'ilike', comparisonValue: `%${search}%`, castAs: 'text' },
}),
pageSize: maxResults,
});
Expand Down
108 changes: 58 additions & 50 deletions packages/admin-panel/src/table/actions.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

import parseLinkHeader from 'parse-link-header';
import generateId from 'uuid/v1';
import debounce from 'lodash.debounce';

import {
ACTION_CANCEL,
Expand Down Expand Up @@ -71,64 +72,71 @@ export const changeSorting = (reduxId, sorting) => ({
reduxId,
});

export const refreshData = (reduxId, endpoint, columns, baseFilter, tableState) => async (
dispatch,
getState,
{ api },
) => {
const { pageIndex, pageSize, filters, sorting } = tableState;

// Set up filter
const filterObject = { ...baseFilter };
filters.forEach(({ id, value }) => {
filterObject[id] = value;
});
const filterString = JSON.stringify(convertSearchTermToFilter(filterObject));
const refreshDataWithDebounce = debounce(
async (reduxId, endpoint, columns, baseFilter, tableState, dispatch, api) => {
const { pageIndex, pageSize, filters, sorting } = tableState;

// Set up sort
const sortObjects = sorting.map(({ id, desc }) => {
return `${id}${desc ? ' DESC' : ' ASC'}`;
});
const sortString = JSON.stringify(sortObjects);
// Set up filter
const filterObject = { ...baseFilter };
filters.forEach(({ id, value }) => {
filterObject[id] = value;
});
const filterString = JSON.stringify(convertSearchTermToFilter(filterObject));

// Set up columns
const columnSources = columns.map(column => column.source);
const columnsString = JSON.stringify(columnSources);
// Set up sort
const sortObjects = sorting.map(({ id, desc }) => {
return `${id}${desc ? ' DESC' : ' ASC'}`;
});
const sortString = JSON.stringify(sortObjects);

// Prepare for request
const fetchId = generateId();
dispatch({
type: DATA_FETCH_REQUEST,
reduxId,
fetchId,
});
// Set up columns
const columnSources = columns.map(column => column.source);
const columnsString = JSON.stringify(columnSources);

try {
const queryParameters = {
page: pageIndex,
pageSize,
columns: columnsString.length > 0 ? columnsString : undefined,
filter: filterString.length > 0 ? filterString : undefined,
sort: sortString.length > 0 ? sortString : undefined,
};
const response = await api.get(endpoint, queryParameters);
const linkHeader = parseLinkHeader(response.headers.get('Link'));
const lastPageNumber = parseInt(linkHeader.last.page, 10);
dispatch({
type: DATA_FETCH_SUCCESS,
reduxId,
data: response.body,
numberOfPages: lastPageNumber,
fetchId,
});
} catch (error) {
// Prepare for request
const fetchId = generateId();
dispatch({
type: DATA_FETCH_ERROR,
type: DATA_FETCH_REQUEST,
reduxId,
errorMessage: error.message,
fetchId,
});
}

try {
const queryParameters = {
page: pageIndex,
pageSize,
columns: columnsString.length > 0 ? columnsString : undefined,
filter: filterString.length > 0 ? filterString : undefined,
sort: sortString.length > 0 ? sortString : undefined,
};
const response = await api.get(endpoint, queryParameters);
const linkHeader = parseLinkHeader(response.headers.get('Link'));
const lastPageNumber = parseInt(linkHeader.last.page, 10);
dispatch({
type: DATA_FETCH_SUCCESS,
reduxId,
data: response.body,
numberOfPages: lastPageNumber,
fetchId,
});
} catch (error) {
dispatch({
type: DATA_FETCH_ERROR,
reduxId,
errorMessage: error.message,
fetchId,
});
}
},
200,
);

export const refreshData = (reduxId, endpoint, columns, baseFilter, tableState) => async (
dispatch,
getState,
{ api },
) => {
return refreshDataWithDebounce(reduxId, endpoint, columns, baseFilter, tableState, dispatch, api);
};

export const cancelAction = reduxId => ({
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ export const convertSearchTermToFilter = (unprocessedFilterObject = {}) => {

filterObject[key] = {
comparator: `ilike`,
comparisonValue: `${value}%`,
comparisonValue: `%${value}%`,
castAs: 'text',
};
});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,9 @@ import {
assertSurveyResponsePermissions,
createSurveyResponseDBFilter,
} from './assertSurveyResponsePermissions';
import { assertAnyPermissions, assertBESAdminAccess } from '../../permissions';
import { assertAnyPermissions, assertBESAdminAccess, hasBESAdminAccess } from '../../permissions';
import { assertEntityPermissions } from '../GETEntities';
import { getQueryOptionsForColumns } from '../GETHandler/helpers';

/**
* Handles endpoints:
Expand Down Expand Up @@ -48,4 +49,25 @@ export class GETSurveyResponses extends GETHandler {
// Apply regular permissions
return this.getPermissionsFilter(dbConditions, options);
}

async countRecords(criteria) {
// remove conjunction criteria
const columnsInCountQuery = Object.keys(criteria).filter(column => !column.startsWith('_'));

// Always filter by survey permissions and entity permissions for non BES Admin users
// See: createSurveyResponseDBFilter
if (!hasBESAdminAccess(this.accessPolicy)) {
columnsInCountQuery.push('entity.id', 'survey.id');
}

// Only join tables that we are filtering on
const { multiJoin } = getQueryOptionsForColumns(
columnsInCountQuery,
this.recordType,
this.customJoinConditions,
this.defaultJoinType,
);

return this.database.count(this.recordType, criteria, { multiJoin });
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,10 @@ export class MeditrakSyncRecordUpdater {
* @public
*/
async updateSyncRecords(changes) {
return Promise.all(changes.map(change => this.processChange(change)));
for (let i = 0; i < changes.length; i++) {
const change = changes[i];
await this.processChange(change);
}
}

/**
Expand Down
2 changes: 1 addition & 1 deletion packages/data-api/scripts/patchMvRefresh.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
// @ts-expect-error db-migrate has no types unfortunately
import DBMigrate from 'db-migrate';
import * as dotenv from 'dotenv';
import { requireEnv, getEnvVarOrDefault } from '@tupaia/utils';
import { requireEnv } from '@tupaia/utils';
import { getConnectionConfig } from '@tupaia/database';

dotenv.config(); // Load the environment variables into process.env
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@
let dbm;
let type;
let seed;

/**
* We receive the dbmigrate dependency from dbmigrate initially.
* This enables us to not have to rely on NODE_PATH.
*/
exports.setup = function (options, seedLink) {
dbm = options.dbmigrate;
type = dbm.dataType;
seed = seedLink;
};

exports.up = async function (db) {
// Remove orphaned outer join details from previous use of broken renameMaterializedView function
await db.runSql(`
DELETE FROM pg$mviews_oj_details
WHERE view_name = 'analytics_tmp'
`);

await db.runSql(`
CREATE OR REPLACE
FUNCTION mv$renameMaterializedView
(
pOldViewName IN TEXT,
pNewViewName IN TEXT,
pOwner IN TEXT DEFAULT USER
)
RETURNS VOID
AS
$BODY$
/* ---------------------------------------------------------------------------------------------------------------------------------
Routine Name: mv$renameMaterializedView
Author: Rohan Port
Date: 17/08/2021
------------------------------------------------------------------------------------------------------------------------------------
Revision History Push Down List
------------------------------------------------------------------------------------------------------------------------------------
Date | Name | Description
------------+---------------+-------------------------------------------------------------------------------------------------------
21/09/2022 | Rohan Port | Fix to rename references in outerjoin details table as well
------------+---------------+-------------------------------------------------------------------------------------------------------
Description: Renames a materialized view, edits the view_name in the pg$mviews table
This function performs the following steps
1) Edits the view_name in the pg$mviews table to be the new name
2) Alters the materialized view table name to be the new name
Arguments: IN pOldViewName The existing name of the materialized view
IN pNewViewName The new name of the materialized view
IN pOwner Optional, the owner of the materialized view, defaults to user
Returns: VOID
************************************************************************************************************************************
Copyright 2021 Beyond Essential Systems Pty Ltd
***********************************************************************************************************************************/
DECLARE
aPgMview pg$mviews;
rConst mv$allConstants;
tUpdatePgMviewsSqlStatement TEXT := '';
tUpdatePgMviewOjDetailsSqlStatement TEXT := '';
tRenameTableSqlStatement TEXT := '';
tRenameIndexSqlStatement TEXT := '';
rIndex RECORD;
tOldIndexName TEXT;
tNewIndexName TEXT;
begin
rConst := mv$buildAllConstants();
tUpdatePgMviewsSqlStatement := rConst.UPDATE_COMMAND || 'pg$mviews' || rConst.SET_COMMAND || 'view_name = '
|| rConst.SINGLE_QUOTE_CHARACTER || pNewViewName || rConst.SINGLE_QUOTE_CHARACTER
|| rConst.WHERE_COMMAND || 'view_name = ' || rConst.SINGLE_QUOTE_CHARACTER || pOldViewName || rConst.SINGLE_QUOTE_CHARACTER;
tUpdatePgMviewOjDetailsSqlStatement := rConst.UPDATE_COMMAND || 'pg$mviews_oj_details' || rConst.SET_COMMAND || 'view_name = '
|| rConst.SINGLE_QUOTE_CHARACTER || pNewViewName || rConst.SINGLE_QUOTE_CHARACTER
|| rConst.WHERE_COMMAND || 'view_name = ' || rConst.SINGLE_QUOTE_CHARACTER || pOldViewName || rConst.SINGLE_QUOTE_CHARACTER;
tRenameTableSqlStatement := rConst.ALTER_TABLE || pOldViewName || rConst.RENAME_TO_COMMAND || pNewViewName;
EXECUTE tUpdatePgMviewsSqlStatement;
EXECUTE tUpdatePgMviewOjDetailsSqlStatement;
EXECUTE tRenameTableSqlStatement;
FOR rIndex IN
SELECT indexname FROM pg_indexes WHERE schemaname = pOwner AND tablename = pNewViewName AND indexname like '%' || rConst.MV_M_ROW$_COLUMN || '%'
LOOP
tOldIndexName := rIndex.indexname;
tNewIndexName := REPLACE(tOldIndexName, pOldViewName, pNewViewName);
tRenameIndexSqlStatement := rConst.ALTER_INDEX || tOldIndexName || rConst.RENAME_TO_COMMAND || tNewIndexName;
execute tRenameIndexSqlStatement;
END LOOP;
RETURN;
EXCEPTION
WHEN OTHERS
THEN
RAISE INFO 'Exception in function mv$renameMaterializedView';
RAISE INFO 'Error %:- %:', SQLSTATE, SQLERRM;
RAISE INFO E'Error Context:% % \n % \n %',CHR(10), tUpdatePgMviewsSqlStatement, tRenameTableSqlStatement, tRenameIndexSqlStatement;
RAISE EXCEPTION '%', SQLSTATE;
END;
$BODY$
LANGUAGE plpgsql
SECURITY DEFINER;
`);
};

exports.down = function (db) {
return null;
};

exports._meta = {
version: 1,
};
2 changes: 1 addition & 1 deletion packages/data-api/scripts/pg-mv-fast-refresh
2 changes: 1 addition & 1 deletion packages/database/.babelrc.js
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ const getIgnore = api => {
// When building @tupaia/database, babel-cli compiles in advance, so we only want it to bother
// with the last 90 days of migrations, otherwise it takes too long
return [
'src/tests/**',
'src/__tests__/**',
function (filepath) {
const filepathComponents = filepath.split('/');
const filename = filepathComponents.pop();
Expand Down
7 changes: 7 additions & 0 deletions packages/database/jest.config.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
const baseConfig = require('../../jest.config-js.json');

module.exports = async () => ({
...baseConfig,
rootDir: '.',
setupFilesAfterEnv: ['../../jest.setup.js', './jest.setup.js'],
});
12 changes: 12 additions & 0 deletions packages/database/jest.setup.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
/**
* Tupaia
* Copyright (c) 2017 - 2020 Beyond Essential Systems Pty Ltd
*/

import { clearTestData, getTestDatabase } from './src/testUtilities';

afterAll(async () => {
const database = getTestDatabase();
await clearTestData(database);
await database.closeConnections();
});
9 changes: 3 additions & 6 deletions packages/database/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,9 @@
"migrate-create": "scripts/migrateCreate.sh",
"migrate-down": "babel-node ./src/migrate.js down --migrations-dir ./src/migrations -v --config-file \"../../babel.config.json\"",
"refresh-database": "node ./scripts/refreshDatabase.js",
"test": "yarn workspace @tupaia/database check-test-database-exists && DB_NAME=tupaia_test mocha",
"test:coverage": "cross-env NODE_ENV=test nyc mocha",
"test:debug": "mocha --inspect-brk",
"update-test-data": "bash -c 'source .env && pg_dump -s -U $DB_USER -O $DB_NAME > src/tests/testData/testDataDump.sql && pg_dump -t migrations -c -U $DB_USER -O $DB_NAME >> src/tests/testData/testDataDump.sql'",
"test": "yarn package:test:withdb --runInBand",
"test:coverage": "yarn test --coverage",
"update-test-data": "bash -c 'source .env && pg_dump -s -U $DB_USER -O $DB_NAME > src/__tests__/testData/testDataDump.sql && pg_dump -t migrations -c -U $DB_USER -O $DB_NAME >> src/__tests__/testData/testDataDump.sql'",
"setup-test-database": "DB_NAME=tupaia_test scripts/setupTestDatabase.sh",
"check-test-database-exists": "DB_NAME=tupaia_test scripts/checkTestDatabaseExists.sh"
},
Expand All @@ -53,8 +52,6 @@
"devDependencies": {
"@babel/node": "^7.10.5",
"cross-env": "^7.0.2",
"deep-equal-in-any-order": "^1.0.27",
"mocha": "^8.1.3",
"npm-run-all": "^4.1.5",
"nyc": "^15.1.0",
"pluralize": "^8.0.0"
Expand Down
Loading

0 comments on commit 9dd05c2

Please sign in to comment.