diff --git a/app/components/Settings/ConnectButton/ConnectButton.react.js b/app/components/Settings/ConnectButton/ConnectButton.react.js index 82eea5b95..d8038b37a 100644 --- a/app/components/Settings/ConnectButton/ConnectButton.react.js +++ b/app/components/Settings/ConnectButton/ConnectButton.react.js @@ -2,19 +2,19 @@ import React, {Component} from 'react'; import PropTypes from 'prop-types'; import {pathOr} from 'ramda'; -/** - * The following is the Connect Button which triggers the connection - * @param {function} connect - Connect function - * @param {object} connectRequest - Connection Request - * @param {number || string} connectRequest.status -- 400 or loading - * @param {Error} connectRequest.error - * @param {object} saveConnectionsRequest - Saved Connection Request - * @param {number || string } saveConnectionsRequest.status -- 400 or loading - * @param {Error} saveConnectionsRequest.error - * @param {boolean} editMode - Enabled if Editting credentials - * @returns {ConnectButton} - */ export default class ConnectButton extends Component { + /** + * Component props + * @type {object} props + * @property {function} props.connect - Connect function + * @property {object} props.connectRequest - Connection Request + * @property {(number|string)} props.connectRequest.status - 400 or loading + * @property {Error} props.connectRequest.error + * @property {object} props.saveConnectionsRequest - Saved Connection Request + * @property {(number|string)} props.saveConnectionsRequest.status - 400 or loading + * @property {Error} props.saveConnectionsRequest.error + * @property {boolean} props.editMode - Enabled if editing credentials + */ static propTypes = { connect: PropTypes.func, connectRequest: PropTypes.object, @@ -118,4 +118,4 @@ export default class ConnectButton extends Component { ); } -} \ No newline at end of file +} diff --git a/app/components/Settings/Preview/TableTree.react.js b/app/components/Settings/Preview/TableTree.react.js index 2d268fa5b..13dba6e98 100644 --- a/app/components/Settings/Preview/TableTree.react.js +++ b/app/components/Settings/Preview/TableTree.react.js @@ -30,11 +30,13 @@ class TableTree extends Component { getLabel(connectionObject) { switch (connectionObject.dialect) { case DIALECTS.SQLITE: - return BASENAME_RE.exec(connectionObject.storage)[0] || connectionObject.storage; + return BASENAME_RE.exec(connectionObject.storage)[0] || connectionObject.storage; case DIALECTS.DATA_WORLD: - return getPathNames(connectionObject.url)[2]; + return getPathNames(connectionObject.url)[2]; + case DIALECTS.CSV: + return connectionObject.label || connectionObject.id || connectionObject.database; default: - return connectionObject.database; + return connectionObject.database; } } diff --git a/app/components/Settings/Tabs/Tab.react.js b/app/components/Settings/Tabs/Tab.react.js index 03be19323..a62d4f05d 100644 --- a/app/components/Settings/Tabs/Tab.react.js +++ b/app/components/Settings/Tabs/Tab.react.js @@ -33,7 +33,7 @@ export default class ConnectionTab extends Component { } else if (dialect === DIALECTS.APACHE_SPARK) { label = `Apache Spark (${connectionObject.host}:${connectionObject.port})`; } else if (connectionObject.dialect === DIALECTS.CSV) { - label = `CSV (${connectionObject.database})`; + label = connectionObject.label || connectionObject.id || connectionObject.database; } else if (connectionObject.dialect === DIALECTS.ELASTICSEARCH) { label = `Elasticsearch (${connectionObject.host})`; } else if (connectionObject.dialect === DIALECTS.SQLITE) { diff --git a/app/components/Settings/UserConnections/UserConnections.react.js b/app/components/Settings/UserConnections/UserConnections.react.js index c9939144d..04058fdb1 100644 --- a/app/components/Settings/UserConnections/UserConnections.react.js +++ b/app/components/Settings/UserConnections/UserConnections.react.js @@ -1,6 +1,9 @@ import React, {Component} from 'react'; import PropTypes from 'prop-types'; +import Filedrop from './filedrop.jsx'; + import {contains} from 'ramda'; + import {CONNECTION_CONFIG, SAMPLE_DBS} from '../../../constants/constants'; import {dynamicRequireElectron} from '../../../utils/utils'; @@ -151,6 +154,15 @@ export default class UserConnections extends Component { ); + } else if (setting.type === 'filedrop') { + input = ( + + ); } return ( diff --git a/app/components/Settings/UserConnections/filedrop.jsx b/app/components/Settings/UserConnections/filedrop.jsx new file mode 100644 index 000000000..7f1f52094 --- /dev/null +++ b/app/components/Settings/UserConnections/filedrop.jsx @@ -0,0 +1,172 @@ +import React, {Component} from 'react'; +import PropTypes from 'prop-types'; + +import {SAMPLE_DBS} from '../../../constants/constants'; + +export default class Filedrop extends Component { + static propTypes = { + settings: PropTypes.object, + connection: PropTypes.object, + updateConnection: PropTypes.func, + sampleCredentialsStyle: PropTypes.object + } + + /** + * Filedrop is an input component where users can type an URL or drop a file + * + * @param {object} props - Component properties + * + * @param {object} props.settings - FileDrop settings + * @param {string} props.settings.type - Set to 'filedrop' + * @param {string} props.settings.value - Target property in the connection object + * @param {string} props.settings.inputLabel - Label for input box + * @param {string} props.settings.dropLabel - Label for drop box + * @param {string} props.settings.placeholder - Placeholder for input box + * + * @param {object} props.connection - Connection object + * @param {string} props.connection.dialect - Connection dialect + * @param {string} props.connection.label - Connection label + * + * @param {function} props.updateConnection - Callback to update the connection object + * + * @param {object} props.sampleCredentialsStyle - To control the display of sample credentials + */ + constructor(props) { + super(props); + + const { + settings, + connection + } = this.props; + + const url = connection[settings.value]; + + /** + * @member {object} state - Component state + * @property {string} state.inputValue - Value typed into the input box + * @property {string} state.dropValue - Data URL dropped into the drop box + */ + this.state = (typeof url === 'string' && url.startsWith('data:')) ? { + inputValue: connection.label || url.slice(0, 64), + dropValue: url + } : { + inputValue: url || '', + dropValue: '' + }; + } + + + render() { + const { + settings, + connection, + updateConnection, + sampleCredentialsStyle + } = this.props; + + const { + inputValue, + dropValue, + drag + } = this.state; + + const setState = this.setState.bind(this); + + const { + value, + inputLabel, + dropLabel, + placeholder + } = settings; + + const {dialect} = connection; + + const sampleCredential = (SAMPLE_DBS[dialect]) ? SAMPLE_DBS[dialect][value] : null; + + return ( +
+ +
+ + + {dropLabel} + +
+ + {sampleCredential} + +
+
+
+ ); + + function onChange(event) { + setState({ + inputValue: event.target.value, + dropValue: '' + }); + updateConnection({ + [value]: event.target.value, + label: event.target.value + }); + } + + function onDragEnter(event) { + event.stopPropagation(); + event.preventDefault(); + setState({drag: true}); + } + + function onDragOver(event) { + event.stopPropagation(); + event.preventDefault(); + event.dataTransfer.dropEffect = 'copy'; + } + + function onDragLeave(event) { + event.stopPropagation(); + event.preventDefault(); + setState({drag: false}); + } + + function onDrop(event) { + event.stopPropagation(); + event.preventDefault(); + + const files = event.dataTransfer.files; + if (!files || files.length !== 1) { + setState({drag: false}); + return; + } + + const file = files[0]; + const reader = new FileReader(); + reader.onload = () => { + setState({ + drag: false, + dropValue: reader.result, + inputValue: file.name + }); + updateConnection({ + [value]: reader.result, + label: file.name + }); + }; + reader.readAsDataURL(file); + } + } +} diff --git a/app/constants/constants.js b/app/constants/constants.js index 7de987bbc..187433f3c 100644 --- a/app/constants/constants.js +++ b/app/constants/constants.js @@ -76,9 +76,12 @@ const hadoopQLOptions = [ export const CONNECTION_CONFIG = { [DIALECTS.APACHE_IMPALA]: hadoopQLOptions, [DIALECTS.APACHE_SPARK]: hadoopQLOptions, - [DIALECTS.CSV]: [ - {'label': 'URL to CSV File', 'value': 'database', 'type': 'text'} - ], + [DIALECTS.CSV]: [{ + 'inputLabel': 'Type URL to a CSV file', + 'dropLabel': '(or drop a CSV file here)', + 'value': 'database', + 'type': 'filedrop' + }], [DIALECTS.IBM_DB2]: commonSqlOptions, [DIALECTS.MYSQL]: commonSqlOptions, [DIALECTS.MARIADB]: commonSqlOptions, diff --git a/backend/init.js b/backend/init.js new file mode 100644 index 000000000..9ac75bcb4 --- /dev/null +++ b/backend/init.js @@ -0,0 +1,25 @@ +import Logger from './logger'; + +import { + deleteAllConnections, + deleteBadConnections +} from './persistent/Connections.js'; +import {getSetting} from './settings.js'; + +const setCSVStorageSize = require('./persistent/datastores/csv.js').setStorageSize; + +export default function init() { + try { + deleteBadConnections(); + } catch (error) { + Logger.log(`Failed to delete bad connections: ${error.message}`); + deleteAllConnections(); + } + + try { + setCSVStorageSize(getSetting('CSV_STORAGE_SIZE')); + } catch (error) { + Logger.log(`Failed to get setting CSV_STORAGE_SIZE: ${error.message}`); + setCSVStorageSize(0); + } +} diff --git a/backend/persistent/Connections.js b/backend/persistent/Connections.js index b6562143a..58c31cb07 100644 --- a/backend/persistent/Connections.js +++ b/backend/persistent/Connections.js @@ -5,6 +5,7 @@ import {assoc, dissoc, findIndex} from 'ramda'; import uuid from 'uuid'; import YAML from 'yamljs'; import * as Datastores from './datastores/Datastores.js'; +import {DIALECTS} from '../../app/constants/constants.js'; import {getSetting} from '../settings'; @@ -39,11 +40,32 @@ export function deleteConnectionById(id) { const connections = getConnections(); const index = findIndex(connection => connection.id === id, connections); if (index > -1) { + Datastores.disconnect(connections[index]); connections.splice(index, 1); fs.writeFileSync(getSetting('CONNECTIONS_PATH'), YAML.stringify(connections, 4)); } } +export function deleteBadConnections() { + getConnections().forEach(connection => { + const {id, dialect} = connection; + + const dialects = Object.getOwnPropertyNames(DIALECTS).map(k => DIALECTS[k]); + + const isUnknownDialect = (dialects.indexOf(dialect) === -1); + if (isUnknownDialect) { + deleteConnectionById(id); + } + }); +} + +export function deleteAllConnections() { + if (!fs.existsSync(getSetting('STORAGE_PATH'))) { + createStoragePath(); + } + fs.writeFileSync(getSetting('CONNECTIONS_PATH'), YAML.stringify([], 4)); +} + export function getSanitizedConnections() { const connections = getConnections(); return connections.map(cred => sanitize(cred)); @@ -60,7 +82,7 @@ export function saveConnection(connectionObject) { return connectionId; } -export function validateConnection (connectionObject) { +export function validateConnection(connectionObject) { return Datastores.connect(connectionObject).then(() => { return {}; }).catch(err => { diff --git a/backend/persistent/datastores/Datastores.js b/backend/persistent/datastores/Datastores.js index e35a85ce9..8817079e8 100644 --- a/backend/persistent/datastores/Datastores.js +++ b/backend/persistent/datastores/Datastores.js @@ -5,10 +5,11 @@ import * as ApacheDrill from './ApacheDrill'; import * as IbmDb2 from './ibmdb2'; import * as ApacheLivy from './livy'; import * as ApacheImpala from './impala'; -import * as CSV from './csv'; import * as DataWorld from './dataworld'; import * as DatastoreMock from './datastoremock'; +const CSV = require('./csv'); + /* * Switchboard to all of the different types of connections * that we support. @@ -55,45 +56,62 @@ function getDatastoreClient(connection) { return Sql; } -/* - * query functions take a configuration, query a connection and - * return a promise with the results as an object: - * - * { - * rows: [...], - * columnnames: [...] - * } - * +/** + * query makes a query + * @param {(object|string)} queryStatement Query + * @param {object} connection Connection object + * @returns {Promise.} that resolves to the results as: + * { + * columnnames: [...], + * rows: [...] + * } */ -export function query(queryObject, connection) { - return getDatastoreClient(connection).query(queryObject, connection); +export function query(queryStatement, connection) { + return getDatastoreClient(connection).query(queryStatement, connection); } -/* - * connect functions attempt to ping the connection and - * return a promise that is empty +/** + * connect attempts to ping the connection + * @param {object} connection Connection object + * @returns {Promise} that resolves when the connection succeeds */ export function connect(connection) { return getDatastoreClient(connection).connect(connection); } +/** + * disconnect closes the connection and + * @param {object} connection Connection object + * @returns {Promise} that resolves when the connection succeeds + */ +export function disconnect(connection) { + const client = getDatastoreClient(connection); + return (client.disconnect) ? + client.disconnect(connection) : + Promise.resolve(connection); +} + /* SQL-like Connectors */ -/* - * return a promise that resolves to an array of [table_name, column_name, data_type] - * available from a database. - * +/** + * schemas retrieves a list of table names, column names and column data types + * @param {object} connection Connection object + * @returns {Promise.} that resolves to the results as: + * { + * columnnames: [...], + * rows: [[table_name, column_name, data_type], ...] + * } */ export function schemas(connection) { return getDatastoreClient(connection).schemas(connection); } -/* - * return a promise with the available tables from a database - * - * this can have flexible meaning for other datastores. - * e.g., for elasticsearch, this means return the available - * "documents" per an "index" +/** + * tables retrieves a list of table names + * @param {object} connection Connection object + * @returns {Promise.} that resolves to a list of the available tables. + * This can have flexible meaning for other datastores. E.g.: + * for elasticsearch, this means return the available "documents" per an "index" */ export function tables(connection) { return getDatastoreClient(connection).tables(connection); diff --git a/backend/persistent/datastores/csv.js b/backend/persistent/datastores/csv.js index b2d5b7ec5..5c5979f19 100644 --- a/backend/persistent/datastores/csv.js +++ b/backend/persistent/datastores/csv.js @@ -5,81 +5,129 @@ import {type} from 'ramda'; import {parseSQL} from '../../parse'; -/** - * @typedef {object} PapaError Papaparse error - * - * @property {string} type Error type - * @property {string} code Error code - * @property {string} message Error description - * @property {number} row Row index that triggered the error - */ +module.exports = { + connect: connect, + tables: tables, + schemas: schemas, + query: query, + disconnect: disconnect, + + getAvailableSize: getAvailableSize, + setStorageSize: setStorageSize +}; /** - * Error thrown by CSV connector - * @class - * @param {string} url URL of the CSV file that triggered the error - * @param {PapaError[]} errors List of errors returned by Papaparse + * @typedef {object} CSVStorage Global storage of CSV connector + * + * @property {Object.} + * data Store of CSV files parsed into JS objects and indexed by URL + * @property {number} size Available size in bytes (0 to disable limit) + * @property {number} used Used size in bytes */ -export function CSVError(url, errors) { - /** - * Error class - * @type {string} - */ - this.name = 'CSVError'; - - /** - * Error description - * @type {string} - */ - this.message = 'Failed to parse CSV file ' + url; - - if (Error.captureStackTrace) { - Error.captureStackTrace(this, CSVError); - } else { - /** - * Error stack trace - */ - this.stack = new Error(this.message).stack; - } - - /** - * URL to CSV file - * @type {string} - */ - this.url = url; - - /** - * List of errors returned by Papaparse - * @type {PapaError[]} - */ - this.errors = errors; - - if (errors && errors[0] && errors[0].message) { - this.message = errors[0].message; - } -} -CSVError.prototype = Object.create(Error.prototype); -CSVError.prototype.constructor = CSVError; /** - * Store of CSV files parsed into JS objects and indexed by URL + * CSV-connector global storage. * - * @const {Object.} + * Note the difference between: + * - `const CSV = require('./csv');` + * - `import * as CSV from './csv';` + * Files using `require` will share STORAGE. + * Files using `import` will have their own copy of STORAGE. + * + * @const {CSVStorage} */ -const connectionData = {}; +const STORAGE = { + data: {}, + size: 0, + used: 0 +}; + function getData(connection) { - return connectionData[connection.database]; + return STORAGE.data[connection.database]; } + function putData(connection, data) { - connectionData[connection.database] = data; + const size = connection.size || 0; + STORAGE.used += size; + STORAGE.data[connection.database] = data; +} + +function deleteData(connection) { + const size = connection.size || 0; + STORAGE.used = Math.max(0, STORAGE.used - size); + delete STORAGE.data[connection.database]; } -export function connect(connection) { +function setStorageSize(size) { + STORAGE.size = size; +} + +function getAvailableSize() { + // no size limit + if (STORAGE.size === 0) { + return 0; + } + + const available = STORAGE.size - STORAGE.used; + + // storage is full + if (available <= 0) { + return -1; + } + + // available size + return available; +} + + +/** + * @typedef {object} CSVConnection Connection to a CSV file + * + * @property {string} database URL of the CSV file + * @property {number} size File size + */ + +/** + * connect downloads and parses an URL of a CSV file + * @param {CSVConnection} connection Connection to a CSV file + * @returns {Promise.} that resolves when the connection succeeds + */ +function connect(connection) { + // check if the CSV file is already in the storage + if (getData(connection)) { + return Promise.resolve(connection); + } + const url = connection.database; - return fetch(url) - .then(res => res.text()) - .then(body => { + const availableSize = getAvailableSize(); + if (availableSize === -1) { + throw new Error('Out of memory'); + } + + let getCSVFile; + if (url.startsWith('data:')) { + const data = url.slice(1 + url.indexOf(',')); + + // base64 encodes 6 bits per byte + const estimatedSize = data.length * 6 / 8; + if (availableSize !== 0 && estimatedSize > availableSize) { + throw new Error('Out of memory'); + } + + const body = new Buffer(url.slice(1 + url.indexOf(',')), 'base64').toString(); + getCSVFile = Promise.resolve(body); + + } else { + getCSVFile = fetch(url, {size: availableSize}).then(res => res.text()); + } + + return getCSVFile.then(body => { + const fileSize = body.length; + if (availableSize !== 0 && fileSize > availableSize) { + throw new Error('Out of memory'); + } + return new Promise(function(resolve) { papa.parse(body, { download: false, @@ -90,10 +138,11 @@ export function connect(connection) { complete: function({data, errors, meta}) { if (errors.length) { - throw new CSVError(url, errors); + throw new Error('Failed to parse CSV file ' + url); } connection.meta = meta; + connection.size = fileSize; putData(connection, data); @@ -104,6 +153,16 @@ export function connect(connection) { }); } +/** + * disconnect deletes the CSV file from the storage + * @param {CSVConnection} connection Connection to a CSV file + * @returns {Promise.} that resolves when the connection has been disconnected + */ +function disconnect(connection) { + deleteData(connection); + return Promise.resolve(connection); +} + /** * Table name used in SQL queries to refer to the data imported from a CSV file, * so that we can take advantage of alaSQL's parser. @@ -111,11 +170,11 @@ export function connect(connection) { */ const TABLENAME = '?'; -export function tables() { +function tables() { return Promise.resolve([TABLENAME]); } -export function schemas(connection) { +function schemas(connection) { const columnnames = ['TABNAME', 'COLNAME', 'TYPENAME']; const rows = connection.meta.fields.map(columnName => { return [TABLENAME, columnName, getType(columnName)]; @@ -137,7 +196,7 @@ export function schemas(connection) { } } -export function query(queryString, connection) { +function query(queryString, connection) { const data = getData(connection); // In the query `SELECT * FROM ?`, alaSQL replaces ? with data diff --git a/backend/routes.js b/backend/routes.js index cd0b2d408..e386555a9 100644 --- a/backend/routes.js +++ b/backend/routes.js @@ -1,9 +1,11 @@ -var restify = require('restify'); -var CookieParser = require('restify-cookies'); -import * as Datastores from './persistent/datastores/Datastores.js'; +const restify = require('restify'); +const CookieParser = require('restify-cookies'); +const fetch = require('node-fetch'); + import * as fs from 'fs'; import path from 'path'; +import * as Datastores from './persistent/datastores/Datastores.js'; import {PlotlyOAuth} from './plugins/authorization.js'; import {getQueries, getQuery, deleteQuery} from './persistent/Queries'; import { @@ -25,7 +27,7 @@ import {checkWritePermissions, newDatacache} from './persistent/PlotlyAPI.js'; import {contains, keys, isEmpty, merge, pluck} from 'ramda'; import {getCerts, timeoutFetchAndSaveCerts, setRenewalJob} from './certificates'; import Logger from './logger'; -import fetch from 'node-fetch'; +import init from './init.js'; export default class Servers { /* @@ -34,6 +36,8 @@ export default class Servers { * The httpsServer starts when certificates have been created. */ constructor(args = {createCerts: true, startHttps: true, isElectron: false}) { + init(); + this.httpServer = { port: null, server: null, diff --git a/backend/settings.js b/backend/settings.js index cc5756496..55702a442 100644 --- a/backend/settings.js +++ b/backend/settings.js @@ -36,6 +36,11 @@ const DEFAULT_SETTINGS = { */ ADDITIONAL_CORS_ALLOWED_ORIGINS: [], + /* + * Storage size of CSV connector in bytes (0 to disable size limit) + */ + CSV_STORAGE_SIZE: 0, + DEFAULT_CORS_ALLOWED_ORIGINS: [ 'https://plot.ly', 'https://stage.plot.ly', diff --git a/package.json b/package.json index d69749f15..1ac4ba467 100644 --- a/package.json +++ b/package.json @@ -19,7 +19,7 @@ "rebuild:modules:electron": "cross-env FSEVENTS_BUILD_FROM_SOURCE=true node scripts/rebuild-modules.js --electron", "rebuild:modules:node": "cross-env FSEVENTS_BUILD_FROM_SOURCE=true node scripts/rebuild-modules.js", "fix:module:ibmdb": "node scripts/fix-module-ibmdb.js", - "lint": "eslint app test backend scripts *.js", + "lint": "eslint app test backend scripts *.js --ext .js,.jsx", "test": "yarn run lint && yarn run test-unit-all && yarn run test-e2e && yarn run test-jest", "test-e2e": "cross-env NODE_ENV=test mocha --bail --full-trace --compilers js:babel-register --require babel-polyfill ./test/integration_test.js", "test-e2e-local": "source test/set_creds.sh && yarn run test-e2e", @@ -41,7 +41,7 @@ "test-unit-scheduler": "cross-env NODE_ENV=test BABEL_DISABLE_CACHE=1 electron-mocha --full-trace --timeout 90000 --compilers js:babel-register test/backend/QueryScheduler.spec.js", "test-unit-routes": "cross-env NODE_ENV=test BABEL_DISABLE_CACHE=1 electron-mocha --full-trace --timeout 90000 --compilers js:babel-register test/backend/routes.spec.js", "test-unit-mock": "cross-env NODE_ENV=test BABEL_DISABLE_CACHE=1 electron-mocha --full-trace --timeout 90000 --compilers js:babel-register test/backend/datastores.mock.spec.js", - "test-jest":"./node_modules/.bin/jest ./test/app", + "test-jest": "./node_modules/.bin/jest ./test/app", "pack": "cross-env NODE_ENV=production electron-builder --publish=never", "package": "cross-env NODE_ENV=production node -r babel-register package.js", "package-all": "yarn run package -- --all", @@ -138,13 +138,13 @@ "aws-sdk": "^2.156.0", "babel-core": "^6.26.0", "babel-eslint": "^8.0.2", - "babel-jest":"^22.2.2", + "babel-jest": "^22.2.2", "babel-loader": "^7.1.2", "babel-plugin-add-module-exports": "^0.2.1", "babel-plugin-transform-decorators-legacy": "^1.3.4", "babel-polyfill": "^6.26.0", "babel-preset-es2015": "^6.24.1", - "babel-preset-jest":"^22.2.0", + "babel-preset-jest": "^22.2.0", "babel-preset-react": "^6.24.1", "babel-preset-react-hmre": "^1.1.1", "babel-preset-stage-0": "^6.24.1", @@ -164,8 +164,8 @@ "electron-debug": "^1.4.0", "electron-mocha": "^4.0.3", "electron-packager": "^8.7.0", - "enzyme":"latest", - "enzyme-adapter-react-15":"^1.0.5", + "enzyme": "latest", + "enzyme-adapter-react-15": "^1.0.5", "eslint": "^4.11.0", "eslint-config-airbnb": "^16.1.0", "eslint-plugin-import": "^2.8.0", @@ -176,7 +176,7 @@ "form-data": "^2.3.1", "fs-extra": "^4.0.2", "immutable": "^3.8.2", - "jest":"^22.3.0", + "jest": "^22.3.0", "json-loader": "^0.5.4", "minimist": "^1.2.0", "mkdirp": "^0.5.1", @@ -202,7 +202,7 @@ "react-select": "^1.0.0-beta13", "react-split-pane": "^0.1.66", "react-tabs": "^1.1.0", - "react-test-renderer":"^16.2.0", + "react-test-renderer": "^16.2.0", "react-tooltip": "^3.4.0", "react-treeview": "^0.4.7", "redux": "^3.4.0", diff --git a/test/app/components/Settings/UserConnections/filedrop.test.jsx b/test/app/components/Settings/UserConnections/filedrop.test.jsx new file mode 100644 index 000000000..0b6710a7a --- /dev/null +++ b/test/app/components/Settings/UserConnections/filedrop.test.jsx @@ -0,0 +1,139 @@ +jest.unmock('../../../../../app/components/Settings/UserConnections/filedrop.jsx'); +import Filedrop from '../../../../../app/components/Settings/UserConnections/filedrop.jsx'; +import React from 'react'; +import {configure, mount} from 'enzyme'; +import Adapter from 'enzyme-adapter-react-15'; + +import { + CONNECTION_CONFIG, + DIALECTS, + SAMPLE_DBS +} from '../../../../../app/constants/constants.js'; + +describe('Filedrop', () => { + + beforeAll(() => { + configure({adapter: new Adapter()}); + }); + + function newFiledrop(onUpdateConnection) { + const settings = { + type: 'filedrop', + value: 'database', + inputLabel: 'Type URL to a CSV file', + dropLabel: '(or drop a CSV file here)', + placeholder: 'testing placeholder' + }; + + const connection = { + dialect: DIALECTS.CSV, + label: '' + }; + + const updateConnection = (update) => { + Object.assign(connection, update); + if (onUpdateConnection) onUpdateConnection(update); + }; + + const sampleCredentialsStyle = {display: 'none'}; + + const filedrop = mount( + + ); + + return { + filedrop, settings, connection, updateConnection, sampleCredentialsStyle + }; + } + + it('honors props', () => { + const {filedrop, settings, connection, sampleCredentialsStyle} = newFiledrop(); + + const sampleCredential = (SAMPLE_DBS[connection.dialect]) ? + SAMPLE_DBS[connection.dialect][settings.value] : + null; + + expect(filedrop.find('label.label').length).toBe(1); + expect(filedrop.find('label.label').first().text()).toBe(settings.inputLabel); + expect(filedrop.find('small').length).toBe(1); + expect(filedrop.find('small').first().text()).toBe(settings.dropLabel); + expect(filedrop.find('input').length).toBe(1); + expect(filedrop.find('input').first().prop('placeholder')).toBe(settings.placeholder); + expect(filedrop.find('.wrapInput > div').length).toBe(1); + expect(filedrop.find('.wrapInput > div').first().prop('style')).toEqual(sampleCredentialsStyle); + expect(filedrop.find('code').length).toBe(1); + expect(filedrop.find('code').first().text()).toBe(sampleCredential); + + const label = 'testing label'; + filedrop.prop('updateConnection')({[settings.value]: label}); + expect(connection[settings.value]).toBe(label); + }); + + it('accepts input from keyboard', () => { + const {filedrop, settings, connection} = newFiledrop(); + + function getCurrentInput() { + return filedrop.find('input').first(); + } + + const change = {target: {value: 'testing input from keyboard'}}; + getCurrentInput().simulate('change', change); + + expect(getCurrentInput().prop('value')).toBe(change.target.value); + expect(filedrop.state('inputValue')).toBe(change.target.value); + expect(filedrop.state('dropValue')).toBe(''); + expect(connection[settings.value]).toBe(change.target.value); + expect(connection.label).toBe(change.target.value); + }); + + it('accepts dropped file as input', (done) => { + let stage = 0; + + const filename = 'test.csv'; + const csvFile = 'col1,col 2,"col 3",col 4\r\n1,1.1,2018-01-10,UK\r\n2,2.2,2019-02-20,ES\r\n3,3.3,2020-03-30,PL'; + /* eslint-disable max-len */ + const dataURL = 'data:;base64,Y29sMSxjb2wgMiwiY29sIDMiLGNvbCA0DQoxLDEuMSwyMDE4LTAxLTEwLFVLDQoyLDIuMiwyMDE5LTAyLTIwLEVTDQozLDMuMywyMDIwLTAzLTMwLFBM'; + /* eslint-enable max-len */ + const file = new File([csvFile], filename); + + const {filedrop, settings, connection} = newFiledrop(() => { + if (stage === 2) { + try { + expect(filedrop.state('drag')).toBe(false); + expect(getCurrentInput().prop('style')).toEqual({backgroundColor: 'lightcyan'}); + expect(filedrop.state('inputValue')).toBe(filename); + expect(filedrop.state('dropValue')).toBe(dataURL); + expect(connection.label).toBe(filename); + expect(connection[settings.value]).toBe(dataURL); + done(); + } catch (error) { + done(error); + } + } + }); + + function getCurrentInput() { + return filedrop.find('input').first(); + } + + expect(getCurrentInput().prop('style')).toEqual({backgroundColor: null}); + getCurrentInput().simulate('dragenter'); + expect(filedrop.state('drag')).toBe(true); + expect(getCurrentInput().prop('style')).toEqual({backgroundColor: 'lightcyan'}); + getCurrentInput().simulate('dragleave'); + expect(filedrop.state('drag')).toBe(false); + expect(getCurrentInput().prop('style')).toEqual({backgroundColor: null}); + + stage = 1; + getCurrentInput().simulate('dragenter'); + expect(filedrop.state('drag')).toBe(true); + + stage = 2; + getCurrentInput().simulate('drop', {dataTransfer: {files: [file]}}); + }); +}); diff --git a/test/backend/datastores.csv.spec.js b/test/backend/datastores.csv.spec.js index d6b687cb1..7bb260fb8 100644 --- a/test/backend/datastores.csv.spec.js +++ b/test/backend/datastores.csv.spec.js @@ -7,9 +7,15 @@ import { connect, query, schemas, - tables + tables, + disconnect } from '../../backend/persistent/datastores/Datastores.js'; +const { + getAvailableSize, + setStorageSize +} = require('../../backend/persistent/datastores/csv.js'); + const csvFile = [ 'col1,col 2,"col 3",col 4', '1,1.1,2018-01-10,UK', @@ -18,6 +24,10 @@ const csvFile = [ '' // to test csv files with empty lines can be parsed ].join('\n'); +/* eslint-disable max-len */ +const csvDataURL = 'data:text/plain;charset=utf-8;base64,Y29sMSxjb2wgMiwiY29sIDMiLGNvbCA0DQoxLDEuMSwyMDE4LTAxLTEwLFVLDQoyLDIuMiwyMDE5LTAyLTIwLEVTDQozLDMuMywyMDIwLTAzLTMwLFBM'; +/* eslint-enable max-len */ + const expected = { columnnames: ['col1', 'col 2', 'col 3', 'col 4'], rows: [ @@ -40,6 +50,10 @@ const connection = { dialect: 'csv', database: url }; +const connectionDataURL = { + dialect: 'csv', + database: csvDataURL +}; describe('CSV:', function () { before(function() { @@ -51,12 +65,52 @@ describe('CSV:', function () { nock.restore(); }); - it('connect succeeds', function() { + it('connect fails if storage size is exceeded', function() { + // mock connect response + nock(host) + .get(path) + .reply(200, csvFile); + + // set storage size of CSV connector to a small number, + // so that next attempt to connect fails + setStorageSize(84); + + return connect(connection) + .then(() => { + throw new Error('connect() should have thrown an exception'); + + }, (error) => { + assert.equal(error.name, 'FetchError', 'Unexpected error name'); + assert.equal( + error.message, + 'content size at https://csv.example.com/table.csv over limit: 84', + 'Unexpected error message' + ); + }); + }); + + it('connect accepts data URLs', function() { + // set storage size of CSV connector to 0 to disable size limit + setStorageSize(0); + + return connect(connectionDataURL) + .then(conn => { + assert.equal(conn.dialect, 'csv', 'Unexpected connection.dialect'); + assert.equal(conn.database, csvDataURL, 'Unexpected connection.database'); + assert(conn.meta, 'Missing connection.meta'); + assert.deepEqual(conn.meta.fields, expected.columnnames, 'Unexpected connection.meta.fields'); + }); + }); + + it('connect succeeds if storage size limit is disabled', function() { // mock connect response nock(host) .get(path) .reply(200, csvFile); + // set storage size of CSV connector to 0 to disable size limit + setStorageSize(0); + return connect(connection) .then(conn => { assert.equal(conn.dialect, 'csv', 'Unexpected connection.dialect'); @@ -88,4 +142,33 @@ describe('CSV:', function () { assert.deepEqual(rows, expected.rows, 'Unexpected rows'); }); }); + + it('disconnect removes a CSV file from the storage', function() { + // mock connect response + nock(host) + .get(path) + .reply(200, csvFile); + + // set storage size of CSV connector to 0 to disable size limit + const storageSize = 200; + setStorageSize(storageSize); + + // test connections.size has been set to the file size + assert.equal(connection.size, csvFile.length, 'Unexpected connection size'); + + // assuming the previous tests didn't disconnect connection and connectionDataURL + let expectedAvailableSize = storageSize - connection.size - connectionDataURL.size; + assert.equal(getAvailableSize(), expectedAvailableSize, 'Unexpected available size'); + + return disconnect(connection) + .then(() => { + expectedAvailableSize = storageSize - connectionDataURL.size; + assert.equal(getAvailableSize(), expectedAvailableSize, 'Unexpected available size'); + + return disconnect(connectionDataURL); + }) + .then(() => { + assert.equal(getAvailableSize(), storageSize, 'Unexpected storage size'); + }); + }); }); diff --git a/webpack.config.base.js b/webpack.config.base.js index 744b05fd1..7af568d91 100644 --- a/webpack.config.base.js +++ b/webpack.config.base.js @@ -24,6 +24,7 @@ export default { externals: [ { 'csv-parse': 'commonjs csv-parse', + 'data-urls': 'commonjs data-urls', 'font-awesome': 'font-awesome', 'ibm_db': 'commonjs ibm_db', 'mysql': 'mysql', @@ -33,7 +34,8 @@ export default { 'sequelize': 'commonjs sequelize', 'source-map-support': 'source-map-support', 'sqlite3': 'sqlite3', - 'tedious': 'tedious' + 'tedious': 'tedious', + 'whatwg-encoding': 'commonjs whatwg-encoding' } ] }; diff --git a/yarn.lock b/yarn.lock index c4c23051a..5592f52be 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2836,18 +2836,16 @@ doctrine@^2.0.0: esutils "^2.0.2" isarray "^1.0.0" -<<<<<<< HEAD dom-serializer@0, dom-serializer@~0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/dom-serializer/-/dom-serializer-0.1.0.tgz#073c697546ce0780ce23be4a28e293e40bc30c82" dependencies: domelementtype "~1.1.1" entities "~1.1.1" -======= + dom-storage@^2.0.1: version "2.0.2" resolved "https://registry.yarnpkg.com/dom-storage/-/dom-storage-2.0.2.tgz#ed17cbf68abd10e0aef8182713e297c5e4b500b0" ->>>>>>> upstream-master dom-walk@^0.1.0: version "0.1.1" @@ -3636,15 +3634,13 @@ execa@^0.7.0: signal-exit "^3.0.0" strip-eof "^1.0.0" -<<<<<<< HEAD -exit@^0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c" -======= exit-on-epipe@~1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/exit-on-epipe/-/exit-on-epipe-1.0.1.tgz#0bdd92e87d5285d267daa8171d0eb06159689692" ->>>>>>> upstream-master + +exit@^0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c" expand-brackets@^0.1.4: version "0.1.5" @@ -7823,18 +7819,16 @@ pretty-bytes@^1.0.2: get-stdin "^4.0.1" meow "^3.1.0" -<<<<<<< HEAD pretty-format@^22.1.0: version "22.1.0" resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-22.1.0.tgz#2277605b40ed4529ae4db51ff62f4be817647914" dependencies: ansi-regex "^3.0.0" ansi-styles "^3.2.0" -======= + printj@~1.1.0, printj@~1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/printj/-/printj-1.1.1.tgz#3749360215888d460a35b683ae13dcc02c620b47" ->>>>>>> upstream-master private@^0.1.6, private@^0.1.7: version "0.1.8" @@ -10524,11 +10518,6 @@ xdg-basedir@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/xdg-basedir/-/xdg-basedir-3.0.0.tgz#496b2cc109eca8dbacfe2dc72b603c17c5870ad4" -<<<<<<< HEAD -xml-name-validator@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/xml-name-validator/-/xml-name-validator-3.0.0.tgz#6ae73e06de4d8c6e47f9fb181f78d648ad457c6a" -======= xlsx@^0.11.17: version "0.11.19" resolved "https://registry.yarnpkg.com/xlsx/-/xlsx-0.11.19.tgz#2f019d9df756f6345aac5bc1af2442cf22a025e3" @@ -10540,7 +10529,10 @@ xlsx@^0.11.17: crc-32 "~1.2.0" exit-on-epipe "~1.0.1" ssf "~0.10.1" ->>>>>>> upstream-master + +xml-name-validator@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/xml-name-validator/-/xml-name-validator-3.0.0.tgz#6ae73e06de4d8c6e47f9fb181f78d648ad457c6a" xml2js@0.4.17: version "0.4.17"