Skip to content
This repository has been archived by the owner on Jun 4, 2024. It is now read-only.

Commit

Permalink
Google BigQuery (#526)
Browse files Browse the repository at this point in the history
* Check in first commits

* Check in code from original branch

* Check in bigquery logo

* Fix eslint issue

* make database optional

* tweaks

* always show query for bigquery

* revert
  • Loading branch information
nicolaskruchten authored Nov 2, 2018
1 parent 07166b5 commit 2cd633d
Show file tree
Hide file tree
Showing 10 changed files with 2,225 additions and 10 deletions.
2 changes: 2 additions & 0 deletions app/components/Settings/Tabs/Tab.react.js
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,8 @@ export default class ConnectionTab extends Component {
label = `${connectionObject.connectionString}`;
} else if (connectionObject.dialect === DIALECTS.SQLITE) {
label = connectionObject.storage;
} else if (connectionObject.dialect === DIALECTS.BIGQUERY) {
label = `Big Query ${connectionObject.database}`;
} else if (connectionObject.dialect === DIALECTS.DATA_WORLD) {
const pathNames = getPathNames(connectionObject.url);
if (pathNames.length >= 3) {
Expand Down
31 changes: 28 additions & 3 deletions app/constants/constants.js
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,8 @@ export const DIALECTS = {
APACHE_DRILL: 'apache drill',
DATA_WORLD: 'data.world',
ATHENA: 'athena',
CSV: 'csv'
CSV: 'csv',
BIGQUERY: 'bigquery'
};

export const SQL_DIALECTS_USING_EDITOR = [
Expand All @@ -34,7 +35,8 @@ export const SQL_DIALECTS_USING_EDITOR = [
'apache impala',
'data.world',
'athena',
'csv'
'csv',
'bigquery'
];

const commonSqlOptions = [
Expand Down Expand Up @@ -253,6 +255,21 @@ export const CONNECTION_CONFIG = {
Note that this is just the connection between this app and your database; \
connections to plot.ly or your plotly instance are always encrypted.'
}
],
[DIALECTS.BIGQUERY]: [
{
'label': 'Google Project Id',
'value': 'projectId',
'type': 'text',
'description': 'The Google Cloud Project Id'
},
{'label': 'Database', 'value': 'database', 'type': 'text'},
{
'label': 'Key File',
'value': 'keyFilename',
'type': 'filedrop',
'description': 'The location of the Google Service Account Key File'
}
]
};

Expand All @@ -273,7 +290,8 @@ export const LOGOS = {
[DIALECTS.S3]: 'images/s3-logo.png',
[DIALECTS.APACHE_DRILL]: 'images/apache_drill-logo.png',
[DIALECTS.DATA_WORLD]: 'images/dataworld-logo.png',
[DIALECTS.ATHENA]: 'images/athena-logo.png'
[DIALECTS.ATHENA]: 'images/athena-logo.png',
[DIALECTS.BIGQUERY]: 'images/bigquery-logo.png'
};

export function PREVIEW_QUERY(connection, table, elasticsearchIndex) {
Expand Down Expand Up @@ -307,6 +325,8 @@ export function PREVIEW_QUERY(connection, table, elasticsearchIndex) {
size: 1000
}
});
case DIALECTS.BIGQUERY:
return 'SELECT \'connected\' as status';
default:
return '';
}
Expand Down Expand Up @@ -484,6 +504,11 @@ export const SAMPLE_DBS = {
},
[DIALECTS.DATA_WORLD]: {
url: 'https://data.world/rflprr/reported-lyme-disease-cases-by-state'
},
[DIALECTS.BIGQUERY]: {
projectId: 'Plotly',
database: 'plotly',
keyFilename: '/home/plotly/falcon/google-credentials.json'
}
};

Expand Down
249 changes: 249 additions & 0 deletions app/images/bigquery-logo.LICENSE

Large diffs are not rendered by default.

Binary file added app/images/bigquery-logo.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
3 changes: 3 additions & 0 deletions backend/persistent/datastores/Datastores.js
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ import * as ApacheImpala from './impala';
import * as DataWorld from './dataworld';
import * as DatastoreMock from './datastoremock';
import * as Athena from './athena';
import * as BigQuery from './bigquery';

const CSV = require('./csv');
const Oracle = require('./oracle.js');
Expand Down Expand Up @@ -65,6 +66,8 @@ function getDatastoreClient(connection) {
return Athena;
} else if (dialect === 'oracle') {
return Oracle;
} else if (dialect === 'bigquery') {
return BigQuery;
}
return Sql;
}
Expand Down
146 changes: 146 additions & 0 deletions backend/persistent/datastores/bigquery.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,146 @@
import {parseSQL} from '../../parse.js';

const BigQuery = require('@google-cloud/bigquery');
const Pool = require('./pool.js');
const pool = new Pool(newClient, sameConnection);

function newClient(connection) {
return new BigQuery({
keyFilename: connection.keyFilename,
projectId: connection.projectId
});
}

function sameConnection(connection1, connection2) {
return (
connection1.projectId === connection2.projectId &&
connection1.database === connection2.database &&
connection1.keyFilename === connection2.keyFilename
);
}

/*
* The connection function will validate the parameters and return the connection
* parameters
* @param {object} connection
* @param {string} connection.projectId - Google Cloud Project Id
* @param {string} connection.database - Google Big Query Database
* @param {string} connection.keyFilename - Google Service Account Key File
* @returns {Promise} that resolves connection
*/
export function connect(connection) {
const client = pool.getClient(connection);
return Promise.resolve(client);
}

export function disconnect(connection) {
return pool.remove(connection);
}

/**
* The following method will execute a query against the specified connection
* @param {object} queryObject - The SQL to query against the connection
* @param {object} connection - Connection parameters
* @returns {Promise} that resolves to { columnnames, rows }
*/
export function query(queryObject, connection) {
const client = pool.getClient(connection);
const options = {
query: queryObject,
useLegacySql: false // Use standard SQL syntax for queries.
};

let job;

return client
.createQueryJob(options)
.then(results => {
job = results[0];
return job.promise();
})
.then(() => {
return job.getMetadata();
})
.then(metadata => {
const errors = metadata[0].status.errors;
if (errors && errors.length > 0) {
throw new Error(errors.join(':'));
}
})
.then(() => {
return job.getQueryResults().then(rst => {
return rst[0];
});
})
.then(parseSQL);
}

/**
* Should return a list of tables and their columns that are defined within the database.
* @param {object} connection - Connection parameters
* @param {string} connection.projectId - Google Cloud Project Id
* @param {string} connection.database - Google Big Query Database
* @param {string} connection.keyFilename - Google Service Account Key File
* @returns {Promise} that resolves to { columnnames, rows }
*/
export function schemas(connection) {
if (!connection.database || connection.database === '') {
const columnnames = ['table_name', 'column_name', 'data_type'];
const rows = [];
return {columnnames, rows};
}

const client = pool.getClient(connection);

return client
.dataset(connection.database)
.getTables()
.then(results => {
const metadataPromises = results[0].map(table => table.getMetadata());
return Promise.all(metadataPromises);
}).
then(results => {
const columnnames = ['table_name', 'column_name', 'data_type'];
const rows = [];

// iterate tables
results.forEach(result => {
const metadata = result[0];
const tableName = metadata.tableReference.tableId;

// iterate fields
if (metadata.schema && metadata.schema.fields)
{
metadata.schema.fields.forEach(({name, type}) => {
rows.push([tableName, name, type]);
});
}
});

return {columnnames, rows};
});
}


/**
* Should return a list of tables that are in the database
* @param {object} connection - Connection Parameters
* @param {string} connection.projectId - Google Cloud Project Id
* @param {string} connection.database - Google Big Query Database
* @param {string} connection.keyFilename - Google Service Account Key File
* @returns {Promise} that resolves to an array of table names
*/
export function tables(connection) {
if (!connection.database || connection.database === '') {
return [];
}

const client = pool.getClient(connection);

return client
.dataset(connection.database)
.getTables()
.then(results => {
return (results[0] || []).map(table => table.id);
});
}
4 changes: 3 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "falcon-sql-client",
"version": "3.0.3",
"version": "3.0.4",
"description": "Free, open-source SQL client for Windows, Mac and Linux",
"main": "./backend/main.js",
"scripts": {
Expand Down Expand Up @@ -145,6 +145,7 @@
"fsevents": "*"
},
"devDependencies": {
"@google-cloud/bigquery": "^1.3.0",
"aws-sdk": "^2.156.0",
"babel-core": "^6.26.0",
"babel-eslint": "^8.0.2",
Expand Down Expand Up @@ -235,6 +236,7 @@
"sinon": "^4.3.0",
"style-loader": "^0.19.0",
"tohash": "^1.0.2",
"uglifyjs-webpack-plugin": "^1.2.7",
"webpack": "^3.8.1",
"yamljs": "^0.3.0"
},
Expand Down
2 changes: 1 addition & 1 deletion webpack.config.base.js
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ export default {
libraryTarget: 'commonjs2'
},
resolve: {
extensions: ['.js', '.jsx']
extensions: ['.js', '.jsx', '.json']
},
plugins: [
],
Expand Down
8 changes: 3 additions & 5 deletions webpack.config.electron.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import webpack from 'webpack';
import baseConfig from './webpack.config.base';
import UglifyJsPlugin from 'uglifyjs-webpack-plugin';

export default {
...baseConfig,
Expand All @@ -16,11 +17,8 @@ export default {
plugins: [
...baseConfig.plugins,

new webpack.optimize.UglifyJsPlugin({
sourceMap: false,
compressor: {
warnings: false
}
new UglifyJsPlugin({
sourceMap: false
}),
new webpack.BannerPlugin(
{banner: 'require("source-map-support").install();',
Expand Down
Loading

0 comments on commit 2cd633d

Please sign in to comment.