diff --git a/.gitignore b/.gitignore index b5c08c51..8ec6f604 100644 --- a/.gitignore +++ b/.gitignore @@ -41,4 +41,13 @@ coverage packages/frontend/public/datasets/* *service.json -*.db \ No newline at end of file +*.db + +for_test/ +sasl-0.3.1-cp310-cp310-win_amd64.whl +__pycache__/ +.idea/ +connect.db +not_sqlalchemy/ +run_flask_app.bat +venv \ No newline at end of file diff --git a/apps/connector/.gitignore b/apps/connector/.gitignore new file mode 100644 index 00000000..acab83ef --- /dev/null +++ b/apps/connector/.gitignore @@ -0,0 +1,7 @@ +for_test/ +sasl-0.3.1-cp310-cp310-win_amd64.whl +__pycache__/ +.idea/ +connect.db +not_sqlalchemy/ +run_flask_app.bat \ No newline at end of file diff --git a/apps/connector/README.md b/apps/connector/README.md new file mode 100644 index 00000000..97c98eba --- /dev/null +++ b/apps/connector/README.md @@ -0,0 +1,5 @@ +# connector +use sqlalchemy to connect database. + +It uses a method similar to Superset, and you can refer to its reference documentation. +https://superset.apache.org/docs/databases/installing-database-drivers/ \ No newline at end of file diff --git a/apps/connector/app.py b/apps/connector/app.py new file mode 100644 index 00000000..a45fc756 --- /dev/null +++ b/apps/connector/app.py @@ -0,0 +1,36 @@ +from flask import Flask + +from bp import bp_Druid, bp_Drill, bp_Athena, bp_Clickhouse, bp_Oracle, bp_Sqlite, bp_SQLserver, bp_Doris, bp_Redshift, bp_Mysql, bp_Impala, bp_Kylin, bp_SparkSQL, bp_Postgres +from bp import bp_database +from database import init_db +from database import db_session + +app = Flask(__name__) + + +@app.route('/') +def hello_world(): # put application's code here + return 'Hello World!' + + +@app.route("/ping") +def ping(): + return { + "success": True + } + + +@app.teardown_appcontext +def shutdown_session(exception=None): + db_session.remove() + + +# 注册蓝图 +app.register_blueprint(bp_database.bp) + + +init_db() + +if __name__ == '__main__': + # init_db() + app.run(host='0.0.0.0') diff --git a/apps/connector/bp/__init__.py b/apps/connector/bp/__init__.py new file mode 100644 index 00000000..99b229e4 --- /dev/null +++ b/apps/connector/bp/__init__.py @@ -0,0 +1,2 @@ +# 2022/9/5 +# 17:57 diff --git a/apps/connector/bp/basefunc.py b/apps/connector/bp/basefunc.py new file mode 100644 index 00000000..b8b55bf0 --- /dev/null +++ b/apps/connector/bp/basefunc.py @@ -0,0 +1,833 @@ +# 2022/9/13 +# 9:13 +from sqlalchemy import create_engine + + +class basefunc: + # athena + @staticmethod + def athena_getdb(uri, schema): + engine = create_engine(uri, echo=True) + res = engine.execute('SHOW DATABASES').fetchall() + db_list = [] + for row in res: + for item in row: + db_list.append(item) + return db_list + + @staticmethod + def athena_gettable(uri, database, schema): + engine = create_engine(uri, echo=True) + res = engine.execute('SHOW TABLES FROM ' + database).fetchall() + table_list = [] + for row in res: + for item in row: + table_list.append(item) + return table_list + + @staticmethod + def athena_getmeta(uri, database, table, schema): + engine = create_engine(uri, echo=True) + metaRes = engine.execute('select * from ' + database + '.' + table + ' limit 500').keys() + meta = [] + i = 1 + for colData in metaRes: + scores = {"key": colData, "colIndex": i, "dataType": None} + meta.append(scores) + i += 1 + return meta + + @staticmethod + def athena_getdata(uri, database, table, schema): + engine = create_engine(uri, echo=True) + dataRes = engine.execute('select * from ' + database + '.' + table + ' limit 500').fetchall() + data = [] + for row in dataRes: + rows = [] + for item in row: + rows.append(item) + data.append(rows) + return data + + @staticmethod + def athena_getresult(uri, sql): + engine = create_engine(uri, echo=True) + res = engine.execute(sql).fetchall() + sql_result = [] + for row in res: + rows = [] + for item in row: + rows.append(item) + sql_result.append(rows) + return sql_result + + # clickhouse + @staticmethod + def clickhouse_getdb(uri, schema): + engine = create_engine(uri, echo=True) + res = engine.execute('SHOW DATABASES').fetchall() + db_list = [] + for row in res: + for item in row: + db_list.append(item) + return db_list + + @staticmethod + def clickhouse_gettable(uri, database, schema): + engine = create_engine(uri, echo=True) + res = engine.execute('SHOW TABLES FROM ' + database).fetchall() + table_list = [] + for row in res: + for item in row: + table_list.append(item) + return table_list + + @staticmethod + def clickhouse_getmeta(uri, database, table, schema): + engine = create_engine(uri, echo=True) + metaRes = engine.execute('desc ' + database + '.' + table).fetchall() + meta = [] + i = 1 + for colData in metaRes: + scores = {"key": colData.name, "colIndex": i, "dataType": colData.type} + meta.append(scores) + i += 1 + return meta + + @staticmethod + def clickhouse_getdata(uri, database, table, schema): + engine = create_engine(uri, echo=True) + dataRes = engine.execute('select * from ' + database + '.' + table + ' limit 500').fetchall() + data = [] + for row in dataRes: + rows = [] + for item in row: + rows.append(item) + data.append(rows) + return data + + @staticmethod + def clickhouse_getresult(uri, sql): + engine = create_engine(uri, echo=True) + res = engine.execute(sql).fetchall() + sql_result = [] + for row in res: + rows = [] + for item in row: + rows.append(item) + sql_result.append(rows) + return sql_result + + # doris + @staticmethod + def doris_getdb(uri, schema): + engine = create_engine(uri, echo=True) + res = engine.execute('SHOW DATABASES').fetchall() + db_list = [] + for row in res: + for item in row: + db_list.append(item) + return db_list + + @staticmethod + def doris_gettable(uri, database, schema): + engine = create_engine(uri, echo=True) + res = engine.execute('SHOW TABLES FROM ' + database).fetchall() + table_list = [] + for row in res: + for item in row: + table_list.append(item) + return table_list + + @staticmethod + def doris_getmeta(uri, database, table, schema): + engine = create_engine(uri, echo=True) + metaRes = engine.execute('show full columns from ' + database + '.' + table).fetchall() + meta = [] + i = 1 + for colData in metaRes: + scores = {"key": colData.name, "colIndex": i, "dataType": colData.type} + meta.append(scores) + i += 1 + return meta + + @staticmethod + def doris_getdata(uri, database, table, schema): + engine = create_engine(uri, echo=True) + dataRes = engine.execute('select * from ' + database + '.' + table + ' limit 500').fetchall() + data = [] + for row in dataRes: + rows = [] + for item in row: + rows.append(item) + data.append(rows) + return data + + @staticmethod + def doris_getresult(uri, sql): + engine = create_engine(uri, echo=True) + res = engine.execute(sql).fetchall() + sql_result = [] + for row in res: + rows = [] + for item in row: + rows.append(item) + sql_result.append(rows) + return sql_result + + # drill + @staticmethod + def drill_getdb(uri, schema): + engine = create_engine(uri, echo=True) + res = engine.execute('SHOW DATABASES').fetchall() + db_list = [] + for row in res: + for item in row: + db_list.append(item) + return db_list + + @staticmethod + def drill_gettable(uri, database, schema): + engine = create_engine(uri, echo=True) + res = engine.execute('SHOW TABLES FROM ' + database).fetchall() + table_list = [] + for row in res: + table_list.append(row.TABLE_NAME) + return table_list + + @staticmethod + def drill_getmeta(uri, database, table, schema): + engine = create_engine(uri, echo=True) + metaRes = engine.execute('desc ' + database + '.' + table).fetchall() + meta = [] + i = 1 + for colData in metaRes: + scores = {"key": colData.COLUMN_NAME, "colIndex": i, "dataType": colData.DATA_TYPE} + meta.append(scores) + i += 1 + return meta + + @staticmethod + def drill_getdata(uri, database, table, schema): + engine = create_engine(uri, echo=True) + dataRes = engine.execute('select * from ' + database + '.' + table + ' limit 500').fetchall() + data = [] + for row in dataRes: + rows = [] + for item in row: + rows.append(item) + data.append(rows) + return data + + @staticmethod + def drill_getresult(uri, sql): + engine = create_engine(uri, echo=True) + res = engine.execute(sql).fetchall() + sql_result = [] + for row in res: + rows = [] + for item in row: + rows.append(item) + sql_result.append(rows) + return sql_result + + # druid + @staticmethod + def druid_getschema(uri, db): + engine = create_engine(uri, echo=True) + res = engine.execute('select SCHEMA_NAME from INFORMATION_SCHEMA.SCHEMATA ').fetchall() + db_list = [] + for row in res: + for item in row: + db_list.append(item) + return db_list + + @staticmethod + def druid_gettable(uri, database, schema): + engine = create_engine(uri, echo=True) + res = engine.execute( + 'SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = \'' + schema + '\'').fetchall() + table_list = [] + for row in res: + table_list.append(row.TABLE_NAME) + return table_list + + @staticmethod + def druid_getmeta(uri, database, table, schema): + engine = create_engine(uri, echo=True) + metaRes = engine.execute( + 'select COLUMN_NAME, DATA_TYPE from INFORMATION_SCHEMA.COLUMNS where TABLE_SCHEMA = \'' + schema + '\' and TABLE_NAME = \'' + table + '\'').fetchall() + meta = [] + i = 1 + for colData in metaRes: + scores = {"key": colData.COLUMN_NAME, "colIndex": i, "dataType": colData.DATA_TYPE} + meta.append(scores) + i += 1 + return meta + + @staticmethod + def druid_getdata(uri, database, table, schema): + engine = create_engine(uri, echo=True) + dataRes = engine.execute('select * from ' + schema + '.' + table + ' limit 500').fetchall() + data = [] + for row in dataRes: + rows = [] + for item in row: + rows.append(item) + data.append(rows) + return data + + @staticmethod + def druid_getresult(uri, sql): + engine = create_engine(uri, echo=True) + res = engine.execute(sql).fetchall() + sql_result = [] + for row in res: + rows = [] + for item in row: + rows.append(item) + sql_result.append(rows) + return sql_result + + # impala + @staticmethod + def impala_getdb(uri, schema): + engine = create_engine(uri, echo=True) + res = engine.execute('SHOW DATABASES').fetchall() + db_list = [] + for row in res: + for item in row: + db_list.append(item) + return db_list + + @staticmethod + def impala_gettable(uri, database, schema): + engine = create_engine(uri, echo=True) + res = engine.execute('SHOW TABLES FROM ' + database).fetchall() + table_list = [] + for row in res: + for item in row: + table_list.append(item) + return table_list + + @staticmethod + def impala_getmeta(uri, database, table, schema): + engine = create_engine(uri, echo=True) + metaRes = engine.execute('desc ' + database + '.' + table).fetchall() + meta = [] + i = 1 + for colData in metaRes: + scores = {"key": colData.col_name, "colIndex": i, "dataType": colData.data_type} + meta.append(scores) + i += 1 + return meta + + @staticmethod + def impala_getdata(uri, database, table, schema): + engine = create_engine(uri, echo=True) + dataRes = engine.execute('select * from ' + database + '.' + table + ' limit 500').fetchall() + data = [] + for row in dataRes: + rows = [] + for item in row: + rows.append(item) + data.append(rows) + return data + + @staticmethod + def impala_getresult(uri, sql): + engine = create_engine(uri, echo=True) + res = engine.execute(sql).fetchall() + sql_result = [] + for row in res: + rows = [] + for item in row: + rows.append(item) + sql_result.append(rows) + return sql_result + + # kylin + @staticmethod + def kylin_getmeta(uri, database, table, schema): + engine = create_engine(uri, echo=True) + metaRes = engine.execute('select * from ' + schema + '.' + table + ' limit 500').keys() + meta = [] + i = 1 + for colData in metaRes: + scores = {"key": colData, "colIndex": i, "dataType": None} + meta.append(scores) + i += 1 + return meta + + @staticmethod + def kylin_getdata(uri, database, table, schema): + engine = create_engine(uri, echo=True) + dataRes = engine.execute('select * from ' + schema + '.' + table + ' limit 500').fetchall() + data = [] + for row in dataRes: + rows = [] + for item in row: + rows.append(item) + data.append(rows) + return data + + @staticmethod + def kylin_getresult(uri, sql): + engine = create_engine(uri, echo=True) + res = engine.execute(sql).fetchall() + sql_result = [] + for row in res: + rows = [] + for item in row: + rows.append(item) + sql_result.append(rows) + return sql_result + + # mysql + @staticmethod + def mysql_getdb(uri, schema): + engine = create_engine(uri, echo=True) + res = engine.execute('SHOW DATABASES').fetchall() + db_list = [] + for row in res: + for item in row: + db_list.append(item) + return db_list + + @staticmethod + def mysql_gettable(uri, database, schema): + engine = create_engine(uri, echo=True) + res = engine.execute('SHOW TABLES FROM ' + database).fetchall() + table_list = [] + for row in res: + for item in row: + table_list.append(item) + return table_list + + @staticmethod + def mysql_getmeta(uri, database, table, schema): + engine = create_engine(uri, echo=True) + metaRes = engine.execute('desc ' + database + '.' + table).fetchall() + meta = [] + i = 1 + for colData in metaRes: + scores = {"key": colData.Field, "colIndex": i, "dataType": colData.Type} + meta.append(scores) + i += 1 + return meta + + @staticmethod + def mysql_getdata(uri, database, table, schema): + engine = create_engine(uri, echo=True) + dataRes = engine.execute('select * from ' + database + '.' + table + ' limit 500').fetchall() + data = [] + for row in dataRes: + rows = [] + for item in row: + rows.append(item) + data.append(rows) + return data + + @staticmethod + def mysql_getresult(uri, sql): + engine = create_engine(uri, echo=True) + res = engine.execute(sql).fetchall() + sql_result = [] + for row in res: + rows = [] + for item in row: + rows.append(item) + sql_result.append(rows) + return sql_result + + # oracle + @staticmethod + def oracle_gettable(uri, database, schema): + engine = create_engine(uri, echo=True) + res = engine.execute('select tname from tab').fetchall() + table_list = [] + for row in res: + for item in row: + table_list.append(item) + return table_list + + @staticmethod + def oracle_getmeta(uri, database, table, schema): + engine = create_engine(uri, echo=True) + metaRes = engine.execute( + ''' + select t.table_name, + t.column_name, + t.data_type, + t.data_length, + t.nullable, + t.column_id, + c.comments, + (SELECT CASE WHEN t.column_name = m.column_name THEN 1 ELSE 0 END FROM DUAL) iskey + FROM user_tab_cols t, + user_col_comments c, + (select m.column_name + from user_constraints s, + user_cons_columns m + where lower(m.table_name) = '{0}' + and m.table_name = s.table_name + and m.constraint_name = s.constraint_name + and s.constraint_type = 'P') m + WHERE lower(t.table_name) = '{1}' + and c.table_name = t.table_name + and c.column_name = t.column_name + and t.hidden_column = 'NO' + order by t.column_id + '''.format(table, table) + ).fetchall() + meta = [] + i = 1 + for colData in metaRes: + scores = {"key": colData.column_name, "colIndex": i, "dataType": colData.data_type} + meta.append(scores) + i += 1 + return meta + + @staticmethod + def oracle_getdata(uri, database, table): + engine = create_engine(uri, echo=True) + dataRes = engine.execute('select * from ' + table + ' where rownum <= 500').fetchall() + data = [] + for row in dataRes: + rows = [] + for item in row: + rows.append(item) + data.append(rows) + return data + + @staticmethod + def oracle_getresult(uri, sql): + engine = create_engine(uri, echo=True) + res = engine.execute(sql).fetchall() + sql_result = [] + for row in res: + rows = [] + for item in row: + rows.append(item) + sql_result.append(rows) + return sql_result + + # postgres + @staticmethod + def postgres_getschema(uri, db): + engine = create_engine(uri, echo=True) + res = engine.execute('select nspname from pg_catalog.pg_namespace').fetchall() + schema_list = [] + for row in res: + for item in row: + schema_list.append(item) + return schema_list + + @staticmethod + def postgres_gettable(uri, database, schema): + engine = create_engine(uri, echo=True) + res = engine.execute( + 'select tablename from pg_tables where schemaname=\'' + schema + '\'').fetchall() + table_list = [] + for row in res: + for item in row: + table_list.append(item) + return table_list + + @staticmethod + def postgres_getmeta(uri, database, table, schema): + engine = create_engine(uri, echo=True) + metaRes = engine.execute( + 'select column_name, data_type from information_schema.columns where table_schema= \'' + schema + '\' and table_name= \'' + table + '\'').fetchall() + meta = [] + i = 1 + for colData in metaRes: + scores = {"key": colData.column_name, "colIndex": i, "dataType": colData.data_type} + meta.append(scores) + i += 1 + return meta + + @staticmethod + def postgres_getdata(uri, database, table, schema): + engine = create_engine(uri, echo=True) + dataRes = engine.execute('select * from ' + schema + '.' + table + ' limit 500').fetchall() + data = [] + for row in dataRes: + rows = [] + for item in row: + rows.append(item) + data.append(rows) + return data + + @staticmethod + def postgres_getresult(uri, sql): + engine = create_engine(uri, echo=True) + res = engine.execute(sql).fetchall() + sql_result = [] + for row in res: + rows = [] + for item in row: + rows.append(item) + sql_result.append(rows) + return sql_result + + # redshift + @staticmethod + def redshift_getdb(uri, schema): + engine = create_engine(uri, echo=True) + res = engine.execute('select nspname from pg_namespace').fetchall() + db_list = [] + for row in res: + for item in row: + db_list.append(item) + return db_list + + @staticmethod + def redshift_gettable(uri, database, schema): + engine = create_engine(uri, echo=True) + res = engine.execute( + '''select distinct(tablename) from pg_table_def where schemaname = '{0}' '''.format(database)).fetchall() + table_list = [] + for row in res: + for item in row: + table_list.append(item) + return table_list + + @staticmethod + def redshift_getmeta(uri, database, table, schema): + engine = create_engine(uri, echo=True) + metaRes = engine.execute(''' + SELECT * + FROM pg_table_def + WHERE tablename = '{0}' + AND schemaname = '{1}' + '''.format(table, database)).fetchall() + meta = [] + i = 1 + for colData in metaRes: + scores = {"key": colData.column, "colIndex": i, "dataType": colData.type} + meta.append(scores) + i += 1 + return meta + + @staticmethod + def redshift_getdata(uri, database, table, schema): + engine = create_engine(uri, echo=True) + dataRes = engine.execute('select * from ' + database + '.' + table + ' limit 500').fetchall() + data = [] + for row in dataRes: + rows = [] + for item in row: + rows.append(item) + data.append(rows) + return data + + @staticmethod + def redshift_getresult(uri, sql): + engine = create_engine(uri, echo=True) + res = engine.execute(sql).fetchall() + sql_result = [] + for row in res: + rows = [] + for item in row: + rows.append(item) + sql_result.append(rows) + return sql_result + + # sparksql + @staticmethod + def sparksql_getdb(uri, schema): + engine = create_engine(uri, echo=True) + res = engine.execute('SHOW DATABASES').fetchall() + db_list = [] + for row in res: + for item in row: + db_list.append(item) + return db_list + + @staticmethod + def sparksql_gettable(uri, database, schema): + engine = create_engine(uri, echo=True) + res = engine.execute('SHOW TABLES FROM ' + database).fetchall() + table_list = [] + for row in res: + for item in row: + table_list.append(item) + return table_list + + @staticmethod + def sparksql_getmeta(uri, database, table, schema): + engine = create_engine(uri, echo=True) + metaRes = engine.execute('desc ' + database + '.' + table).fetchall() + meta = [] + i = 1 + for colData in metaRes: + scores = {"key": colData.col_name, "colIndex": i, "dataType": colData.data_type} + meta.append(scores) + i += 1 + return meta + + @staticmethod + def sparksql_getdata(uri, database, table, schema): + engine = create_engine(uri, echo=True) + dataRes = engine.execute('select * from ' + database + '.' + table + ' limit 500').fetchall() + data = [] + for row in dataRes: + rows = [] + for item in row: + rows.append(item) + data.append(rows) + return data + + @staticmethod + def sparksql_getresult(uri, sql): + engine = create_engine(uri, echo=True) + res = engine.execute(sql).fetchall() + sql_result = [] + for row in res: + rows = [] + for item in row: + rows.append(item) + sql_result.append(rows) + return sql_result + + # sqlserver + @staticmethod + def sqlserver_getdb(uri, schema): + engine = create_engine(uri, echo=True) + res = engine.execute('SELECT name FROM sys.databases').fetchall() + db_list = [] + for row in res: + for item in row: + db_list.append(item) + return db_list + + @staticmethod + def sqlserver_getschema(uri, db): + engine = create_engine(uri, echo=True) + res = engine.execute('SELECT name FROM {0}.sys.schemas'.format(db)).fetchall() + schema_list = [] + for row in res: + for item in row: + schema_list.append(item) + return schema_list + + @staticmethod + def sqlserver_gettable(uri, database, schema): + engine = create_engine(uri, echo=True) + res = engine.execute( + ''' + SELECT t.name FROM {0}.sys.tables AS t INNER JOIN {1}.sys.schemas AS s ON s.schema_id = t.schema_id WHERE s.name = '{2}' + '''.format( + database, database, schema)).fetchall() + table_list = [] + for row in res: + for item in row: + table_list.append(item) + return table_list + + @staticmethod + def sqlserver_getmeta(uri, database, table, schema): + engine = create_engine(uri, echo=True) + metaRes = engine.execute(''' + SELECT SC.name as table_name, + ST.name as table_column + FROM {0}.sys.sysobjects SO, + {1}.sys.syscolumns SC, + {2}.sys.systypes ST + WHERE SO.id = SC.id + AND SO.xtype = 'U' + AND SO.status >= 0 + AND SC.xtype = ST.xusertype + AND SO.name = '{3}' + ORDER BY SO.name, SC.colorder + '''.format(database, database, database, table)).fetchall() + meta = [] + i = 1 + for colData in metaRes: + scores = {"key": colData.table_name, "colIndex": i, "dataType": colData.table_column} + meta.append(scores) + i += 1 + return meta + + @staticmethod + def sqlserver_getdata(uri, database, table, schema): + engine = create_engine(uri, echo=True) + dataRes = engine.execute('select top 500 * from ' + database + '.' + schema + '.' + table).fetchall() + data = [] + for row in dataRes: + rows = [] + for item in row: + rows.append(item) + data.append(rows) + return data + + @staticmethod + def sqlserver_getresult(uri, sql): + engine = create_engine(uri, echo=True) + res = engine.execute(sql).fetchall() + sql_result = [] + for row in res: + rows = [] + for item in row: + rows.append(item) + sql_result.append(rows) + return sql_result + + # # x + # @staticmethod + # def x_getdb(uri, schema): + # engine = create_engine(uri, echo=True) + # res = engine.execute('SHOW DATABASES').fetchall() + # db_list = [] + # for row in res: + # for item in row: + # db_list.append(item) + # return db_list + # + # + # @staticmethod + # def x_gettable(uri, database): + # engine = create_engine(uri, echo=True) + # res = engine.execute('SHOW TABLES FROM ' + database).fetchall() + # table_list = [] + # for row in res: + # for item in row: + # table_list.append(item) + # return table_list + # + # @staticmethod + # def x_getmeta(uri, database, table): + # engine = create_engine(uri, echo=True) + # metaRes = engine.execute('desc ' + database + '.' + table).fetchall() + # meta = [] + # i = 1 + # for colData in metaRes: + # scores = {"key": colData.col_name, "colIndex": i, "dataType": colData.data_type} + # meta.append(scores) + # i += 1 + # return meta + # + # @staticmethod + # def x_getdata(uri, database, table): + # engine = create_engine(uri, echo=True) + # dataRes = engine.execute('select * from ' + database + '.' + table + ' limit 500').fetchall() + # data = [] + # for row in dataRes: + # rows = [] + # for item in row: + # rows.append(item) + # data.append(rows) + # return data + # + # @staticmethod + # def x_getresult(uri, sql): + # engine = create_engine(uri, echo=True) + # res = engine.execute(sql).fetchall() + # sql_result = [] + # for row in res: + # rows = [] + # for item in row: + # rows.append(item) + # sql_result.append(rows) + # return sql_result diff --git a/apps/connector/bp/bp_Athena.py b/apps/connector/bp/bp_Athena.py new file mode 100644 index 00000000..d04478fc --- /dev/null +++ b/apps/connector/bp/bp_Athena.py @@ -0,0 +1,157 @@ +# 2022/8/28 +# 15:33 +# 无语了,这个athena在desc的时候竟然查询出来的是一个string类型的数据,最后fetchall的时候会出现列和结果数量不匹配的结果 + +import json +from flask import ( + Blueprint, flash, g, redirect, render_template, request, session, url_for +) +from sqlalchemy import create_engine +from bp.bp_Sqlite import getUri + +bp = Blueprint('athena', __name__, url_prefix='/athena') + + +@bp.route('/database_list', methods=['POST']) +def get_databases(): + try: + props = json.loads(request.data) + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute('SHOW DATABASES').fetchall() + database = [] + for row in res: + for item in row: + database.append(item) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': database + } + + +@bp.route('/get_metadata', methods=['POST']) +def get_metadata(): + try: + props = json.loads(request.data) + database = props['schema'] + table = props['table'] + connect_id = props['connect_id'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute('desc ' + database + '.' + table).fetchall() + database = [] + for row in res: + rows = [] + for item in row: + rows.append(item) + database.append(rows) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': database + } + + +@bp.route('/table_list', methods=['POST']) +def get_table_list(): + try: + props = json.loads(request.data) + database = props['db'] + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute('SHOW TABLES FROM ' + database).fetchall() + database = [] + for row in res: + for item in row: + database.append(item) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': database + } + + +@bp.route('/table_detail', methods=['POST']) +def get_detail(): + try: + props = json.loads(request.data) + database = props['db'] + table = props['table'] + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + dataRes = engine.execute('select * from ' + database + '.' + table + ' limit 500').fetchall() + metaRes = engine.execute('select * from ' + database + '.' + table + ' limit 500').keys() + data = [] + for row in dataRes: + rows = [] + for item in row: + rows.append(item) + data.append(rows) + meta = [] + i = 1 + for colData in metaRes: + scores = {"key": colData, "colIndex": i, "dataType": None} + meta.append(scores) + i += 1 + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': { + "columns": meta, + "rows": data + } + } + + +# 执行任意sql +@bp.route('/execute', methods=['POST']) +def execute_sql(): + try: + props = json.loads(request.data) + sql = props['query'] + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute(sql).fetchall() + database = [] + for row in res: + rows = [] + for item in row: + rows.append(item) + database.append(rows) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': { + "rows": database + } + } diff --git a/apps/connector/bp/bp_Clickhouse.py b/apps/connector/bp/bp_Clickhouse.py new file mode 100644 index 00000000..9bfd6c9f --- /dev/null +++ b/apps/connector/bp/bp_Clickhouse.py @@ -0,0 +1,157 @@ +# 2022/8/28 +# 15:33 +#接口测试完成 +import json +from flask import ( + Blueprint, flash, g, redirect, render_template, request, session, url_for +) +from sqlalchemy import create_engine +from bp.bp_Sqlite import getUri + +bp = Blueprint('clickhouse', __name__, url_prefix='/clickhouse') + + +@bp.route('/database_list', methods=['POST']) +def get_databases(): + try: + props = json.loads(request.data) + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute('SHOW DATABASES').fetchall() + # [['INFORMATION_SCHEMA',], ('default',), ('fh_data',), ('information_schema',), ('system',)] + database = [] + for row in res: + for item in row: + database.append(item) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': database + } + +# 不用了先放着 +@bp.route('/get_metadata', methods=['POST']) +def get_metadata(): + try: + props = json.loads(request.data) + database = props['schema'] + table = props['table'] + connect_id = props['connect_id'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute('desc ' + database + '.' + table).fetchall() + database = [] + for row in res: + rows = [] + for item in row: + rows.append(item) + database.append(rows) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': database + } + + +@bp.route('/table_list', methods=['POST']) +def get_table_list(): + try: + props = json.loads(request.data) + database = props['db'] + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute('SHOW TABLES FROM ' + database).fetchall() + database = [] + for row in res: + for item in row: + database.append(item) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': database + } + + +@bp.route('/table_detail', methods=['POST']) +def get_detail(): + try: + props = json.loads(request.data) + database = props['db'] + table = props['table'] + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + dataRes = engine.execute('select * from ' + database + '.' + table + ' limit 500').fetchall() + metaRes = engine.execute('desc ' + database + '.' + table).fetchall() + data = [] + for row in dataRes: + rows = [] + for item in row: + rows.append(item) + data.append(rows) + meta = [] + i = 1 + for colData in metaRes: + scores = {"key": colData.name, "colIndex": i, "dataType": colData.type} + meta.append(scores) + i += 1 + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + "success": True, + "data": { + "columns": meta, + "rows": data + } + } + + +# 执行任意sql +@bp.route('/execute', methods=['POST']) +def execute_sql(): + try: + props = json.loads(request.data) + sql = props['query'] + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute(sql).fetchall() + database = [] + for row in res: + rows = [] + for item in row: + rows.append(item) + database.append(rows) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + "success": True, + "data": { + "rows": database + } + } diff --git a/apps/connector/bp/bp_Doris.py b/apps/connector/bp/bp_Doris.py new file mode 100644 index 00000000..7dfa950e --- /dev/null +++ b/apps/connector/bp/bp_Doris.py @@ -0,0 +1,155 @@ +# 2022/8/28 +# 15:33 +import json +from flask import ( + Blueprint, flash, g, redirect, render_template, request, session, url_for +) +from sqlalchemy import create_engine +from bp.bp_Sqlite import getUri + +bp = Blueprint('doris', __name__, url_prefix='/doris') + + +@bp.route('/database_list', methods=['POST']) +def get_databases(): + try: + props = json.loads(request.data) + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute('SHOW DATABASES').fetchall() + database = [] + for row in res: + for item in row: + database.append(item) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': database + } + + +@bp.route('/get_metadata', methods=['POST']) +def get_metadata(): + try: + props = json.loads(request.data) + database = props['schema'] + table = props['table'] + connect_id = props['connect_id'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute('desc ' + database + '.' + table).fetchall() + database = [] + for row in res: + rows = [] + for item in row: + rows.append(item) + database.append(rows) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': database + } + + +@bp.route('/table_list', methods=['POST']) +def get_table_list(): + try: + props = json.loads(request.data) + database = props['db'] + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute('SHOW TABLES FROM ' + database).fetchall() + database = [] + for row in res: + for item in row: + database.append(item) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': database + } + + +@bp.route('/table_detail', methods=['POST']) +def get_detail(): + try: + props = json.loads(request.data) + database = props['db'] + table = props['table'] + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + dataRes = engine.execute('select * from ' + database + '.' + table + ' limit 500').fetchall() + metaRes = engine.execute('show full columns from ' + database + '.' + table).fetchall() + data = [] + for row in dataRes: + rows = [] + for item in row: + rows.append(item) + data.append(rows) + meta = [] + i = 1 + for colData in metaRes: + scores = {"key": colData.name, "colIndex": i, "dataType": colData.type} + meta.append(scores) + i += 1 + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': { + "columns": meta, + "rows": data + } + } + + +# 执行任意sql +@bp.route('/execute', methods=['POST']) +def execute_sql(): + try: + props = json.loads(request.data) + sql = props['query'] + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute(sql).fetchall() + database = [] + for row in res: + rows = [] + for item in row: + rows.append(item) + database.append(rows) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': { + "rows": database + } + } diff --git a/apps/connector/bp/bp_Drill.py b/apps/connector/bp/bp_Drill.py new file mode 100644 index 00000000..d77d7e26 --- /dev/null +++ b/apps/connector/bp/bp_Drill.py @@ -0,0 +1,127 @@ +# 2022/8/28 +# 15:33 +#接口测试完成 +import json +from flask import ( + Blueprint, flash, g, redirect, render_template, request, session, url_for +) +from sqlalchemy import create_engine +from bp.bp_Sqlite import getUri + +bp = Blueprint('drill', __name__, url_prefix='/drill') + + +@bp.route('/database_list', methods=['POST']) +def get_databases(): + try: + props = json.loads(request.data) + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute('SHOW DATABASES').fetchall() + database = [] + for row in res: + for item in row: + database.append(item) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': database + } + + +@bp.route('/table_list', methods=['POST']) +def get_table_list(): + try: + props = json.loads(request.data) + database = props['db'] + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute('SHOW TABLES FROM ' + database).fetchall() + database = [] + for row in res: + database.append(row.TABLE_NAME) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': database + } + + +@bp.route('/table_detail', methods=['POST']) +def get_detail(): + try: + props = json.loads(request.data) + database = props['db'] + table = props['table'] + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + dataRes = engine.execute('select * from ' + database + '.' + table + ' limit 500').fetchall() + metaRes = engine.execute('desc ' + database + '.' + table).fetchall() + data = [] + for row in dataRes: + rows = [] + for item in row: + rows.append(item) + data.append(rows) + meta = [] + i = 1 + for colData in metaRes: + scores = {"key": colData.COLUMN_NAME, "colIndex": i, "dataType": colData.DATA_TYPE} + meta.append(scores) + i += 1 + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + "success": True, + "data": { + "columns": meta, + "rows": data + } + } + + +# 执行任意sql +@bp.route('/execute', methods=['POST']) +def execute_sql(): + try: + props = json.loads(request.data) + sql = props['query'] + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute(sql).fetchall() + database = [] + for row in res: + rows = [] + for item in row: + rows.append(item) + database.append(rows) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + "success": True, + "data": { + "rows": database + } + } diff --git a/apps/connector/bp/bp_Druid.py b/apps/connector/bp/bp_Druid.py new file mode 100644 index 00000000..3a0a8039 --- /dev/null +++ b/apps/connector/bp/bp_Druid.py @@ -0,0 +1,129 @@ +# 2022/8/28 +# 15:33 +# 接口测试完成 +import json +from flask import ( + Blueprint, flash, g, redirect, render_template, request, session, url_for +) +from sqlalchemy import create_engine +from bp.bp_Sqlite import getUri + +bp = Blueprint('druid', __name__, url_prefix='/druid') + + +@bp.route('/database_list', methods=['POST']) +def get_databases(): + try: + props = json.loads(request.data) + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute('select SCHEMA_NAME from INFORMATION_SCHEMA.SCHEMATA ').fetchall() + database = [] + for row in res: + for item in row: + database.append(item) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': database + } + + +@bp.route('/table_list', methods=['POST']) +def get_table_list(): + try: + props = json.loads(request.data) + database = props['db'] + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute( + 'SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = \'' + database + '\'').fetchall() + database = [] + for row in res: + database.append(row.TABLE_NAME) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': database + } + + +@bp.route('/table_detail', methods=['POST']) +def get_detail(): + try: + props = json.loads(request.data) + database = props['db'] + table = props['table'] + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + dataRes = engine.execute('select * from ' + database + '.' + table + ' limit 500').fetchall() + metaRes = engine.execute( + 'select COLUMN_NAME, DATA_TYPE from INFORMATION_SCHEMA.COLUMNS where TABLE_SCHEMA = \'' + database + '\' and TABLE_NAME = \'' + table + '\'').fetchall() + data = [] + for row in dataRes: + rows = [] + for item in row: + rows.append(item) + data.append(rows) + meta = [] + i = 1 + for colData in metaRes: + scores = {"key": colData.COLUMN_NAME, "colIndex": i, "dataType": colData.DATA_TYPE} + meta.append(scores) + i += 1 + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + "success": True, + "data": { + "columns": meta, + "rows": data + } + } + + +# 执行任意sql +@bp.route('/execute', methods=['POST']) +def execute_sql(): + try: + props = json.loads(request.data) + sql = props['query'] + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute(sql).fetchall() + database = [] + for row in res: + rows = [] + for item in row: + rows.append(item) + database.append(rows) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + "success": True, + "data": { + "rows": database + } + } diff --git a/apps/connector/bp/bp_Impala.py b/apps/connector/bp/bp_Impala.py new file mode 100644 index 00000000..e934e34d --- /dev/null +++ b/apps/connector/bp/bp_Impala.py @@ -0,0 +1,155 @@ +# 2022/8/28 +# 15:33 +import json +from flask import ( + Blueprint, flash, g, redirect, render_template, request, session, url_for +) +from sqlalchemy import create_engine +from bp.bp_Sqlite import getUri + +bp = Blueprint('impala', __name__, url_prefix='/impala') + + +@bp.route('/database_list', methods=['POST']) +def get_databases(): + try: + props = json.loads(request.data) + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute('SHOW DATABASES').fetchall() + database = [] + for row in res: + for item in row: + database.append(item) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': database + } + + +@bp.route('/get_metadata', methods=['POST']) +def get_metadata(): + try: + props = json.loads(request.data) + database = props['schema'] + table = props['table'] + connect_id = props['connect_id'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute('desc ' + database + '.' + table).fetchall() + database = [] + for row in res: + rows = [] + for item in row: + rows.append(item) + database.append(rows) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': database + } + + +@bp.route('/table_list', methods=['POST']) +def get_table_list(): + try: + props = json.loads(request.data) + database = props['db'] + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute('SHOW TABLES FROM ' + database).fetchall() + database = [] + for row in res: + for item in row: + database.append(item) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': database + } + + +@bp.route('/table_detail', methods=['POST']) +def get_detail(): + try: + props = json.loads(request.data) + database = props['db'] + table = props['table'] + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + dataRes = engine.execute('select * from ' + database + '.' + table + ' limit 500').fetchall() + metaRes = engine.execute('desc ' + database + '.' + table).fetchall() + data = [] + for row in dataRes: + rows = [] + for item in row: + rows.append(item) + data.append(rows) + meta = [] + i = 1 + for colData in metaRes: + scores = {"key": colData.col_name, "colIndex": i, "dataType": colData.data_type} + meta.append(scores) + i += 1 + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': { + "columns": meta, + "rows": data + } + } + + +# 执行任意sql +@bp.route('/execute', methods=['POST']) +def execute_sql(): + try: + props = json.loads(request.data) + sql = props['query'] + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute(sql).fetchall() + database = [] + for row in res: + rows = [] + for item in row: + rows.append(item) + database.append(rows) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': { + "rows": database + } + } diff --git a/apps/connector/bp/bp_Kylin.py b/apps/connector/bp/bp_Kylin.py new file mode 100644 index 00000000..9f84f433 --- /dev/null +++ b/apps/connector/bp/bp_Kylin.py @@ -0,0 +1,204 @@ +# 2022/8/28 +# 15:33 +# kylin支持的sql中没有show databases,show tables,desc 等语句, +# 可能是因为kylin是一个用于做cube计算的工具,并且在预加载表的时候这些信息都可以看到,因此没有特意设置这几个sql +# 基本上都是一些计算的sql,如: +# 语法 +# QUERY SYNTAX +# SELECT +#  STATEMENT +#  EXPRESSION +# SUBQUERY +# JOIN +#  INNER JOIN +#  LEFT JOIN +# UNION +# UNION ALL +# +# 函数 +# COUNT +#  COUNT(COLUMN) +#  COUNT(*) +# COUNT_DISTINCT +# MAX +# MIN +# PERCENTILE +# SUM +# TOP_N +# +# WINDOW +#  ROW_NUMBER +#  AVG +#  RANK +#  DENSE_RANK +#  FIRST_VALUE +#  LAST_VALUE +#  LAG +#  LEAD +#  NTILE +#  CASE WHEN +#  CAST +# +# SUSTRING +# COALESCE +# STDDEV_SUM +# INTERSECT_COUNT +# INTERSECT_VALUE + +import json +from flask import ( + Blueprint, flash, g, redirect, render_template, request, session, url_for +) +from sqlalchemy import create_engine +from bp.bp_Sqlite import getUri + +bp = Blueprint('kylin', __name__, url_prefix='/kylin') + + +# @bp.route('/database_list', methods=['POST']) +# def get_databases(): +# try: +# props = json.loads(request.data) +# connect_id = props['connect_id'] +# uri = getUri(connect_id) +# engine = create_engine(uri, echo=True) +# res = engine.execute('SHOW DATABASES').fetchall() +# database = [] +# for row in res: +# rows = [] +# for item in row: +# rows.append(item) +# database.append(rows) +# except Exception as e: +# return { +# 'success': False, +# 'message': repr(e) +# } +# else: +# return { +# 'success': True, +# 'data': database +# } + + +# @bp.route('/get_metadata', methods=['POST']) +# def get_metadata(): +# try: +# props = json.loads(request.data) +# database = props['schema'] +# table = props['table'] +# connect_id = props['connect_id'] +# uri = getUri(connect_id) +# engine = create_engine(uri, echo=True) +# res = engine.execute('desc ' + database + '.' + table).fetchall() +# database = [] +# for row in res: +# rows = [] +# for item in row: +# rows.append(item) +# database.append(rows) +# except Exception as e: +# return { +# 'success': False, +# 'message': repr(e) +# } +# else: +# return { +# 'success': True, +# 'data': database +# } + + +# @bp.route('/table_list', methods=['POST']) +# def get_table_list(): +# try: +# props = json.loads(request.data) +# database = props['schema'] +# connect_id = props['connect_id'] +# uri = getUri(connect_id) +# engine = create_engine(uri, echo=True) +# res = engine.execute('SHOW TABLES FROM ' + database).fetchall() +# database = [] +# for row in res: +# rows = [] +# for item in row: +# rows.append(item) +# database.append(rows) +# except Exception as e: +# return { +# 'success': False, +# 'message': repr(e) +# } +# else: +# return { +# 'success': True, +# 'data': database +# } + + +@bp.route('/table_detail', methods=['POST']) +def get_detail(): + try: + props = json.loads(request.data) + database = props['db'] + table = props['table'] + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + dataRes = engine.execute('select * from ' + database + '.' + table + ' limit 500').fetchall() + metaRes = engine.execute('select * from ' + database + '.' + table + ' limit 500').keys() + data = [] + for row in dataRes: + rows = [] + for item in row: + rows.append(item) + data.append(rows) + meta = [] + i = 1 + for colData in metaRes: + scores = {"key": colData, "colIndex": i, "dataType": None} + meta.append(scores) + i += 1 + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': { + "columns": meta, + "rows": data + } + } + + +# 执行任意sql +@bp.route('/execute', methods=['POST']) +def execute_sql(): + try: + props = json.loads(request.data) + sql = props['query'] + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute(sql).fetchall() + database = [] + for row in res: + rows = [] + for item in row: + rows.append(item) + database.append(rows) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': { + "rows": database + } + } diff --git a/apps/connector/bp/bp_Mysql.py b/apps/connector/bp/bp_Mysql.py new file mode 100644 index 00000000..cce5ae2c --- /dev/null +++ b/apps/connector/bp/bp_Mysql.py @@ -0,0 +1,156 @@ +# 2022/8/28 +# 15:33 +# finish API test +import json +from flask import ( + Blueprint, flash, g, redirect, render_template, request, session, url_for +) +from sqlalchemy import create_engine +from bp.bp_Sqlite import getUri + +bp = Blueprint('mysql', __name__, url_prefix='/mysql') + + +@bp.route('/database_list', methods=['POST']) +def get_databases(): + try: + props = json.loads(request.data) + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute('SHOW DATABASES').fetchall() + database = [] + for row in res: + for item in row: + database.append(item) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': database + } + + +@bp.route('/table_list', methods=['POST']) +def get_table_list(): + try: + props = json.loads(request.data) + database = props['db'] + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute('SHOW TABLES FROM ' + database).fetchall() + database = [] + for row in res: + for item in row: + database.append(item) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': database + } + +# 不用了但是先放在这里 +@bp.route('/get_metadata', methods=['POST']) +def get_metadata(): + try: + props = json.loads(request.data) + database = props['db'] + table = props['table'] + connect_id = props['connect_id'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute('desc ' + database + '.' + table).fetchall() + database = [] + for row in res: + rows = [] + for item in row: + rows.append(item) + database.append(rows) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': database + } + + +@bp.route('/table_detail', methods=['POST']) +def get_detail(): + try: + props = json.loads(request.data) + database = props['db'] + table = props['table'] + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + dataRes = engine.execute('select * from ' + database + '.' + table + ' limit 500').fetchall() + metaRes = engine.execute('desc ' + database + '.' + table).fetchall() + data = [] + for row in dataRes: + rows = [] + for item in row: + rows.append(item) + data.append(rows) + meta = [] + i = 1 + for colData in metaRes: + scores = {"key": colData.Field, "colIndex": i, "dataType": colData.Type} + meta.append(scores) + i += 1 + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + "success": True, + "data": { + "columns": meta, + "rows": data + } + } + + +# 执行任意sql +@bp.route('/execute', methods=['POST']) +def execute_sql(): + try: + props = json.loads(request.data) + sql = props['query'] + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute(sql).fetchall() + database = [] + for row in res: + rows = [] + for item in row: + rows.append(item) + database.append(rows) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + "success": True, + "data": { + "rows": database + } + } diff --git a/apps/connector/bp/bp_Oracle.py b/apps/connector/bp/bp_Oracle.py new file mode 100644 index 00000000..51fb2723 --- /dev/null +++ b/apps/connector/bp/bp_Oracle.py @@ -0,0 +1,213 @@ +# 2022/8/28 +# 15:33 +# 测试完成,但是这个数据库真是不太会用,丢数据问题是因为没有commit,后面似乎没有这个问题了,但是仍然有搞不清楚表情况的问题 +# 比如:for_test一开始丢数据,后来不丢了,但是desc的时候又找不到该表,fortest可以desc出来但是重启后会没数据 +import json +from flask import ( + Blueprint, flash, g, redirect, render_template, request, session, url_for +) +from sqlalchemy import create_engine +from bp.bp_Sqlite import getUri + +bp = Blueprint('oracle', __name__, url_prefix='/oracle') + + +# 由于oracle没有database,一个用户只有一个,因此在选择用户时以及定了数据库,因此没有该项 +@bp.route('/database_list', methods=['POST']) +def get_databases(): + try: + props = json.loads(request.data) + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute('SHOW DATABASES').fetchall() + database = [] + for row in res: + rows = [] + for item in row: + rows.append(item) + database.append(rows) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': database + } + + +@bp.route('/get_metadata', methods=['POST']) +def get_metadata(): + try: + props = json.loads(request.data) + database = props['schema'] + table = props['table'] + connect_id = props['connect_id'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute( + ''' + select t.table_name, + t.column_name, + t.data_type, + t.data_length, + t.nullable, + t.column_id, + c.comments, + (SELECT CASE WHEN t.column_name = m.column_name THEN 1 ELSE 0 END FROM DUAL) iskey + FROM user_tab_cols t, + user_col_comments c, + (select m.column_name + from user_constraints s, + user_cons_columns m + where lower(m.table_name) = '{0}' + and m.table_name = s.table_name + and m.constraint_name = s.constraint_name + and s.constraint_type = 'P') m + WHERE lower(t.table_name) = '{1}' + and c.table_name = t.table_name + and c.column_name = t.column_name + and t.hidden_column = 'NO' + order by t.column_id + '''.format(table, table) + ).fetchall() + database = [] + for row in res: + rows = [] + for item in row: + rows.append(item) + database.append(rows) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': database + } + + +# * -> tname +@bp.route('/table_list', methods=['POST']) +def get_table_list(): + try: + props = json.loads(request.data) + database = props['db'] + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute('select tname from tab').fetchall() + database = [] + for row in res: + for item in row: + database.append(item) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': database + } + + +@bp.route('/table_detail', methods=['POST']) +def get_detail(): + try: + props = json.loads(request.data) + database = props['db'] + table = props['table'] + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + dataRes = engine.execute('select * from ' + table + ' where rownum <= 500').fetchall() + cmd = ''' + select t.table_name, + t.column_name as column_name, + t.data_type as data_type, + t.data_length, + t.nullable, + t.column_id, + c.comments, + (SELECT CASE WHEN t.column_name = m.column_name THEN 1 ELSE 0 END FROM DUAL) iskey + FROM user_tab_cols t, + user_col_comments c, + (select m.column_name + from user_constraints s, + user_cons_columns m + where m.table_name = '{0}' + and m.table_name = s.table_name + and m.constraint_name = s.constraint_name + and s.constraint_type = 'P') m + WHERE t.table_name = '{1}' + and c.table_name = t.table_name + and c.column_name = t.column_name + and t.hidden_column = 'NO' + order by t.column_id + '''.format(table, table) + print('>>>', [cmd]) + metaRes = engine.execute( + cmd + ).fetchall() + data = [] + for row in dataRes: + rows = [] + for item in row: + rows.append(item) + data.append(rows) + meta = [] + i = 1 + for colData in metaRes: + scores = {"key": colData.column_name, "colIndex": i, "dataType": colData.data_type} + meta.append(scores) + i += 1 + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': { + "columns": meta, + "rows": data + } + } + + +# 执行任意sql +@bp.route('/execute', methods=['POST']) +def execute_sql(): + try: + props = json.loads(request.data) + sql = props['query'] + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute(sql).fetchall() + database = [] + for row in res: + rows = [] + for item in row: + rows.append(item) + database.append(rows) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': { + "rows": database + } + } diff --git a/apps/connector/bp/bp_Postgres.py b/apps/connector/bp/bp_Postgres.py new file mode 100644 index 00000000..9ca0dd79 --- /dev/null +++ b/apps/connector/bp/bp_Postgres.py @@ -0,0 +1,159 @@ +# 2022/8/28 +# 15:33 +# 测试完成 +import json +from flask import ( + Blueprint, flash, g, redirect, render_template, request, session, url_for +) +from sqlalchemy import create_engine +from bp.bp_Sqlite import getUri + +bp = Blueprint('postgres', __name__, url_prefix='/postgres') + + +@bp.route('/database_list', methods=['POST']) +def get_databases(): + try: + props = json.loads(request.data) + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute('select nspname from pg_catalog.pg_namespace').fetchall() + database = [] + for row in res: + for item in row: + database.append(item) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': database + } + + +@bp.route('/get_metadata', methods=['POST']) +def get_metadata(): + try: + props = json.loads(request.data) + database = props['schema'] + table = props['table'] + connect_id = props['connect_id'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute( + 'select column_name, data_type from information_schema.columns where table_schema= \'' + database + '\' and table_name= \'' + table + '\'').fetchall() + database = [] + for row in res: + rows = [] + for item in row: + rows.append(item) + database.append(rows) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': database + } + + +@bp.route('/table_list', methods=['POST']) +def get_table_list(): + try: + props = json.loads(request.data) + database = props['db'] + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute( + 'select tablename from pg_tables where schemaname=\'' + database + '\'').fetchall() + database = [] + for row in res: + for item in row: + database.append(item) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': database + } + + +@bp.route('/table_detail', methods=['POST']) +def get_detail(): + try: + props = json.loads(request.data) + database = props['db'] + table = props['table'] + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + dataRes = engine.execute('select * from ' + database + '.' + table + ' limit 500').fetchall() + metaRes = engine.execute( + 'select column_name, data_type from information_schema.columns where table_schema= \'' + database + '\' and table_name= \'' + table + '\'').fetchall() + data = [] + for row in dataRes: + rows = [] + for item in row: + rows.append(item) + data.append(rows) + meta = [] + i = 1 + for colData in metaRes: + scores = {"key": colData.column_name, "colIndex": i, "dataType": colData.data_type} + meta.append(scores) + i += 1 + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + "success": True, + "data": { + "columns": meta, + "rows": data + } + } + + +# 执行任意sql +@bp.route('/execute', methods=['POST']) +def execute_sql(): + try: + props = json.loads(request.data) + sql = props['query'] + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute(sql).fetchall() + database = [] + for row in res: + rows = [] + for item in row: + rows.append(item) + database.append(rows) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': { + "rows": database + } + } diff --git a/apps/connector/bp/bp_Redshift.py b/apps/connector/bp/bp_Redshift.py new file mode 100644 index 00000000..c85b73f8 --- /dev/null +++ b/apps/connector/bp/bp_Redshift.py @@ -0,0 +1,160 @@ +# 2022/8/28 +# 15:33 +import json +from flask import ( + Blueprint, flash, g, redirect, render_template, request, session, url_for +) +from sqlalchemy import create_engine +from bp.bp_Sqlite import getUri + +bp = Blueprint('redshift', __name__, url_prefix='/redshift') + + +@bp.route('/database_list', methods=['POST']) +def get_databases(): + try: + props = json.loads(request.data) + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute('select nspname from pg_namespace').fetchall() + database = [] + for row in res: + for item in row: + database.append(item) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': database + } + + +@bp.route('/get_metadata', methods=['POST']) +def get_metadata(): + try: + props = json.loads(request.data) + database = props['schema'] + table = props['table'] + connect_id = props['connect_id'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute('desc ' + database + '.' + table).fetchall() + database = [] + for row in res: + rows = [] + for item in row: + rows.append(item) + database.append(rows) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': database + } + + +@bp.route('/table_list', methods=['POST']) +def get_table_list(): + try: + props = json.loads(request.data) + database = props['db'] + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute('''select distinct(tablename) from pg_table_def where schemaname = '{0}' '''.format(database)).fetchall() + database = [] + for row in res: + for item in row: + database.append(item) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': database + } + + +@bp.route('/table_detail', methods=['POST']) +def get_detail(): + try: + props = json.loads(request.data) + database = props['db'] + table = props['table'] + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + dataRes = engine.execute('select * from ' + database + '.' + table + ' limit 500').fetchall() + metaRes = engine.execute(''' + SELECT * + FROM pg_table_def + WHERE tablename = '{0}' + AND schemaname = '{1}' + '''.format(table, database)).fetchall() + data = [] + for row in dataRes: + rows = [] + for item in row: + rows.append(item) + data.append(rows) + meta = [] + i = 1 + for colData in metaRes: + scores = {"key": colData.column, "colIndex": i, "dataType": colData.type} + meta.append(scores) + i += 1 + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + "success": True, + "data": { + "columns": meta, + "rows": data + } + } + + +# 执行任意sql +@bp.route('/execute', methods=['POST']) +def execute_sql(): + try: + props = json.loads(request.data) + sql = props['query'] + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute(sql).fetchall() + database = [] + for row in res: + rows = [] + for item in row: + rows.append(item) + database.append(rows) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': { + "rows": database + } + } diff --git a/apps/connector/bp/bp_SQLserver.py b/apps/connector/bp/bp_SQLserver.py new file mode 100644 index 00000000..3b35c293 --- /dev/null +++ b/apps/connector/bp/bp_SQLserver.py @@ -0,0 +1,211 @@ +# 2022/8/28 +# 15:33 +# 测试完成,这个数据库因为有三层,因此在方法中多加了一个schemas list,post的东西也要多一个 + +import json +from flask import ( + Blueprint, flash, g, redirect, render_template, request, session, url_for +) +from sqlalchemy import create_engine +from bp.bp_Sqlite import getUri + +bp = Blueprint('sqlserver', __name__, url_prefix='/sqlserver') + + +@bp.route('/database_list', methods=['POST']) +def get_databases(): + try: + props = json.loads(request.data) + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute('SELECT name FROM sys.databases').fetchall() + database = [] + for row in res: + for item in row: + database.append(item) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': database + } + + +@bp.route('/schema_list', methods=['POST']) +def get_schemas(): + try: + props = json.loads(request.data) + connect_id = props['sourceId'] + db = props['db'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute('SELECT name FROM {0}.sys.schemas'.format(db)).fetchall() + database = [] + for row in res: + for item in row: + database.append(item) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': database + } + +@bp.route('/table_list', methods=['POST']) +def get_table_list(): + try: + props = json.loads(request.data) + database = props['db'] + schema = props['schema'] + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute( + ''' + SELECT t.name FROM {0}.sys.tables AS t INNER JOIN {1}.sys.schemas AS s ON s.schema_id = t.schema_id WHERE s.name = '{2}' + '''.format( + database, database, schema)).fetchall() + database = [] + for row in res: + for item in row: + database.append(item) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': database + } + + +@bp.route('/get_metadata', methods=['POST']) +def get_metadata(): + try: + props = json.loads(request.data) + database = props['database'] + table = props['table'] + connect_id = props['connect_id'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute(''' + SELECT SC.name, + ST.name + FROM {0}.sys.sysobjects SO, + {1}.sys.syscolumns SC, + {2}.sys.systypes ST + WHERE SO.id = SC.id + AND SO.xtype = 'U' + AND SO.status >= 0 + AND SC.xtype = ST.xusertype + AND SO.name = '{3}' + ORDER BY SO.name, SC.colorder + '''.format(database, database, database, table)).fetchall() + database = [] + for row in res: + rows = [] + for item in row: + rows.append(item) + database.append(rows) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': database + } + + +@bp.route('/table_detail', methods=['POST']) +def get_detail(): + try: + props = json.loads(request.data) + database = props['db'] + schema = props['schema'] + table = props['table'] + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + dataRes = engine.execute('select top 500 * from ' + database + '.' + schema + '.' + table).fetchall() + metaRes = engine.execute(''' + SELECT SC.name as table_name, + ST.name as table_column + FROM {0}.sys.sysobjects SO, + {1}.sys.syscolumns SC, + {2}.sys.systypes ST + WHERE SO.id = SC.id + AND SO.xtype = 'U' + AND SO.status >= 0 + AND SC.xtype = ST.xusertype + AND SO.name = '{3}' + ORDER BY SO.name, SC.colorder + '''.format(database, database, database, table)).fetchall() + data = [] + for row in dataRes: + rows = [] + for item in row: + rows.append(item) + data.append(rows) + meta = [] + i = 1 + for colData in metaRes: + scores = {"key": colData.table_name, "colIndex": i, "dataType": colData.table_column} + meta.append(scores) + i += 1 + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': { + "columns": meta, + "rows": data + } + } + + +# 执行任意sql +@bp.route('/execute', methods=['POST']) +def execute_sql(): + try: + props = json.loads(request.data) + sql = props['query'] + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute(sql).fetchall() + database = [] + for row in res: + rows = [] + for item in row: + rows.append(item) + database.append(rows) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': { + "rows": database + } + } diff --git a/apps/connector/bp/bp_SparkSQL.py b/apps/connector/bp/bp_SparkSQL.py new file mode 100644 index 00000000..a6a6944c --- /dev/null +++ b/apps/connector/bp/bp_SparkSQL.py @@ -0,0 +1,155 @@ +# 2022/8/28 +# 15:33 +import json +from flask import ( + Blueprint, flash, g, redirect, render_template, request, session, url_for +) +from sqlalchemy import create_engine +from bp.bp_Sqlite import getUri + +bp = Blueprint('sparksql', __name__, url_prefix='/sparksql') + + +@bp.route('/database_list', methods=['POST']) +def get_databases(): + try: + props = json.loads(request.data) + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute('SHOW DATABASES').fetchall() + database = [] + for row in res: + for item in row: + database.append(item) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': database + } + + +@bp.route('/get_metadata', methods=['POST']) +def get_metadata(): + try: + props = json.loads(request.data) + database = props['schema'] + table = props['table'] + connect_id = props['connect_id'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute('desc ' + database + '.' + table).fetchall() + database = [] + for row in res: + rows = [] + for item in row: + rows.append(item) + database.append(rows) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': database + } + + +@bp.route('/table_list', methods=['POST']) +def get_table_list(): + try: + props = json.loads(request.data) + database = props['db'] + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute('SHOW TABLES FROM ' + database).fetchall() + database = [] + for row in res: + for item in row: + database.append(item) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': database + } + + +@bp.route('/table_detail', methods=['POST']) +def get_detail(): + try: + props = json.loads(request.data) + database = props['db'] + table = props['table'] + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + dataRes = engine.execute('select * from ' + database + '.' + table + ' limit 500').fetchall() + metaRes = engine.execute('desc ' + database + '.' + table).fetchall() + data = [] + for row in dataRes: + rows = [] + for item in row: + rows.append(item) + data.append(rows) + meta = [] + i = 1 + for colData in metaRes: + scores = {"key": colData.col_name, "colIndex": i, "dataType": colData.data_type} + meta.append(scores) + i += 1 + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': { + "columns": meta, + "rows": data + } + } + + +# 执行任意sql +@bp.route('/execute', methods=['POST']) +def execute_sql(): + try: + props = json.loads(request.data) + sql = props['query'] + connect_id = props['sourceId'] + uri = getUri(connect_id) + engine = create_engine(uri, echo=True) + res = engine.execute(sql).fetchall() + database = [] + for row in res: + rows = [] + for item in row: + rows.append(item) + database.append(rows) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': { + "rows": database + } + } diff --git a/apps/connector/bp/bp_Sqlite.py b/apps/connector/bp/bp_Sqlite.py new file mode 100644 index 00000000..27fe292f --- /dev/null +++ b/apps/connector/bp/bp_Sqlite.py @@ -0,0 +1,73 @@ +# 2022/8/29 +# 10:54 +# 接口测试完成 +import json +from flask import ( + Blueprint, request +) +from database import Base +from models.connection import Connection +from database import db_session + +bp = Blueprint('sqlite', __name__, url_prefix='/sqlite') + + +@bp.route('/upsert', methods=['POST']) +def upsert_url(): + try: + props = json.loads(request.data) + # connect_id = props['connect_id'] + uri = props['uri'] + source_type = props['sourceType'] + + + # if getUri(connect_id): + # Connection.query.filter(Connection.connect_id == connect_id).delete() + # db_session.commit() + + connection = Connection(uri=uri, source_type=source_type) + db_session.add(connection) + db_session.commit() + + # item_new = Connection.query.filter(Connection.connect_id == connect_id).first() + res = Connection.query.filter(Connection.uri == uri).first() + connectId = res.connect_id + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': connectId + } + + +@bp.route('/get_url', methods=['POST']) +def get_uri(): + try: + props = json.loads(request.data) + connect_id = props['connect_id'] + uri = getUri(connect_id) + sourceType = getSourceType(connect_id) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + "sourceType": sourceType, + 'data': uri + } + + +def getUri(connect_id): + res = Connection.query.filter(Connection.connect_id == connect_id).first() + return res.uri + + +def getSourceType(connect_id): + res = Connection.query.filter(Connection.connect_id == connect_id).first() + return res.source_type diff --git a/apps/connector/bp/bp_database.py b/apps/connector/bp/bp_database.py new file mode 100644 index 00000000..30e4c539 --- /dev/null +++ b/apps/connector/bp/bp_database.py @@ -0,0 +1,169 @@ +# 2022/9/9 +# 14:43 +import json +from flask import Blueprint, request +from models.connection import Connection +from database import db_session +from bp.basefunc import basefunc + +bp = Blueprint('database', __name__, url_prefix='/api') + + +@bp.route('/upsert', methods=['POST']) +def upsert_uri(): + try: + props = json.loads(request.data) + # connect_id = props['connect_id'] + uri = props['uri'] + source_type = props['sourceType'] + + # if getUri(connect_id): + # Connection.query.filter(Connection.connect_id == connect_id).delete() + # db_session.commit() + + connection = Connection(uri=uri, source_type=source_type) + db_session.add(connection) + db_session.commit() + + # item_new = Connection.query.filter(Connection.connect_id == connect_id).first() + res = Connection.query.filter(Connection.uri == uri).first() + connectId = res.connect_id + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': connectId + } + + +@bp.route('/database_list', methods=['POST']) +def get_databases_list(): + try: + props = json.loads(request.data) + connect_id = props['sourceId'] + schema = props.get('schema', None) + uri = get_uri(connect_id) + source_type = get_source_type(connect_id) + dict__ = basefunc.__dict__ + db_list = dict__['{0}_getdb'.format(source_type)](uri=uri, schema=schema) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': db_list + } + + +@bp.route('/schema_list', methods=['POST']) +def get_schema_list(): + try: + props = json.loads(request.data) + connect_id = props['sourceId'] + database = props.get('db', None) + uri = get_uri(connect_id) + source_type = get_source_type(connect_id) + dict__ = basefunc.__dict__ + schema_list = dict__['{0}_getschema'.format(source_type)](uri=uri, db=database) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': schema_list + } + + +@bp.route('/table_list', methods=['POST']) +def get_table_list(): + try: + props: dict = json.loads(request.data) + database = props.get('db', None) + schema = props.get('schema', None) + connect_id = props['sourceId'] + uri = get_uri(connect_id) + source_type = get_source_type(connect_id) + dict__ = basefunc.__dict__ + table_list = dict__['{0}_gettable'.format(source_type)](uri=uri, database=database, schema=schema) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': table_list + } + + +@bp.route('/table_detail', methods=['POST']) +def get_table_detail(): + try: + props = json.loads(request.data) + database = props.get('db', None) + schema = props.get('schema', None) + table = props['table'] + connect_id = props['sourceId'] + uri = get_uri(connect_id) + source_type = get_source_type(connect_id) + dict__ = basefunc.__dict__ + meta = dict__['{0}_getmeta'.format(source_type)](uri=uri, database=database, table=table, schema=schema) + data = dict__['{0}_getdata'.format(source_type)](uri=uri, database=database, table=table, schema=schema) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': { + "columns": meta, + "rows": data + } + } + + +@bp.route('/execute', methods=['POST']) +def execute_sql(): + try: + props = json.loads(request.data) + sql = props['query'] + connect_id = props['sourceId'] + uri = get_uri(connect_id) + source_type = get_source_type(connect_id) + dict__ = basefunc.__dict__ + sql_result = dict__['{0}_getresult'.format(source_type)](uri=uri, sql=sql) + except Exception as e: + return { + 'success': False, + 'message': repr(e) + } + else: + return { + 'success': True, + 'data': { + "rows": sql_result + } + } + + +def get_uri(connect_id): + res = Connection.query.filter(Connection.connect_id == connect_id).first() + return res.uri + + +def get_source_type(connect_id): + res = Connection.query.filter(Connection.connect_id == connect_id).first() + return res.source_type diff --git a/apps/connector/database.py b/apps/connector/database.py new file mode 100644 index 00000000..2e7e3464 --- /dev/null +++ b/apps/connector/database.py @@ -0,0 +1,19 @@ +# 2022/8/29 +# 12:13 +from sqlalchemy import create_engine +from sqlalchemy.orm import scoped_session, sessionmaker +from sqlalchemy.ext.declarative import declarative_base + +engine = create_engine('sqlite:///./connect.db') +db_session = scoped_session(sessionmaker(autocommit=False, + autoflush=False, + bind=engine)) +Base = declarative_base() +Base.query = db_session.query_property() + + +def init_db(): + # 在这里导入定义模型所需要的所有模块,这样它们就会正确的注册在 + # 元数据上。否则你就必须在调用 init_db() 之前导入它们。 + import models.connection + Base.metadata.create_all(bind=engine) diff --git a/apps/connector/models/__init__.py b/apps/connector/models/__init__.py new file mode 100644 index 00000000..bbc57ddf --- /dev/null +++ b/apps/connector/models/__init__.py @@ -0,0 +1,2 @@ +# 2022/8/29 +# 13:27 diff --git a/apps/connector/models/connection.py b/apps/connector/models/connection.py new file mode 100644 index 00000000..1c468d6d --- /dev/null +++ b/apps/connector/models/connection.py @@ -0,0 +1,18 @@ +from sqlalchemy import Column, Integer, String +from database import Base + + +class Connection(Base): + __tablename__ = 'connection' + # id = Column(Integer, primary_key=True) + connect_id = Column(Integer, primary_key=True) + uri = Column(String(255), unique=False) + source_type = Column(String(50), unique=False) + + def __init__(self, connect_id=None, uri=None, source_type=None): + # self.connect_id = connect_id + self.uri = uri + self.source_type = source_type + + def __repr__(self): + return self.uri \ No newline at end of file