diff --git a/assembly-package/config/application-exchangis.yml b/assembly-package/config/application-exchangis.yml
index 222fecbef..59caa497d 100644
--- a/assembly-package/config/application-exchangis.yml
+++ b/assembly-package/config/application-exchangis.yml
@@ -1,12 +1,12 @@
server:
- port: 9321
+ port: 9500
spring:
application:
name: exchangis-server
eureka:
client:
serviceUrl:
- defaultZone: http://127.0.0.1:20303/eureka/
+ defaultZone: http://127.0.0.1:20503/eureka/
instance:
metadata-map:
test: wedatasphere
@@ -17,4 +17,4 @@ management:
exposure:
include: refresh,info
logging:
- config: classpath:log4j2.xml
\ No newline at end of file
+ config: classpath:log4j2.xml
diff --git a/assembly-package/config/exchangis-server.properties b/assembly-package/config/exchangis-server.properties
index 39de4fecc..07a162edc 100644
--- a/assembly-package/config/exchangis-server.properties
+++ b/assembly-package/config/exchangis-server.properties
@@ -15,8 +15,6 @@
#
#
-#wds.linkis.test.mode=false
-
wds.linkis.server.mybatis.datasource.url=jdbc:mysql://localhost:3306/database?useSSL=false&characterEncoding=UTF-8&allowMultiQueries=true
wds.linkis.server.mybatis.datasource.username=username
@@ -27,30 +25,27 @@ wds.linkis.log.clear=true
wds.linkis.server.version=v1
-## datasource client
+# datasource client
wds.exchangis.datasource.client.serverurl=
wds.exchangis.datasource.client.authtoken.key=DATASOURCE-AUTH
wds.exchangis.datasource.client.authtoken.value=DATASOURCE-AUTH
wds.exchangis.datasource.client.dws.version=v1
+# launcher client
+wds.exchangis.client.linkis.server-url=
+wds.exchangis.client.linkis.token.value=DATASOURCE-AUTH
wds.exchangis.datasource.extension.dir=exchangis-extds
-wds.exchangis.linkis.serverurl=
-
##restful
wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.exchangis.datasource.server.restful.api,\
com.webank.wedatasphere.exchangis.project.server.restful,\
com.webank.wedatasphere.exchangis.job.server.restful
-wds.linkis.server.mybatis.mapperLocations=classpath*:com/webank/wedatasphere/dss/framework/appconn/dao/impl/*.xml,classpath*:com/webank/wedatasphere/dss/workflow/dao/impl/*.xml,classpath*:com/webank/wedatasphere/exchangis/job/server/mapper/impl/*.xml
+wds.linkis.server.mybatis.mapperLocations=classpath*:com/webank/wedatasphere/exchangis/job/server/mapper/impl/*.xml:\
+ ,classpath*:com/webank/wedatasphere/exchangis/project/server/mapper/impl/*.xml
wds.linkis.server.mybatis.BasePackage=com.webank.wedatasphere.exchangis.dao,\
- com.webank.wedatasphere.exchangis.project.server.dao,\
- com.webank.wedatasphere.linkis.configuration.dao,\
- com.webank.wedatasphere.dss.framework.appconn.dao,\
- com.webank.wedatasphere.dss.workflow.dao,\
- com.webank.wedatasphere.linkis.metadata.dao,\
- com.webank.wedatasphere.exchangis.job.server.mapper,\
- com.webank.wedatasphere.exchangis.job.server.dao
+ com.webank.wedatasphere.exchangis.project.server.mapper,\
+ com.webank.wedatasphere.exchangis.job.server.mapper
diff --git a/assembly-package/config/log4j2.xml b/assembly-package/config/log4j2.xml
index bcda90df7..70da2f238 100644
--- a/assembly-package/config/log4j2.xml
+++ b/assembly-package/config/log4j2.xml
@@ -25,16 +25,16 @@
-
-
-
-
-
-
+
+
+
+
+
-
+
@@ -42,4 +42,3 @@
-
diff --git a/db/exchangis_ddl.sql b/db/exchangis_ddl.sql
index 05ecdaf2a..916523046 100644
--- a/db/exchangis_ddl.sql
+++ b/db/exchangis_ddl.sql
@@ -1,166 +1,170 @@
--- exchangis_v3.exchangis_job_ds_bind definition
-
-CREATE TABLE `exchangis_job_ds_bind`
-(
- `id` bigint(20) NOT NULL AUTO_INCREMENT,
- `job_id` bigint(20) NOT NULL,
- `task_index` int(11) NOT NULL,
- `source_ds_id` bigint(20) NOT NULL,
- `sink_ds_id` bigint(20) NOT NULL,
- PRIMARY KEY (`id`)
-) ENGINE = InnoDB
- DEFAULT CHARSET = utf8
- COLLATE = utf8_bin;
-
-
--- exchangis_v3.exchangis_job_info definition
-
-CREATE TABLE `exchangis_job_info`
-(
- `id` bigint(20) NOT NULL AUTO_INCREMENT,
- `project_id` bigint(20) DEFAULT NULL,
- `dss_project_id` bigint(20) DEFAULT NULL,
- `dss_project_name` varchar(64) DEFAULT NULL,
- `node_id` varchar(64) DEFAULT NULL,
- `node_name` varchar(64) DEFAULT NULL,
- `job_name` varchar(100) DEFAULT NULL,
- `job_type` varchar(50) DEFAULT NULL,
- `task_name` varchar(255) DEFAULT NULL,
- `engine_type` varchar(50) DEFAULT NULL,
- `job_labels` varchar(255) DEFAULT NULL,
- `job_desc` varchar(255) DEFAULT NULL,
- `content` text,
- `alarm_user` varchar(50) DEFAULT NULL,
- `alarm_level` int(255) DEFAULT NULL,
- `proxy_user` varchar(50) DEFAULT NULL,
- `execute_node` varchar(255) DEFAULT NULL,
- `sync_type` varchar(50) DEFAULT NULL,
- `job_params` text,
- `domain` varchar(32) DEFAULT NULL,
- `create_time` datetime DEFAULT NULL,
- `create_user` varchar(50) DEFAULT NULL,
- `modify_time` datetime DEFAULT NULL,
- `modify_user` varchar(50) DEFAULT NULL,
- PRIMARY KEY (`id`),
- UNIQUE KEY `uk_job_info_node_id` (`node_id`)
-) ENGINE = InnoDB
- DEFAULT CHARSET = utf8;
-
-
--- exchangis_v3.exchangis_job_param_config definition
-
-CREATE TABLE `exchangis_job_param_config`
-(
- `id` bigint(20) NOT NULL AUTO_INCREMENT,
- `config_key` varchar(64) NOT NULL,
- `config_name` varchar(64) NOT NULL,
- `config_direction` varchar(16) DEFAULT NULL,
- `type` varchar(32) NOT NULL,
- `ui_type` varchar(32) DEFAULT NULL,
- `ui_field` varchar(64) DEFAULT NULL,
- `ui_label` varchar(32) DEFAULT NULL,
- `unit` varchar(32) DEFAULT NULL,
- `required` bit(1) DEFAULT b'0',
- `value_type` varchar(32) DEFAULT NULL,
- `value_range` varchar(255) DEFAULT NULL,
- `default_value` varchar(255) DEFAULT NULL,
- `validate_type` varchar(64) DEFAULT NULL,
- `validate_range` varchar(64) DEFAULT NULL,
- `validate_msg` varchar(255) DEFAULT NULL,
- `is_hidden` bit(1) DEFAULT NULL,
- `is_advanced` bit(1) DEFAULT NULL,
- `level` tinyint(4) DEFAULT NULL,
- `treename` varchar(32) DEFAULT NULL,
- `sort` int(11) DEFAULT NULL,
- `description` varchar(255) DEFAULT NULL,
- `status` tinyint(4) DEFAULT NULL,
- PRIMARY KEY (`id`)
--- UNIQUE KEY `config_key` (`config_key`)
-) ENGINE = InnoDB
- DEFAULT CHARSET = utf8;
-
-
--- exchangis_v3.exchangis_launch_task definition
-
-CREATE TABLE `exchangis_launch_task`
-(
- `id` bigint(20) NOT NULL AUTO_INCREMENT,
- `task_name` varchar(100) DEFAULT NULL COMMENT '子任务名称',
- `job_id` bigint(20) DEFAULT NULL COMMENT '所属任务id',
- `job_name` varchar(100) DEFAULT NULL COMMENT '所属任务名称',
- `content` text COMMENT '子任务执行内容',
- `execute_node` varchar(255) DEFAULT NULL COMMENT '执行节点',
- `create_time` datetime DEFAULT NULL COMMENT '子任务创建时间',
- `create_user` varchar(50) DEFAULT NULL COMMENT '创建用户',
- `launch_time` datetime DEFAULT NULL COMMENT '触发时间',
- `proxy_user` varchar(50) DEFAULT NULL COMMENT '执行/代理用户',
- `params_json` text COMMENT '作业参数',
- `launch_id` varchar(64) DEFAULT NULL,
- `status` varchar(50) DEFAULT NULL COMMENT '状态:SUCCESS | FAILED | RUNNING | BUSY | IDLE | UNLOCK',
- `complete_time` datetime DEFAULT NULL COMMENT '完成/中止时间',
- `engine_type` varchar(64) DEFAULT NULL,
- PRIMARY KEY (`id`)
-) ENGINE = InnoDB
- DEFAULT CHARSET = utf8;
-
-
--- exchangis_v3.exchangis_metric definition
-
-CREATE TABLE `exchangis_metric`
-(
- `id` bigint(20) NOT NULL AUTO_INCREMENT,
- `title` varchar(128) DEFAULT NULL COMMENT '指标名称',
- `norm` varchar(128) DEFAULT NULL COMMENT '指标key',
- `value` text COMMENT '指标值',
- `ts` datetime DEFAULT NULL COMMENT '采集时间',
- `version` bigint(20) NOT NULL DEFAULT '1',
- PRIMARY KEY (`id`),
- UNIQUE KEY `norm` (`norm`)
-) ENGINE = InnoDB
- DEFAULT CHARSET = utf8;
-
-
--- exchangis_v3.exchangis_project definition
-
-CREATE TABLE `exchangis_project`
-(
- `id` bigint(20) NOT NULL AUTO_INCREMENT,
- `dss_project_id` bigint(20) DEFAULT NULL,
- `workspace_name` varchar(64) DEFAULT NULL,
- `dss_name` varchar(64) DEFAULT NULL,
- `name` varchar(64) NOT NULL,
- `description` varchar(255) DEFAULT NULL,
- `create_time` datetime DEFAULT NULL,
- `last_update_time` datetime DEFAULT NULL,
- `create_by` varchar(64) DEFAULT NULL,
- `last_update_by` varchar(64) DEFAULT NULL,
- `tags` varchar(255) DEFAULT NULL,
- `domain` varchar(32) DEFAULT NULL,
- `exec_users` varchar(255) DEFAULT NULL,
- `view_users` varchar(255) DEFAULT NULL,
- `edit_users` varchar(255) DEFAULT NULL,
- PRIMARY KEY (`id`),
- UNIQUE KEY `name_UNIQUE` (`name`),
- UNIQUE KEY `uk_project_dss_project_id` (`dss_project_id`),
- UNIQUE KEY `workspace_name_UNIQUE` (`workspace_name`)
-) ENGINE = InnoDB
- DEFAULT CHARSET = utf8;
-
-
--- exchangis_v3.exchangis_project_relation definition
-
-CREATE TABLE `exchangis_project_relation`
-(
- `id` int(11) NOT NULL AUTO_INCREMENT,
- `project_id` bigint(20) DEFAULT NULL,
- `node_id` bigint(20) DEFAULT NULL,
- `project_version` varchar(32) COLLATE utf8_bin DEFAULT NULL,
- `flow_version` varchar(32) COLLATE utf8_bin DEFAULT NULL,
- `resource_id` bigint(20) DEFAULT NULL,
- `version` varchar(32) COLLATE utf8_bin DEFAULT NULL,
- PRIMARY KEY (`id`),
- UNIQUE KEY `uq_project_relation_project_id_node_id` (`project_id`, `node_id`),
- UNIQUE KEY `uq_project_relation_node_id` (`node_id`)
-) ENGINE = InnoDB
- DEFAULT CHARSET = utf8
- COLLATE = utf8_bin;
\ No newline at end of file
+-- exchangis_v4.exchangis_job_ds_bind definition
+
+CREATE TABLE `exchangis_job_ds_bind` (
+ `id` bigint(20) NOT NULL AUTO_INCREMENT,
+ `job_id` bigint(20) NOT NULL,
+ `task_index` int(11) NOT NULL,
+ `source_ds_id` bigint(20) NOT NULL,
+ `sink_ds_id` bigint(20) NOT NULL,
+ PRIMARY KEY (`id`)
+) ENGINE=InnoDB AUTO_INCREMENT=5043 DEFAULT CHARSET=utf8 COLLATE=utf8_bin
+
+
+-- exchangis_v4.exchangis_job_entity definition
+
+CREATE TABLE `exchangis_job_entity` (
+ `id` bigint(20) NOT NULL AUTO_INCREMENT,
+ `name` varchar(100) NOT NULL,
+ `create_time` datetime DEFAULT NULL,
+ `last_update_time` datetime DEFAULT NULL,
+ `engine_type` varchar(45) DEFAULT '',
+ `job_labels` varchar(255) DEFAULT NULL,
+ `create_user` varchar(100) DEFAULT NULL,
+ `job_content` mediumtext,
+ `execute_user` varchar(100) DEFAULT '',
+ `job_params` text,
+ `job_desc` varchar(255) DEFAULT NULL,
+ `job_type` varchar(50) DEFAULT NULL,
+ `project_id` bigint(13) DEFAULT NULL,
+ `source` text,
+ `modify_user` varchar(50) DEFAULT NULL COMMENT '修改用户',
+ PRIMARY KEY (`id`)
+) ENGINE=InnoDB AUTO_INCREMENT=129 DEFAULT CHARSET=utf8
+
+
+-- exchangis_v4.exchangis_job_param_config definition
+
+CREATE TABLE `exchangis_job_param_config` (
+ `id` bigint(20) NOT NULL AUTO_INCREMENT,
+ `config_key` varchar(64) NOT NULL,
+ `config_name` varchar(64) NOT NULL,
+ `config_direction` varchar(16) DEFAULT NULL,
+ `type` varchar(32) NOT NULL,
+ `ui_type` varchar(32) DEFAULT NULL,
+ `ui_field` varchar(64) DEFAULT NULL,
+ `ui_label` varchar(32) DEFAULT NULL,
+ `unit` varchar(32) DEFAULT NULL,
+ `required` bit(1) DEFAULT b'0',
+ `value_type` varchar(32) DEFAULT NULL,
+ `value_range` varchar(255) DEFAULT NULL,
+ `default_value` varchar(255) DEFAULT NULL,
+ `validate_type` varchar(64) DEFAULT NULL,
+ `validate_range` varchar(64) DEFAULT NULL,
+ `validate_msg` varchar(255) DEFAULT NULL,
+ `is_hidden` bit(1) DEFAULT NULL,
+ `is_advanced` bit(1) DEFAULT NULL,
+ `source` varchar(255) DEFAULT NULL,
+ `level` tinyint(4) DEFAULT NULL,
+ `treename` varchar(32) DEFAULT NULL,
+ `sort` int(11) DEFAULT NULL,
+ `description` varchar(255) DEFAULT NULL,
+ `status` tinyint(4) DEFAULT NULL,
+ PRIMARY KEY (`id`)
+) ENGINE=InnoDB AUTO_INCREMENT=32 DEFAULT CHARSET=utf8
+
+-- exchangis_v4.exchangis_project_info definition
+
+CREATE TABLE `exchangis_project_info` (
+ `id` bigint(20) NOT NULL AUTO_INCREMENT,
+ `name` varchar(64) NOT NULL,
+ `description` varchar(255) DEFAULT NULL,
+ `create_time` datetime DEFAULT NULL,
+ `last_update_time` datetime DEFAULT NULL,
+ `create_user` varchar(64) DEFAULT NULL,
+ `last_update_user` varchar(64) DEFAULT NULL,
+ `project_labels` varchar(255) DEFAULT NULL,
+ `domain` varchar(32) DEFAULT NULL,
+ `exec_users` varchar(255) DEFAULT NULL,
+ `view_users` varchar(255) DEFAULT NULL,
+ `edit_users` varchar(255) DEFAULT NULL,
+ `source` text,
+ PRIMARY KEY (`id`)
+) ENGINE=InnoDB AUTO_INCREMENT=1497870871035973652 DEFAULT CHARSET=utf8
+
+-- exchangis_v4.exchangis_job_entity definition
+
+
+CREATE TABLE `exchangis_job_entity` (
+ `id` bigint(20) NOT NULL,
+ `name` varchar(100) NOT NULL,
+ `create_time` datetime DEFAULT NULL,
+ `last_update_time` datetime DEFAULT NULL,
+ `engine_type` varchar(45) DEFAULT '',
+ `job_labels` varchar(64) DEFAULT NULL,
+ `create_user` varchar(100) DEFAULT NULL,
+ `job_content` text NOT NULL,
+ `execute_user` varchar(100) DEFAULT '',
+ `job_params` text NOT NULL,
+ `project_id` bigint(13) DEFAULT NULL,
+ `source` varchar(255) NOT NULL,
+ `modify_user` varchar(50) DEFAULT NULL COMMENT '修改用户',
+ PRIMARY KEY (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8
+
+-- exchangis_v4.exchangis_launchable_task definition
+
+CREATE TABLE `exchangis_launchable_task` (
+ `id` bigint(13) NOT NULL,
+ `name` varchar(100) NOT NULL,
+ `job_execution_id` varchar(64) DEFAULT NULL,
+ `create_time` datetime DEFAULT NULL,
+ `last_update_time` datetime DEFAULT NULL,
+ `engine_type` varchar(45) DEFAULT '',
+ `execute_user` varchar(50) DEFAULT '',
+ `linkis_job_name` varchar(100) NOT NULL,
+ `linkis_job_content` text NOT NULL,
+ `linkis_params` varchar(255) DEFAULT NULL,
+ `linkis_source` varchar(64) DEFAULT NULL,
+ `labels` varchar(64) DEFAULT NULL,
+ PRIMARY KEY (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8
+
+-- exchangis_v4.exchangis_launched_job_entity definition
+
+CREATE TABLE `exchangis_launched_job_entity` (
+ `id` bigint(20) NOT NULL AUTO_INCREMENT,
+ `name` varchar(100) NOT NULL,
+ `create_time` datetime DEFAULT NULL,
+ `last_update_time` datetime DEFAULT NULL,
+ `job_id` bigint(20) DEFAULT NULL,
+ `launchable_task_num` int(20) DEFAULT '0',
+ `engine_type` varchar(100) DEFAULT NULL,
+ `execute_user` varchar(100) DEFAULT NULL,
+ `job_name` varchar(100) DEFAULT NULL,
+ `status` varchar(100) DEFAULT NULL,
+ `progress` varchar(100) DEFAULT NULL,
+ `error_code` varchar(64) DEFAULT NULL,
+ `error_msg` varchar(255) DEFAULT NULL,
+ `retry_num` bigint(10) DEFAULT NULL,
+ `job_execution_id` varchar(255) DEFAULT NULL,
+ `log_path` varchar(255) DEFAULT NULL,
+ `create_user` varchar(100) DEFAULT NULL,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `job_execution_id_UNIQUE` (`job_execution_id`)
+) ENGINE=InnoDB AUTO_INCREMENT=708 DEFAULT CHARSET=utf8
+
+-- exchangis_v4.exchangis_launched_task_entity definition
+
+CREATE TABLE `exchangis_launched_task_entity` (
+ `id` bigint(20) NOT NULL,
+ `name` varchar(100) NOT NULL,
+ `create_time` datetime DEFAULT NULL,
+ `last_update_time` datetime DEFAULT NULL,
+ `job_id` bigint(20) DEFAULT NULL,
+ `engine_type` varchar(100) DEFAULT NULL,
+ `execute_user` varchar(100) DEFAULT NULL,
+ `job_name` varchar(100) DEFAULT NULL,
+ `progress` varchar(64) DEFAULT NULL,
+ `error_code` varchar(64) DEFAULT NULL,
+ `error_msg` varchar(255) DEFAULT NULL,
+ `retry_num` bigint(10) DEFAULT NULL,
+ `task_id` varchar(64) DEFAULT NULL,
+ `linkis_job_id` varchar(200) DEFAULT NULL,
+ `linkis_job_info` varchar(1000) DEFAULT NULL,
+ `job_execution_id` varchar(100) DEFAULT NULL,
+ `launch_time` datetime DEFAULT NULL,
+ `running_time` datetime DEFAULT NULL,
+ `metrics` text,
+ `status` varchar(64) DEFAULT NULL,
+ PRIMARY KEY (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8
\ No newline at end of file
diff --git a/db/exchangis_dml.sql b/db/exchangis_dml.sql
index c264cdd11..a7172b2ee 100644
--- a/db/exchangis_dml.sql
+++ b/db/exchangis_dml.sql
@@ -1,29 +1,15 @@
-- 插入 job_param_config 记录
-INSERT INTO `exchangis_job_param_config` (`id`, `config_key`, `config_name`, `config_direction`, `type`, `ui_type`, `ui_field`, `ui_label`, `unit`, `required`, `value_type`, `value_range`, `default_value`, `validate_type`, `validate_range`, `validate_msg`, `is_hidden`, `is_advanced`, `source`, `level`, `treename`, `sort`, `description`, `status`)
-VALUES
-(1,'exchangis.datax.setting.speed.bytes','作业速率限制','','DATAX','INPUT','exchangis.datax.setting.speed.bytes','作业速率限制','Mb/s',b'1','NUMBER','','','REGEX','^[1-9]\\d*$','作业速率限制输入错误',b'0',b'0','', 1,'',1,'',1),
-(2,'exchangis.datax.setting.speed.records','作业记录数限制','','DATAX','INPUT','exchangis.datax.setting.speed.records','作业记录数限制','条/s',b'1','NUMBER','','','REGEX','^[1-9]\\d*$','作业记录数限制输入错误',b'0',b'0','', 1,'',2,'',1),
-(3,'exchangis.datax.setting.max.parallelism','作业最大并行度','','DATAX','INPUT','exchangis.datax.setting.max.parallelism','作业最大并行度','个',b'1','NUMBER','','1','REGEX','^[1-9]\\d*$','作业最大并行度输入错误',b'0',b'0','', 1,'',3,'',1),
-(4,'exchangis.datax.setting.max.memory','作业最大使用内存','','DATAX','INPUT','exchangis.datax.setting.max.memory','作业最大使用内存','Mb',b'1','NUMBER','','1000','REGEX','^[1-9]\\d*$','作业最大使用内存输入错误',b'0',b'0','', 1,'',4,'',1),
-(5,'exchangis.datax.setting.errorlimit.record','最多错误记录数','','DATAX','INPUT','exchangis.datax.setting.errorlimit.record','最多错误记录数','条',b'1','NUMBER','','','REGEX','^[1-9]\\d*$','最多错误记录数输入错误',b'0',b'0','', 1,'',5,'',1),
-(6,'exchangis.sqoop.setting.max.parallelism','作业最大并行数','','SQOOP','INPUT','exchangis.sqoop.setting.max.parallelism','作业最大并行数','个',b'1','NUMBER','','1','REGEX','^[1-9]\\d*$','作业最大并行数输入错误',b'0',b'0','', 1,'',1,'',1),
-(7,'exchangis.sqoop.setting.max.memory','作业最大内存','','SQOOP','INPUT','exchangis.sqoop.setting.max.memory','作业最大内存','Mb',b'1','NUMBER','','1000','REGEX','^[1-9]\\d*$','作业最大内存输入错误',b'0',b'0','', 1,'',2,'',1),
-(8,'exchangis.job.ds.params.datax.mysql.r.join_condition','连接条件','DATAX-SOURCE','MYSQL','INPUT','exchangis.job.ds.params.datax.mysql.r.join_condition','连接条件','',b'0','VARCHAR','','','','','连接条件输入错误',b'0',b'0','', 1,'',1,'',1),
-(9,'exchangis.job.ds.params.datax.mysql.r.where_condition','WHERE条件','DATAX-SOURCE','MYSQL','INPUT','exchangis.job.ds.params.datax.mysql.r.where_condition','WHERE条件','',b'0','VARCHAR','','','','','WHERE条件输入错误',b'0',b'0','', 1,'',2,'',1),
-(10,'exchangis.job.ds.params.datax.hive.r.trans_proto','传输方式','DATAX-SOURCE','HIVE','OPTION','exchangis.job.ds.params.datax.hive.r.trans_proto','传输方式','',b'1','OPTION','[\"记录\",\"二进制\"]','记录','','','传输方式输入错误',b'0',b'0','', 1,'',1,'',1),
-(11,'exchangis.job.ds.params.datax.hive.r.partition','分区信息','DATAX-SOURCE','HIVE','INPUT','exchangis.job.ds.params.datax.hive.r.partition','分区信息','',b'0','VARCHAR','','','','','分区信息输入错误',b'0',b'0','', 1,'',2,'',1),
-(12,'exchangis.job.ds.params.datax.hive.r.row_format','字段格式','DATAX-SOURCE','HIVE','INPUT','exchangis.job.ds.params.datax.hive.r.row_format','字段格式','',b'0','VARCHAR','','','','','字段格式输入错误',b'0',b'0','', 1,'',3,'',1),
-(13,'exchangis.job.ds.params.datax.mysql.w.write_type','写入方式','DATAX-SINK','MYSQL','OPTION','exchangis.job.ds.params.datax.mysql.w.write_type','写入方式','',b'1','OPTION','[\"INSERT\",\"REPLACE\",\"UPDATE\"]','INSERT','','','写入方式输入错误',b'0',b'0','', 1,'',1,'',1),
-(14,'exchangis.job.ds.params.datax.mysql.w.batch_size','批量大小','DATAX-SINK','MYSQL','INPUT','exchangis.job.ds.params.datax.mysql.w.batch_size','批量大小','',b'0','NUMBER','','1000','REGEX','^[1-9]\\d*$','批量大小输入错误',b'0',b'0','', 1,'',2,'',1),
-(16,'exchangis.job.ds.params.datax.hive.w.partition','分区信息','DATAX-SINK','HIVE','INPUT','exchangis.job.ds.params.datax.hive.w.partition','分区信息','',b'0','VARCHAR','','','','','分股信息输入错误',b'0',b'0','', 1,'',2,'',1),
-(17,'exchangis.job.ds.params.datax.hive.w.row_format','字段格式','DATAX-SINK','HIVE','INPUT','exchangis.job.ds.params.datax.hive.w.row_format','字段格式','',b'0','VARCHAR','','','','','字段格式输入错误',b'0',b'0','', 1,'',3,'',1),
-(18,'exchangis.job.ds.params.datax.hive.w.write_type','写入策略','DATAX-SINK','HIVE','OPTION','exchangis.job.ds.params.datax.hive.w.write_type','写入策略','',b'1','OPTION','[\"清空目录\",\"追加数据\"]','清空目录','','','写入策略输入错误',b'0',b'0','', 1,'',4,'',1),
-(19,'exchangis.job.ds.params.datax.hive.w.sync_meta','同步元数据','DATAX-SINK','HIVE','OPTION','exchangis.job.ds.params.datax.hive.w.sync_meta','同步元数据','',b'1','OPTION','[\"是\",\"否\"]','是','','','同步元数据输入错误',b'0',b'0','', 1,'',5,'',1),
-(20,'exchangis.job.ds.params.sqoop.mysql.r.join_condition','连接条件','SQOOP-SOURCE','MYSQL','INPUT','exchangis.job.ds.params.sqoop.mysql.r.join_condition','连接条件','',b'0','VARCHAR','','','','','连接条件输入错误',b'0',b'0','', 1,'',1,'',1),
-(21,'exchangis.job.ds.params.sqoop.mysql.r.where_condition','WHERE条件','SQOOP-SOURCE','MYSQL','INPUT','exchangis.job.ds.params.sqoop.mysql.r.where_condition','WHERE条件','',b'0','VARCHAR','','','','','WHERE条件输入错误',b'0',b'0','', 1,'',2,'',1),
-(22,'exchangis.job.ds.params.sqoop.mysql.w.write_type','写入方式','SQOOP-SINK','MYSQL','OPTION','exchangis.job.ds.params.sqoop.mysql.w.write_type','写入方式','',b'1','OPTION','[\"UPDATEONLY\",\"ALLOWINSERT\"]','UPDATEONLY','','','写入方式输入错误',b'0',b'0','', 1,'',1,'',1),
-(23,'exchangis.job.ds.params.sqoop.mysql.w.batch_size','批量大小','SQOOP-SINK','MYSQL','INPUT','exchangis.job.ds.params.sqoop.mysql.w.batch_size','批量大小','',b'0','NUMBER','','1000','REGEX','^[1-9]\\d*$','批量大小输入错误',b'0',b'0','', 1,'',2,'',1),
-(25,'exchangis.job.ds.params.sqoop.hive.r.partition','分区信息','SQOOP-SOURCE','HIVE','INPUT','exchangis.job.ds.params.sqoop.hive.r.partition','分区信息','',b'0','VARCHAR','','','','','分区信息输入错误',b'0',b'0','/api/rest_j/v1/exchangis/job/partitionInfo?dataSourceType=&dbname&table=', 1,'',2,'',1),
-(26,'exchangis.job.ds.params.sqoop.hive.r.row_format','字段格式','SQOOP-SOURCE','HIVE','INPUT','exchangis.job.ds.params.sqoop.hive.r.row_format','字段格式','',b'0','VARCHAR','','','','','字段格式输入错误',b'0',b'0','', 1,'',3,'',1),
-(28,'exchangis.job.ds.params.sqoop.hive.w.partition','分区信息','SQOOP-SINK','HIVE','INPUT','exchangis.job.ds.params.sqoop.hive.w.partition','分区信息','',b'0','VARCHAR','','','','','分区信息输入错误',b'0',b'0','', 1,'',2,'',1),
-(29,'exchangis.job.ds.params.sqoop.hive.w.row_format','字段格式','SQOOP-SINK','HIVE','INPUT','exchangis.job.ds.params.sqoop.hive.w.row_format','字段格式','',b'0','VARCHAR','','','','','字段格式输入错误',b'0',b'0','', 1,'',3,'',1);
\ No newline at end of file
+INSERT INTO udes_gzpc_pub_sit_01.exchangis_job_param_config (config_key,config_name,config_direction,`type`,ui_type,ui_field,ui_label,unit,required,value_type,value_range,default_value,validate_type,validate_range,validate_msg,is_hidden,is_advanced,source,`level`,treename,sort,description,status) VALUES
+('setting.speed.bytes','作业速率限制','','DATAX','INPUT','setting.speed.bytes','作业速率限制','Mb/s',1,'NUMBER','','','REGEX','^[1-9]\\d*$','作业速率限制输入错误',0,0,'',1,'',1,'',1)
+,('setting.speed.records','作业记录数限制','','DATAX','INPUT','setting.speed.records','作业记录数限制','条/s',1,'NUMBER','','','REGEX','^[1-9]\\d*$','作业记录数限制输入错误',0,0,'',1,'',2,'',1)
+,('setting.max.parallelism','作业最大并行度','','DATAX','INPUT','setting.max.parallelism','作业最大并行度','个',1,'NUMBER','','1','REGEX','^[1-9]\\d*$','作业最大并行度输入错误',0,0,'',1,'',3,'',1)
+,('setting.max.memory','作业最大使用内存','','DATAX','INPUT','setting.max.memory','作业最大使用内存','Mb',1,'NUMBER','','1024','REGEX','^[1-9]\\d*$','作业最大使用内存输入错误',0,0,'',1,'',4,'',1)
+,('setting.errorlimit.record','最多错误记录数','','DATAX','INPUT','setting.errorlimit.record','最多错误记录数','条',1,'NUMBER','','','REGEX','^[1-9]\\d*$','最多错误记录数输入错误',0,0,'',1,'',5,'',1)
+,('setting.max.parallelism','作业最大并行数','','SQOOP','INPUT','setting.max.parallelism','作业最大并行数','个',1,'NUMBER','','1','REGEX','^[1-9]\\d*$','作业最大并行数输入错误',0,0,'',1,'',1,'',1)
+,('setting.max.memory','作业最大内存','','SQOOP','INPUT','setting.max.memory','作业最大内存','Mb',1,'NUMBER','','1024','REGEX','^[1-9]\\d*$','作业最大内存输入错误',0,0,'',1,'',2,'',1)
+,('where','WHERE条件','SOURCE','MYSQL','INPUT','where','WHERE条件','',0,'VARCHAR','','','REGEX','^[\\s\\S]{0,500}$','WHERE条件输入过长',0,0,'',1,'',2,'',1)
+,('writeMode','写入方式','SQOOP-SINK','HIVE','OPTION','writeMode','写入方式(OVERWRITE只对TEXT类型表生效)','',1,'OPTION','["OVERWRITE","APPEND"]','OVERWRITE','','','写入方式输入错误',0,0,'',1,'',1,'',1)
+,('partition','分区信息','SINK','HIVE','MAP','partition','分区信息(文本)','',0,'VARCHAR','','','REGEX','^[\\s\\S]{0,50}$','分区信息过长',0,0,'/api/rest_j/v1/exchangis/datasources/render/partition/element/map',1,'',2,'',1)
+,('partition','分区信息','SOURCE','HIVE','MAP','partition','分区信息(文本)',NULL,0,'VARCHAR',NULL,NULL,'REGEX','^[\\s\\S]{0,50}$','分区信息过长',0,0,'/api/rest_j/v1/exchangis/datasources/render/partition/element/map',1,NULL,1,NULL,1)
+,('writeMode','写入方式','SQOOP-SINK','MYSQL','OPTION','writeMode','写入方式',NULL,1,'OPTION','["INSERT","UPDATE"]','INSERT',NULL,NULL,'写入方式输入错误',0,0,NULL,1,NULL,1,NULL,1)
+;
diff --git a/exchangis-dao/pom.xml b/exchangis-dao/pom.xml
index 43262ec70..a8117fafc 100644
--- a/exchangis-dao/pom.xml
+++ b/exchangis-dao/pom.xml
@@ -14,6 +14,7 @@
8
8
+ 6.0.15.Final
@@ -26,11 +27,17 @@
org.apache.linkis
linkis-module
${linkis.version}
+
+
+ validation-api
+ javax.validation
+
+
- org.apache.linkis
- linkis-common
- ${linkis.version}
+ org.hibernate
+ hibernate-validator
+ ${hibernate.validator}
diff --git a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/ValidatorConfiguration.java b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/ValidatorConfiguration.java
new file mode 100644
index 000000000..882e71f50
--- /dev/null
+++ b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/ValidatorConfiguration.java
@@ -0,0 +1,21 @@
+package com.webank.wedatasphere.exchangis;
+
+import org.hibernate.validator.HibernateValidator;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+
+import javax.validation.Validation;
+import javax.validation.Validator;
+
+/**
+ * Bean validator
+ */
+@Configuration
+public class ValidatorConfiguration {
+ @Bean
+ public Validator validator(){
+ return Validation.byProvider(HibernateValidator.class)
+ .configure().failFast(true)
+ .buildValidatorFactory().getValidator();
+ }
+}
diff --git a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/pager/PageQuery.java b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/pager/PageQuery.java
new file mode 100644
index 000000000..ba95c3d80
--- /dev/null
+++ b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/pager/PageQuery.java
@@ -0,0 +1,49 @@
+package com.webank.wedatasphere.exchangis.common.pager;
+
+import java.util.Objects;
+
+/**
+ * Query Vo
+ */
+public class PageQuery {
+
+ protected Integer current = 1;
+
+ protected Integer size = 10;
+
+ protected Integer page;
+
+ protected Integer pageSize;
+
+ public Integer getCurrent() {
+ return current;
+ }
+
+ public void setCurrent(Integer current) {
+ this.current = current;
+ }
+
+ public Integer getSize() {
+ return size;
+ }
+
+ public void setSize(Integer size) {
+ this.size = size;
+ }
+
+ public Integer getPage() {
+ return Objects.nonNull(page) ? page : current;
+ }
+
+ public void setPage(Integer page) {
+ this.page = page;
+ }
+
+ public Integer getPageSize() {
+ return Objects.nonNull(pageSize) ? pageSize : size;
+ }
+
+ public void setPageSize(Integer pageSize) {
+ this.pageSize = pageSize;
+ }
+}
diff --git a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/pager/PageResult.java b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/pager/PageResult.java
new file mode 100644
index 000000000..8774b54b4
--- /dev/null
+++ b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/pager/PageResult.java
@@ -0,0 +1,61 @@
+package com.webank.wedatasphere.exchangis.common.pager;
+
+import com.github.pagehelper.PageInfo;
+import org.apache.linkis.server.Message;
+
+import java.util.List;
+
+/**
+ * Page result
+ * @param
+ */
+public class PageResult{
+ /**
+ * Total
+ */
+ private Long total;
+
+ /**
+ * List
+ */
+ private List list;
+
+ public PageResult(){
+
+ }
+
+ public PageResult(PageInfo pageInfo){
+ this.total = pageInfo.getTotal();
+ this.list = pageInfo.getList();
+ }
+ /**
+ * To Message(in linkis-common)
+ * @return message
+ */
+ public Message toMessage(String info){
+ Message message = Message.ok(info);
+ message.data("total", total);
+ message.data("list", list);
+ return message;
+ }
+
+ public Message toMessage(){
+ return toMessage("");
+ }
+
+ public Long getTotal() {
+ return total;
+ }
+
+ public void setTotal(Long total) {
+ this.total = total;
+ }
+
+ public List getList() {
+ return list;
+ }
+
+ public void setList(List list) {
+ this.list = list;
+ }
+}
diff --git a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/validator/groups/InsertGroup.java b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/validator/groups/InsertGroup.java
new file mode 100644
index 000000000..1c05bc04d
--- /dev/null
+++ b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/validator/groups/InsertGroup.java
@@ -0,0 +1,10 @@
+package com.webank.wedatasphere.exchangis.common.validator.groups;
+
+import javax.validation.groups.Default;
+
+/**
+ * Insert group for validator
+ */
+public interface InsertGroup extends Default {
+
+}
diff --git a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/validator/groups/UpdateGroup.java b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/validator/groups/UpdateGroup.java
new file mode 100644
index 000000000..ce05e3f7e
--- /dev/null
+++ b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/common/validator/groups/UpdateGroup.java
@@ -0,0 +1,9 @@
+package com.webank.wedatasphere.exchangis.common.validator.groups;
+
+import javax.validation.groups.Default;
+
+/**
+ * Update group for validator
+ */
+public interface UpdateGroup extends Default {
+}
diff --git a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/dao/domain/ExchangisJobEntity.java b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/dao/domain/ExchangisJobEntity.java
deleted file mode 100644
index e28dee307..000000000
--- a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/dao/domain/ExchangisJobEntity.java
+++ /dev/null
@@ -1,221 +0,0 @@
-package com.webank.wedatasphere.exchangis.dao.domain;
-
-import java.util.Date;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Objects;
-
-import com.baomidou.mybatisplus.annotation.IdType;
-import com.baomidou.mybatisplus.annotation.TableField;
-import com.baomidou.mybatisplus.annotation.TableId;
-import com.baomidou.mybatisplus.annotation.TableName;
-
-@TableName("exchangis_job_info")
-public class ExchangisJobEntity {
-
- @TableId(type = IdType.AUTO)
- private Long id;
-
- @TableField("project_id")
- private Long projectId;
-
- @TableField("dss_project_id")
- private Long dssProjectId;
-
- @TableField("node_id")
- private String nodeId;
-
- @TableField("node_name")
- private String nodeName;
-
- private String jobName;
-
- private String jobType;
-
- private String engineType;
-
- private String jobLabels;
-
- private String jobDesc;
-
- private String content;
-
- private String alarmUser;
-
- private Integer alarmLevel;
-
- private String proxyUser;
-
- private String executeNode;
-
- private String syncType;
-
- private String jobParams;
-
- private Date createTime;
-
- private String createUser;
-
- private Date modifyTime;
-
- private String modifyUser;
-
- public Long getId() {
- return id;
- }
-
- public void setId(Long id) {
- this.id = id;
- }
-
- public Long getProjectId() {
- return projectId;
- }
-
- public void setProjectId(Long projectId) {
- this.projectId = projectId;
- }
-
- public Long getDssProjectId() {
- return dssProjectId;
- }
-
- public void setDssProjectId(Long dssProjectId) { this.dssProjectId = dssProjectId; }
-
- public String getNodeId() { return nodeId; }
-
- public void setNodeId(String nodeId) { this.nodeId = nodeId; }
-
- public String getNodeName() { return nodeName; }
-
- public void setNodeName(String nodeName) { this.nodeName = nodeName; }
-
- public String getJobName() {
- return jobName;
- }
-
- public void setJobName(String jobName) {
- this.jobName = jobName;
- }
-
- public String getJobType() {
- return jobType;
- }
-
- public void setJobType(String jobType) {
- this.jobType = jobType;
- }
-
- public String getEngineType() {
- return engineType;
- }
-
- public void setEngineType(String engineType) {
- this.engineType = engineType;
- }
-
- public String getJobLabels() {
- return jobLabels;
- }
-
- public void setJobLabels(String jobLabels) {
- this.jobLabels = jobLabels;
- }
-
- public String getJobDesc() {
- return jobDesc;
- }
-
- public void setJobDesc(String jobDesc) {
- this.jobDesc = jobDesc;
- }
-
- public String getContent() {
- return content;
- }
-
- public void setContent(String content) {
- this.content = content;
- }
-
- public String getAlarmUser() {
- return alarmUser;
- }
-
- public void setAlarmUser(String alarmUser) {
- this.alarmUser = alarmUser;
- }
-
- public Integer getAlarmLevel() {
- return alarmLevel;
- }
-
- public void setAlarmLevel(Integer alarmLevel) {
- this.alarmLevel = alarmLevel;
- }
-
- public String getProxyUser() {
- return proxyUser;
- }
-
- public void setProxyUser(String proxyUser) {
- this.proxyUser = proxyUser;
- }
-
- public String getExecuteNode() {
- return executeNode;
- }
-
- public void setExecuteNode(String executeNode) {
- this.executeNode = executeNode;
- }
-
- public String getSyncType() {
- return syncType;
- }
-
- public void setSyncType(String syncType) {
- this.syncType = syncType;
- }
-
- public String getJobParams() {
- return jobParams;
- }
-
- public void setJobParams(String jobParams) {
- this.jobParams = jobParams;
- }
-
- public Date getCreateTime() {
- return createTime;
- }
-
- public void setCreateTime(Date createTime) {
- this.createTime = createTime;
- }
-
- public String getCreateUser() {
- return createUser;
- }
-
- public void setCreateUser(String createUser) {
- this.createUser = createUser;
- }
-
- public Date getModifyTime() {
- return modifyTime;
- }
-
- public void setModifyTime(Date modifyTime) {
- this.modifyTime = modifyTime;
- }
-
- public String getModifyUser() {
- return modifyUser;
- }
-
- public void setModifyUser(String modifyUser) {
- this.modifyUser = modifyUser;
- }
-
-}
\ No newline at end of file
diff --git a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/dao/domain/ExchangisJobParamConfig.java b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/dao/domain/ExchangisJobParamConfig.java
index b35fe0934..40ce6e138 100644
--- a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/dao/domain/ExchangisJobParamConfig.java
+++ b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/dao/domain/ExchangisJobParamConfig.java
@@ -64,7 +64,7 @@ public class ExchangisJobParamConfig {
private Boolean advanced;
/**
- * store url exa. http://127.0.0.1/api/v1/exchangis/xxx
+ * store url exa. http://127.0.0.1/api/v1/dss/exchangis/main/xxx
*/
private String source;
diff --git a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/dao/hook/MapperHook.java b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/dao/hook/MapperHook.java
index 89a21334f..ba0de2947 100644
--- a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/dao/hook/MapperHook.java
+++ b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/dao/hook/MapperHook.java
@@ -1,6 +1,5 @@
package com.webank.wedatasphere.exchangis.dao.hook;
-import com.webank.wedatasphere.exchangis.dao.mapper.ExchangisJobInfoMapper;
import com.webank.wedatasphere.exchangis.dao.mapper.ExchangisJobParamConfigMapper;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@@ -8,23 +7,13 @@
@Component
public class MapperHook {
- private ExchangisJobInfoMapper exchangisJobInfoMapper;
private ExchangisJobParamConfigMapper exchangisJobParamConfigMapper;
@Autowired
- public MapperHook(ExchangisJobInfoMapper exchangisJobInfoMapper, ExchangisJobParamConfigMapper exchangisJobParamConfigMapper) {
- this.exchangisJobInfoMapper = exchangisJobInfoMapper;
+ public MapperHook(ExchangisJobParamConfigMapper exchangisJobParamConfigMapper) {
this.exchangisJobParamConfigMapper = exchangisJobParamConfigMapper;
}
- public ExchangisJobInfoMapper getExchangisJobInfoMapper() {
- return exchangisJobInfoMapper;
- }
-
- public void setExchangisJobInfoMapper(ExchangisJobInfoMapper exchangisJobInfoMapper) {
- this.exchangisJobInfoMapper = exchangisJobInfoMapper;
- }
-
public ExchangisJobParamConfigMapper getExchangisJobParamConfigMapper() {
return exchangisJobParamConfigMapper;
}
diff --git a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/dao/mapper/ExchangisJobInfoMapper.java b/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/dao/mapper/ExchangisJobInfoMapper.java
deleted file mode 100644
index 0dc563635..000000000
--- a/exchangis-dao/src/main/java/com/webank/wedatasphere/exchangis/dao/mapper/ExchangisJobInfoMapper.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package com.webank.wedatasphere.exchangis.dao.mapper;
-
-import com.baomidou.mybatisplus.core.mapper.BaseMapper;
-import com.webank.wedatasphere.exchangis.dao.domain.ExchangisJobEntity;
-import org.apache.ibatis.annotations.Mapper;
-
-@Mapper
-public interface ExchangisJobInfoMapper extends BaseMapper {
-}
diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/domain/MetaColumn.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/domain/MetaColumn.java
new file mode 100644
index 000000000..297a0333c
--- /dev/null
+++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/domain/MetaColumn.java
@@ -0,0 +1,69 @@
+package com.webank.wedatasphere.exchangis.datasource.core.domain;
+
+/**
+ * Meta column
+ */
+public class MetaColumn {
+
+ /**
+ * Column index
+ */
+ private int index = -1;
+
+ /**
+ * Is primary key
+ */
+ private boolean primaryKey;
+
+ /**
+ * Name
+ */
+ private String name;
+
+ /**
+ * Type symbol
+ */
+ private String type;
+
+ public MetaColumn(){
+
+ }
+
+ public MetaColumn(int index, String name, String type, boolean primaryKey){
+ this.index = index;
+ this.name = name;
+ this.type = type;
+ this.primaryKey = primaryKey;
+ }
+ public int getIndex() {
+ return index;
+ }
+
+ public void setIndex(int index) {
+ this.index = index;
+ }
+
+ public boolean isPrimaryKey() {
+ return primaryKey;
+ }
+
+ public void setPrimaryKey(boolean primaryKey) {
+ this.primaryKey = primaryKey;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public String getType() {
+ return type;
+ }
+
+ public void setType(String type) {
+ this.type = type;
+ }
+}
diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/exception/ExchangisDataSourceException.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/exception/ExchangisDataSourceException.java
index e755366b5..2486a3bb6 100644
--- a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/exception/ExchangisDataSourceException.java
+++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/exception/ExchangisDataSourceException.java
@@ -2,6 +2,8 @@
import org.apache.linkis.common.exception.ErrorException;
+import org.apache.linkis.common.exception.ExceptionLevel;
+import org.apache.linkis.common.exception.LinkisRuntimeException;
public class ExchangisDataSourceException extends ErrorException {
@@ -17,4 +19,16 @@ public ExchangisDataSourceException(int errCode, String desc, String ip, int por
super(errCode, desc, ip, port, serviceKind);
}
+ public static class Runtime extends LinkisRuntimeException {
+
+ public Runtime(int errCode, String desc, Throwable t) {
+ super(errCode, desc);
+ super.initCause(t);
+ }
+
+ @Override
+ public ExceptionLevel getLevel() {
+ return ExceptionLevel.ERROR;
+ }
+ }
}
\ No newline at end of file
diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/service/MetadataInfoService.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/service/MetadataInfoService.java
index 58d380cea..04b7d78f3 100644
--- a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/service/MetadataInfoService.java
+++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/service/MetadataInfoService.java
@@ -1,5 +1,6 @@
package com.webank.wedatasphere.exchangis.datasource.core.service;
+import com.webank.wedatasphere.exchangis.datasource.core.domain.MetaColumn;
import com.webank.wedatasphere.exchangis.datasource.core.exception.ExchangisDataSourceException;
import com.webank.wedatasphere.exchangis.datasource.core.service.rpc.ServiceRpcClient;
@@ -45,4 +46,15 @@ Map getTableProps(ServiceRpcClient> rpcClient, String userName
* @throws ExchangisDataSourceException
*/
List getPartitionKeys(String userName, Long dataSourceId, String database, String table) throws ExchangisDataSourceException;
+
+ /**
+ * Get columns
+ * @param userName userName
+ * @param dataSourceId data source id
+ * @param database database
+ * @param table table
+ * @return
+ * @throws ExchangisDataSourceException
+ */
+ List getColumns(String userName, Long dataSourceId, String database, String table) throws ExchangisDataSourceException;
}
diff --git a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/utils/Json.java b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/utils/Json.java
index 68d7b81ac..a59822cec 100644
--- a/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/utils/Json.java
+++ b/exchangis-datasource/exchangis-datasource-core/src/main/java/com/webank/wedatasphere/exchangis/datasource/core/utils/Json.java
@@ -70,7 +70,7 @@ public static T fromJson(String json, Class> tClass, Class>... parameter
} catch (Exception e) {
logger.warn("Fail to process method 'fromJson(" +
(json.length() > 5 ? json.substring(0, 5) + "..." : json) + ": " + json.getClass() +
- ", " + tClass.getSimpleName() + ": "+ Class.class + ", ...: " + Class.class + ")");
+ ", " + tClass.getSimpleName() + ": "+ Class.class + ", ...: " + Class.class + ")", e);
return null;
}
}
diff --git a/exchangis-datasource/exchangis-datasource-linkis/src/main/java/com/webank/wedatasphere/exchangis/datasource/linkis/service/LinkisDataSourceInfoService.java b/exchangis-datasource/exchangis-datasource-linkis/src/main/java/com/webank/wedatasphere/exchangis/datasource/linkis/service/LinkisDataSourceInfoService.java
index cb0d658a0..e11bff3b6 100644
--- a/exchangis-datasource/exchangis-datasource-linkis/src/main/java/com/webank/wedatasphere/exchangis/datasource/linkis/service/LinkisDataSourceInfoService.java
+++ b/exchangis-datasource/exchangis-datasource-linkis/src/main/java/com/webank/wedatasphere/exchangis/datasource/linkis/service/LinkisDataSourceInfoService.java
@@ -1,3 +1,20 @@
+/*
+ Copyright 2022 WeBank
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ */
+
+
package com.webank.wedatasphere.exchangis.datasource.linkis.service;
import com.webank.wedatasphere.exchangis.datasource.core.service.DataSourceInfoService;
diff --git a/exchangis-datasource/exchangis-datasource-linkis/src/main/java/com/webank/wedatasphere/exchangis/datasource/linkis/service/LinkisMetadataInfoService.java b/exchangis-datasource/exchangis-datasource-linkis/src/main/java/com/webank/wedatasphere/exchangis/datasource/linkis/service/LinkisMetadataInfoService.java
index ecf9bd872..610e063ba 100644
--- a/exchangis-datasource/exchangis-datasource-linkis/src/main/java/com/webank/wedatasphere/exchangis/datasource/linkis/service/LinkisMetadataInfoService.java
+++ b/exchangis-datasource/exchangis-datasource-linkis/src/main/java/com/webank/wedatasphere/exchangis/datasource/linkis/service/LinkisMetadataInfoService.java
@@ -1,22 +1,29 @@
package com.webank.wedatasphere.exchangis.datasource.linkis.service;
+import com.webank.wedatasphere.exchangis.datasource.core.domain.MetaColumn;
import com.webank.wedatasphere.exchangis.datasource.core.exception.ExchangisDataSourceException;
import com.webank.wedatasphere.exchangis.datasource.core.exception.ExchangisServiceRpcException;
import com.webank.wedatasphere.exchangis.datasource.core.service.MetadataInfoService;
import com.webank.wedatasphere.exchangis.datasource.core.service.rpc.ServiceRpcClient;
import com.webank.wedatasphere.exchangis.datasource.linkis.ExchangisLinkisRemoteClient;
+//import com.webank.wedatasphere.exchangis.datasource.linkis.partition.MetadataGetPartitionsResult;
import com.webank.wedatasphere.exchangis.datasource.linkis.request.MetadataGetPartitionPropsAction;
import com.webank.wedatasphere.exchangis.datasource.linkis.response.MetadataGetPartitionPropsResult;
import com.webank.wedatasphere.exchangis.datasource.linkis.service.rpc.LinkisDataSourceServiceOperation;
import com.webank.wedatasphere.exchangis.datasource.linkis.service.rpc.LinkisDataSourceServiceRpcDispatcher;
import org.apache.linkis.datasource.client.impl.LinkisMetaDataRemoteClient;
+import org.apache.linkis.datasource.client.request.MetadataGetColumnsAction;
import org.apache.linkis.datasource.client.request.MetadataGetPartitionsAction;
import org.apache.linkis.datasource.client.request.MetadataGetTablePropsAction;
+import org.apache.linkis.datasource.client.response.MetadataGetColumnsResult;
import org.apache.linkis.datasource.client.response.MetadataGetPartitionsResult;
import org.apache.linkis.datasource.client.response.MetadataGetTablePropsResult;
+import org.apache.linkis.metadatamanager.common.domain.MetaColumnInfo;
+import java.util.ArrayList;
import java.util.List;
import java.util.Map;
+import java.util.Optional;
import static com.webank.wedatasphere.exchangis.datasource.core.exception.ExchangisDataSourceExceptionCode.*;
@@ -73,10 +80,23 @@ public Map getTableProps(ServiceRpcClient> rpcClient, String u
@Override
public List getPartitionKeys(String userName, Long dataSourceId, String database, String table) throws ExchangisDataSourceException {
MetadataGetPartitionsResult result = dispatch(getDefaultRemoteClient(), new LinkisDataSourceServiceOperation(() -> MetadataGetPartitionsAction.builder()
- .setDataSourceId(String.valueOf(dataSourceId)).setDatabase(database).setTable(table)
+ .setDataSourceId(dataSourceId).setDatabase(database).setTable(table)
.setUser(userName).setSystem(LINKIS_RPC_CLIENT_SYSTEM.getValue()).build()), CLIENT_METADATA_GET_PARTITION.getCode(), "getPartitionKeys");
return result.getPartitionInfo().getPartKeys();
}
+ @Override
+ public List getColumns(String userName, Long dataSourceId, String database, String table) throws ExchangisDataSourceException {
+ MetadataGetColumnsResult result = dispatch(getDefaultRemoteClient(), new LinkisDataSourceServiceOperation(() -> MetadataGetColumnsAction.builder()
+ .setSystem(LINKIS_RPC_CLIENT_SYSTEM.getValue())
+ .setDataSourceId(dataSourceId).setDatabase(database).setTable(table)
+ .setUser(userName).build()),CLIENT_METADATA_GET_PARTITION.getCode(), "getColumns");
+ List columnInfoList = result.getAllColumns();
+ List columns = new ArrayList<>();
+ Optional.ofNullable(columnInfoList).ifPresent(infoList -> infoList.forEach(info ->
+ columns.add(new MetaColumn(info.getIndex(), info.getName(), info.getType(), info.isPrimaryKey()))));
+ return columns;
+ }
+
}
diff --git a/exchangis-datasource/exchangis-datasource-linkis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/linkis/request/ParamsTestConnectAction.scala b/exchangis-datasource/exchangis-datasource-linkis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/linkis/request/ParamsTestConnectAction.scala
new file mode 100644
index 000000000..7461f63fb
--- /dev/null
+++ b/exchangis-datasource/exchangis-datasource-linkis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/linkis/request/ParamsTestConnectAction.scala
@@ -0,0 +1,39 @@
+package com.webank.wedatasphere.exchangis.datasource.linkis.request
+
+import org.apache.linkis.datasource.client.config.DatasourceClientConfig.DATA_SOURCE_SERVICE_MODULE
+import org.apache.linkis.datasource.client.request.DataSourceAction
+import org.apache.linkis.httpclient.dws.DWSHttpClient
+import org.apache.linkis.httpclient.request.POSTAction
+
+import java.util
+import scala.collection.JavaConverters.mapAsScalaMapConverter
+
+/**
+ * Connect test for the data source params
+ */
+class ParamsTestConnectAction extends POSTAction with DataSourceAction{
+
+ private var user: String = _
+
+ override def getRequestPayload: String = DWSHttpClient.jacksonJson.writeValueAsString(getRequestPayloads)
+
+ override def suffixURLs: Array[String] = Array(DATA_SOURCE_SERVICE_MODULE.getValue, "op", "connect", "json")
+
+ override def setUser(user: String): Unit = this.user = user
+
+ override def getUser: String = this.user
+
+ /**
+ *
+ * @param dataSource data source map
+ * @param user user
+ */
+ def this(dataSource: util.Map[String, Any], user: String){
+ this()
+ dataSource.asScala.foreach{
+ case (key, value) =>
+ this.addRequestPayload(key, value)
+ }
+ this.user = user
+ }
+}
diff --git a/exchangis-datasource/exchangis-datasource-linkis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/linkis/response/ParamsTestConnectResult.scala b/exchangis-datasource/exchangis-datasource-linkis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/linkis/response/ParamsTestConnectResult.scala
new file mode 100644
index 000000000..1dc447082
--- /dev/null
+++ b/exchangis-datasource/exchangis-datasource-linkis/src/main/scala/com/webank/wedatasphere/exchangis/datasource/linkis/response/ParamsTestConnectResult.scala
@@ -0,0 +1,12 @@
+package com.webank.wedatasphere.exchangis.datasource.linkis.response
+
+import org.apache.linkis.httpclient.dws.annotation.DWSHttpMessageResult
+import org.apache.linkis.httpclient.dws.response.DWSResult
+
+import scala.beans.BeanProperty
+
+@DWSHttpMessageResult("/api/rest_j/v\\d+/data-source-manager/op/connect/json")
+class ParamsTestConnectResult extends DWSResult{
+ @BeanProperty var ok: Boolean = _
+
+}
diff --git a/exchangis-datasource/exchangis-datasource-server/src/main/java/com/webank/wedatasphere/exchangis/datasource/server/restful/api/ExchangisDataSourceRenderRestfulApi.java b/exchangis-datasource/exchangis-datasource-server/src/main/java/com/webank/wedatasphere/exchangis/datasource/server/restful/api/ExchangisDataSourceRenderRestfulApi.java
index e64ba041b..00e6b5c2d 100644
--- a/exchangis-datasource/exchangis-datasource-server/src/main/java/com/webank/wedatasphere/exchangis/datasource/server/restful/api/ExchangisDataSourceRenderRestfulApi.java
+++ b/exchangis-datasource/exchangis-datasource-server/src/main/java/com/webank/wedatasphere/exchangis/datasource/server/restful/api/ExchangisDataSourceRenderRestfulApi.java
@@ -20,7 +20,7 @@
* Expose the ui interface to front-end rendering
*/
@RestController
-@RequestMapping(value = "exchangis/datasources/render", produces = {"application/json;charset=utf-8"})
+@RequestMapping(value = "dss/exchangis/main/datasources/render", produces = {"application/json;charset=utf-8"})
public class ExchangisDataSourceRenderRestfulApi {
private static final Logger LOG = LoggerFactory.getLogger(ExchangisDataSourceRenderRestfulApi.class);
@@ -52,11 +52,8 @@ public Message partition(@PathVariable("elementType") String type,
LOG.error(uiMessage + ", reason: " + e.getMessage(), e);
result = Message.error(uiMessage);
}
- result.setMethod("/api/rest_j/v1/exchangis/datasources/render/partition/element/" + type);
+ result.setMethod("/api/rest_j/v1/dss/exchangis/main/datasources/render/partition/element/" + type);
return result;
}
- public static void main(String[] args){
- ElementUI.Type.valueOf("map".toUpperCase(Locale.ROOT));
- }
}
diff --git a/exchangis-datasource/exchangis-datasource-server/src/main/java/com/webank/wedatasphere/exchangis/datasource/server/restful/api/ExchangisDataSourceRestfulApi.java b/exchangis-datasource/exchangis-datasource-server/src/main/java/com/webank/wedatasphere/exchangis/datasource/server/restful/api/ExchangisDataSourceRestfulApi.java
index 6ac152306..26803afb6 100644
--- a/exchangis-datasource/exchangis-datasource-server/src/main/java/com/webank/wedatasphere/exchangis/datasource/server/restful/api/ExchangisDataSourceRestfulApi.java
+++ b/exchangis-datasource/exchangis-datasource-server/src/main/java/com/webank/wedatasphere/exchangis/datasource/server/restful/api/ExchangisDataSourceRestfulApi.java
@@ -3,27 +3,36 @@
import com.webank.wedatasphere.exchangis.datasource.core.exception.ExchangisDataSourceException;
import com.webank.wedatasphere.exchangis.datasource.core.ui.ElementUI;
import com.webank.wedatasphere.exchangis.datasource.service.ExchangisDataSourceService;
+import com.webank.wedatasphere.exchangis.datasource.vo.DataSourceCreateVO;
import com.webank.wedatasphere.exchangis.datasource.vo.DataSourceQueryVO;
import com.webank.wedatasphere.exchangis.datasource.vo.FieldMappingVO;
import org.apache.linkis.server.Message;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.validation.BindingResult;
+import org.springframework.validation.FieldError;
+import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.*;
import javax.servlet.http.HttpServletRequest;
+import javax.validation.Valid;
import javax.ws.rs.QueryParam;
import java.util.List;
import java.util.Map;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
@RestController
-@RequestMapping(value = "exchangis/datasources", produces = {"application/json;charset=utf-8"})
+@RequestMapping(value = "dss/exchangis/main/datasources", produces = {"application/json;charset=utf-8"})
public class ExchangisDataSourceRestfulApi {
private static final Logger LOG = LoggerFactory.getLogger(ExchangisDataSourceRestfulApi.class);
private final ExchangisDataSourceService exchangisDataSourceService;
+ private static Pattern p = Pattern.compile("(?<=\\[)[^]]+");
+
@Autowired
public ExchangisDataSourceRestfulApi(ExchangisDataSourceService exchangisDataSourceService) {
this.exchangisDataSourceService = exchangisDataSourceService;
@@ -32,13 +41,39 @@ public ExchangisDataSourceRestfulApi(ExchangisDataSourceService exchangisDataSou
// list all datasource types
@RequestMapping( value = "/type", method = RequestMethod.GET)
public Message listDataSourceTypes(HttpServletRequest request) throws Exception {
- return this.exchangisDataSourceService.listDataSources(request);
+ Message message = null;
+ try{
+ message = exchangisDataSourceService.listDataSources(request);
+ } catch (ExchangisDataSourceException e) {
+ String errorMessage = "Error occur while list datasource type";
+ LOG.error(errorMessage, e);
+
+ String errorNote = e.getMessage();
+ Matcher matcher = p.matcher(errorNote);
+ if (matcher.find()) {
+ message = Message.error(matcher.group());
+ }
+ else{
+ message = Message.error("Getting datasource type list fail (获取数据源类型列表失败)");
+ }
+ }
+ return message;
+
}
// query paged datasource
@RequestMapping( value = "/query", method = {RequestMethod.GET,RequestMethod.POST})
public Message create(HttpServletRequest request, @RequestBody DataSourceQueryVO vo) throws Exception {
- return this.exchangisDataSourceService.queryDataSources(request, vo);
+ Message message = null;
+ try{
+ message = exchangisDataSourceService.queryDataSources(request, vo);
+ } catch (ExchangisDataSourceException e) {
+ String errorMessage = "Error occur while query datasource";
+ LOG.error(errorMessage, e);
+ message = Message.error("查询数据源失败");
+ }
+ return message;
+
}
// list all datasources
@@ -51,7 +86,24 @@ public Message listAllDataSources(
@RequestParam(value = "page", required = false) Integer page,
@RequestParam(value = "size", required = false) Integer size
) throws Exception {
- return this.exchangisDataSourceService.listAllDataSources(request, typeName, typeId, page, size);
+ Message message = null;
+ try{
+ message = exchangisDataSourceService.listAllDataSources(request, typeName, typeId, page, size);
+ } catch (ExchangisDataSourceException e) {
+ String errorMessage = "Error occur while getting datasource list";
+ LOG.error(errorMessage, e);
+
+ String errorNote = e.getMessage();
+ Matcher matcher = p.matcher(errorNote);
+ if (matcher.find()) {
+ message = Message.error(matcher.group());
+ }
+ else{
+ message = Message.error("Getting datasource list fail (获取数据源列表失败)");
+ }
+ }
+ return message;
+
}
// get datasource key define
@@ -60,59 +112,181 @@ public Message getDataSourceKeyDefine(
HttpServletRequest request,
@PathVariable("dataSourceTypeId") Long dataSourceTypeId
) throws Exception {
- return this.exchangisDataSourceService.getDataSourceKeyDefine(request, dataSourceTypeId);
+ Message message = null;
+ try{
+ message = exchangisDataSourceService.getDataSourceKeyDefine(request, dataSourceTypeId);
+ } catch (ExchangisDataSourceException e) {
+ String errorMessage = "Error occur while getting datasource key define";
+ LOG.error(errorMessage, e);
+ message = Message.error("获取数据源主键定义失败");
+ }
+ return message;
+
}
// get datasource version list
@RequestMapping( value = "/{id}/versions", method = RequestMethod.GET)
public Message getDataSourceVersionsById(HttpServletRequest request, @PathVariable("id") Long id) throws Exception {
- return this.exchangisDataSourceService.getDataSourceVersionsById(request, id);
- }
-
- // create datasource
- @RequestMapping( value = "", method = RequestMethod.POST)
- public Message create(HttpServletRequest request, /*@PathParam("type") String type, */@RequestBody Map json) throws Exception {
Message message = null;
try{
- message = exchangisDataSourceService.create(request, json);
+ message = exchangisDataSourceService.getDataSourceVersionsById(request, id);
} catch (ExchangisDataSourceException e) {
- String errorMessage = "Error occur while create datasource";
+ String errorMessage = "Error occur while getting datasource version";
LOG.error(errorMessage, e);
- message = Message.error("已存在同名任务");
+
+ String errorNote = e.getMessage();
+ Matcher matcher = p.matcher(errorNote);
+ if (matcher.find()) {
+ message = Message.error(matcher.group());
+ }
+ else{
+ message = Message.error("Getting datasource version fail (获取数据源版本失败)");
+ }
+ }
+ return message;
+
+ }
+
+ // create datasource
+ @RequestMapping( value = "", method = RequestMethod.POST)
+ public Message create(/*@PathParam("type") String type, */@Valid @RequestBody DataSourceCreateVO dataSourceCreateVO, BindingResult bindingResult, HttpServletRequest request ) throws Exception {
+ Message message = new Message();
+ LOG.info("dataSourceName: " + dataSourceCreateVO.getDataSourceName() + "dataSourceDesc: " + dataSourceCreateVO.getDataSourceDesc() + "label: " + dataSourceCreateVO.getLabels());
+ if(bindingResult.hasErrors()){
+ List fieldErrors = bindingResult.getFieldErrors();
+ for(int i=0;i json) throws Exception {
- return this.exchangisDataSourceService.updateDataSource(request, /*type, */id, json);
+ public Message update(HttpServletRequest request,/* @PathParam("type") String type, */@PathVariable("id") Long id, @Valid @RequestBody DataSourceCreateVO dataSourceCreateVO, BindingResult bindingResult) throws Exception {
+ Message message = new Message();
+
+ LOG.info("dataSourceName: " + dataSourceCreateVO.getDataSourceName() + "dataSourceDesc: " + dataSourceCreateVO.getDataSourceDesc() + "label: " + dataSourceCreateVO.getLabels());
+ if(bindingResult.hasErrors()){
+ List fieldErrors = bindingResult.getFieldErrors();
+ for(int i=0;i> jobSettingsUI = this.exchangisDataSourceService.getJobEngineSettingsUI(engineType);
return Message.ok().data("ui", jobSettingsUI);
}
diff --git a/exchangis-datasource/exchangis-datasource-service/pom.xml b/exchangis-datasource/exchangis-datasource-service/pom.xml
index 68a6191fe..0c692d791 100644
--- a/exchangis-datasource/exchangis-datasource-service/pom.xml
+++ b/exchangis-datasource/exchangis-datasource-service/pom.xml
@@ -24,6 +24,12 @@
1.0.0-RC1
+
+
+ com.webank.wedatasphere.exchangis
+ exchangis-job-common
+ 1.0.0-RC1
+
diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/CreateDataSourceSuccessResultDTO.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/CreateDataSourceSuccessResultDTO.java
index fa0210be1..1398ecd32 100644
--- a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/CreateDataSourceSuccessResultDTO.java
+++ b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/dto/CreateDataSourceSuccessResultDTO.java
@@ -15,15 +15,15 @@ public void setData(InsertIdDTO data) {
}
public static class InsertIdDTO {
- @JsonProperty(value = "insert_id")
- private Long id;
+ @JsonProperty(value = "insertId")
+ private Long insertId;
public Long getId() {
- return id;
+ return insertId;
}
- public void setId(Long id) {
- this.id = id;
+ public void setId(Long insertId) {
+ this.insertId = insertId;
}
}
}
diff --git a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/service/AbstractDataSourceService.java b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/service/AbstractDataSourceService.java
index 4c519c9cf..75faf4f12 100644
--- a/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/service/AbstractDataSourceService.java
+++ b/exchangis-datasource/exchangis-datasource-service/src/main/java/com/webank/wedatasphere/exchangis/datasource/service/AbstractDataSourceService.java
@@ -5,25 +5,23 @@
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Strings;
-import com.webank.wedatasphere.exchangis.dao.domain.ExchangisJobEntity;
import com.webank.wedatasphere.exchangis.dao.domain.ExchangisJobParamConfig;
-import com.webank.wedatasphere.exchangis.dao.mapper.ExchangisJobInfoMapper;
import com.webank.wedatasphere.exchangis.dao.mapper.ExchangisJobParamConfigMapper;
import com.webank.wedatasphere.exchangis.datasource.core.ExchangisDataSource;
import com.webank.wedatasphere.exchangis.datasource.core.context.ExchangisDataSourceContext;
import com.webank.wedatasphere.exchangis.datasource.core.ui.*;
import com.webank.wedatasphere.exchangis.datasource.core.ui.viewer.DefaultDataSourceUIViewer;
import com.webank.wedatasphere.exchangis.datasource.core.ui.viewer.ExchangisDataSourceUIViewer;
+import com.webank.wedatasphere.exchangis.datasource.core.utils.Json;
import com.webank.wedatasphere.exchangis.datasource.core.vo.ExchangisJobDataSourcesContent;
import com.webank.wedatasphere.exchangis.datasource.core.vo.ExchangisJobInfoContent;
import com.webank.wedatasphere.exchangis.datasource.core.vo.ExchangisJobParamsContent;
import com.webank.wedatasphere.exchangis.datasource.core.vo.ExchangisJobTransformsContent;
import com.webank.wedatasphere.exchangis.datasource.dto.GetDataSourceInfoResultDTO;
+import com.webank.wedatasphere.exchangis.job.domain.ExchangisJobEntity;
import org.apache.commons.lang.StringUtils;
import org.apache.linkis.datasource.client.impl.LinkisDataSourceRemoteClient;
import org.apache.linkis.datasource.client.request.GetInfoByDataSourceIdAction;
-import org.apache.linkis.datasourcemanager.common.exception.JsonErrorException;
-import org.apache.linkis.datasourcemanager.common.util.json.Json;
import org.apache.linkis.httpclient.response.Result;
import org.apache.linkis.server.security.SecurityFilter;
import org.slf4j.Logger;
@@ -37,15 +35,13 @@ public class AbstractDataSourceService {
protected final ObjectMapper mapper = new ObjectMapper();
protected final ExchangisDataSourceContext context;
protected final ExchangisJobParamConfigMapper exchangisJobParamConfigMapper;
- protected final ExchangisJobInfoMapper exchangisJobInfoMapper;
private final static Logger LOG = LoggerFactory.getLogger(AbstractDataSourceService.class);
- public AbstractDataSourceService(ExchangisDataSourceContext context, ExchangisJobParamConfigMapper exchangisJobParamConfigMapper, ExchangisJobInfoMapper exchangisJobInfoMapper) {
+ public AbstractDataSourceService(ExchangisDataSourceContext context, ExchangisJobParamConfigMapper exchangisJobParamConfigMapper) {
this.context = context;
this.exchangisJobParamConfigMapper = exchangisJobParamConfigMapper;
- this.exchangisJobInfoMapper = exchangisJobInfoMapper;
}
protected List parseJobContent(String content) {
@@ -99,12 +95,9 @@ private ExchangisDataSourceIdsUI buildDataSourceIdsUI(HttpServletRequest request
Result execute = dsClient.execute(action);
String responseBody = execute.getResponseBody();
GetDataSourceInfoResultDTO dsInfo = null;
- try {
- dsInfo = Json.fromJson(responseBody, GetDataSourceInfoResultDTO.class);
- source.setDs(dsInfo.getData().getInfo().getDataSourceName());
- } catch (JsonErrorException e) {
- //TODO throws Exception
- }
+ dsInfo = Json.fromJson(responseBody, GetDataSourceInfoResultDTO.class);
+ assert dsInfo != null;
+ source.setDs(dsInfo.getData().getInfo().getDataSourceName());
});
});
source.setDb(split[2]);
@@ -129,12 +122,9 @@ private ExchangisDataSourceIdsUI buildDataSourceIdsUI(HttpServletRequest request
Result execute = dsClient.execute(action);
String responseBody = execute.getResponseBody();
GetDataSourceInfoResultDTO dsInfo = null;
- try {
- dsInfo = Json.fromJson(responseBody, GetDataSourceInfoResultDTO.class);
- sink.setDs(dsInfo.getData().getInfo().getDataSourceName());
- } catch (JsonErrorException e) {
- //TODO throw Exception
- }
+ dsInfo = Json.fromJson(responseBody, GetDataSourceInfoResultDTO.class);
+ assert dsInfo != null;
+ sink.setDs(dsInfo.getData().getInfo().getDataSourceName());
});
});
@@ -293,7 +283,8 @@ private ElementUI> fillElementUIValue(ExchangisJobParamConfig config, Object v
case MAP:
Map mapElement = null;
try {
- mapElement = Json.fromJson(String.valueOf(value), Map.class);
+ mapElement = Json.fromJson(Json.toJson(value, null),
+ Map.class, String.class, Object.class);
} catch (Exception e) {
LOG.info("Exception happened while parse json"+ "Config value: " + value + "message: " + e.getMessage(), e);
}
@@ -359,4 +350,5 @@ private MapElementUI fillMapElementUIValue(ExchangisJobParamConfig config, Map getJobDataSourceUIs(HttpServletRequest request, Long jobId) {
if (Objects.isNull(jobId)) {
return null;
}
- ExchangisJobEntity job = this.exchangisJobInfoMapper.selectById(jobId);
+ ExchangisJobEntity job;
+ try {
+ job = this.jobOpenService.getJobById(jobId, false);
+ } catch (ExchangisJobException e) {
+ throw new ExchangisDataSourceException
+ .Runtime(CONTEXT_GET_DATASOURCE_NULL.getCode(), "Fail to get job entity (获得任务信息失败)", e);
+ }
if (Objects.isNull(job)) {
return null;
}
- List jobInfoContents = this.parseJobContent(job.getContent());
+ List jobInfoContents = this.parseJobContent(job.getJobContent());
List uis = new ArrayList<>();
for (ExchangisJobInfoContent cnt : jobInfoContents) {
cnt.setEngine(job.getEngineType());
@@ -121,7 +132,7 @@ public Message listDataSources(HttpServletRequest request) throws Exception {
List dtos = new ArrayList<>();
String userName = SecurityFilter.getLoginUsername(request);
- LOGGER.info("listDataSources userName:" + userName);
+ LOGGER.info("listDataSources userName: {}" + userName);
// 通过 datasourcemanager 获取的数据源类型和context中的数据源通过 type 和 name 比较
// 以 exchangis 中注册了的数据源集合为准
@@ -168,14 +179,15 @@ public Message listDataSources(HttpServletRequest request) throws Exception {
}
@Transactional
- public Message create(HttpServletRequest request, /*String type, */Map json) throws Exception {
- DataSourceCreateVO vo;
+ public Message create(HttpServletRequest request, /*String type, */DataSourceCreateVO vo) throws Exception {
+ //DataSourceCreateVO vo;
+ Map json;
try {
- vo = mapper.readValue(mapper.writeValueAsString(json), DataSourceCreateVO.class);
+ json = mapper.readValue(mapper.writeValueAsString(vo), Map.class);
+ json.put("labels",json.get("label"));
} catch (JsonProcessingException e) {
throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.PARSE_JSON_ERROR.getCode(), e.getMessage());
}
-
String comment = vo.getComment();
String createSystem = vo.getCreateSystem();
if (Objects.isNull(comment)) {
@@ -192,21 +204,10 @@ public Message create(HttpServletRequest request, /*String type, */Map connectParams = vo.getConnectParams();
-// if (!Objects.isNull(connectParams)) {
-// // 如果是 hive 类型,需要处理成连接字符串 TODO
-// Object host = connectParams.get("host");
-// Object port = connectParams.get("port");
-// if (!Objects.isNull(host) && !Objects.isNull(port)) {
-// String uris = "thrift://" + connectParams.get("host") + ":" + connectParams.get("port");
-// connectParams.put("uris", uris);
-// }
-// json.put("parameter", mapper.writeValueAsString(connectParams));
-// }
LOGGER.info("create datasource json as follows");
Set> entries = json.entrySet();
for (Map.Entry entry : entries) {
@@ -215,11 +216,6 @@ public Message create(HttpServletRequest request, /*String type, */Map json) throws Exception {
- DataSourceUpdateVO vo;
+ public Message updateDataSource(HttpServletRequest request,/* String type,*/ Long id, DataSourceCreateVO vo) throws Exception {
+
+ Map json;
try {
- vo = mapper.readValue(mapper.writeValueAsString(json), DataSourceUpdateVO.class);
+ json = mapper.readValue(mapper.writeValueAsString(vo), Map.class);
+ json.put("labels",json.get("label"));
} catch (JsonProcessingException e) {
- throw new ExchangisDataSourceException(30401, e.getMessage());
+ throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.PARSE_JSON_ERROR.getCode(), e.getMessage());
}
-
String comment = vo.getComment();
String createSystem = vo.getCreateSystem();
if (Objects.isNull(comment)) {
@@ -293,31 +290,13 @@ public Message updateDataSource(HttpServletRequest request,/* String type,*/ Lon
throw new ExchangisDataSourceException(30401, "exchangis.datasource.null");
}
-// Map connectParams = vo.getConnectParams();
-// if (!Objects.isNull(connectParams)) {
-// // 如果是 hive 类型,需要处理成连接字符串 TODO
-// Object host = connectParams.get("host");
-// Object port = connectParams.get("port");
-// if (!Objects.isNull(host) && !Objects.isNull(port)) {
-// String uris = "thrift://" + connectParams.get("host") + ":" + connectParams.get("port");
-// connectParams.put("uris", uris);
-// }
-// json.put("parameter", mapper.writeValueAsString(connectParams));
-// }
-
LinkisDataSourceRemoteClient client = exchangisDataSource.getDataSourceRemoteClient();
// UpdateDataSourceResult updateDataSourceResult;
String responseBody;
try {
-// updateDataSourceResult = client.updateDataSource(UpdateDataSourceAction.builder()
-// .setUser(user)
-// .setDataSourceId(id+"")
-// .addRequestPayloads(json)
-// .build()
-// );
Result execute = client.execute(UpdateDataSourceAction.builder()
.setUser(user)
- .setDataSourceId(id + "")
+ .setDataSourceId(Long.parseLong(id + ""))
.addRequestPayloads(json)
.build()
);
@@ -341,7 +320,7 @@ public Message updateDataSource(HttpServletRequest request,/* String type,*/ Lon
try {
updateDataSourceParameterResult = client.updateDataSourceParameter(
UpdateDataSourceParameterAction.builder()
- .setDataSourceId(id + "")
+ .setDataSourceId(Long.parseLong(id + ""))
.setUser(user)
.addRequestPayloads(json)
.build()
@@ -383,7 +362,7 @@ public Message deleteDataSource(HttpServletRequest request, /*String type,*/ Lon
// );
Result execute = dataSourceRemoteClient.execute(
- new DeleteDataSourceAction.Builder().setUser(user).setResourceId(id + "").builder()
+ new DeleteDataSourceAction.Builder().setUser(user).setDataSourceId(Long.parseLong(id + "")).builder()
);
responseBody = execute.getResponseBody();
@@ -465,12 +444,19 @@ public Message getJobDataSourceParamsUI(Long jobId) {
return null;
}
- ExchangisJobEntity job = this.exchangisJobInfoMapper.selectById(jobId);
+ ExchangisJobEntity job;
+ try {
+ job = this.jobOpenService.getJobById(jobId, false);
+ } catch (ExchangisJobException e) {
+ throw new ExchangisDataSourceException
+ .Runtime(CONTEXT_GET_DATASOURCE_NULL.getCode(), "Fail to get job entity (获得任务信息失败)", e);
+ }
+
if (Objects.isNull(job)) {
return null;
}
- List jobInfoContents = this.parseJobContent(job.getContent());
+ List jobInfoContents = this.parseJobContent(job.getJobContent());
List uis = new ArrayList<>();
for (ExchangisJobInfoContent cnt : jobInfoContents) {
uis.add(this.buildDataSourceParamsUI(cnt));
@@ -484,12 +470,18 @@ public Message getJobDataSourceTransformsUI(Long jobId) {
return null;
}
- ExchangisJobEntity job = this.exchangisJobInfoMapper.selectById(jobId);
+ ExchangisJobEntity job;
+ try {
+ job = this.jobOpenService.getJobById(jobId, false);
+ } catch (ExchangisJobException e) {
+ throw new ExchangisDataSourceException
+ .Runtime(CONTEXT_GET_DATASOURCE_NULL.getCode(), "Fail to get job entity (获得任务信息失败)", e);
+ }
if (Objects.isNull(job)) {
return null;
}
- String jobContent = job.getContent();
+ String jobContent = job.getJobContent();
ExchangisJobInfoContent content;
// 转换 content
if (Strings.isNullOrEmpty(jobContent)) {
@@ -513,12 +505,18 @@ public Message getJobDataSourceSettingsUI(Long jobId, String jobName) throws Exc
return null;
}
- ExchangisJobEntity job = this.exchangisJobInfoMapper.selectById(jobId);
+ ExchangisJobEntity job;
+ try {
+ job = this.jobOpenService.getJobById(jobId, false);
+ } catch (ExchangisJobException e) {
+ throw new ExchangisDataSourceException
+ .Runtime(CONTEXT_GET_DATASOURCE_NULL.getCode(), "Fail to get job entity (获得任务信息失败)", e);
+ }
if (Objects.isNull(job)) {
return null;
}
- List contents = this.parseJobContent(job.getContent());
+ List contents = this.parseJobContent(job.getJobContent());
for (ExchangisJobInfoContent content : contents) {
if (content.getSubJobName().equalsIgnoreCase(jobName)) {
@@ -636,7 +634,7 @@ public Message queryDataSources(HttpServletRequest request, DataSourceQueryVO vo
Message message = Message.ok();
message.data("list", dataSources);
- message.data("total", result.getTotalPage() * pageSize);
+ message.data("total", result.getTotalPage());
return message;
//return Message.ok().data("list", dataSources);
}
@@ -858,8 +856,8 @@ private MetadataGetColumnsResultDTO getDatasourceColumns(String username, Long i
throw new ExchangisDataSourceException(result.getStatus(), result.getMessage());
}
} catch (JsonErrorException e) {
- throw new ExchangisDataSourceException(CLIENT_METADATA_GET_COLUMNS_ERROR.getCode(),
- "Fail to deserialize the columns resultSet", e);
+ throw new ExchangisDataSourceException(CLIENT_METADATA_GET_COLUMNS_ERROR.getCode(),
+ "Fail to deserialize the columns resultSet", e);
}
return result;
@@ -923,9 +921,9 @@ public Message getDataSourceVersionsById(HttpServletRequest request, Long id) th
} catch (Exception e) {
if (e instanceof ErrorException) {
ErrorException ee = (ErrorException) e;
- throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_GET_DATASOURCE_ERROR.getCode(), e.getMessage(), ee.getIp(), ee.getPort(), ee.getServiceKind());
+ throw new ExchangisDataSourceException(CLIENT_GET_DATASOURCE_ERROR.getCode(), e.getMessage(), ee.getIp(), ee.getPort(), ee.getServiceKind());
} else {
- throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_GET_DATASOURCE_ERROR.getCode(), e.getMessage());
+ throw new ExchangisDataSourceException(CLIENT_GET_DATASOURCE_ERROR.getCode(), e.getMessage());
}
}
// if (Objects.isNull(result)) {
@@ -948,18 +946,18 @@ public Message getDataSourceVersionsById(HttpServletRequest request, Long id) th
GetDataSourceVersionsResult versionsResult;
try {
versionsResult = linkisDataSourceRemoteClient.getDataSourceVersions(
- new GetDataSourceVersionsAction.Builder().setUser(userName).setResourceId(id + "").build()
+ new GetDataSourceVersionsAction.Builder().setUser(userName).setDataSourceId(Long.parseLong(id + "")).build()
);
} catch (Exception e) {
if (e instanceof ErrorException) {
ErrorException ee = (ErrorException) e;
- throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_GET_DATASOURCE_VERSION_ERROR.getCode(), e.getMessage(), ee.getIp(), ee.getPort(), ee.getServiceKind());
+ throw new ExchangisDataSourceException(CLIENT_GET_DATASOURCE_VERSION_ERROR.getCode(), e.getMessage(), ee.getIp(), ee.getPort(), ee.getServiceKind());
} else {
- throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_GET_DATASOURCE_VERSION_ERROR.getCode(), e.getMessage());
+ throw new ExchangisDataSourceException(CLIENT_GET_DATASOURCE_VERSION_ERROR.getCode(), e.getMessage());
}
}
if (Objects.isNull(versionsResult)) {
- throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_GET_DATASOURCE_VERSION_ERROR.getCode(), "datasource version response body null or empty");
+ throw new ExchangisDataSourceException(CLIENT_GET_DATASOURCE_VERSION_ERROR.getCode(), "datasource version response body null or empty");
}
if (versionsResult.getStatus() != 0) {
@@ -1004,7 +1002,44 @@ public Message testConnect(HttpServletRequest request, Long id, Long version) th
DataSourceTestConnectResult result;
try {
result = linkisDataSourceRemoteClient.getDataSourceTestConnect(
- new DataSourceTestConnectAction.Builder().setUser(userName).setDataSourceId(id + "").setVersion(version + "").build()
+ new DataSourceTestConnectAction.Builder().setUser(userName).setDataSourceId(Long.parseLong(id + "")).setVersion(version + "").build()
+ );
+ } catch (Exception e) {
+ if (e instanceof ErrorException) {
+ ErrorException ee = (ErrorException) e;
+ throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_DATASOURCE_TEST_CONNECTION_ERROR.getCode(), e.getMessage(), ee.getIp(), ee.getPort(), ee.getServiceKind());
+ } else {
+ throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_DATASOURCE_TEST_CONNECTION_ERROR.getCode(), e.getMessage());
+ }
+ }
+
+ if (Objects.isNull(result)) {
+ throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.CLIENT_DATASOURCE_TEST_CONNECTION_ERROR.getCode(), "datasource test connection response body null or empty");
+ }
+
+ if (result.getStatus() != 0) {
+ throw new ExchangisDataSourceException(result.getStatus(), result.getMessage());
+ }
+
+ return Message.ok();
+ }
+
+ public Message testConnectByVo(HttpServletRequest request, DataSourceCreateVO vo) throws ErrorException {
+ LinkisDataSourceRemoteClient linkisDataSourceRemoteClient = ExchangisLinkisRemoteClient.getLinkisDataSourceRemoteClient();
+ String userName = SecurityFilter.getLoginUsername(request);
+ LOGGER.info("testConnect userName:" + userName);
+
+ Map json;
+ try {
+ json = mapper.readValue(mapper.writeValueAsString(vo), Map.class);
+ json.put("labels",json.get("label"));
+ } catch (JsonProcessingException e) {
+ throw new ExchangisDataSourceException(ExchangisDataSourceExceptionCode.PARSE_JSON_ERROR.getCode(), e.getMessage());
+ }
+ ParamsTestConnectResult result;
+ try {
+ result = (ParamsTestConnectResult) linkisDataSourceRemoteClient.execute(
+ new ParamsTestConnectAction(json, userName)
);
} catch (Exception e) {
if (e instanceof ErrorException) {
@@ -1034,7 +1069,7 @@ public Message publishDataSource(HttpServletRequest request, Long id, Long versi
PublishDataSourceVersionResult result;
try {
result = linkisDataSourceRemoteClient.publishDataSourceVersion(
- new PublishDataSourceVersionAction.Builder().setUser(userName).setDataSourceId(id + "").setVersion(version + "").build()
+ new PublishDataSourceVersionAction.Builder().setUser(userName).setDataSourceId(Long.parseLong(id + "")).setVersion(Long.parseLong(version + "")).build()
);
} catch (Exception e) {
if (e instanceof ErrorException) {
@@ -1076,7 +1111,7 @@ public Message expireDataSource(HttpServletRequest request, Long id) throws Erro
// );
Result execute = linkisDataSourceRemoteClient.execute(
- new ExpireDataSourceAction.Builder().setUser(userName).setDataSourceId(id + "").build()
+ new ExpireDataSourceAction.Builder().setUser(userName).setDataSourceId(Long.parseLong(id + "")).build()
);
responseBody = execute.getResponseBody();
} catch (Exception e) {
@@ -1155,7 +1190,7 @@ public Message getDataSourceKeyDefine(HttpServletRequest request, Long dataSourc
throw new ExchangisDataSourceException(result.getStatus(), result.getMessage());
}
- return Message.ok().data("list", Objects.isNull(result.getKey_define()) ? null : result.getKey_define());
+ return Message.ok().data("list", Objects.isNull(result.getKeyDefine()) ? null : result.getKeyDefine());
}
public void checkDSSupportDegree(String engine, String sourceDsType, String sinkDsType) throws ExchangisDataSourceException {
@@ -1184,6 +1219,14 @@ private void checkDataXDSSupportDegree(String sourceDsType, String sinkDsType) t
}
+ /**
+ * TODO: the mapping function is defined by the rule of Hive directly, we should abstract to support all the types
+ * @param request
+ * @param vo
+ * @return
+ * @throws Exception
+ */
+ @SuppressWarnings("unchecked")
public Message queryDataSourceDBTableFieldsMapping(HttpServletRequest request, FieldMappingVO vo) throws Exception {
this.checkDSSupportDegree(vo.getEngine(), vo.getSourceTypeId(), vo.getSinkTypeId());
@@ -1209,39 +1252,23 @@ public Message queryDataSourceDBTableFieldsMapping(HttpServletRequest request, F
field.setFieldEditable(!"HIVE".equals(vo.getSinkTypeId()));
}
message.data("sinkFields", sinkFields);
-
-
// field mapping deduction
List