diff --git a/README-ZH.md b/README-ZH.md
index 216b6af862..bcfcfb709b 100644
--- a/README-ZH.md
+++ b/README-ZH.md
@@ -157,7 +157,7 @@ DSS主要特点:
## 快速安装使用
-点我进入[快速安装使用](docs/zh_CN/ch2/DSS快速安装使用文档.md)
+点我进入[快速安装使用](docs/zh_CN/ch2/DSS_LINKIS_Quick_Install.md)
## 架构
@@ -167,7 +167,7 @@ DSS主要特点:
#### 1. 安装编译文档
-[快速安装使用文档](docs/zh_CN/ch2/DSS快速安装使用文档.md)
+[快速安装使用文档](docs/zh_CN/ch2/DSS_LINKIS_Quick_Install.md)
[**DSS安装常见问题列表**](docs/zh_CN/ch1/DSS安装常见问题列表.md)
@@ -203,4 +203,4 @@ DSS主要特点:
## License
-DSS is under the Apache 2.0 license. See the [License](LICENSE) file for details.
\ No newline at end of file
+DSS is under the Apache 2.0 license. See the [License](LICENSE) file for details.
diff --git a/assembly/pom.xml b/assembly/pom.xml
index 4ec1c8b647..3aa0e2d9a4 100644
--- a/assembly/pom.xml
+++ b/assembly/pom.xml
@@ -22,7 +22,7 @@
dss
com.webank.wedatasphere.dss
- 0.7.0
+ 0.9.0
4.0.0
@@ -103,7 +103,52 @@
com.fasterxml.jackson.core
jackson-core
- 2.9.6
+ 2.10.0
+
+
+ net.databinder.dispatch
+ dispatch-core_2.11
+ 0.11.2
+
+
+ net.databinder.dispatch
+ dispatch-json4s-jackson_2.11
+ 0.11.2
+
+
+ org.apache.htrace
+ htrace-core
+ 3.1.0-incubating
+
+
+ org.apache.commons
+ commons-math3
+ 3.1.1
+
+
+ org.apache.httpcomponents
+ httpclient
+ 4.5.4
+
+
+ org.apache.httpcomponents
+ httpcore
+ 4.4.7
+
+
+ com.ning
+ async-http-client
+ 1.8.10
+
+
+ commons-beanutils
+ commons-beanutils
+ 1.7.0
+
+
+ commons-beanutils
+ commons-beanutils-core
+ 1.8.0
diff --git a/bin/checkEnv.sh b/bin/checkEnv.sh
index bdf48659ae..d51bd5ca21 100644
--- a/bin/checkEnv.sh
+++ b/bin/checkEnv.sh
@@ -1,3 +1,4 @@
+#!/bin/sh
#
# Copyright 2019 WeBank
#
@@ -13,7 +14,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-#!/bin/sh
say() {
printf 'check command fail \n %s\n' "$1"
}
@@ -32,7 +32,7 @@ need_cmd() {
err "need '$1' (your linux command not found)"
fi
}
-echo "<-----start to check linux cmd:yum java mysql unzip expect telnet sed tar---->"
+echo "<-----start to check used cmd---->"
need_cmd yum
need_cmd java
need_cmd mysql
@@ -42,4 +42,4 @@ need_cmd telnet
need_cmd tar
need_cmd sed
need_cmd dos2unix
-echo "<-----end to check linux cmd:yum java mysql unzip expect telnet sed tar------>"
+echo "<-----end to check used cmd---->"
diff --git a/bin/checkMicro.sh b/bin/checkMicro.sh
deleted file mode 100644
index 3f8ff95fa9..0000000000
--- a/bin/checkMicro.sh
+++ /dev/null
@@ -1,50 +0,0 @@
-#
-# Copyright 2019 WeBank
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-#!/bin/sh
-source ~/.bash_profile
-
-shellDir=`dirname $0`
-workDir=`cd ${shellDir}/..;pwd`
-
-##load config
-source ${workDir}/conf/config.sh
-source ${workDir}/conf/db.sh
-
-MICRO_SERVICE_NAME=$1
-MICRO_SERVICE_IP=$2
-MICRO_SERVICE_PORT=$3
-echo "<--------------------------------------------------------------------------->"
-echo "Start to Check if your microservice:$MICRO_SERVICE_NAME is normal via telnet"
-echo ""
-if [ ! -d $DSS_INSTALL_HOME/$MICRO_SERVICE_NAME ];then
- echo "$MICRO_SERVICE_NAME is not installed,the check steps will be skipped"
- exit 0
-fi
-
-result=`echo -e "\n" | telnet $MICRO_SERVICE_IP $MICRO_SERVICE_PORT 2>/dev/null | grep Connected | wc -l`
-if [ $result -eq 1 ]; then
- echo "$MICRO_SERVICE_NAME is ok."
-else
- echo "<--------------------------------------------------------------------------->"
- echo "ERROR your $MICRO_SERVICE_NAME microservice is not start successful !!! ERROR logs as follows :"
- echo "PLEAESE CHECK DETAIL LOG,LOCATION:$DSS_INSTALL_HOME/$MICRO_SERVICE_NAME/logs/linkis.out"
- echo '<------------------------------------------------------------->'
- tail -n 50 $DSS_INSTALL_HOME/$MICRO_SERVICE_NAME/logs/*.out
- echo '<-------------------------------------------------------------->'
- echo "PLEAESE CHECK DETAIL LOG,LOCATION:$DSS_INSTALL_HOME/$MICRO_SERVICE_NAME/logs/linkis.out"
- exit 1
-fi
-
diff --git a/bin/checkServices.sh b/bin/checkServices.sh
new file mode 100644
index 0000000000..72df04be43
--- /dev/null
+++ b/bin/checkServices.sh
@@ -0,0 +1,91 @@
+#
+# Copyright 2019 WeBank
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+#!/bin/sh
+source ~/.bash_profile
+
+shellDir=`dirname $0`
+workDir=`cd ${shellDir}/..;pwd`
+
+##load config
+export LINKIS_DSS_CONF_FILE=${LINKIS_DSS_CONF_FILE:-"${workDir}/conf/config.sh"}
+export DISTRIBUTION=${DISTRIBUTION:-"${workDir}/conf/config.sh"}
+source ${LINKIS_DSS_CONF_FILE}
+source ${DISTRIBUTION}
+
+MICRO_SERVICE_NAME=$1
+MICRO_SERVICE_IP=$2
+MICRO_SERVICE_PORT=$3
+
+local_host="`hostname --fqdn`"
+
+ipaddr=$(ip addr | awk '/^[0-9]+: / {}; /inet.*global/ {print gensub(/(.*)\/(.*)/, "\\1", "g", $2)}'|awk 'NR==1')
+
+function isLocal(){
+ if [ "$1" == "127.0.0.1" ];then
+ return 0
+ elif [ $1 == "localhost" ]; then
+ return 0
+ elif [ $1 == $local_host ]; then
+ return 0
+ elif [ $1 == $ipaddr ]; then
+ return 0
+ fi
+ return 1
+}
+
+function executeCMD(){
+ isLocal $1
+ flag=$?
+ echo "Is local "$flag
+ if [ $flag == "0" ];then
+ eval $2
+ else
+ ssh -p $SSH_PORT $1 $2
+ fi
+
+}
+
+#echo "<--------------------------------------------------------------------------->"
+#echo "Start to Check if your microservice:$MICRO_SERVICE_NAME is normal via telnet"
+#echo ""
+#if ! executeCMD $SERVER_IP "test -e $DSS_INSTALL_HOME/$MICRO_SERVICE_NAME"; then
+# echo "$MICRO_SERVICE_NAME is not installed,the check steps will be skipped"
+# exit 0
+#fi
+echo "==========================================================="
+echo $MICRO_SERVICE_NAME
+echo $MICRO_SERVICE_IP
+echo $MICRO_SERVICE_PORT
+echo "==========================================================="
+
+if [ $MICRO_SERVICE_NAME == "visualis-server" ] && [ $MICRO_SERVICE_IP == "127.0.0.1" ]; then
+ MICRO_SERVICE_IP=$ipaddr
+fi
+
+result=`echo -e "\n" | telnet $MICRO_SERVICE_IP $MICRO_SERVICE_PORT 2>/dev/null | grep Connected | wc -l`
+if [ $result -eq 1 ]; then
+ echo "$MICRO_SERVICE_NAME is ok."
+else
+ echo "<--------------------------------------------------------------------------->"
+ echo "ERROR your $MICRO_SERVICE_NAME microservice is not start successful !!! ERROR logs as follows :"
+ echo "PLEAESE CHECK DETAIL LOG,LOCATION:$DSS_INSTALL_HOME/$MICRO_SERVICE_NAME/logs/linkis.out"
+ echo '<------------------------------------------------------------->'
+ executeCMD $MICRO_SERVICE_IP "tail -n 50 $DSS_INSTALL_HOME/$MICRO_SERVICE_NAME/logs/*.out"
+ echo '<-------------------------------------------------------------->'
+ echo "PLEAESE CHECK DETAIL LOG,LOCATION:$DSS_INSTALL_HOME/$MICRO_SERVICE_NAME/logs/linkis.out"
+ exit 1
+fi
+
diff --git a/bin/install.sh b/bin/install.sh
index 5961dfdbd8..7a0b46db6d 100644
--- a/bin/install.sh
+++ b/bin/install.sh
@@ -15,6 +15,7 @@
#
#!/bin/sh
#Actively load user env
+
source ~/.bash_profile
shellDir=`dirname $0`
@@ -45,17 +46,13 @@ fi
function isSuccess(){
if [ $? -ne 0 ]; then
- echo "ERROR to " + $1
+ echo "Failed to " + $1
exit 1
else
- echo "SUCESS to" + $1
+ echo "Succeed to" + $1
fi
}
-#check env
-sh ${workDir}/bin/checkEnv.sh
-isSuccess "check env"
-
function checkJava(){
java -version
isSuccess "execute java --version"
@@ -72,46 +69,108 @@ else
fi
}
+
+say() {
+ printf 'check command fail \n %s\n' "$1"
+}
+
+err() {
+ say "$1" >&2
+ exit 1
+}
+
+check_cmd() {
+ command -v "$1" > /dev/null 2>&1
+}
+
+need_cmd() {
+ if ! check_cmd "$1"; then
+ err "need '$1' (command not found)"
+ fi
+}
+
+#check env
+sh ${workDir}/bin/checkEnv.sh
+isSuccess "check env"
+
##load config
echo "step1:load config"
-source ${workDir}/conf/config.sh
-source ${workDir}/conf/db.sh
+export DSS_CONFIG_PATH=${DSS_CONFIG_PATH:-"${workDir}/conf/config.sh"}
+export DSS_DB_CONFIG_PATH=${DSS_DB_CONFIG_PATH:-"${workDir}/conf/db.sh"}
+export DISTRIBUTION=${DISTRIBUTION:-"${workDir}/conf/config.sh"}
+source ${DSS_CONFIG_PATH}
+source ${DSS_DB_CONFIG_PATH}
+source ${DISTRIBUTION}
isSuccess "load config"
-local_host="`hostname -i`"
+local_host="`hostname --fqdn`"
+ipaddr=$(ip addr | awk '/^[0-9]+: / {}; /inet.*global/ {print gensub(/(.*)\/(.*)/, "\\1", "g", $2)}'|awk 'NR==1')
+
+function isLocal(){
+ if [ "$1" == "127.0.0.1" ];then
+ return 0
+ elif [ $1 == "localhost" ]; then
+ return 0
+ elif [ $1 == $local_host ]; then
+ return 0
+ elif [ $1 == $ipaddr ]; then
+ return 0
+ fi
+ return 1
+}
+
+function executeCMD(){
+ isLocal $1
+ flag=$?
+ if [ $flag == "0" ];then
+ echo "Is local execution:$2"
+ eval $2
+ else
+ echo "Is remote execution:$2"
+ ssh -p $SSH_PORT $1 $2
+ fi
+}
-##env check
-echo "Please enter the mode selection such as: 1"
-echo " 1: lite"
-echo " 2: sample"
-echo " 3: Standard"
-echo ""
+function copyFile(){
+ isLocal $1
+ flag=$?
+ src=$2
+ dest=$3
+ if [ $flag == "0" ];then
+ echo "Is local cp "
+ eval "cp -r $src $dest"
+ else
+ echo "Is remote cp "
+ scp -r -P $SSH_PORT $src $1:$dest
+ fi
+}
-INSTALL_MODE=1
+##install mode choice
+if [ "$INSTALL_MODE" == "" ];then
+ echo "Please enter the mode selection such as: 1"
+ echo " 1: Lite"
+ echo " 2: Simple"
+ echo " 3: Standard"
+ echo ""
+ read -p "Please input the choice:" idx
+ INSTALL_MODE=$idx
+fi
-read -p "Please input the choice:" idx
-if [[ '1' = "$idx" ]];then
- INSTALL_MODE=1
+if [[ '1' = "$INSTALL_MODE" ]];then
echo "You chose lite installation mode"
- #check for Java
checkJava
- #check for mysql
SERVER_NAME=MYSQL
EXTERNAL_SERVER_IP=$MYSQL_HOST
EXTERNAL_SERVER_PORT=$MYSQL_PORT
checkExternalServer
-elif [[ '2' = "$idx" ]];then
- INSTALL_MODE=2
+elif [[ '2' = "$INSTALL_MODE" ]];then
echo "You chose sample installation mode"
- #check for Java
checkJava
- #check for mysql
SERVER_NAME=MYSQL
EXTERNAL_SERVER_IP=$MYSQL_HOST
EXTERNAL_SERVER_PORT=$MYSQL_PORT
-
-elif [[ '3' = "$idx" ]];then
- INSTALL_MODE=3
+ checkExternalServer
+elif [[ '3' = "$INSTALL_MODE" ]];then
echo "You chose Standard installation mode"
#check for Java
checkJava
@@ -124,13 +183,16 @@ elif [[ '3' = "$idx" ]];then
SERVER_NAME=Qualitis
EXTERNAL_SERVER_IP=$QUALITIS_ADRESS_IP
EXTERNAL_SERVER_PORT=$QUALITIS_ADRESS_PORT
+ if [[ $IGNORECHECK = "" ]];then
checkExternalServer
+ fi
#check azkaban serivice
SERVER_NAME=AZKABAN
EXTERNAL_SERVER_IP=$AZKABAN_ADRESS_IP
EXTERNAL_SERVER_PORT=$AZKABAN_ADRESS_PORT
+ if [[ $IGNORECHECK = "" ]];then
checkExternalServer
-
+ fi
else
echo "no choice,exit!"
exit 1
@@ -156,40 +218,97 @@ else
exit 1
fi
+echo "create hdfs directory and local directory"
+if [ "$WORKSPACE_USER_ROOT_PATH" != "" ]
+then
+ localRootDir=$WORKSPACE_USER_ROOT_PATH
+ if [[ $WORKSPACE_USER_ROOT_PATH == file://* ]];then
+ localRootDir=${WORKSPACE_USER_ROOT_PATH#file://}
+ mkdir -p $localRootDir/$deployUser
+ sudo chmod -R 775 $localRootDir/$deployUser
+ elif [[ $WORKSPACE_USER_ROOT_PATH == hdfs://* ]];then
+ localRootDir=${WORKSPACE_USER_ROOT_PATH#hdfs://}
+ hdfs dfs -mkdir -p $localRootDir/$deployUser
+ hdfs dfs -chmod -R 775 $localRootDir/$deployUser
+ else
+ echo "does not support $WORKSPACE_USER_ROOT_PATH filesystem types"
+ fi
+isSuccess "create $WORKSPACE_USER_ROOT_PATH directory"
+fi
+
+
+if [ "$RESULT_SET_ROOT_PATH" != "" ]
+then
+ localRootDir=$RESULT_SET_ROOT_PATH
+ if [[ $RESULT_SET_ROOT_PATH == file://* ]];then
+ localRootDir=${RESULT_SET_ROOT_PATH#file://}
+ mkdir -p $localRootDir/$deployUser
+ sudo chmod -R 775 $localRootDir/$deployUser
+ elif [[ $RESULT_SET_ROOT_PATH == hdfs://* ]];then
+ localRootDir=${RESULT_SET_ROOT_PATH#hdfs://}
+ hdfs dfs -mkdir -p $localRootDir/$deployUser
+ hdfs dfs -chmod -R 775 $localRootDir/$deployUser
+ else
+ echo "does not support $RESULT_SET_ROOT_PATH filesystem types"
+ fi
+isSuccess "create $RESULT_SET_ROOT_PATH directory"
+fi
+
+
+if [ "$WDS_SCHEDULER_PATH" != "" ]
+then
+ localRootDir=$WDS_SCHEDULER_PATH
+ if [[ $WDS_SCHEDULER_PATH == file://* ]];then
+ localRootDir=${WDS_SCHEDULER_PATH#file://}
+ mkdir -p $localRootDir
+ sudo chmod -R 775 $localRootDir
+ elif [[ $WDS_SCHEDULER_PATH == hdfs://* ]];then
+ localRootDir=${WDS_SCHEDULER_PATH#hdfs://}
+ hdfs dfs -mkdir -p $localRootDir
+ hdfs dfs -chmod -R 775 $localRootDir
+ else
+ echo "does not support $WDS_SCHEDULER_PATH filesystem types"
+ fi
+isSuccess "create $WDS_SCHEDULER_PATH directory"
+fi
+
+
##init db
if [[ '2' = "$MYSQL_INSTALL_MODE" ]];then
- mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD -D$MYSQL_DB --default-character-set=utf8 -e "source ${workDir}/db/dss_ddl.sql"
- isSuccess "source dss_ddl.sql"
- LOCAL_IP="`hostname -i`"
- if [ $GATEWAY_INSTALL_IP == "127.0.0.1" ];then
- echo "GATEWAY_INSTALL_IP is equals 127.0.0.1 ,we will change it to ip address"
- GATEWAY_INSTALL_IP_2=$LOCAL_IP
+ mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD -D$MYSQL_DB --default-character-set=utf8 -e "source ${workDir}/db/dss_ddl.sql"
+ isSuccess "source dss_ddl.sql"
+ LOCAL_IP=$ipaddr
+ if [ $GATEWAY_INSTALL_IP == "127.0.0.1" ];then
+ echo "GATEWAY_INSTALL_IP is equals 127.0.0.1 ,we will change it to ip address"
+ GATEWAY_INSTALL_IP_2=$LOCAL_IP
else
- GATEWAY_INSTALL_IP_2=$GATEWAY_INSTALL_IP
+ GATEWAY_INSTALL_IP_2=$GATEWAY_INSTALL_IP
fi
#echo $GATEWAY_INSTALL_IP_2
sed -i "s/GATEWAY_INSTALL_IP_2/$GATEWAY_INSTALL_IP_2/g" ${workDir}/db/dss_dml.sql
sed -i "s/GATEWAY_PORT/$GATEWAY_PORT/g" ${workDir}/db/dss_dml.sql
mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD -D$MYSQL_DB --default-character-set=utf8 -e "source ${workDir}/db/dss_dml.sql"
- isSuccess "source dss_dml.sql"
+ isSuccess "source dss_dml.sql"
- if [ '2' = "$INSTALL_MODE" ]||[ '3' = "$INSTALL_MODE" ];then
- echo "visualis support "
- if [ $VISUALIS_NGINX_IP == "127.0.0.1" ]||[ $VISUALIS_NGINX_IP == "0.0.0.0" ];then
- echo "VISUALIS_NGINX_IP is equals $VISUALIS_NGINX_IP ,we will change it to ip address"
- VISUALIS_NGINX_IP_2=$LOCAL_IP
+ if [[ '2' = "$INSTALL_MODE" ]] || [[ '3' = "$INSTALL_MODE" ]];then
+ echo "visualis support,visualis database will be initialized !"
+ if [ $VISUALIS_NGINX_IP == "127.0.0.1" ]||[ $VISUALIS_NGINX_IP == "0.0.0.0" ];then
+ echo "VISUALIS_NGINX_IP is equals $VISUALIS_NGINX_IP ,we will change it to ip address"
+ VISUALIS_NGINX_IP_2=$LOCAL_IP
else
- VISUALIS_NGINX_IP_2=$VISUALIS_NGINX_IP
+ VISUALIS_NGINX_IP_2=$VISUALIS_NGINX_IP
fi
#echo $VISUALIS_NGINX_IP_2
sed -i "s/VISUALIS_NGINX_IP_2/$VISUALIS_NGINX_IP_2/g" ${workDir}/db/visualis.sql
sed -i "s/VISUALIS_NGINX_PORT/$VISUALIS_NGINX_PORT/g" ${workDir}/db/visualis.sql
- mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD -D$MYSQL_DB --default-character-set=utf8 -e "source ${workDir}/db/visualis.sql"
- isSuccess "source visualis.sql"
+ mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD -D$MYSQL_DB --default-character-set=utf8 -e "source ${workDir}/db/visualis.sql"
+ isSuccess "source visualis.sql"
+ mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD -D$MYSQL_DB --default-character-set=utf8 -e "source ${workDir}/db/davinci.sql"
+ isSuccess "source davinci.sql"
fi
- if [[ '3' = "$INSTALL_MODE" ]];then
- echo "start to update azkaban and qualitis table info "
+ if [[ '3' = "$INSTALL_MODE" ]];then
+ echo "azkaban and qualitis support, azkaban and qualitis database will be initialized !"
#azkaban
if [ $AZKABAN_ADRESS_IP == "127.0.0.1" ];then
echo "AZKABAN_ADRESS_IP is equals 127.0.0.1 ,we will change it to ip address"
@@ -217,28 +336,9 @@ if [[ '2' = "$MYSQL_INSTALL_MODE" ]];then
fi
fi
-## davinci db init
-echo "Do you want to clear davinci table information in the database ? If you have not installed davinci environment,you must input '2',if you have davinci installed,choice 1."
-echo " 1: Do not execute table-building statements"
-echo "WARN:"
-echo " 2: Dangerous! Clear all data and rebuild the tables."
-echo ""
-DAVINCI_INSTALL_MODE=1
-read -p "Please input the choice:" idx
-if [[ '2' = "$idx" ]];then
- DAVINCI_INSTALL_MODE=2
- echo "You chose rebuild davinci's table !!! start rebuild all tables"
- mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD -D$MYSQL_DB --default-character-set=utf8 -e "source ${workDir}/db/davinci.sql"
- isSuccess "source davinci.sql"
- echo ""
-elif [[ '1' = "$idx" ]];then
- DAVINCI_INSTALL_MODE=1
- echo "You chose not execute table-building statements"
- echo ""
-else
- echo "no choice,exit!"
- exit 1
-fi
+##Deal special symbol '#'
+HIVE_META_PASSWORD=$(echo ${HIVE_META_PASSWORD//'#'/'\#'})
+MYSQL_PASSWORD=$(echo ${MYSQL_PASSWORD//'#'/'\#'})
###linkis Eurkea info
SERVER_IP=$EUREKA_INSTALL_IP
@@ -260,24 +360,30 @@ then
SERVER_IP=$local_host
fi
-if ! ssh -p $SSH_PORT $SERVER_IP test -e $SERVER_HOME; then
- ssh -p $SSH_PORT $SERVER_IP "sudo mkdir -p $SERVER_HOME;sudo chown -R $deployUser:$deployUser $SERVER_HOME"
+if ! executeCMD $SERVER_IP "test -e $SERVER_HOME"; then
+ executeCMD $SERVER_IP "sudo mkdir -p $SERVER_HOME;sudo chown -R $deployUser:$deployUser $SERVER_HOME"
isSuccess "create the dir of $SERVERNAME"
fi
echo "$SERVERNAME-step2:copy install package"
-scp -P $SSH_PORT ${workDir}/share/$PACKAGE_DIR/$SERVERNAME.zip $SERVER_IP:$SERVER_HOME
+copyFile $SERVER_IP ${workDir}/share/$PACKAGE_DIR/$SERVERNAME.zip $SERVER_HOME
+
+if ! executeCMD $SERVER_IP "test -e $SERVER_HOME/lib"; then
+ copyFile $SERVER_IP ${workDir}/lib $SERVER_HOME
+fi
+
+#copyFile $SERVER_IP ${workDir}/lib $SERVER_HOME
isSuccess "copy ${SERVERNAME}.zip"
-ssh -p $SSH_PORT $SERVER_IP "cd $SERVER_HOME/;rm -rf $SERVERNAME-bak; mv -f $SERVERNAME $SERVERNAME-bak"
-ssh -p $SSH_PORT $SERVER_IP "cd $SERVER_HOME/;unzip $SERVERNAME.zip > /dev/null"
-ssh -p $SSH_PORT $SERVER_IP "cd $workDir/;scp -r lib/* $SERVER_HOME/$SERVERNAME/lib"
+executeCMD $SERVER_IP "cd $SERVER_HOME/;rm -rf $SERVERNAME-bak; mv -f $SERVERNAME $SERVERNAME-bak"
+executeCMD $SERVER_IP "cd $SERVER_HOME/;unzip $SERVERNAME.zip > /dev/null"
+executeCMD $SERVER_IP "cd $SERVER_HOME/;scp -r lib/* $SERVER_HOME/$SERVERNAME/lib"
isSuccess "unzip ${SERVERNAME}.zip"
echo "$SERVERNAME-step3:subsitution conf"
SERVER_CONF_PATH=$SERVER_HOME/$SERVERNAME/conf/application.yml
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#port:.*#port: $SERVER_PORT#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#defaultZone:.*#defaultZone: $EUREKA_URL#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#hostname:.*#hostname: $SERVER_IP#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#port:.*#port: $SERVER_PORT#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#defaultZone:.*#defaultZone: $EUREKA_URL#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#hostname:.*#hostname: $SERVER_IP#g\" $SERVER_CONF_PATH"
isSuccess "subsitution conf of $SERVERNAME"
}
##function end
@@ -291,16 +397,16 @@ then
SERVER_IP=$local_host
fi
-if ! ssh -p $SSH_PORT $SERVER_IP test -e $SERVER_HOME; then
- ssh -p $SSH_PORT $SERVER_IP "sudo mkdir -p $SERVER_HOME;sudo chown -R $deployUser:$deployUser $SERVER_HOME"
+if ! executeCMD $SERVER_IP "test -e $SERVER_HOME"; then
+ executeCMD $SERVER_IP "sudo mkdir -p $SERVER_HOME;sudo chown -R $deployUser:$deployUser $SERVER_HOME"
isSuccess "create the dir of $SERVERNAME"
fi
echo "$SERVERNAME-step2:copy install package"
-scp -P $SSH_PORT ${workDir}/share/$PACKAGE_DIR/$SERVERNAME.zip $SERVER_IP:$SERVER_HOME
+copyFile $SERVER_IP ${workDir}/share/$PACKAGE_DIR/$SERVERNAME.zip $SERVER_HOME
isSuccess "copy ${SERVERNAME}.zip"
-ssh -p $SSH_PORT $SERVER_IP "cd $SERVER_HOME/;rm -rf $SERVERNAME-bak; mv -f $SERVERNAME $SERVERNAME-bak"
-ssh -p $SSH_PORT $SERVER_IP "cd $SERVER_HOME/;unzip $SERVERNAME.zip > /dev/null"
+executeCMD $SERVER_IP "cd $SERVER_HOME/;rm -rf $SERVERNAME-bak; mv -f $SERVERNAME $SERVERNAME-bak"
+executeCMD $SERVER_IP "cd $SERVER_HOME/;unzip $SERVERNAME.zip > /dev/null"
isSuccess "unzip ${SERVERNAME}.zip"
}
##function end
@@ -315,15 +421,15 @@ then
SERVER_IP=$local_host
fi
-if ! ssh -p $SSH_PORT $SERVER_IP test -e $SERVER_HOME/$APPJOINTPARENT; then
- ssh -p $SSH_PORT $SERVER_IP "sudo mkdir -p $SERVER_HOME/$APPJOINTPARENT;sudo chown -R $deployUser:$deployUser $SERVER_HOME/$APPJOINTPARENT"
+if ! executeCMD $SERVER_IP "test -e $SERVER_HOME/$APPJOINTPARENT"; then
+ executeCMD $SERVER_IP "sudo mkdir -p $SERVER_HOME/$APPJOINTPARENT;sudo chown -R $deployUser:$deployUser $SERVER_HOME/$APPJOINTPARENT"
isSuccess "create the dir of $SERVER_HOME/$APPJOINTPARENT;"
fi
echo "$APPJOINTNAME-step2:copy install package"
-scp -P $SSH_PORT $workDir/share/appjoints/$APPJOINTNAME/*.zip $SERVER_IP:$SERVER_HOME/$APPJOINTPARENT
+copyFile $SERVER_IP $workDir/share/appjoints/$APPJOINTNAME/*.zip $SERVER_HOME/$APPJOINTPARENT
isSuccess "copy ${APPJOINTNAME}.zip"
-ssh -p $SSH_PORT $SERVER_IP "cd $SERVER_HOME/$APPJOINTPARENT/;unzip -o dss-*-appjoint.zip > /dev/null;rm -rf dss-*-appjoint.zip"
+executeCMD $SERVER_IP "cd $SERVER_HOME/$APPJOINTPARENT/;unzip -o dss-*-appjoint.zip > /dev/null;rm -rf dss-*-appjoint.zip"
isSuccess "install ${APPJOINTNAME}.zip"
}
##function end
@@ -339,18 +445,19 @@ installPackage
###update Dss-Server linkis.properties
echo "$SERVERNAME-step4:update linkis.properties"
SERVER_CONF_PATH=$SERVER_HOME/$SERVERNAME/conf/linkis.properties
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#wds.linkis.server.mybatis.datasource.url.*#wds.linkis.server.mybatis.datasource.url=jdbc:mysql://${MYSQL_HOST}:${MYSQL_PORT}/${MYSQL_DB}?characterEncoding=UTF-8#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#wds.linkis.server.mybatis.datasource.username.*#wds.linkis.server.mybatis.datasource.username=$MYSQL_USER#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#wds.linkis.server.mybatis.datasource.password.*#wds.linkis.server.mybatis.datasource.password=$MYSQL_PASSWORD#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#wds.dss.appjoint.scheduler.azkaban.address.*#wds.dss.appjoint.scheduler.azkaban.address=http://${AZKABAN_ADRESS_IP}:${AZKABAN_ADRESS_PORT}#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#wds.linkis.gateway.ip.*#wds.linkis.gateway.ip=$GATEWAY_INSTALL_IP#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#wds.linkis.gateway.port.*#wds.linkis.gateway.port=$GATEWAY_PORT#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#wds.dss.appjoint.scheduler.project.store.dir.*#wds.dss.appjoint.scheduler.project.store.dir=$WDS_SCHEDULER_PATH#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#wds.linkis.server.mybatis.datasource.url.*#wds.linkis.server.mybatis.datasource.url=jdbc:mysql://${MYSQL_HOST}:${MYSQL_PORT}/${MYSQL_DB}?characterEncoding=UTF-8#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#wds.linkis.server.mybatis.datasource.username.*#wds.linkis.server.mybatis.datasource.username=$MYSQL_USER#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#wds.linkis.server.mybatis.datasource.password.*#wds.linkis.server.mybatis.datasource.password=$MYSQL_PASSWORD#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#wds.dss.appjoint.scheduler.azkaban.address.*#wds.dss.appjoint.scheduler.azkaban.address=http://${AZKABAN_ADRESS_IP}:${AZKABAN_ADRESS_PORT}#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#wds.linkis.gateway.ip.*#wds.linkis.gateway.ip=$GATEWAY_INSTALL_IP#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#wds.linkis.gateway.port.*#wds.linkis.gateway.port=$GATEWAY_PORT#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#wds.dss.appjoint.scheduler.project.store.dir.*#wds.dss.appjoint.scheduler.project.store.dir=$WDS_SCHEDULER_PATH#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "echo "$deployUser=$deployUser" >> $SERVER_HOME/$SERVERNAME/conf/token.properties"
isSuccess "subsitution linkis.properties of $SERVERNAME"
echo "<----------------$SERVERNAME:end------------------->"
echo ""
-if [ '2' = "$INSTALL_MODE" ]||[ '3' = "$INSTALL_MODE" ];then
+if [[ '2' = "$INSTALL_MODE" ]]||[[ '3' = "$INSTALL_MODE" ]];then
##Flow execution Install
PACKAGE_DIR=dss/dss-flow-execution-entrance
SERVERNAME=dss-flow-execution-entrance
@@ -362,9 +469,9 @@ installPackage
###Update flow execution linkis.properties
echo "$SERVERNAME-step4:update linkis.properties"
SERVER_CONF_PATH=$SERVER_HOME/$SERVERNAME/conf/linkis.properties
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#wds.linkis.entrance.config.logPath.*#wds.linkis.entrance.config.logPath=$WORKSPACE_USER_ROOT_PATH#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#wds.linkis.resultSet.store.path.*#wds.linkis.resultSet.store.path=$RESULT_SET_ROOT_PATH#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#wds.linkis.gateway.url.*#wds.linkis.gateway.url=http://${GATEWAY_INSTALL_IP}:${GATEWAY_PORT}#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#wds.linkis.entrance.config.logPath.*#wds.linkis.entrance.config.logPath=$WORKSPACE_USER_ROOT_PATH#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#wds.linkis.resultSet.store.path.*#wds.linkis.resultSet.store.path=$RESULT_SET_ROOT_PATH#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#wds.linkis.gateway.url.*#wds.linkis.gateway.url=http://${GATEWAY_INSTALL_IP}:${GATEWAY_PORT}#g\" $SERVER_CONF_PATH"
isSuccess "subsitution linkis.properties of $SERVERNAME"
echo "<----------------$SERVERNAME:end------------------->"
echo ""
@@ -379,8 +486,8 @@ installPackage
###Update appjoint entrance linkis.properties
echo "$SERVERNAME-step4:update linkis.properties"
SERVER_CONF_PATH=$SERVER_HOME/$SERVERNAME/conf/linkis.properties
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#wds.linkis.entrance.config.logPath.*#wds.linkis.entrance.config.logPath=$WORKSPACE_USER_ROOT_PATH#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#wds.linkis.resultSet.store.path.*#wds.linkis.resultSet.store.path=$RESULT_SET_ROOT_PATH#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#wds.linkis.entrance.config.logPath.*#wds.linkis.entrance.config.logPath=$WORKSPACE_USER_ROOT_PATH#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#wds.linkis.resultSet.store.path.*#wds.linkis.resultSet.store.path=$RESULT_SET_ROOT_PATH#g\" $SERVER_CONF_PATH"
isSuccess "subsitution linkis.properties of $SERVERNAME"
echo "<----------------$SERVERNAME:end------------------->"
echo ""
@@ -396,22 +503,25 @@ installVisualis
echo "$SERVERNAME-step4:update linkis.properties"
SERVER_CONF_PATH=$SERVER_HOME/$SERVERNAME/conf/linkis.properties
if [ $VISUALIS_NGINX_IP == "127.0.0.1" ]||[ $VISUALIS_NGINX_IP == "0.0.0.0" ]; then
- VISUALIS_NGINX_IP=$local_host
+ VISUALIS_NGINX_IP=$ipaddr
+fi
+if [ $VISUALIS_SERVER_INSTALL_IP == "127.0.0.1" ]||[ $VISUALIS_SERVER_INSTALL_IP == "0.0.0.0" ]; then
+ VISUALIS_SERVER_INSTALL_IP=$ipaddr
fi
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#wds.linkis.entrance.config.logPath.*#wds.linkis.entrance.config.logPath=$WORKSPACE_USER_ROOT_PATH#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#wds.linkis.resultSet.store.path.*#wds.linkis.resultSet.store.path=$RESULT_SET_ROOT_PATH#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#wds.dss.visualis.gateway.ip.*#wds.dss.visualis.gateway.ip=$GATEWAY_INSTALL_IP#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#wds.dss.visualis.gateway.port.*#wds.dss.visualis.gateway.port=$GATEWAY_PORT#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#wds.linkis.entrance.config.logPath.*#wds.linkis.entrance.config.logPath=$WORKSPACE_USER_ROOT_PATH#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#wds.linkis.resultSet.store.path.*#wds.linkis.resultSet.store.path=$RESULT_SET_ROOT_PATH#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#wds.dss.visualis.gateway.ip.*#wds.dss.visualis.gateway.ip=$GATEWAY_INSTALL_IP#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#wds.dss.visualis.gateway.port.*#wds.dss.visualis.gateway.port=$GATEWAY_PORT#g\" $SERVER_CONF_PATH"
SERVER_CONF_PATH=$SERVER_HOME/$SERVERNAME/conf/application.yml
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#address: 127.0.0.1#address: $VISUALIS_SERVER_INSTALL_IP#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#port: 9007#port: $VISUALIS_SERVER_PORT#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#url: http://0.0.0.0:0000/dss/visualis#url: http://$VISUALIS_NGINX_IP:$VISUALIS_NGINX_PORT/dss/visualis#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#address: 0.0.0.0#address: $VISUALIS_NGINX_IP#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#port: 0000#port: $VISUALIS_NGINX_PORT#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#defaultZone: http://127.0.0.1:20303/eureka/#defaultZone: http://$EUREKA_INSTALL_IP:$EUREKA_PORT/eureka/#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#url: jdbc:mysql://127.0.0.1:3306/xxx?characterEncoding=UTF-8#url: jdbc:mysql://$MYSQL_HOST:$MYSQL_PORT/$MYSQL_DB?characterEncoding=UTF-8#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#username: xxx#username: $MYSQL_USER#g\" $SERVER_CONF_PATH"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#password: xxx#password: $MYSQL_PASSWORD#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#address: 127.0.0.1#address: $VISUALIS_SERVER_INSTALL_IP#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#port: 9007#port: $VISUALIS_SERVER_PORT#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#url: http://0.0.0.0:0000/dss/visualis#url: http://$VISUALIS_NGINX_IP:$VISUALIS_NGINX_PORT/dss/visualis#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#address: 0.0.0.0#address: $VISUALIS_NGINX_IP#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#port: 0000#port: $VISUALIS_NGINX_PORT#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#defaultZone: http://127.0.0.1:20303/eureka/#defaultZone: http://$EUREKA_INSTALL_IP:$EUREKA_PORT/eureka/#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#url: jdbc:mysql://127.0.0.1:3306/xxx?characterEncoding=UTF-8#url: jdbc:mysql://$MYSQL_HOST:$MYSQL_PORT/$MYSQL_DB?characterEncoding=UTF-8#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#username: xxx#username: $MYSQL_USER#g\" $SERVER_CONF_PATH"
+executeCMD $SERVER_IP "sed -i \"s#password: xxx#password: $MYSQL_PASSWORD#g\" $SERVER_CONF_PATH"
isSuccess "subsitution linkis.properties of $SERVERNAME"
echo "<----------------$SERVERNAME:end------------------->"
echo ""
@@ -423,9 +533,9 @@ APPJOINTNAME=datachecker
installAppjoints
echo "$APPJOINTNAME:subsitution conf"
APPJOINTNAME_CONF_PATH_PATENT=$SERVER_HOME/$APPJOINTPARENT/$APPJOINTNAME/appjoint.properties
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#job.datachecker.jdo.option.url.*#job.datachecker.jdo.option.url=$HIVE_META_URL#g\" $APPJOINTNAME_CONF_PATH_PATENT"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#job.datachecker.jdo.option.username.*#job.datachecker.jdo.option.username=$HIVE_META_USER#g\" $APPJOINTNAME_CONF_PATH_PATENT"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#job.datachecker.jdo.option.password.*#job.datachecker.jdo.option.password=$HIVE_META_PASSWORD#g\" $APPJOINTNAME_CONF_PATH_PATENT"
+executeCMD $SERVER_IP "sed -i \"s#job.datachecker.jdo.option.url.*#job.datachecker.jdo.option.url=$HIVE_META_URL#g\" $APPJOINTNAME_CONF_PATH_PATENT"
+executeCMD $SERVER_IP "sed -i \"s#job.datachecker.jdo.option.username.*#job.datachecker.jdo.option.username=$HIVE_META_USER#g\" $APPJOINTNAME_CONF_PATH_PATENT"
+executeCMD $SERVER_IP "sed -i \"s#job.datachecker.jdo.option.password.*#job.datachecker.jdo.option.password=$HIVE_META_PASSWORD#g\" $APPJOINTNAME_CONF_PATH_PATENT"
isSuccess "subsitution conf of datachecker"
echo "<----------------datachecker appjoint install end------------------->"
echo ""
@@ -436,9 +546,9 @@ APPJOINTNAME=eventchecker
installAppjoints
echo "$APPJOINTNAME:subsitution conf"
APPJOINTNAME_CONF_PATH_PATENT=$SERVER_HOME/$APPJOINTPARENT/$APPJOINTNAME/appjoint.properties
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#msg.eventchecker.jdo.option.url.*#msg.eventchecker.jdo.option.url=jdbc:mysql://${MYSQL_HOST}:${MYSQL_PORT}/${MYSQL_DB}?characterEncoding=UTF-8#g\" $APPJOINTNAME_CONF_PATH_PATENT"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#msg.eventchecker.jdo.option.username.*#msg.eventchecker.jdo.option.username=$MYSQL_USER#g\" $APPJOINTNAME_CONF_PATH_PATENT"
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#msg.eventchecker.jdo.option.password.*#msg.eventchecker.jdo.option.password=$MYSQL_PASSWORD#g\" $APPJOINTNAME_CONF_PATH_PATENT"
+executeCMD $SERVER_IP "sed -i \"s#msg.eventchecker.jdo.option.url.*#msg.eventchecker.jdo.option.url=jdbc:mysql://${MYSQL_HOST}:${MYSQL_PORT}/${MYSQL_DB}?characterEncoding=UTF-8#g\" $APPJOINTNAME_CONF_PATH_PATENT"
+executeCMD $SERVER_IP "sed -i \"s#msg.eventchecker.jdo.option.username.*#msg.eventchecker.jdo.option.username=$MYSQL_USER#g\" $APPJOINTNAME_CONF_PATH_PATENT"
+executeCMD $SERVER_IP "sed -i \"s#msg.eventchecker.jdo.option.password.*#msg.eventchecker.jdo.option.password=$MYSQL_PASSWORD#g\" $APPJOINTNAME_CONF_PATH_PATENT"
isSuccess "subsitution conf of eventchecker"
echo "<----------------$APPJOINTNAME:end------------------->"
echo ""
@@ -459,7 +569,7 @@ APPJOINTNAME=qualitis
#qualitis appjoint install
installAppjoints
APPJOINTNAME_CONF_PATH_PATENT=$SERVER_HOME/$APPJOINTPARENT/$APPJOINTNAME/appjoint.properties
-ssh -p $SSH_PORT $SERVER_IP "sed -i \"s#baseUrl=http://127.0.0.1:8090#baseUrl=http://$QUALITIS_ADRESS_IP:$QUALITIS_ADRESS_PORT#g\" $APPJOINTNAME_CONF_PATH_PATENT"
+executeCMD $SERVER_IP "sed -i \"s#baseUrl=http://127.0.0.1:8090#baseUrl=http://$QUALITIS_ADRESS_IP:$QUALITIS_ADRESS_PORT#g\" $APPJOINTNAME_CONF_PATH_PATENT"
isSuccess "subsitution conf of qualitis"
echo "<----------------$APPJOINTNAME:end------------------->"
echo ""
diff --git a/bin/start-all.sh b/bin/start-all.sh
index 8f5b04e2ca..98a07f6bdc 100644
--- a/bin/start-all.sh
+++ b/bin/start-all.sh
@@ -15,21 +15,35 @@
# limitations under the License.
#
-
-
# Start all dss applications
info="We will start all dss applications, it will take some time, please wait"
echo ${info}
#Actively load user env
+source /etc/profile
source ~/.bash_profile
-workDir=`dirname "${BASH_SOURCE-$0}"`
-workDir=`cd "$workDir"; pwd`
+shellDir=`dirname $0`
+
+workDir=`cd ${shellDir}/..;pwd`
+CONF_DIR="${workDir}"/conf
+
+export LINKIS_DSS_CONF_FILE=${LINKIS_DSS_CONF_FILE:-"${CONF_DIR}/config.sh"}
+export DISTRIBUTION=${DISTRIBUTION:-"${CONF_DIR}/config.sh"}
+source $LINKIS_DSS_CONF_FILE
+source ${DISTRIBUTION}
+function isSuccess(){
+if [ $? -ne 0 ]; then
+ echo "ERROR: " + $1
+ exit 1
+else
+ echo "INFO:" + $1
+fi
+}
+local_host="`hostname --fqdn`"
-CONF_DIR="${workDir}"/../conf
-CONF_FILE=${CONF_DIR}/config.sh
+ipaddr=$(ip addr | awk '/^[0-9]+: / {}; /inet.*global/ {print gensub(/(.*)\/(.*)/, "\\1", "g", $2)}'|awk 'NR==1')
function isLocal(){
if [ "$1" == "127.0.0.1" ];then
@@ -56,28 +70,14 @@ function executeCMD(){
}
-function isSuccess(){
-if [ $? -ne 0 ]; then
- echo "ERROR: " + $1
- exit 1
-else
- echo "INFO:" + $1
-fi
-}
-
-sudo yum -y install dos2unix
-
-
-local_host="`hostname --fqdn`"
-
#if there is no LINKIS_INSTALL_HOME,we need to source config again
if [ -z ${DSS_INSTALL_HOME} ];then
echo "Warning: DSS_INSTALL_HOME does not exist, we will source config"
- if [ ! -f "${CONF_FILE}" ];then
+ if [ ! -f "${LINKIS_DSS_CONF_FILE}" ];then
echo "Error: can not find config file, start applications failed"
exit 1
else
- source ${CONF_FILE}
+ source ${LINKIS_DSS_CONF_FILE}
fi
fi
@@ -85,19 +85,29 @@ function startApp(){
echo "<-------------------------------->"
echo "Begin to start $SERVER_NAME"
SERVER_BIN=${DSS_INSTALL_HOME}/${SERVER_NAME}/bin
-SERVER_START_CMD="source /etc/profile;source ~/.bash_profile;cd ${SERVER_BIN}; dos2unix ./* > /dev/null 2>&1; dos2unix ../conf/* > /dev/null 2>&1;sh start-${SERVER_NAME}.sh > /dev/null 2>&1 &"
+#echo $SERVER_BIN
+SERVER_LOCAL_START_CMD="dos2unix ${SERVER_BIN}/* > /dev/null 2>&1; dos2unix ${SERVER_BIN}/../conf/* > /dev/null 2>&1;sh ${SERVER_BIN}/start-${SERVER_NAME}.sh > /dev/null 2>&1 &"
+SERVER_REMOTE_START_CMD="source /etc/profile;source ~/.bash_profile;cd ${SERVER_BIN}; dos2unix ./* > /dev/null 2>&1; dos2unix ../conf/* > /dev/null 2>&1; sh start-${SERVER_NAME}.sh > /dev/null 2>&1"
+
+if test -z "$SERVER_IP"
+then
+ SERVER_IP=$local_host
+fi
-if [ ! -d $SERVER_BIN ];then
+if ! executeCMD $SERVER_IP "test -e $SERVER_BIN"; then
echo "<-------------------------------->"
echo "$SERVER_NAME is not installed,the start steps will be skipped"
echo "<-------------------------------->"
return
fi
-if [ -n "${SERVER_IP}" ];then
- ssh ${SERVER_IP} "${SERVER_START_CMD}"
+isLocal $SERVER_IP
+flag=$?
+echo "Is local "$flag
+if [ $flag == "0" ];then
+ eval $SERVER_LOCAL_START_CMD
else
- ssh ${local_host} "${SERVER_START_CMD}"
+ ssh -p $SSH_PORT $SERVER_IP $SERVER_REMOTE_START_CMD
fi
isSuccess "End to start $SERVER_NAME"
echo "<-------------------------------->"
@@ -119,6 +129,7 @@ SERVER_NAME=linkis-appjoint-entrance
SERVER_IP=$APPJOINT_ENTRANCE_INSTALL_IP
startApp
+#visualis-server
SERVER_NAME=visualis-server
SERVER_IP=$VISUALIS_SERVER_INSTALL_IP
startApp
@@ -126,34 +137,53 @@ startApp
echo ""
echo "Start to check all dss microservice"
echo ""
+
+function checkServer(){
+echo "<-------------------------------->"
+echo "Begin to check $SERVER_NAME"
+if test -z "$SERVER_IP"
+then
+ SERVER_IP=$local_host
+fi
+
+SERVER_BIN=${SERVER_HOME}/${SERVER_NAME}/bin
+
+if ! executeCMD $SERVER_IP "test -e ${DSS_INSTALL_HOME}/${SERVER_NAME}"; then
+ echo "$SERVER_NAME is not installed,the checkServer steps will be skipped"
+ return
+fi
+
+sh $workDir/bin/checkServices.sh $SERVER_NAME $SERVER_IP $SERVER_PORT
+isSuccess "start $SERVER_NAME "
+sleep 3
+echo "<-------------------------------->"
+}
+
#check dss-server
-MICRO_SERVICE_NAME=dss-server
-MICRO_SERVICE_IP=$DSS_SERVER_INSTALL_IP
-MICRO_SERVICE_PORT=$DSS_SERVER_PORT
-sh $workDir/checkMicro.sh $MICRO_SERVICE_NAME $MICRO_SERVICE_IP $MICRO_SERVICE_PORT
-isSuccess "$MICRO_SERVICE_NAME start"
+SERVER_NAME=dss-server
+SERVER_IP=$DSS_SERVER_INSTALL_IP
+SERVER_PORT=$DSS_SERVER_PORT
+checkServer
#check dss-flow-execution-entrance
-MICRO_SERVICE_NAME=dss-flow-execution-entrance
-MICRO_SERVICE_IP=$FLOW_EXECUTION_INSTALL_IP
-MICRO_SERVICE_PORT=$FLOW_EXECUTION_PORT
-sh $workDir/checkMicro.sh $MICRO_SERVICE_NAME $MICRO_SERVICE_IP $MICRO_SERVICE_PORT
-isSuccess "$MICRO_SERVICE_NAME start"
+SERVER_NAME=dss-flow-execution-entrance
+SERVER_IP=$FLOW_EXECUTION_INSTALL_IP
+SERVER_PORT=$FLOW_EXECUTION_PORT
+checkServer
#check linkis-appjoint-entrance
-MICRO_SERVICE_NAME=linkis-appjoint-entrance
-MICRO_SERVICE_IP=$APPJOINT_ENTRANCE_INSTALL_IP
-MICRO_SERVICE_PORT=$APPJOINT_ENTRANCE_PORT
-sh $workDir/checkMicro.sh $MICRO_SERVICE_NAME $MICRO_SERVICE_IP $MICRO_SERVICE_PORT
-isSuccess "$MICRO_SERVICE_NAME start"
+SERVER_NAME=linkis-appjoint-entrance
+SERVER_IP=$APPJOINT_ENTRANCE_INSTALL_IP
+SERVER_PORT=$APPJOINT_ENTRANCE_PORT
+checkServer
#check visualis-server
-sleep 10 #for visualis-server
-MICRO_SERVICE_NAME=visualis-server
-MICRO_SERVICE_IP=$VISUALIS_SERVER_INSTALL_IP
-MICRO_SERVICE_PORT=$VISUALIS_SERVER_PORT
-sh $workDir/checkMicro.sh $MICRO_SERVICE_NAME $MICRO_SERVICE_IP $MICRO_SERVICE_PORT
-isSuccess "$MICRO_SERVICE_NAME start"
+sleep 10 #visualis service need more time to register
+SERVER_NAME=visualis-server
+SERVER_IP=$VISUALIS_SERVER_INSTALL_IP
+SERVER_PORT=$VISUALIS_SERVER_PORT
+checkServer
+echo "DSS started successfully"
diff --git a/bin/stop-all.sh b/bin/stop-all.sh
index 82252c7b55..838b9babc9 100644
--- a/bin/stop-all.sh
+++ b/bin/stop-all.sh
@@ -29,7 +29,12 @@ workDir=`cd "$workDir"; pwd`
CONF_DIR="${workDir}"/../conf
-CONF_FILE=${CONF_DIR}/config.sh
+export LINKIS_DSS_CONF_FILE=${LINKIS_DSS_CONF_FILE:-"${CONF_DIR}/config.sh"}
+export DISTRIBUTION=${DISTRIBUTION:-"${CONF_DIR}/config.sh"}
+source ${DISTRIBUTION}
+
+local_host="`hostname --fqdn`"
+ipaddr=$(ip addr | awk '/^[0-9]+: / {}; /inet.*global/ {print gensub(/(.*)\/(.*)/, "\\1", "g", $2)}'|awk 'NR==1')
function isSuccess(){
if [ $? -ne 0 ]; then
@@ -40,18 +45,40 @@ else
fi
}
+function isLocal(){
+ if [ "$1" == "127.0.0.1" ];then
+ return 0
+ elif [ $1 == "localhost" ]; then
+ return 0
+ elif [ $1 == $local_host ]; then
+ return 0
+ elif [ $1 == $ipaddr ]; then
+ return 0
+ fi
+ return 1
+}
+function executeCMD(){
+ isLocal $1
+ flag=$?
+ echo "Is local "$flag
+ if [ $flag == "0" ];then
+ eval $2
+ else
+ ssh -p $SSH_PORT $1 $2
+ fi
+
+}
-local_host="`hostname --fqdn`"
#if there is no LINKIS_INSTALL_HOME,we need to source config again
if [ -z ${DSS_INSTALL_HOME} ];then
echo "Warning: DSS_INSTALL_HOME does not exist, we will source config"
- if [ ! -f "${CONF_FILE}" ];then
+ if [ ! -f "${LINKIS_DSS_CONF_FILE}" ];then
echo "Error: can not find config file, stop applications failed"
exit 1
else
- source ${CONF_FILE}
+ source ${LINKIS_DSS_CONF_FILE}
fi
fi
@@ -59,18 +86,26 @@ function stopAPP(){
echo "<-------------------------------->"
echo "Begin to stop $SERVER_NAME"
SERVER_BIN=${DSS_INSTALL_HOME}/${SERVER_NAME}/bin
-SERVER_STOP_CMD="source ~/.bash_profile;cd ${SERVER_BIN}; dos2unix ./* > /dev/null 2>&1; dos2unix ../conf/* > /dev/null 2>&1; sh stop-${SERVER_NAME}.sh"
-if [ ! -d ${DSS_INSTALL_HOME}/${SERVER_NAME} ];then
+SERVER_LOCAL_STOP_CMD="sh ${SERVER_BIN}/stop-${SERVER_NAME}.sh"
+SERVER_REMOTE_STOP_CMD="source /etc/profile;source ~/.bash_profile;cd ${SERVER_BIN}; sh stop-${SERVER_NAME}.sh "
+if test -z "$SERVER_IP"
+then
+ SERVER_IP=$local_host
+fi
+
+if ! executeCMD $SERVER_IP "test -e ${DSS_INSTALL_HOME}/${SERVER_NAME}"; then
echo "$SERVER_NAME is not installed,the stop steps will be skipped"
return
fi
-if [ -n "${SERVER_IP}" ];then
- ssh -p $SSH_PORT ${SERVER_IP} "${SERVER_STOP_CMD}"
+isLocal $SERVER_IP
+flag=$?
+echo "Is local "$flag
+if [ $flag == "0" ];then
+ eval $SERVER_LOCAL_STOP_CMD
else
- ssh -p $SSH_PORT ${local_host} "${SERVER_STOP_CMD}"
+ ssh -p $SSH_PORT $SERVER_IP $SERVER_REMOTE_STOP_CMD
fi
-isSuccess "End to stop $SERVER_NAME"
echo "<-------------------------------->"
sleep 3
}
@@ -89,7 +124,10 @@ stopAPP
SERVER_NAME=linkis-appjoint-entrance
SERVER_IP=$APPJOINT_ENTRANCE_INSTALL_IP
stopAPP
+
#visualis-server
SERVER_NAME=visualis-server
SERVER_IP=$VISUALIS_SERVER_INSTALL_IP
stopAPP
+
+echo "stop-all shell script executed completely"
diff --git a/conf/config.sh b/conf/config.sh
index 2d0172d23a..c6d910aca9 100644
--- a/conf/config.sh
+++ b/conf/config.sh
@@ -1,8 +1,13 @@
+#!/bin/sh
+
+shellDir=`dirname $0`
+workDir=`cd ${shellDir}/..;pwd`
+
### deploy user
deployUser=hadoop
### The install home path of DSS,Must provided
-DSS_INSTALL_HOME=/appcom/Install/DSS
+DSS_INSTALL_HOME=$workDir
### Specifies the user workspace, which is used to store the user's script files and log files.
### Generally local directory
@@ -66,10 +71,10 @@ VISUALIS_NGINX_PORT=8088
#azkaban address for check
AZKABAN_ADRESS_IP=127.0.0.1
-AZKABAN_ADRESS_PORT=8091
+AZKABAN_ADRESS_PORT=8081
#qualitis.address for check
QUALITIS_ADRESS_IP=127.0.0.1
QUALITIS_ADRESS_PORT=8090
-DSS_VERSION=0.7.0
\ No newline at end of file
+DSS_VERSION=0.9.0
diff --git a/datachecker-appjoint/pom.xml b/datachecker-appjoint/pom.xml
index c2d804486c..9d36a02ae5 100644
--- a/datachecker-appjoint/pom.xml
+++ b/datachecker-appjoint/pom.xml
@@ -22,7 +22,7 @@
dss
com.webank.wedatasphere.dss
- 0.7.0
+ 0.9.0
4.0.0
diff --git a/datachecker-appjoint/src/main/java/com/webank/wedatasphere/dss/appjoint/schedulis/jobtype/connector/DataCheckerDao.java b/datachecker-appjoint/src/main/java/com/webank/wedatasphere/dss/appjoint/schedulis/jobtype/connector/DataCheckerDao.java
index d3caeac010..d84a0d4bc6 100644
--- a/datachecker-appjoint/src/main/java/com/webank/wedatasphere/dss/appjoint/schedulis/jobtype/connector/DataCheckerDao.java
+++ b/datachecker-appjoint/src/main/java/com/webank/wedatasphere/dss/appjoint/schedulis/jobtype/connector/DataCheckerDao.java
@@ -158,8 +158,9 @@ private long getTotalCount(Map proObjectMap, Connection conn, L
private PreparedStatement getStatement(Connection conn, String dataObject) throws SQLException {
String dataScape = dataObject.contains("{") ? "Partition" : "Table";
- String dbName = dataObject.split("\\.")[0];
- String tableName = dataObject.split("\\.")[1];
+ String[] dataObjectArray = dataObject.split("\\.");
+ String dbName = dataObjectArray[0];
+ String tableName = dataObjectArray[1];
if(dataScape.equals("Partition")) {
Pattern pattern = Pattern.compile("\\{([^\\}]+)\\}");
Matcher matcher = pattern.matcher(dataObject);
@@ -174,11 +175,13 @@ private PreparedStatement getStatement(Connection conn, String dataObject) throw
pstmt.setString(2, tableName);
pstmt.setString(3, partitionName);
return pstmt;
- } else {
+ } else if(dataObjectArray.length == 2){
PreparedStatement pstmt = conn.prepareCall(SQL_SOURCE_TYPE_JOB_TABLE);
pstmt.setString(1, dbName);
pstmt.setString(2, tableName);
return pstmt;
+ }else {
+ throw new SQLException("Incorrect input format for dataObject "+ dataObject);
}
}
diff --git a/db/azkaban.sql b/db/azkaban.sql
index 489ded7bb1..7f18b33087 100644
--- a/db/azkaban.sql
+++ b/db/azkaban.sql
@@ -1,3 +1,4 @@
INSERT INTO `dss_application` (`id`, `name`, `url`, `is_user_need_init`, `level`, `user_init_url`, `exists_project_service`, `project_url`, `enhance_json`, `if_iframe`, `homepage_url`, `redirect_url`) VALUES (NULL, 'schedulis', NULL, '0', '1', NULL, '0', NULL, NULL, '1', NULL, NULL);
-UPDATE `dss_application` SET url = 'http://AZKABAN_ADRESS_IP_2:AZKABAN_ADRESS_PORT', project_url = 'http://AZKABAN_ADRESS_IP_2:AZKABAN_ADRESS_PORT/manager?project=${projectName}',homepage_url = 'http://AZKABAN_ADRESS_IP_2:AZKABAN_ADRESS_PORT/homepage' WHERE `name` in
- ('schedulis');
+UPDATE `dss_application` SET url = 'http://AZKABAN_ADRESS_IP_2:AZKABAN_ADRESS_PORT', project_url = 'http://AZKABAN_ADRESS_IP_2:AZKABAN_ADRESS_PORT/manager?project=${projectName}',homepage_url = 'http://AZKABAN_ADRESS_IP_2:AZKABAN_ADRESS_PORT/homepage' WHERE `name` in ('schedulis');
+SELECT @shcedulis_id:=id FROM `dss_application` WHERE `name` = 'schedulis';
+insert into dss_workflow_node values(null,null,'linkis.shell.sh',@shcedulis_id,1,1,0,1,null);
diff --git a/db/dss_ddl.sql b/db/dss_ddl.sql
index 5c016e0b1b..cdaf8fb1a7 100644
--- a/db/dss_ddl.sql
+++ b/db/dss_ddl.sql
@@ -154,6 +154,7 @@ CREATE TABLE `dss_project` (
`name` varchar(200) COLLATE utf8_bin DEFAULT NULL,
`source` varchar(50) COLLATE utf8_bin DEFAULT NULL COMMENT 'Source of the dss_project',
`description` text COLLATE utf8_bin,
+ `workspace_id` bigint(20) DEFAULT 1,
`user_id` bigint(20) DEFAULT NULL,
`create_time` datetime DEFAULT NULL,
`create_by` bigint(20) DEFAULT NULL,
@@ -294,3 +295,156 @@ CREATE TABLE `event_status` (
`msg_id` int(11) NOT NULL COMMENT '消息的最大消费id',
PRIMARY KEY (`receiver`,`topic`,`msg_name`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COMMENT='消息消费状态表';
+
+
+-- ----------------------------
+-- Table structure for dss_workspace
+-- ----------------------------
+DROP TABLE IF EXISTS `dss_workspace`;
+CREATE TABLE `dss_workspace` (
+ `id` bigint(20) NOT NULL AUTO_INCREMENT,
+ `name` varchar(255) DEFAULT NULL,
+ `label` varchar(255) DEFAULT NULL,
+ `description` varchar(255) DEFAULT NULL,
+ `department` varchar(255) DEFAULT NULL,
+ `product` varchar(255) DEFAULT NULL,
+ `source` varchar(255) DEFAULT NULL,
+ `create_by` varchar(255) DEFAULT NULL,
+ `create_time` datetime DEFAULT NULL,
+ `last_update_time` datetime DEFAULT NULL,
+ `last_update_user` varchar(30) DEFAULT NULL,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `name` (`name`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8;
+
+-- ----------------------------
+-- Table structure for dss_onestop_menu
+-- ----------------------------
+DROP TABLE IF EXISTS `dss_onestop_menu`;
+CREATE TABLE `dss_onestop_menu` (
+ `id` int(20) NOT NULL AUTO_INCREMENT,
+ `name` varchar(64) DEFAULT NULL,
+ `title_en` varchar(64) DEFAULT NULL,
+ `title_cn` varchar(64) DEFAULT NULL,
+ `description` varchar(255) DEFAULT NULL,
+ `is_active` tinyint(1) DEFAULT 1,
+ `icon` varchar(255) DEFAULT NULL,
+ `order` int(2) DEFAULT NULL,
+ `create_by` varchar(255) DEFAULT NULL,
+ `create_time` datetime DEFAULT NULL,
+ `last_update_time` datetime DEFAULT NULL,
+ `last_update_user` varchar(30) DEFAULT NULL,
+ PRIMARY KEY (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8;
+
+-- ----------------------------
+-- Table structure for dss_onestop_menu_application
+-- ----------------------------
+DROP TABLE IF EXISTS `dss_onestop_menu_application`;
+CREATE TABLE `dss_onestop_menu_application` (
+ `id` int(20) NOT NULL AUTO_INCREMENT,
+ `application_id` int(20) DEFAULT NULL,
+ `onestop_menu_id` int(20) NOT NULL,
+ `title_en` varchar(64) DEFAULT NULL,
+ `title_cn` varchar(64) DEFAULT NULL,
+ `desc_en` varchar(255) DEFAULT NULL,
+ `desc_cn` varchar(255) DEFAULT NULL,
+ `labels_en` varchar(255) DEFAULT NULL,
+ `labels_cn` varchar(255) DEFAULT NULL,
+ `is_active` tinyint(1) DEFAULT NULL,
+ `access_button_en` varchar(64) DEFAULT NULL,
+ `access_button_cn` varchar(64) DEFAULT NULL,
+ `manual_button_en` varchar(64) DEFAULT NULL,
+ `manual_button_cn` varchar(64) DEFAULT NULL,
+ `manual_button_url` varchar(255) DEFAULT NULL,
+ `icon` varchar(255) DEFAULT NULL,
+ `order` int(2) DEFAULT NULL,
+ `create_by` varchar(255) DEFAULT NULL,
+ `create_time` datetime DEFAULT NULL,
+ `last_update_time` datetime DEFAULT NULL,
+ `last_update_user` varchar(30) DEFAULT NULL,
+ PRIMARY KEY (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8;
+
+-- ----------------------------
+-- Table structure for dss_onestop_user_favorites
+-- ----------------------------
+DROP TABLE IF EXISTS `dss_onestop_user_favorites`;
+CREATE TABLE `dss_onestop_user_favorites` (
+ `id` int(20) NOT NULL AUTO_INCREMENT,
+ `username` varchar(64) DEFAULT NULL,
+ `workspace_id` bigint(20) DEFAULT 1,
+ `menu_application_id` int(20) DEFAULT NULL,
+ `order` int(2) DEFAULT NULL,
+ `create_by` varchar(255) DEFAULT NULL,
+ `create_time` datetime DEFAULT NULL,
+ `last_update_time` datetime DEFAULT NULL,
+ `last_update_user` varchar(30) DEFAULT NULL,
+ PRIMARY KEY (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8;
+
+-- ----------------------------
+-- Table structure for dss_homepage_demo_menu
+-- ----------------------------
+DROP TABLE IF EXISTS `dss_homepage_demo_menu`;
+CREATE TABLE `dss_homepage_demo_menu` (
+ `id` int(20) NOT NULL AUTO_INCREMENT,
+ `name` varchar(64) DEFAULT NULL,
+ `title_en` varchar(64) DEFAULT NULL,
+ `title_cn` varchar(64) DEFAULT NULL,
+ `description` varchar(255) DEFAULT NULL,
+ `is_active` tinyint(1) DEFAULT 1,
+ `icon` varchar(255) DEFAULT NULL,
+ `order` int(2) DEFAULT NULL,
+ `create_by` varchar(255) DEFAULT NULL,
+ `create_time` datetime DEFAULT NULL,
+ `last_update_time` datetime DEFAULT NULL,
+ `last_update_user` varchar(30) DEFAULT NULL,
+ PRIMARY KEY (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8;
+
+-- ----------------------------
+-- Table structure for dss_homepage_demo_instance
+-- ----------------------------
+DROP TABLE IF EXISTS `dss_homepage_demo_instance`;
+CREATE TABLE `dss_homepage_demo_instance` (
+ `id` int(20) NOT NULL AUTO_INCREMENT,
+ `menu_id` int(20) DEFAULT NULL,
+ `name` varchar(64) DEFAULT NULL,
+ `url` varchar(128) DEFAULT NULL,
+ `title_en` varchar(64) DEFAULT NULL,
+ `title_cn` varchar(64) DEFAULT NULL,
+ `description` varchar(255) DEFAULT NULL,
+ `is_active` tinyint(1) DEFAULT 1,
+ `icon` varchar(255) DEFAULT NULL,
+ `order` int(2) DEFAULT NULL,
+ `click_num` int(11) DEFAULT 0,
+ `create_by` varchar(255) DEFAULT NULL,
+ `create_time` datetime DEFAULT NULL,
+ `last_update_time` datetime DEFAULT NULL,
+ `last_update_user` varchar(30) DEFAULT NULL,
+ PRIMARY KEY (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8;
+
+-- ----------------------------
+-- Table structure for dss_homepage_video
+-- ----------------------------
+DROP TABLE IF EXISTS `dss_homepage_video`;
+CREATE TABLE `dss_homepage_video` (
+ `id` int(20) NOT NULL AUTO_INCREMENT,
+ `name` varchar(64) DEFAULT NULL,
+ `url` varchar(128) DEFAULT NULL,
+ `title_en` varchar(64) DEFAULT NULL,
+ `title_cn` varchar(64) DEFAULT NULL,
+ `description` varchar(255) DEFAULT NULL,
+ `is_active` tinyint(1) DEFAULT 1,
+ `icon` varchar(255) DEFAULT NULL,
+ `order` int(2) DEFAULT NULL,
+ `play_num` int(11) DEFAULT 0,
+ `create_by` varchar(255) DEFAULT NULL,
+ `create_time` datetime DEFAULT NULL,
+ `last_update_time` datetime DEFAULT NULL,
+ `last_update_user` varchar(30) DEFAULT NULL,
+ PRIMARY KEY (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8;
+
diff --git a/db/dss_dml.sql b/db/dss_dml.sql
index f9c2b2a24b..78bf09bd45 100644
--- a/db/dss_dml.sql
+++ b/db/dss_dml.sql
@@ -9,6 +9,7 @@ INSERT INTO `dss_workflow_node` (`id`, `icon`, `node_type`, `application_id`, `s
INSERT INTO `dss_workflow_node` (`id`, `icon`, `node_type`, `application_id`, `submit_to_scheduler`, `enable_copy`, `should_creation_before_node`, `support_jump`, `jump_url`) VALUES (NULL, NULL, 'linkis.spark.sql', @linkis_appid, '1', '1', '0', '1', NULL);
INSERT INTO `dss_workflow_node` (`id`, `icon`, `node_type`, `application_id`, `submit_to_scheduler`, `enable_copy`, `should_creation_before_node`, `support_jump`, `jump_url`) VALUES (NULL, NULL, 'linkis.spark.scala', @linkis_appid, '1', '1', '0', '1', NULL);
INSERT INTO `dss_workflow_node` (`id`, `icon`, `node_type`, `application_id`, `submit_to_scheduler`, `enable_copy`, `should_creation_before_node`, `support_jump`, `jump_url`) VALUES (NULL, NULL, 'linkis.hive.hql', @linkis_appid, '1', '1', '0', '1', NULL);
+INSERT INTO `dss_workflow_node` (`id`, `icon`, `node_type`, `application_id`, `submit_to_scheduler`, `enable_copy`, `should_creation_before_node`, `support_jump`, `jump_url`) VALUES (NULL, NULL, 'linkis.jdbc.jdbc', @linkis_appid, '1', '1', '0', '1', NULL);
INSERT INTO `dss_workflow_node` (`id`, `icon`, `node_type`, `application_id`, `submit_to_scheduler`, `enable_copy`, `should_creation_before_node`, `support_jump`, `jump_url`) VALUES (NULL, NULL, 'linkis.control.empty', @linkis_appid, '1', '1', '0', '0', NULL);
INSERT INTO `dss_workflow_node` (`id`, `icon`, `node_type`, `application_id`, `submit_to_scheduler`, `enable_copy`, `should_creation_before_node`, `support_jump`, `jump_url`) VALUES (NULL, NULL, 'linkis.appjoint.sendemail', @linkis_appid, '1', '1', '0', '0', NULL);
INSERT INTO `dss_workflow_node` (`id`, `icon`, `node_type`, `application_id`, `submit_to_scheduler`, `enable_copy`, `should_creation_before_node`, `support_jump`, `jump_url`) VALUES (NULL, NULL, 'linkis.appjoint.eventchecker.eventsender', @linkis_appid, '1', '1', '0', '0', NULL);
@@ -47,3 +48,61 @@ insert into `linkis_config_key_tree` VALUES(NULL,@key_id3,@tree_id1);
insert into `linkis_config_key_tree` VALUES(NULL,@key_id4,@tree_id1);
insert into `linkis_config_key_tree` VALUES(NULL,@key_id5,@tree_id1);
insert into `linkis_config_key_tree` VALUES(NULL,@key_id6,@tree_id2);
+
+#-----------------------jdbc-------------------
+
+select @application_id:=id from `linkis_application` where `name` = 'nodeexecution';
+INSERT INTO `linkis_application` (`id`, `name`, `chinese_name`, `description`) SELECT NULL,'nodeexecution',`chinese_name`,`description` FROM linkis_application WHERE @application_id IS NULL LIMIT 1 ;
+select @jdbc_id:=id from `linkis_application` where `name` = 'jdbc';
+
+INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (NULL, 'jdbc.url', '格式:', 'jdbc连接地址', @application_id, NULL , 'None', NULL , '0', '0', '1');
+INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (NULL, 'jdbc.username', NULL , 'jdbc连接用户名', @application_id, NULL, 'None', NULL , '0', '0', '1');
+INSERT INTO `linkis_config_key` (`id`, `key`, `description`, `name`, `application_id`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`) VALUES (NULL, 'jdbc.password', NULL , 'jdbc连接密码', @application_id, NULL , 'None', NULL , '0', '0', '1');
+
+select @key_id1:=id from `linkis_config_key` where `application_id` = @application_id and `key` = 'jdbc.url';
+select @key_id2:=id from `linkis_config_key` where `application_id` = @application_id and `key` = 'jdbc.username';
+select @key_id3:=id from `linkis_config_key` where `application_id` = @application_id and `key` = 'jdbc.password';
+
+SELECT @tree_id1:=t.id from linkis_config_tree t LEFT JOIN linkis_application a on t.application_id = a.id WHERE t.`name` = 'jdbc连接设置' and a.`name` = 'jdbc';
+
+insert into `linkis_config_key_tree` VALUES(NULL,@key_id1,@tree_id1);
+insert into `linkis_config_key_tree` VALUES(NULL,@key_id2,@tree_id1);
+insert into `linkis_config_key_tree` VALUES(NULL,@key_id3,@tree_id1);
+
+INSERT INTO dss_workspace (id, name, label, description, department, product, source, create_by, create_time, last_update_time, last_update_user) VALUES (1, 'default', 'default', 'default user workspace', NULL, NULL, 'create by user', 'root', NULL, NULL, 'root');
+
+INSERT INTO dss_homepage_demo_instance (id, menu_id, name, url, title_en, title_cn, description, is_active, icon, `order`, click_num, create_by, create_time, last_update_time, last_update_user) VALUES (NULL, 1, '工作流编辑执行', 'https://github.com/WeBankFinTech/DataSphereStudio', 'workflow edit execution', '工作流编辑执行', '工作流编辑执行', 1, NULL, 1, 0, NULL, NULL, NULL, NULL);
+INSERT INTO dss_homepage_demo_instance (id, menu_id, name, url, title_en, title_cn, description, is_active, icon, `order`, click_num, create_by, create_time, last_update_time, last_update_user) VALUES (NULL, 1, '工作流串联可视化', 'https://github.com/WeBankFinTech/DataSphereStudio', 'workflow series visualization', '工作流串联可视化', '工作流串联可视化', 1, NULL, 2, 0, NULL, NULL, NULL, NULL);
+INSERT INTO dss_homepage_demo_instance (id, menu_id, name, url, title_en, title_cn, description, is_active, icon, `order`, click_num, create_by, create_time, last_update_time, last_update_user) VALUES (NULL, 1, '工作流调度执行跑批', 'https://github.com/WeBankFinTech/DataSphereStudio', 'workflow scheduling execution run batch', '工作流调度执行跑批', '工作流调度执行跑批', 1, NULL, 3, 0, NULL, NULL, NULL, NULL);
+INSERT INTO dss_homepage_demo_instance (id, menu_id, name, url, title_en, title_cn, description, is_active, icon, `order`, click_num, create_by, create_time, last_update_time, last_update_user) VALUES (NULL, 2, '某业务日常运营报表', 'https://github.com/WeBankFinTech/DataSphereStudio', 'business daily operation report', '某业务日常运营报表', '某业务日常运营报表', 1, NULL, 1, 0, NULL, NULL, NULL, NULL);
+INSERT INTO dss_homepage_demo_instance (id, menu_id, name, url, title_en, title_cn, description, is_active, icon, `order`, click_num, create_by, create_time, last_update_time, last_update_user) VALUES (NULL, 2, '某业务机器学习建模预测', 'https://github.com/WeBankFinTech/DataSphereStudio', 'business machine learning modeling prediction', '某业务机器学习建模预测', '某业务机器学习建模预测', 1, NULL, 2, 0, NULL, NULL, NULL, NULL);
+INSERT INTO dss_homepage_demo_instance (id, menu_id, name, url, title_en, title_cn, description, is_active, icon, `order`, click_num, create_by, create_time, last_update_time, last_update_user) VALUES (NULL, 2, '某业务导出营销用户列表', 'https://github.com/WeBankFinTech/DataSphereStudio', 'business export marketing user list', '某业务导出营销用户列表', '某业务导出营销用户列表', 1, NULL, 3, 0, NULL, NULL, NULL, NULL);
+INSERT INTO dss_homepage_demo_instance (id, menu_id, name, url, title_en, title_cn, description, is_active, icon, `order`, click_num, create_by, create_time, last_update_time, last_update_user) VALUES (NULL, 3, '数据大屏体验', 'https://github.com/WeBankFinTech/DataSphereStudio', 'data big screen experience', '数据大屏体验', '数据大屏体验', 1, NULL, 1, 0, NULL, NULL, NULL, NULL);
+INSERT INTO dss_homepage_demo_instance (id, menu_id, name, url, title_en, title_cn, description, is_active, icon, `order`, click_num, create_by, create_time, last_update_time, last_update_user) VALUES (NULL, 3, '数据仪表盘体验', 'https://github.com/WeBankFinTech/DataSphereStudio', 'data dashboard experience', '数据仪表盘体验', '数据仪表盘体验', 1, NULL, 2, 0, NULL, NULL, NULL, NULL);
+INSERT INTO dss_homepage_demo_instance (id, menu_id, name, url, title_en, title_cn, description, is_active, icon, `order`, click_num, create_by, create_time, last_update_time, last_update_user) VALUES (NULL, 3, '可视化挂件快速体验', 'https://github.com/WeBankFinTech/DataSphereStudio', 'visual widgets quick experience', '可视化挂件快速体验', '可视化挂件快速体验', 1, NULL, 3, 0, NULL, NULL, NULL, NULL);
+
+INSERT INTO dss_homepage_demo_menu (id, name, title_en, title_cn, description, is_active, icon, `order`, create_by, create_time, last_update_time, last_update_user) VALUES (1, 'workflow', 'workflow', '工作流', '工作流', 1, NULL, 1, NULL, NULL, NULL, NULL);
+INSERT INTO dss_homepage_demo_menu (id, name, title_en, title_cn, description, is_active, icon, `order`, create_by, create_time, last_update_time, last_update_user) VALUES (2, 'application', 'application', '应用场景', '应用场景', 1, NULL, 2, NULL, NULL, NULL, NULL);
+INSERT INTO dss_homepage_demo_menu (id, name, title_en, title_cn, description, is_active, icon, `order`, create_by, create_time, last_update_time, last_update_user) VALUES (3, 'visualization', 'visualization', '可视化', '可视化', 1, NULL, 3, NULL, NULL, NULL, NULL);
+
+INSERT INTO dss_homepage_video (id, name, url, title_en, title_cn, description, is_active, icon, `order`, play_num, create_by, create_time, last_update_time, last_update_user) VALUES (NULL, '10秒教你搭建工作流', 'https://sandbox.webank.com/wds/dss/videos/1.mp4', '10 sec how to build workflow', '10秒教你搭建工作流', '10秒教你搭建工作流', 1, NULL, 1, 0, NULL, NULL, NULL, NULL);
+INSERT INTO dss_homepage_video (id, name, url, title_en, title_cn, description, is_active, icon, `order`, play_num, create_by, create_time, last_update_time, last_update_user) VALUES (NULL, '10秒教你发邮件', 'https://sandbox.webank.com/wds/dss/videos/1.mp4', '10 sec how to send email', '10秒教你发邮件', '10秒教你发邮件', 1, NULL, 2, 0, NULL, NULL, NULL, NULL);
+
+INSERT INTO dss_onestop_menu (id, name, title_en, title_cn, description, is_active, icon, `order`, create_by, create_time, last_update_time, last_update_user) VALUES (1, '应用开发', 'application development', '应用开发', '应用开发描述', 1, NULL, NULL, NULL, NULL, NULL, NULL);
+INSERT INTO dss_onestop_menu (id, name, title_en, title_cn, description, is_active, icon, `order`, create_by, create_time, last_update_time, last_update_user) VALUES (2, '数据分析', 'data analysis', '数据分析', '数据分析描述', 1, NULL, NULL, NULL, NULL, NULL, NULL);
+INSERT INTO dss_onestop_menu (id, name, title_en, title_cn, description, is_active, icon, `order`, create_by, create_time, last_update_time, last_update_user) VALUES (3, '生产运维', 'production operation', '生产运维', '生产运维描述', 1, NULL, NULL, NULL, NULL, NULL, NULL);
+INSERT INTO dss_onestop_menu (id, name, title_en, title_cn, description, is_active, icon, `order`, create_by, create_time, last_update_time, last_update_user) VALUES (4, '数据质量', 'data quality', '数据质量', '数据质量描述', 1, NULL, NULL, NULL, NULL, NULL, NULL);
+INSERT INTO dss_onestop_menu (id, name, title_en, title_cn, description, is_active, icon, `order`, create_by, create_time, last_update_time, last_update_user) VALUES (5, '管理员功能', 'administrator function', '管理员功能', '管理员功能描述', 0, NULL, NULL, NULL, NULL, NULL, NULL);
+
+INSERT INTO dss_onestop_menu_application (id, application_id, onestop_menu_id, title_en, title_cn, desc_en, desc_cn, labels_en, labels_cn, is_active, access_button_en, access_button_cn, manual_button_en, manual_button_cn, manual_button_url, icon, `order`, create_by, create_time, last_update_time, last_update_user) VALUES (NULL, NULL, 1, 'workflow development', '工作流开发', 'Workflow development is a data application development tool created by WeDataSphere with Linkis as the kernel.', '工作流开发是微众银行微数域(WeDataSphere)打造的数据应用开发工具,以任意桥(Linkis)做为内核,将满足从数据交换、脱敏清洗、分析挖掘、质量检测、可视化展现、定时调度到数据输出等数据应用开发全流程场景需求。', 'workflow, data warehouse development', '工作流,数仓开发', 1, 'enter workflow development', '进入工作流开发', 'user manual', '用户手册', 'https://github.com/WeBankFinTech/DataSphereStudio', 'fi-workflow|rgb(102, 102, 255)', NULL, NULL, NULL, NULL, NULL);
+INSERT INTO dss_onestop_menu_application (id, application_id, onestop_menu_id, title_en, title_cn, desc_en, desc_cn, labels_en, labels_cn, is_active, access_button_en, access_button_cn, manual_button_en, manual_button_cn, manual_button_url, icon, `order`, create_by, create_time, last_update_time, last_update_user) VALUES (NULL, NULL, 1, 'StreamSQL development', 'StreamSQL开发', 'Real-time application development is a streaming solution jointly built by WeDataSphere, Boss big data team and China Telecom ctcloud Big data team.', '实时应用开发是微众银行微数域(WeDataSphere)、Boss直聘大数据团队 和 中国电信天翼云大数据团队 社区联合共建的流式解决方案,以 Linkis 做为内核,基于 Flink Engine 构建的批流统一的 Flink SQL,助力实时化转型。', 'streaming, realtime', '流式,实时', 0, 'under union construction', '联合共建中', 'related information', '相关资讯', 'https://github.com/WeBankFinTech/DataSphereStudio', 'fi-scriptis|rgb(102, 102, 255)', NULL, NULL, NULL, NULL, NULL);
+INSERT INTO dss_onestop_menu_application (id, application_id, onestop_menu_id, title_en, title_cn, desc_en, desc_cn, labels_en, labels_cn, is_active, access_button_en, access_button_cn, manual_button_en, manual_button_cn, manual_button_url, icon, `order`, create_by, create_time, last_update_time, last_update_user) VALUES (NULL, NULL, 1, 'Data service development', '数据服务开发', 'Data service is a unified API service jointly built by WeDataSphere and Ihome Big data Team. With Linkis and DataSphere Studio as the kernel.', '数据服务是微众银行微数域(WeDataSphere)与 艾佳生活大数据团队 社区联合共建的统一API服务,以 Linkis 和 DataSphere Studio 做为内核,提供快速将 Scriptis 脚本生成数据API的能力,协助企业统一管理对内对外的API服务。', 'API, data service', 'API,数据服务', 0, 'under union construction', '联合共建中', 'related information', '相关资讯', 'https://github.com/WeBankFinTech/DataSphereStudio', 'fi-scriptis|rgb(102, 102, 255)', NULL, NULL, NULL, NULL, NULL);
+INSERT INTO dss_onestop_menu_application (id, application_id, onestop_menu_id, title_en, title_cn, desc_en, desc_cn, labels_en, labels_cn, is_active, access_button_en, access_button_cn, manual_button_en, manual_button_cn, manual_button_url, icon, `order`, create_by, create_time, last_update_time, last_update_user) VALUES (NULL, NULL, 2, 'Scriptis', 'Scriptis', 'Scriptis is a one-stop interactive data exploration analysis tool built by WeDataSphere, uses Linkis as the kernel.', 'Scriptis是微众银行微数域(WeDataSphere)打造的一站式交互式数据探索分析工具,以任意桥(Linkis)做为内核,提供多种计算存储引擎(如Spark、Hive、TiSpark等)、Hive数据库管理功能、资源(如Yarn资源、服务器资源)管理、应用管理和各种用户资源(如UDF、变量等)管理的能力。', 'scripts development,IDE', '脚本开发,IDE', 1, 'enter Scriptis', '进入Scriptis', 'user manual', '用户手册', 'https://github.com/WeBankFinTech/DataSphereStudio', 'fi-scriptis|rgb(102, 102, 255)', NULL, NULL, NULL, NULL, NULL);
+INSERT INTO dss_onestop_menu_application (id, application_id, onestop_menu_id, title_en, title_cn, desc_en, desc_cn, labels_en, labels_cn, is_active, access_button_en, access_button_cn, manual_button_en, manual_button_cn, manual_button_url, icon, `order`, create_by, create_time, last_update_time, last_update_user) VALUES (NULL, NULL, 2, 'Visualis', 'Visualis', 'Visualis is a data visualization BI tool based on Davinci, with Linkis as the kernel, it supports the analysis mode of data development exploration.', 'Visualis是基于宜信开源项目Davinci开发的数据可视化BI工具,以任意桥(Linkis)做为内核,支持拖拽式报表定义、图表联动、钻取、全局筛选、多维分析、实时查询等数据开发探索的分析模式,并做了水印、数据质量校验等金融级增强。', 'visualization, statement', '可视化,报表', 1, 'enter Visualis', '进入Visualis', 'user manual', '用户手册', 'https://github.com/WeBankFinTech/DataSphereStudio', 'fi-visualis|rgb(0, 153, 255)', NULL, NULL, NULL, NULL, NULL);
+INSERT INTO dss_onestop_menu_application (id, application_id, onestop_menu_id, title_en, title_cn, desc_en, desc_cn, labels_en, labels_cn, is_active, access_button_en, access_button_cn, manual_button_en, manual_button_cn, manual_button_url, icon, `order`, create_by, create_time, last_update_time, last_update_user) VALUES (NULL, NULL, 3, 'Schedulis', 'Schedulis', 'Description for Schedulis.', 'Schedulis描述', 'scheduling, workflow', '调度,工作流', 1, 'enter Schedulis', '进入Schedulis', 'user manual', '用户手册', 'https://github.com/WeBankFinTech/DataSphereStudio', 'fi-schedule|rgb(102, 102, 204)', NULL, NULL, NULL, NULL, NULL);
+INSERT INTO dss_onestop_menu_application (id, application_id, onestop_menu_id, title_en, title_cn, desc_en, desc_cn, labels_en, labels_cn, is_active, access_button_en, access_button_cn, manual_button_en, manual_button_cn, manual_button_url, icon, `order`, create_by, create_time, last_update_time, last_update_user) VALUES (NULL, NULL, 3, 'Application operation center', '应用运维中心', 'Description for Application operation center.', '应用运维中心描述', 'production, operation', '生产,运维', 0, 'enter application operation center', '进入应用运维中心', 'user manual', '用户手册', 'https://github.com/WeBankFinTech/DataSphereStudio', 'fi-scriptis|rgb(102, 102, 255)', NULL, NULL, NULL, NULL, NULL);
+INSERT INTO dss_onestop_menu_application (id, application_id, onestop_menu_id, title_en, title_cn, desc_en, desc_cn, labels_en, labels_cn, is_active, access_button_en, access_button_cn, manual_button_en, manual_button_cn, manual_button_url, icon, `order`, create_by, create_time, last_update_time, last_update_user) VALUES (NULL, NULL, 4, 'Qualitis', 'Qualitis', 'Qualitis is a financial and one-stop data quality management platform that provides data quality model definition, visualization and monitoring of data quality results', 'Qualitis是一套金融级、一站式的数据质量管理平台,提供了数据质量模型定义,数据质量结果可视化、可监控等功能,并用一整套统一的流程来定义和检测数据集的质量并及时报告问题。', 'product, operations', '生产,运维', 1, 'enter Qualitis', '进入Qualitis', 'user manual', '用户手册', 'https://github.com/WeBankFinTech/DataSphereStudio', 'fi-qualitis|rgb(51, 153, 153)', NULL, NULL, NULL, NULL, NULL);
+INSERT INTO dss_onestop_menu_application (id, application_id, onestop_menu_id, title_en, title_cn, desc_en, desc_cn, labels_en, labels_cn, is_active, access_button_en, access_button_cn, manual_button_en, manual_button_cn, manual_button_url, icon, `order`, create_by, create_time, last_update_time, last_update_user) VALUES (NULL, NULL, 4, 'Exchangis', 'Exchangis', 'Exchangis is a lightweight, high scalability, data exchange platform, support for structured and unstructured data transmission between heterogeneous data sources.', 'Exchangis是一个轻量级的、高扩展性的数据交换平台,支持对结构化及无结构化的异构数据源之间的数据传输,在应用层上具有数据权限管控、节点服务高可用和多租户资源隔离等业务特性,而在数据层上又具有传输架构多样化、模块插件化和组件低耦合等架构特点。', 'user manual', '生产,运维', 1, 'enter Exchangis', '进入Exchangis', 'user manual', '用户手册', 'https://github.com/WeBankFinTech/DataSphereStudio', 'fi-exchange|(102, 102, 255)', NULL, NULL, NULL, NULL, NULL);
+INSERT INTO dss_onestop_menu_application (id, application_id, onestop_menu_id, title_en, title_cn, desc_en, desc_cn, labels_en, labels_cn, is_active, access_button_en, access_button_cn, manual_button_en, manual_button_cn, manual_button_url, icon, `order`, create_by, create_time, last_update_time, last_update_user) VALUES (NULL, NULL, 5, 'Workspace management', '工作空间管理', NULL, NULL, NULL, NULL, 1, 'workspace management', '工作空间管理', null, null, null, 'fi-scriptis|rgb(102, 102, 255)', null, null, null, null, null);
+INSERT INTO dss_onestop_menu_application (id, application_id, onestop_menu_id, title_en, title_cn, desc_en, desc_cn, labels_en, labels_cn, is_active, access_button_en, access_button_cn, manual_button_en, manual_button_cn, manual_button_url, icon, `order`, create_by, create_time, last_update_time, last_update_user) VALUES (NULL, NULL, 5, 'User resources management', '用户资源管理', NULL, NULL, NULL, NULL, 1, 'user resource management', '用户资源管理', null, null, null, 'fi-scriptis|rgb(102, 102, 255)', null, null, null, null, null);
+
diff --git a/docs/en_US/ch1/DataSphereStudio_Compile_Manual.md b/docs/en_US/ch1/DataSphereStudio_Compile_Manual.md
index d3dde44ae7..598ff158b8 100644
--- a/docs/en_US/ch1/DataSphereStudio_Compile_Manual.md
+++ b/docs/en_US/ch1/DataSphereStudio_Compile_Manual.md
@@ -6,8 +6,8 @@
```xml
- 0.7.0
- 0.9.1
+ 0.9.0
+ 0.9.4
2.11.8
1.8
3.3.3
diff --git a/docs/en_US/ch2/Azkaban_LinkisJobType_Deployment_Manual.md b/docs/en_US/ch2/Azkaban_LinkisJobType_Deployment_Manual.md
index d70a87eedc..33cb3808cb 100644
--- a/docs/en_US/ch2/Azkaban_LinkisJobType_Deployment_Manual.md
+++ b/docs/en_US/ch2/Azkaban_LinkisJobType_Deployment_Manual.md
@@ -2,7 +2,7 @@
## 1. Ready work
-1.Click [release](https://github.com/WeBankFinTech/DataSphereStudio/releases/download/0.7.0/linkis-jobtype-0.7.0.zip) to select the corresponding installation package to download:
+1.Click [release](https://github.com/WeBankFinTech/DataSphereStudio/releases/download/0.8.0/linkis-jobtype-0.8.0.zip) to select the corresponding installation package to download:
- linkis-jobtype-$version.zip
diff --git a/docs/en_US/ch2/DSS Quick Installation Guide.md b/docs/en_US/ch2/DSS Quick Installation Guide.md
index f8837393dc..bc36886351 100644
--- a/docs/en_US/ch2/DSS Quick Installation Guide.md
+++ b/docs/en_US/ch2/DSS Quick Installation Guide.md
@@ -17,7 +17,7 @@ DSS also implements the integration of many external systems, such as [Qualitis]
DSS environment configuration can be divided into three steps, including basic software installation, backend environment configuration, and frontend environment configuration. The details are as below:
### 2.1 Frontend and backend basic software installation
-Linkis standard version (above 0.9.1). How to install [Linkis](https://github.com/WeBankFinTech/Linkis/blob/master/docs/en_US/ch1/deploy.md)
+Linkis standard version (above 0.9.4). How to install [Linkis](https://github.com/WeBankFinTech/Linkis/blob/master/docs/en_US/ch1/deploy.md)
JDK (above 1.8.0_141). How to install [JDK](https://www.runoob.com/java/java-environment-setup.html)
@@ -103,7 +103,7 @@ dss_port="8088"
linkis_url="http://127.0.0.1:9001"
# dss ip address
-dss_ipaddr=$(ip addr | awk '/^[0-9]+: / {}; /inet.*global/ {print gensub(/(.*)\/(.*)/, "\\1", "g", $2)}')
+dss_ipaddr=$(ip addr | awk '/^[0-9]+: / {}; /inet.*global/ {print gensub(/(.*)\/(.*)/, "\\1", "g", $2)}'|awk 'NR==1')
```
The environment is ready, click me to enter ****[4. Installation and use](https://github.com/WeBankFinTech/DataSphereStudio/blob/master/docs/en_US/ch2/DSS%20Quick%20Installation%20Guide.md#four-installation-and-use)**
@@ -111,7 +111,7 @@ The environment is ready, click me to enter ****[4. Installation and use](https:
## Three Standard DSS environment configuration preparation
The standard DSS environment preparation is also divided into three parts, the frontEnd-end and back-end basic software installation, back-end environment preparation, and frontEnd-end environment preparation. The details are as follows:
### 3.1 frontEnd and BackEnd basic software installation
-Linkis standard version (above 0.9.1), [How to install Linkis](https://github.com/WeBankFinTech/Linkis/blob/master/docs/en_US/ch1/deploy.md)
+Linkis standard version (above 0.9.4), [How to install Linkis](https://github.com/WeBankFinTech/Linkis/blob/master/docs/en_US/ch1/deploy.md)
JDK (above 1.8.0_141), How to install [JDK](https://www.runoob.com/java/java-environment-setup.html)
@@ -219,7 +219,7 @@ dss_port="8088"
linkis_url="http://127.0.0.1:9001"
# dss ip address
-dss_ipaddr=$(ip addr | awk '/^[0-9]+: / {}; /inet.*global/ {print gensub(/(.*)\/(.*)/, "\\1", "g", $2)}')
+dss_ipaddr=$(ip addr | awk '/^[0-9]+: / {}; /inet.*global/ {print gensub(/(.*)\/(.*)/, "\\1", "g", $2)}'|awk 'NR==1')
```
The environment is ready, click me to enter **[Four Installation and use](https://github.com/WeBankFinTech/DataSphereStudio/blob/master/docs/en_US/ch2/DSS%20Quick%20Installation%20Guide.md#four-installation-and-use)**
diff --git a/docs/en_US/ch2/DSS_0.9.0_upgrade_notes.md b/docs/en_US/ch2/DSS_0.9.0_upgrade_notes.md
new file mode 100644
index 0000000000..cee4ec591e
--- /dev/null
+++ b/docs/en_US/ch2/DSS_0.9.0_upgrade_notes.md
@@ -0,0 +1,16 @@
+# DSS 0.9.0 upgrade notes
+
+In DSS-0.9.0, the concept “workspace” is added. If you upgrade from DSS 0.7 or DSS 0.8 to DSS0.9.0. After completing platform deployment, the following adjustments are needed to be made: field `application_id` of table `dss_onestop_menu_application` is NULL by default., which is a foreign key references field `id` of table `dss_application`. So the field `application_id` of table `dss_onestop_menu_application` needed to be filled choosing from field `id` of table `dss_application`, which accords to the actual situation of business system, so as to connect workspace with each application.
+E.g:
+```
+-- Update application_id corresponding to workflow application
+UPDATE dss_onestop_menu_application SET application_id = 2 WHERE id = 1;
+-- Update application_id corresponding to Scriptis application
+UPDATE dss_onestop_menu_application SET application_id = 1 WHERE id = 4;
+```
+In addition, for users who have deployed DSS with edition 0.8.0 or below, the following adjustments are required:
+Since field `workspace_id` is added to table `dss_project`, which is a foreign key references field `id` of table `dss_workspace`. The following command needs to be executed:
+```
+ALTER TABLE dss_project ADD workspace_id bigint(20) DEFAULT 1;
+```
+By default, original projects belongs to default workspace(workspace_id=1), users may add more workspace according to actual situation, and adjust the workspace of original projects as needed.
diff --git "a/docs/zh_CN/ch1/DSS\345\256\211\350\243\205\345\270\270\350\247\201\351\227\256\351\242\230\345\210\227\350\241\250.md" "b/docs/zh_CN/ch1/DSS\345\256\211\350\243\205\345\270\270\350\247\201\351\227\256\351\242\230\345\210\227\350\241\250.md"
index 0b9e779828..151c25c6ab 100644
--- "a/docs/zh_CN/ch1/DSS\345\256\211\350\243\205\345\270\270\350\247\201\351\227\256\351\242\230\345\210\227\350\241\250.md"
+++ "b/docs/zh_CN/ch1/DSS\345\256\211\350\243\205\345\270\270\350\247\201\351\227\256\351\242\230\345\210\227\350\241\250.md"
@@ -3,17 +3,20 @@
**本文档汇总DSS安装过程中所有问题列表及解决方式,为社区用户安装DSS提供参考。**
-#### (1) 创建工程提示用户token为空
+#### (1) 创建工程失败:add scheduler project用户token为空
```
-sudo vi dss-server/conf/token.properties
-```
-
-添加用户
+{"method":null,"status":1,"message":"error code(错误码): 90002, error message(错误信息): add scheduler project failederrCode: 90019 ,desc: errCode: 90020 ,desc: 用户token为空 ,ip: dss.com ,port: 9004 ,serviceKind: dss-server ,ip: dss.com ,port: 9004 ,serviceKind: dss-server.","data":{"errorMsg":{"serviceKind":"dss-server","level":2,"port":9004,"errCode":90002,"ip":"dss.com","desc":"add scheduler project failederrCode: 90019 ,desc: errCode: 90020 ,desc: 用户token为空 ,ip: dss.com ,port: 9004 ,serviceKind: dss-server ,ip: dss.com ,port: 9004 ,serviceKind: dss-server"}}}
```
-xxx=xxx
-```
+
+确保dss-server的token.properties中添加了此用户,并保持与 azkaban 的 azkaban-users.xml用户一致
+以hadoop用户为例:
+1、在dss-server的token.properties添加
+hadoop=hadoop
+2、在azkaban azkaban-users.xml 文件 添加
+
- 0.7.0
- 0.9.1
+ 0.9.0
+ 0.9.4
2.11.8
1.8
3.3.3
diff --git a/docs/zh_CN/ch2/Azkaban_LinkisJobType_Deployment_Manual.md b/docs/zh_CN/ch2/Azkaban_LinkisJobType_Deployment_Manual.md
index 846dc7aedf..adc075174e 100644
--- a/docs/zh_CN/ch2/Azkaban_LinkisJobType_Deployment_Manual.md
+++ b/docs/zh_CN/ch2/Azkaban_LinkisJobType_Deployment_Manual.md
@@ -22,19 +22,19 @@ cd linkis/bin/
LINKIS_GATEWAY_URL=http://127.0.0.1:9001 ## linkis的GateWay地址
##Linkis gateway token defaultWS-AUTH
-LINKIS_GATEWAY_TOKEN=WS-AUTH ## Linkis的代理Token,该参数可以用默认的
+LINKIS_GATEWAY_TOKEN=WS-AUTH ## Linkis的代理Token,该参数可以用默认值
##Azkaban executor host
-AZKABAN_EXECUTOR_HOST=127.0.0.1 ## AZKABAN执行器机器IP
+AZKABAN_EXECUTOR_HOST=127.0.0.1 ## 如果Azkaban是单机安装则该IP就是机器IP,如果是分布式安装为Azkaban执行器机器IP,
### SSH Port
SSH_PORT=22 ## SSH端口
##Azkaban executor dir
-AZKABAN_EXECUTOR_DIR=/tmp/Install/AzkabanInstall/executor ## 执行器的安装目录,最后不需要带上/
+AZKABAN_EXECUTOR_DIR=/tmp/Install/AzkabanInstall/executor ## 如果Azkaban是单机安装则该目录是Azkaban的安装目录,如果是分布式安装为执行器的安装目录,注意:最后不需要带上/
##Azkaban executor plugin reload url
-AZKABAN_EXECUTOR_URL=http://127.0.0.1:12321/executor?action=reloadJobTypePlugins ##这里只需要修改IP和端口即可
+AZKABAN_EXECUTOR_URL=http://$AZKABAN_EXECUTOR_HOST:12321/executor?action=reloadJobTypePlugins ##这里只需要修改IP和端口即可,该地址为Azkaban重载插件的地址。
```
## 3. 执行安装脚本
```
diff --git "a/docs/zh_CN/ch2/DSS_0.9.0_\345\215\207\347\272\247\350\257\264\346\230\216.md" "b/docs/zh_CN/ch2/DSS_0.9.0_\345\215\207\347\272\247\350\257\264\346\230\216.md"
new file mode 100644
index 0000000000..619959583c
--- /dev/null
+++ "b/docs/zh_CN/ch2/DSS_0.9.0_\345\215\207\347\272\247\350\257\264\346\230\216.md"
@@ -0,0 +1,16 @@
+# DSS-0.9.0升级说明
+
+本次DSS-0.9.0版本新增用户工作空间(workspace)概念,如果您是从 DSS0.7 或 DSS0.8 升级到 DSS0.9.0,在完成平台部署后,需对数据库表做一些调整需作如下调整:
+dss_onestop_menu_application表中的application_id字段默认为空,该字段与dss_application表的id字段关联,需根据用户业务系统的实际情况与dss_application表进行关联,将用户工作空间与各应用打通。例如:
+```
+-- 更新workflow应用对应的application_id
+UPDATE dss_onestop_menu_application SET application_id = 2 WHERE id = 1;
+-- 更新Scriptis应用对应的application_id
+UPDATE dss_onestop_menu_application SET application_id = 1 WHERE id = 4;
+```
+此外,对于已部署DSS-0.8.0及以下版本的用户,还需做如下调整:
+dss_project表新增workspace_id字段,该字段与dss_workspace表的id字段关联,需在数据库执行如下命令:
+```
+ALTER TABLE dss_project ADD workspace_id bigint(20) DEFAULT 1;
+```
+默认情况下,所有原有项目都将归属默认工作空间(workspace_id=1),用户可根据实际情况新增用户空间,并调整原有项目的所属工作空间。
\ No newline at end of file
diff --git a/docs/zh_CN/ch2/DSS_LINKIS_Quick_Install.md b/docs/zh_CN/ch2/DSS_LINKIS_Quick_Install.md
new file mode 100644
index 0000000000..60e5735fb4
--- /dev/null
+++ b/docs/zh_CN/ch2/DSS_LINKIS_Quick_Install.md
@@ -0,0 +1,490 @@
+# DataSphere Studio快速安装使用文档
+
+由于DataSphere Studio依赖于[Linkis](https://github.com/WeBankFinTech/Linkis),本文档提供了以下两种部署方式供您选择:
+
+1. DSS & Linkis 一键部署
+
+ 该模式适合于DSS和Linkis都没有安装的情况。
+
+ 进入[DSS & Linkis安装环境准备](https://github.com/WeBankFinTech/DataSphereStudio/blob/master/docs/zh_CN/ch2/DSS_LINKIS_Quick_Install.md#%E4%B8%80dss--linkis%E5%AE%89%E8%A3%85%E7%8E%AF%E5%A2%83%E5%87%86%E5%A4%87)
+
+2. DSS 一键部署
+
+ 该模式适合于Linkis已经安装,需要安装DSS的情况。
+
+ 进入[DSS快速安装文档](https://github.com/WeBankFinTech/DataSphereStudio/blob/master/docs/zh_CN/ch2/DSS%E5%BF%AB%E9%80%9F%E5%AE%89%E8%A3%85%E4%BD%BF%E7%94%A8%E6%96%87%E6%A1%A3.md)
+
+ **请根据实际情况,选择合理安装方式**。
+
+## 一、DSS & Linkis安装环境准备
+
+**根据安装难度,我们提供了以下两种环境准备方式,请根据需要选择:**
+
+1. **精简版**
+
+ 没有任何安装难度,适合于调研和学习,10分钟即可部署起来。
+
+ 支持的功能有:
+
+- 数据开发IDE - Scriptis,仅支持:执行Python和JDBC脚本
+- Linkis管理台
+
+**进入[DSS & Linkis精简版环境准备](https://github.com/WeBankFinTech/DataSphereStudio/blob/master/docs/zh_CN/ch2/DSS_LINKIS_Quick_Install.md#%E4%BA%8Cdss--linkis%E7%B2%BE%E7%AE%80%E7%89%88%E7%8E%AF%E5%A2%83%E5%87%86%E5%A4%87)**
+
+2. **标准版**:
+
+ 有一定的安装难度,体现在Hadoop、Hive和Spark版本不同时,可能需要重新编译,可能会出现包冲突问题。
+
+适合于试用和生产使用,2~3小时即可部署起来。
+
+ 支持的功能有:
+
+- 数据开发IDE - Scriptis
+
+- 工作流实时执行
+
+- 信号功能和邮件功能
+
+- 数据可视化 - Visualis
+
+- 数据质量 - Qualitis(**单机版**)
+
+- 工作流定时调度 - Azkaban(**单机版**)
+
+- Linkis管理台
+
+**进入[DSS & Linkis标准版环境准备](https://github.com/WeBankFinTech/DataSphereStudio/blob/master/docs/zh_CN/ch2/DSS_LINKIS_Quick_Install.md#%E4%B8%89dss--linkis%E6%A0%87%E5%87%86%E7%89%88%E7%8E%AF%E5%A2%83%E5%87%86%E5%A4%87)**
+
+----
+
+## 二、DSS & Linkis精简版环境准备
+
+### a. 基础软件安装
+
+ 下面的软件必装:
+
+- MySQL (5.5+),[如何安装MySQL](https://www.runoob.com/mysql/mysql-install.html)
+- JDK (1.8.0_141以上),[如何安装JDK](https://www.runoob.com/java/java-environment-setup.html)
+- Python(2.x和3.x都支持),[如何安装Python](https://www.runoob.com/python/python-install.html)
+- Nginx,[如何安装Nginx](https://www.tecmint.com/install-nginx-on-centos-7/)
+
+### b. 创建用户
+
+ 例如: **部署用户是hadoop账号**
+
+1. 在部署机器上创建部署用户,用于安装
+
+```bash
+ sudo useradd hadoop
+```
+
+2. 因为Linkis的服务是以 sudo -u ${linux-user} 方式来切换引擎,从而执行作业,所以部署用户需要有 sudo 权限,而且是免密的。
+
+```bash
+ vi /etc/sudoers
+```
+
+ hadoop ALL=(ALL) NOPASSWD: NOPASSWD: ALL
+
+3. **如果您的Python想拥有画图功能,则还需在安装节点,安装画图模块**。命令如下:
+
+```bash
+ python -m pip install matplotlib
+```
+
+### c. 安装包准备
+
+**如果您想使用DSS & Linkis全家桶一键部署安装包(1.3GB)([点我进入下载页面](https://github.com/WeBankFinTech/DataSphereStudio/issues/90)),直接解压即可,以下步骤可忽略。**
+
+下列步骤为用户自行编译或者去各个组件release页面下载安装包:
+1. 下载安装包
+- [wedatasphere-linkis-x.x.x-dist.tar.gz](https://github.com/WeBankFinTech/Linkis/releases)
+- [wedatasphere-dss-x.x.x-dist.tar.gz](https://github.com/WeBankFinTech/DataSphereStudio/releases)
+- [wedatasphere-dss-web-x.x.x-dist.zip](https://github.com/WeBankFinTech/DataSphereStudio/releases)
+- [linkis-jobtype-x.x.x.zip](https://github.com/WeBankFinTech/DataSphereStudio/releases)
+- azkaban-solo-server-x.x.x.tar.gz
+- [wedatasphere-qualitis-x.x.x.zip](https://github.com/WeBankFinTech/Qualitis/releases)
+
+2. 下载DSS&LINKIS[一键部署脚本](https://share.weiyun.com/58yxh3n),并解压,再将上述所下载的安装包放置于该目录下,目录层级如下:
+
+```text
+├── dss_linkis # 一键部署主目录
+ ├── backup # 用于兼容Linkis老版本的安装启动脚本
+ ├── bin # 用于一键安装启动DSS+Linkis
+ ├── conf # 一键部署的配置文件
+ ├── azkaban-solo-server-x.x.x.tar.gz #azkaban安装包
+ ├── linkis-jobtype-x.x.x.zip #linkis jobtype安装包
+ ├── wedatasphere-dss-x.x.x-dist.tar.gz # DSS后台安装包
+ ├── wedatasphere-dss-web-x.x.x-dist.zip # DSS前端安装包
+ ├── wedatasphere-linkis-x.x.x-dist.tar.gz # Linkis安装包
+ ├── wedatasphere-qualitis-x.x.x.zip # Qualitis安装包
+```
+**注意事项:**
+1. Azkaban: 社区没有提供单独的release安装包,用户需要自行编译后的将安装包放置于安装目录下。
+2. DSS: 用户自行编译DSS安装包,会缺失visualis-server部分,因此,visualis-server也需要用户自行编译。从[visualis项目](https://github.com/WeBankFinTech/Visualis)编译打包后放置于wedatasphere-dss-x.x.x-dist.tar.gz的share/visualis-server目录下,否则dss安装时可能报找不到visualis安装包。
+
+### d. 修改配置
+
+将conf目录下的config.sh.lite.template,修改为config.sh
+
+```shell
+ cp conf/config.sh.lite.template conf/config.sh
+```
+
+**精简版可以不修改任何配置参数**,当然您也可以按需修改相关配置参数。
+
+```
+ vi conf/config.sh
+
+ SSH_PORT=22 #ssh默认端口
+ deployUser="`whoami`" #默认获取当前用户为部署用户
+ WORKSPACE_USER_ROOT_PATH=file:///tmp/linkis/ ##工作空间路径,默认为本地路径,尽量提前创建并授于写权限
+ RESULT_SET_ROOT_PATH=file:///tmp/linkis ##结果集路径,默认为本地路径,尽量提前创建并授于写权限
+ DSS_NGINX_IP=127.0.0.1 #DSS Nginx访问IP
+ DSS_WEB_PORT=8088 #DSS Web页面访问端口
+
+```
+
+```properties
+ # 说明:通常情况下,精简版,上述参数默认情况均可不做修改,即可直接安装使用
+
+```
+
+### e. 修改数据库配置
+
+```bash
+ vi conf/db.sh
+```
+
+```properties
+ # 设置数据库的连接信息
+ MYSQL_HOST=
+ MYSQL_PORT=
+ MYSQL_DB=
+ MYSQL_USER=
+ MYSQL_PASSWORD=
+```
+
+
+```properties
+ # 说明:此为必须配置参数,并确保可以从本机进行访问,验证方式:
+ mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD
+```
+
+精简版配置修改完毕,进入[安装和使用](https://github.com/WeBankFinTech/DataSphereStudio/blob/master/docs/zh_CN/ch2/DSS_LINKIS_Quick_Install.md#%E5%9B%9B%E5%AE%89%E8%A3%85%E5%92%8C%E4%BD%BF%E7%94%A8)
+
+## 三、DSS & Linkis标准版环境准备
+
+### a. 基础软件安装
+
+ 下面的软件必装:
+
+- MySQL (5.5+),[如何安装MySQL](https://www.runoob.com/mysql/mysql-install.html)
+
+- JDK (1.8.0_141以上),[如何安装JDK](https://www.runoob.com/java/java-environment-setup.html)
+
+- Python(2.x和3.x都支持),[如何安装Python](https://www.runoob.com/python/python-install.html)
+
+- Nginx,[如何安装Nginx](https://www.tecmint.com/install-nginx-on-centos-7/)
+
+ 下面的服务必须可从本机访问:
+
+- Hadoop(**2.7.2,Hadoop其他版本需自行编译Linkis**)
+
+- Hive(**1.2.1,Hive其他版本需自行编译Linkis**)
+
+- Spark(**支持2.0以上所有版本**)
+
+### b. 创建用户
+
+ 例如: **部署用户是hadoop账号**
+
+1. 在所有需要部署的机器上创建部署用户,用于安装
+
+```bash
+ sudo useradd hadoop
+```
+
+2. 因为Linkis的服务是以 sudo -u ${linux-user} 方式来切换引擎,从而执行作业,所以部署用户需要有 sudo 权限,而且是免密的。
+
+```bash
+ vi /etc/sudoers
+```
+
+```properties
+ hadoop ALL=(ALL) NOPASSWD: NOPASSWD: ALL
+```
+
+3. 确保部署 DSS 和 Linkis 的服务器可正常访问Hadoop、Hive和Spark。
+
+ **部署DSS 和 Linkis 的服务器,不要求必须安装Hadoop,但要求hdfs命令必须可用,如:hdfs dfs -ls /**。
+
+ **如果想使用Linkis的Spark,部署 Linkis 的服务器,要求spark-sql命令必须可以正常启动一个spark application**。
+
+ **在每台安装节点设置如下的全局环境变量**,以便Linkis能正常读取Hadoop、Hive和Spark的配置文件,具备访问Hadoop、Hive和Spark的能力。
+
+ 修改安装用户hadoop的.bash_rc,命令如下:
+
+```bash
+ vim /home/hadoop/.bash_rc
+```
+
+ 下方为环境变量示例:
+
+```bash
+ #JDK
+ export JAVA_HOME=/nemo/jdk1.8.0_141
+ #HADOOP
+ export HADOOP_CONF_DIR=/appcom/config/hadoop-config
+ #Hive
+ export HIVE_CONF_DIR=/appcom/config/hive-config
+ #Spark
+ export SPARK_HOME=/appcom/Install/spark
+ export SPARK_CONF_DIR=/appcom/config/spark-config
+ export PYSPARK_ALLOW_INSECURE_GATEWAY=1 # Pyspark必须加的参数
+```
+
+4. **如果您的Pyspark想拥有画图功能,则还需在所有安装节点,安装画图模块**。命令如下:
+
+```bash
+ python -m pip install matplotlib
+```
+
+### c. 安装包准备
+
+**如果您想使用DSS & Linkis全家桶一键部署安装包(1.3GB)([点我进入下载页面](https://github.com/WeBankFinTech/DataSphereStudio/issues/90)),直接解压即可,以下步骤可忽略。**
+
+下列步骤为用户自行编译或者去各个组件release页面下载安装包:
+1. 下载安装包
+- [wedatasphere-linkis-x.x.x-dist.tar.gz](https://github.com/WeBankFinTech/Linkis/releases)
+- [wedatasphere-dss-x.x.x-dist.tar.gz](https://github.com/WeBankFinTech/DataSphereStudio/releases)
+- [wedatasphere-dss-web-x.x.x-dist.zip](https://github.com/WeBankFinTech/DataSphereStudio/releases)
+- [linkis-jobtype-x.x.x.zip](https://github.com/WeBankFinTech/DataSphereStudio/releases)
+- azkaban-solo-server-x.x.x.tar.gz
+- [wedatasphere-qualitis-x.x.x.zip](https://github.com/WeBankFinTech/Qualitis/releases)
+
+2. 下载DSS&LINKIS[一键部署脚本](https://share.weiyun.com/58yxh3n),并解压,再将上述所下载的安装包放置于该目录下,目录层级如下:
+
+```text
+├── dss_linkis # 一键部署主目录
+ ├── backup # 用于兼容Linkis老版本的安装启动脚本
+ ├── bin # 用于一键安装启动DSS+Linkis
+ ├── conf # 一键部署的配置文件
+ ├── azkaban-solo-server-x.x.x.tar.gz #azkaban安装包
+ ├── linkis-jobtype-x.x.x.zip #linkis jobtype安装包
+ ├── wedatasphere-dss-x.x.x-dist.tar.gz # DSS后台安装包
+ ├── wedatasphere-dss-web-x.x.x-dist.zip # DSS前端安装包
+ ├── wedatasphere-linkis-x.x.x-dist.tar.gz # Linkis安装包
+ ├── wedatasphere-qualitis-x.x.x.zip # Qualitis安装包
+```
+**注意事项:**
+1. Azkaban: 社区没有提供单独的release安装包,用户需要自行编译后的将安装包放置于安装目录下。
+2. DSS: 用户自行编译DSS安装包,会缺失visualis-server部分,因此,visualis-server也需要用户自行编译。从[visualis项目](https://github.com/WeBankFinTech/Visualis)编译打包后放置于wedatasphere-dss-x.x.x-dist.tar.gz的share/visualis-server目录下,否则安装时可能报找不到visualis安装包。
+
+### d. 修改配置
+
+将conf目录下的config.sh.stand.template,修改为config.sh
+
+```shell
+ cp conf/config.sh.stand.template conf/config.sh
+```
+
+您可以按需修改相关配置参数:
+
+```
+ vi conf/config.sh
+```
+
+参数说明如下:
+```properties
+ WORKSPACE_USER_ROOT_PATH=file:///tmp/linkis/ ##本地工作空间路径,默认为本地路径,尽量提前创建并授于写权限
+ HDFS_USER_ROOT_PATH=hdfs:///tmp/linkis ##hdfs工作空间路径,默认为本地路径,尽量提前创建并授于写权限
+ RESULT_SET_ROOT_PATH=hdfs:///tmp/linkis ##结果集路径,默认为本地路径,尽量提前创建并授于写权限
+ WDS_SCHEDULER_PATH=file:///appcom/tmp/wds/scheduler ##DSS工程转换为azkaban工程后的存储路径
+ #DSS Web,注意distribution.sh中VISUALIS_NGINX的IP和端口必须和此处保持一致
+ DSS_NGINX_IP=127.0.0.1 #DSS Nginx访问IP
+ DSS_WEB_PORT=8088 #DSS Web页面访问端口
+ ##hive metastore的地址
+ HIVE_META_URL=jdbc:mysql://127.0.0.1:3306/metastore?useUnicode=true
+ HIVE_META_USER=xxx
+ HIVE_META_PASSWORD=xxx
+ ###hadoop配置文件目录
+ HADOOP_CONF_DIR=/appcom/config/hadoop-config
+ ###hive配置文件目录
+ HIVE_CONF_DIR=/appcom/config/hive-config
+ ###spark配置文件目录
+ SPARK_CONF_DIR=/appcom/config/spark-config
+ ###azkaban服务端IP地址及端口,单机版安装时请勿修改
+ AZKABAN_ADRESS_IP=127.0.0.1
+ AZKABAN_ADRESS_PORT=8081
+ ####Qualitis服务端IP地址及端口,单机版安装时请勿修改
+ QUALITIS_ADRESS_IP=127.0.0.1
+ QUALITIS_ADRESS_PORT=8090
+
+```
+
+### e. 使用分布式模式
+
+ 如果您打算将DSS和Linkis都部署在同一台服务器上, 本步骤可以跳过。
+
+ 如果您打算将 DSS 和 Linkis 部署在多台服务器上,首先,您需要为这些服务器配置ssh免密登陆。
+
+ [如何配置SSH免密登陆](https://www.jianshu.com/p/0922095f69f3)
+
+ 同时,您还需要修改分布式部署模式下的distribution.sh配置文件,使分布式部署生效。
+
+```shell script
+ vi conf/distribution.sh
+```
+
+```说明:IP地址和端口
+ LINKIS和DSS的微服务IP地址和端口,可配置成远程地址,例如您想把LINKIS和DSS安装在不同的机器上,那么只需把linkis各项微服务的IP地址修改成与DSS不同的IP即可。
+
+```
+
+### f. 修改数据库配置
+
+```bash
+ vi conf/db.sh
+```
+
+```properties
+ # 设置数据库的连接信息
+ MYSQL_HOST=
+ MYSQL_PORT=
+ MYSQL_DB=
+ MYSQL_USER=
+ MYSQL_PASSWORD=
+```
+
+```properties
+ # 说明:此为必须配置参数,并确保可以从本机进行访问,验证方式:
+ mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD
+```
+
+标准版配置修改完毕,进入[安装和使用](https://github.com/WeBankFinTech/DataSphereStudio/blob/master/docs/zh_CN/ch2/DSS_LINKIS_Quick_Install.md#%E5%9B%9B%E5%AE%89%E8%A3%85%E5%92%8C%E4%BD%BF%E7%94%A8)
+
+----
+
+
+## 四、安装和使用
+
+### 1. 执行安装脚本:
+
+```bash
+ sh bin/install.sh
+```
+注意:安装脚本有两处是相对路径,为了正确安装,请按照以上命令执行。
+
+### 2. 安装步骤
+
+- 该安装脚本会检查各项集成环境命令,如果没有请按照提示进行安装,以下命令为必须项:
+_yum java mysql unzip expect telnet tar sed dos2unix nginx_
+
+- 安装过程如果有很多cp 命令提示您是否覆盖安装,说明您的系统配置有别名,输入alias,如果有cp、mv、rm的别名,如果有可以去掉,就不会再有大量提示。
+
+- install.sh脚本会询问您安装模式。
+安装模式分为精简版、标准版,请根据您准备的环境情况,选择合适的安装模式。
+
+- install.sh脚本会询问您是否需要初始化数据库并导入元数据,linkis和dss 均会询问。
+
+ **第一次安装**必须选是。
+
+### 3. 是否安装成功:
+
+ 通过查看控制台打印的日志信息查看是否安装成功。
+
+ 如果有错误信息,可以查看具体报错原因。
+
+ 您也可以通过查看我们的[安装常见问题](https://github.com/WeBankFinTech/DataSphereStudio/blob/master/docs/zh_CN/ch1/DSS%E5%AE%89%E8%A3%85%E5%B8%B8%E8%A7%81%E9%97%AE%E9%A2%98%E5%88%97%E8%A1%A8.md),获取问题的解答。
+
+### 4. 启动服务
+
+#### (1) 启动服务:
+
+ 在安装目录执行以下命令,启动所有服务:
+
+```shell script
+ sh bin/start-all.sh > start.log 2>start_error.log
+```
+
+ 如果启动产生了错误信息,可以查看具体报错原因。启动后,各项微服务都会进行**通信检测**,如果有异常则可以帮助用户定位异常日志和原因。
+
+ 您可以通过查看我们的[启动常见问题](https://github.com/WeBankFinTech/DataSphereStudio/blob/master/docs/zh_CN/ch1/DSS%E5%AE%89%E8%A3%85%E5%B8%B8%E8%A7%81%E9%97%AE%E9%A2%98%E5%88%97%E8%A1%A8.md),获取问题的解答。
+
+#### (2) 查看是否启动成功
+
+ 可以在Eureka界面查看 Linkis & DSS 后台各微服务的启动情况。如下图,如您的Eureka主页**启动日志会打印此访问地址**,出现以下微服务,则表示服务都启动成功,可以正常对外提供服务了:
+
+ ![Eureka](https://github.com/WeBankFinTech/DataSphereStudio/blob/master/images/zh_CN/chapter2/quickInstallUse/quickInstall.png)
+
+#### (3) 谷歌浏览器访问:
+
+请使用**谷歌浏览器**访问以下前端地址:
+
+`http://DSS_NGINX_IP:DSS_WEB_PORT` **启动日志会打印此访问地址**。登陆时管理员的用户名和密码均为部署用户名,如部署用户为hadoop,则管理员的用户名/密码为:hadoop/hadoop。
+
+如果您想支持更多用户登录,详见 [Linkis LDAP](https://github.com/WeBankFinTech/Linkis/wiki/%E9%83%A8%E7%BD%B2%E5%92%8C%E7%BC%96%E8%AF%91%E9%97%AE%E9%A2%98%E6%80%BB%E7%BB%93)
+
+如何快速使用DSS, 点我进入 [DSS快速使用文档](https://github.com/WeBankFinTech/DataSphereStudio/blob/master/docs/zh_CN/ch3/DSS_User_Manual.md)
+
+【DSS用户手册】提供了更加详尽的使用方法,点我进入 [DSS用户手册](https://github.com/WeBankFinTech/DataSphereStudio/blob/master/docs/zh_CN/ch3/DSS_User_Manual.md)
+#### (4) 停止服务:
+ 在安装目录执行以下命令,停止所有服务:sh bin/stop-all.sh
+
+**注意**
+1. 如果用户想启动和停止**单个应用**,可修改启动脚本注释掉其他应用的启动和停止命令即可。
+
+2. 如果用户想启动和停止**单个微服务**,则可进入该微服务安装目录下执行sh bin/start-微服务名称.sh或sh bin/stop-微服务名称.sh
+
+## 五、云资源
+## 云资源
+
+**我们提供了DSS + Linkis + Qualitis + Visualis + Azkaban【全家桶一键部署安装包】,由于安装包过大(1.3GB),Github下载缓慢,请通过以下方式获取**:
+
+**Baidu cloud:**
+
+- 百度云链接:https://pan.baidu.com/s/1hmxuJtyY72D5X_dZoQIE5g
+
+- Password: p82h
+
+**Tencent Cloud:**
+
+- 腾讯云链接:https://share.weiyun.com/5vpLr9t
+
+- Password: upqgib
+
+**DSS&Linkis 一键部署脚本**
+
+- URL:https://share.weiyun.com/5Qvl07X
+
+**以下为Linkis安装包资源:**
+
+- 腾讯云链接:https://share.weiyun.com/5Gjz0zU
+
+- 密码:9vctqg
+
+- 百度云链:
+https://pan.baidu.com/s/1uuogWgLE9r8EcGROkRNeKg
+
+- 密码:pwbz
+
+**以下为DSS安装包资源:**
+
+- 腾讯云链接:https://share.weiyun.com/5n2GD0h
+
+- 密码:p8f4ug
+
+- 百度云链接:https://pan.baidu.com/s/18H8P75Y-cSEsW-doVRyAJQ
+
+- 密码:pnnj
+
+**附Qualitis及Azkaban单机版安装包资源:**
+
+- 腾讯云链接:https://share.weiyun.com/5fBPVIV
+
+- 密码:cwnhgw
+
+- 百度云链接:https://pan.baidu.com/s/1DYvm_KTljQpbdk6ZPx6K9g
+
+- 密码:3lnk
diff --git "a/docs/zh_CN/ch2/DSS\345\277\253\351\200\237\345\256\211\350\243\205\344\275\277\347\224\250\346\226\207\346\241\243.md" "b/docs/zh_CN/ch2/DSS\345\277\253\351\200\237\345\256\211\350\243\205\344\275\277\347\224\250\346\226\207\346\241\243.md"
index d3d96b212a..a8fda0a9b5 100644
--- "a/docs/zh_CN/ch2/DSS\345\277\253\351\200\237\345\256\211\350\243\205\344\275\277\347\224\250\346\226\207\346\241\243.md"
+++ "b/docs/zh_CN/ch2/DSS\345\277\253\351\200\237\345\256\211\350\243\205\344\275\277\347\224\250\346\226\207\346\241\243.md"
@@ -32,7 +32,7 @@
## 二、精简版DSS环境配置准备
DSS环境配置准备分为三部分,前后端基础软件安装、后端环境配置准备和前端环配置境准备,详细介绍如下:
### 2.1 前后端基础软件安装
-Linkis简单版(0.9.1及以上),[如何安装Linkis](https://github.com/WeBankFinTech/Linkis/wiki/%E5%A6%82%E4%BD%95%E5%BF%AB%E9%80%9F%E5%AE%89%E8%A3%85%E4%BD%BF%E7%94%A8Linkis)
+Linkis精简版(0.9.4及以上),[如何安装Linkis](https://github.com/WeBankFinTech/Linkis/wiki/%E5%A6%82%E4%BD%95%E5%BF%AB%E9%80%9F%E5%AE%89%E8%A3%85%E4%BD%BF%E7%94%A8Linkis)
JDK (1.8.0_141以上),[如何安装JDK](https://www.runoob.com/java/java-environment-setup.html)
@@ -78,7 +78,7 @@ Nginx,[如何安装Nginx](https://www.tecmint.com/install-nginx-on-centos-7/)
deployUser=hadoop #指定部署用户
- DSS_INSTALL_HOME=/appcom/Install/DSS #指定DSS的安装目录
+ DSS_INSTALL_HOME=$workDir #默认为上一级目录
WORKSPACE_USER_ROOT_PATH=file:///tmp/Linkis #指定用户根目录,存储用户的脚本文件和日志文件等,是用户的工作空间。
@@ -132,7 +132,7 @@ dss_port="8088"
linkis_url="http://127.0.0.1:9001"
# dss ip address
-dss_ipaddr=$(ip addr | awk '/^[0-9]+: / {}; /inet.*global/ {print gensub(/(.*)\/(.*)/, "\\1", "g", $2)}')
+dss_ipaddr=$(ip addr | awk '/^[0-9]+: / {}; /inet.*global/ {print gensub(/(.*)\/(.*)/, "\\1", "g", $2)}'|awk 'NR==1')
```
环境准备完毕,点我进入 [五、安装和使用](https://github.com/WeBankFinTech/DataSphereStudio/blob/master/docs/zh_CN/ch2/DSS%E5%BF%AB%E9%80%9F%E5%AE%89%E8%A3%85%E4%BD%BF%E7%94%A8%E6%96%87%E6%A1%A3.md#%E4%BA%94%E5%AE%89%E8%A3%85%E5%92%8C%E4%BD%BF%E7%94%A8)
@@ -143,7 +143,7 @@ dss_ipaddr=$(ip addr | awk '/^[0-9]+: / {}; /inet.*global/ {print gensub(/(.*)\/
## 三、简单版DSS环境配置准备
DSS环境配置准备分为三部分,前后端基础软件安装、后端环境配置准备和前端环配置境准备,详细介绍如下:
### 3.1 前后端基础软件安装
-Linkis简单版(0.9.1及以上),[如何安装Linkis](https://github.com/WeBankFinTech/Linkis/wiki/%E5%A6%82%E4%BD%95%E5%BF%AB%E9%80%9F%E5%AE%89%E8%A3%85%E4%BD%BF%E7%94%A8Linkis)
+Linkis简单版(0.9.4及以上),[如何安装Linkis](https://github.com/WeBankFinTech/Linkis/wiki/%E5%A6%82%E4%BD%95%E5%BF%AB%E9%80%9F%E5%AE%89%E8%A3%85%E4%BD%BF%E7%94%A8Linkis)
JDK (1.8.0_141以上),[如何安装JDK](https://www.runoob.com/java/java-environment-setup.html)
@@ -189,7 +189,7 @@ Nginx,[如何安装Nginx](https://www.tecmint.com/install-nginx-on-centos-7/)
deployUser=hadoop #指定部署用户
- DSS_INSTALL_HOME=/appcom/Install/DSS #指定DSS的安装目录
+ DSS_INSTALL_HOME=$workDir #默认为上一级目录
WORKSPACE_USER_ROOT_PATH=file:///tmp/Linkis #指定用户根目录,存储用户的脚本文件和日志文件等,是用户的工作空间。
@@ -243,7 +243,7 @@ dss_port="8088"
linkis_url="http://127.0.0.1:9001"
# dss ip address
-dss_ipaddr=$(ip addr | awk '/^[0-9]+: / {}; /inet.*global/ {print gensub(/(.*)\/(.*)/, "\\1", "g", $2)}')
+dss_ipaddr=$(ip addr | awk '/^[0-9]+: / {}; /inet.*global/ {print gensub(/(.*)\/(.*)/, "\\1", "g", $2)}'|awk 'NR==1')
```
环境准备完毕,点我进入 [五、安装和使用](https://github.com/WeBankFinTech/DataSphereStudio/blob/master/docs/zh_CN/ch2/DSS%E5%BF%AB%E9%80%9F%E5%AE%89%E8%A3%85%E4%BD%BF%E7%94%A8%E6%96%87%E6%A1%A3.md#%E4%BA%94%E5%AE%89%E8%A3%85%E5%92%8C%E4%BD%BF%E7%94%A8)
@@ -251,7 +251,7 @@ dss_ipaddr=$(ip addr | awk '/^[0-9]+: / {}; /inet.*global/ {print gensub(/(.*)\/
## 四、标准版DSS环境配置准备
标准版DSS环境准备也分为三部分,前后端基础软件安装、后端环境准备和前端环境准备,详细介绍如下:
### 4.1 前后端基础软件安装
-Linkis简单版(0.9.1及以上),[如何安装Linkis](https://github.com/WeBankFinTech/Linkis/wiki/%E5%A6%82%E4%BD%95%E5%BF%AB%E9%80%9F%E5%AE%89%E8%A3%85%E4%BD%BF%E7%94%A8Linkis)
+Linkis标准版(0.9.4及以上),[如何安装Linkis](https://github.com/WeBankFinTech/Linkis/wiki/%E5%A6%82%E4%BD%95%E5%BF%AB%E9%80%9F%E5%AE%89%E8%A3%85%E4%BD%BF%E7%94%A8Linkis)
JDK (1.8.0_141以上),[如何安装JDK](https://www.runoob.com/java/java-environment-setup.html)
@@ -302,7 +302,7 @@ Azkaban [如何安装Azkaban](https://github.com/azkaban/azkaban)
deployUser=hadoop #指定部署用户
- DSS_INSTALL_HOME=/appcom/Install/DSS #指定DSS的安装目录
+ DSS_INSTALL_HOME=$workDir #默认为上一级目录
WORKSPACE_USER_ROOT_PATH=file:///tmp/Linkis #指定用户根目录,存储用户的脚本文件和日志文件等,是用户的工作空间。
@@ -365,7 +365,7 @@ dss_port="8088"
linkis_url="http://127.0.0.1:9001"
# dss ip address
-dss_ipaddr=$(ip addr | awk '/^[0-9]+: / {}; /inet.*global/ {print gensub(/(.*)\/(.*)/, "\\1", "g", $2)}')
+dss_ipaddr=$(ip addr | awk '/^[0-9]+: / {}; /inet.*global/ {print gensub(/(.*)\/(.*)/, "\\1", "g", $2)}'|awk 'NR==1')
```
环境准备完毕,点我进入 [五、安装和使用](https://github.com/WeBankFinTech/DataSphereStudio/blob/master/docs/zh_CN/ch2/DSS%E5%BF%AB%E9%80%9F%E5%AE%89%E8%A3%85%E4%BD%BF%E7%94%A8%E6%96%87%E6%A1%A3.md#%E4%BA%94%E5%AE%89%E8%A3%85%E5%92%8C%E4%BD%BF%E7%94%A8)
@@ -386,14 +386,12 @@ dss_ipaddr=$(ip addr | awk '/^[0-9]+: / {}; /inet.*global/ {print gensub(/(.*)\/
安装模式就是简单模式或标准模式,请根据您准备的环境情况,选择合适的安装模式,精简版、简单模式和标准模式都会检查mysql服务,标准模式还会检测Qualitis服务和Azkaban外部server服务,如果检测失败会直接退出安装。
+- 安装过程如果有很多cp 命令提示您是否覆盖安装,说明您的系统配置有别名,输入alias,如果有cp、mv、rm的别名,如果有可以去掉,就不会再有大量提示。
+
- install.sh脚本会询问您是否需要初始化数据库并导入元数据。
因为担心用户重复执行install.sh脚本,把数据库中的用户数据清空,所以在install.sh执行时,会询问用户是否需要初始化数据库并导入元数据。
**第一次安装**必须选是。
-- install.sh脚本会询问您是否需要初始化使用[davinci](https://github.com/edp963/davinci)所依赖的库表,如果您没有安装过davinci,则需要进行初始化建表,如果您已经安装了davinci,则无需再次初始化。
- 因为担心用户会mysql中已安装好的davinci数据清空,所以在install.sh执行时,会询问用户是否需要初始化。
- **第一次安装**必须选是。
-
### c. 是否安装成功:
diff --git a/docs/zh_CN/ch3/DSS_User_Manual.md b/docs/zh_CN/ch3/DSS_User_Manual.md
index f366e5de31..5d91c0d02e 100644
--- a/docs/zh_CN/ch3/DSS_User_Manual.md
+++ b/docs/zh_CN/ch3/DSS_User_Manual.md
@@ -44,7 +44,17 @@ __注意:__ 如果要支持多用户登录,DSS的用户登录依赖Linkis,
3. 工程复制:以工程的最新版本为源工程,复制出新工程,初始版本工作流内容为源工程最新版本的工作流。注意:**工程名是唯一,不可重复**
## 3工作流——workflow
-### 3.1 工作流编排
+### 3.1 工作流spark节点
+ spark节点分别支持sql、pyspark、scala三种方式执行spark任务,使用时只需将节点拖拽至工作台后编写代码即可。
+### 3.2 工作流hive节点
+ hive节点支持sql方式执行hive任务,使用时只需将节点拖拽至工作台后编写hivesql代码即可。
+### 3.3 工作流python节点
+ python节点支持执行python任务,使用时只需将节点拖拽至工作台后编写python代码即可。
+### 3.4 工作流shell节点
+ shell节点支持执行shell命令或者脚本运行,使用时只需将节点拖拽至工作台后编写shell命令即可。
+### 3.5 工作流jdbc节点
+ jdbc节点支持以jdbc方式运行sql命令,使用时只需将节点拖拽至工作台后编写sql即可,**注意需要提前在linkis console管理台配置jdbc连接信息。**
+### 3.6 工作流编排
当点击一个对应的工程后,既可以进入工程首页,在工程首页可以做工作流的编排。
1. 首先需要创建工作流
![workflow01](/images/zh_CN/chapter3/manual/workflow01.png)
@@ -53,7 +63,7 @@ __注意:__ 如果要支持多用户登录,DSS的用户登录依赖Linkis,
3. 节点支持右键功能包括,删除、依赖选择、复制等基本功能,同时数据开发节点还支持脚本关联
![workflow03](/images/zh_CN/chapter3/manual/workflow03.png)
-### 3.2 工作流节点打开
+### 3.7 工作流节点打开
节点支持双击打开:
1. 数据开发节点:点开后即可进入Scriptis进行脚本编辑
![workflow04](/images/zh_CN/chapter3/manual/workflow04.png)
@@ -64,14 +74,14 @@ __注意:__ 如果要支持多用户登录,DSS的用户登录依赖Linkis,
4. 数据可视化节点:跳转到对应的可视化编辑页面
![workflow07](/images/zh_CN/chapter3/manual/workflow07.png)
-### 3.3 层级切换
+### 3.8 层级切换
1. 支持多层级切换:支持快速工程切换、支持在工作流页面切换工作流、支持在单个工作流中切换节点
![workflow08](/images/zh_CN/chapter3/manual/workflow08.png)
2. 右上脚支持多组件快速切换,在切换后进入的组件的内容都只与该工程相关,让用户更加清晰的去定义工程和业务的内容:
![functions](/images/zh_CN/chapter3/manual/functions.png)
-### 3.4 参数和资源设置
+### 3.9 参数和资源设置
1. 工作流上下文信息设置,支持工作流参数、变量、代理用户等
@@ -86,26 +96,26 @@ open("flow://test.txt", encoding="utf-8") #工作流级资源文件使用flow:/
open("node://test.txt", encoding="utf-8") #节点级资源文件使用node://开头
```
-### 3.5 工作流实时执行
+### 3.10 工作流实时执行
1. 除了功能节点中的subflow会跳过执行,连接节点会作为空节点运行,其他都支持实时执行
![workflow11](/images/zh_CN/chapter3/manual/workflow11.png)
2. 用户编辑好工作流后点击执行就可以将工作流进行运行,您将看到实时的工作流运行起来可以看到现在运行节点的时间,同时可以右键节点打开节点的管理台去展示该节点的进度,运行结果,运行日志等。支持任务停止等功能
![workflow12](/images/zh_CN/chapter3/manual/workflow12.png)
-### 3.6 工作流调度执行
+### 3.11 工作流调度执行
1. DSS的工程支持发布调度,默认支持发布到Azkaban,同样DSS的调度部分做了深层次的抽象可以做到对其他的调度系统快速支持。发布前会对工作流进行解析,以确保工作流是可以调度运行的:
![workflow13](/images/zh_CN/chapter3/manual/workflow13.png)
2. 发布后即可到调度系统中进行查看,比如去Azkaban页面上进行查看:
![workflow14](/images/zh_CN/chapter3/manual/workflow14.png)
3. DSS如何对接调度系统可以参考:[]()
-### 3.7 工作流版本
+### 3.12 工作流版本
1. 工作流创建完成后,具有初始版本,版本号为v000001,直接点击工作流图标时,默认打开工作流的最新版本
2. 可以查看工作流的版本,方便您进行历史版本查看:
![workflow15](/images/zh_CN/chapter3/manual/workflow15.png)
-### 3.8 工作流布局修改
+### 3.13 工作流布局修改
1. 工作流格式化:当工作流节点过多,界面太乱时。可以点击节点编辑页的右上方第四个“格式化”按钮。快速美化节点界面:
![workflow16](/images/zh_CN/chapter3/manual/workflow16.png)
如果格式化后不满意,可再次点击节点编辑页的右上方第五个“恢复”按钮,恢复到之前的状态:
diff --git a/docs/zh_CN/ch3/DSS_User_Tests1_Scala.md b/docs/zh_CN/ch3/DSS_User_Tests1_Scala.md
new file mode 100644
index 0000000000..70c0b03317
--- /dev/null
+++ b/docs/zh_CN/ch3/DSS_User_Tests1_Scala.md
@@ -0,0 +1,82 @@
+# DSS用户测试样例1:Scala
+
+DSS用户测试样例的目的是为平台新用户提供一组测试样例,用于熟悉DSS的常见操作,并验证DSS平台的正确性
+
+![image-20200408211243941](../../../images/zh_CN/chapter3/tests/home.png)
+
+## 1.1 Spark Core(入口函数sc)
+
+在Scriptis中,已经默认为您注册了SparkContext,所以直接使用sc即可:
+
+### 1.1.1 单Value算子(Map算子为例)
+
+```scala
+val rddMap = sc.makeRDD(Array((1,"a"),(1,"d"),(2,"b"),(3,"c")),4)
+val res = rddMap.mapValues(data=>{data+"||||"})
+res.collect().foreach(data=>println(data._1+","+data._2))
+```
+
+### 1.1.2 双Value算子(union算子为例)
+
+```scala
+val rdd1 = sc.makeRDD(1 to 5)
+val rdd2 = sc.makeRDD(6 to 10)
+val rddCustom = rdd1.union(rdd2)
+rddCustom.collect().foreach(println)
+```
+
+### 1.1.3 K-V算子(reduceByKey算子为例子)
+
+```scala
+val rdd1 = sc.makeRDD(List(("female",1),("male",2),("female",3),("male",4)))
+val rdd2 = rdd1.reduceByKey((x,y)=>x+y)
+rdd2.collect().foreach(println)
+```
+
+### 1.1.4 执行算子(以上collect算子为例)
+
+### 1.1.5 从hdfs上读取文件并做简单执行
+
+```scala
+case class Person(name:String,age:String)
+val file = sc.textFile("/test.txt")
+val person = file.map(line=>{
+ val values=line.split(",")
+
+ Person(values(0),values(1))
+})
+val df = person.toDF()
+df.select($"name").show()
+```
+
+
+
+## 1.2 UDF函数测试
+
+### 1.2.1 函数定义
+
+
+
+```scala
+def ScalaUDF3(str: String): String = "hello, " + str + "this is a third attempt"
+```
+
+### 1.2.2 注册函数
+
+函数-》个人函数-》右击新增spark函数=》注册方式同常规spark开发
+
+ ![img](../../../images/zh_CN/chapter3/tests/udf1.png)
+
+## 1.3 UDAF函数测试
+
+### 1.3.1 Jar包上传
+
+ idea上开发一个求平均值的udaf函数,打成jar(wordcount)包,上传dss jar文件夹。
+
+ ![img](../../../images/zh_CN/chapter3/tests/udf2.png)
+
+### 1.3.2 注册函数
+
+函数-》个人函数-》右击新增普通函数=》注册方式同常规spark开发
+
+ ![img](../../../images/zh_CN/chapter3/tests/udf-3.png)
\ No newline at end of file
diff --git a/docs/zh_CN/ch3/DSS_User_Tests2_Hive.md b/docs/zh_CN/ch3/DSS_User_Tests2_Hive.md
new file mode 100644
index 0000000000..800277ca09
--- /dev/null
+++ b/docs/zh_CN/ch3/DSS_User_Tests2_Hive.md
@@ -0,0 +1,148 @@
+# DSS用户测试样例2:Hive
+
+DSS用户测试样例的目的是为平台新用户提供一组测试样例,用于熟悉DSS的常见操作,并验证DSS平台的正确性
+
+![image-20200408211243941](../../../images/zh_CN/chapter3/tests/home.png)
+
+## 2.1 数仓建表
+
+ 进入“数据库”页面,点击“+”,依次输入表信息、表结构和分区信息即可创建数据库表:
+
+
+
+ ![img](../../../images/zh_CN/chapter3/tests/hive2.png)
+
+ 通过以上流程,分别创建部门表dept、员工表emp和分区员工表emp_partition,建表语句如下:
+
+```sql
+create external table if not exists default.dept(
+ deptno int,
+ dname string,
+ loc int
+)
+row format delimited fields terminated by '\t';
+
+create external table if not exists default.emp(
+ empno int,
+ ename string,
+ job string,
+ mgr int,
+ hiredate string,
+ sal double,
+ comm double,
+ deptno int
+)
+row format delimited fields terminated by '\t';
+
+create table if not exists emp_partition(
+ empno int,
+ ename string,
+ job string,
+ mgr int,
+ hiredate string,
+ sal double,
+ comm double,
+ deptno int
+)
+partitioned by (month string)
+row format delimited fields terminated by '\t';
+```
+
+**导入数据**
+
+目前需要通过后台手动批量导入数据,可以通过insert方法从页面插入数据
+
+```sql
+load data local inpath 'dept.txt' into table default.dept;
+load data local inpath 'emp.txt' into table default.emp;
+load data local inpath 'emp1.txt' into table default.emp_partition;
+load data local inpath 'emp2.txt' into table default.emp_partition;
+load data local inpath 'emp2.txt' into table default.emp_partition;
+```
+
+其它数据按照上述语句导入,样例数据文件路径在:`examples\ch3`
+
+## 2.2 基本SQL语法测试
+
+### 2.2.1 简单查询
+
+```sql
+select * from dept;
+```
+
+### 2.2.2 Join连接
+
+```sql
+select * from emp
+left join dept
+on emp.deptno = dept.deptno;
+```
+
+### 2.2.3 聚合函数
+
+```sql
+select dept.dname, avg(sal) as avg_salary
+from emp left join dept
+on emp.deptno = dept.deptno
+group by dept.dname;
+```
+
+### 2.2.4 内置函数
+
+```sql
+select ename, job,sal,
+rank() over(partition by job order by sal desc) sal_rank
+from emp;
+```
+
+### 2.2.5 分区表简单查询
+
+```sql
+show partitions emp_partition;
+select * from emp_partition where month='202001';
+```
+
+### 2.2.6 分区表联合查询
+
+```sql
+select * from emp_partition where month='202001'
+union
+select * from emp_partition where month='202002'
+union
+select * from emp_partition where month='202003'
+```
+
+## 2.3 UDF函数测试
+
+### 2.3.1 Jar包上传
+
+进入Scriptis页面后,右键目录路径上传jar包:
+
+ ![img](../../../images/zh_CN/chapter3/tests/hive3.png)
+
+测试样例jar包在`examples\ch3\rename.jar`
+
+### 4.3.2 自定义函数
+
+进入“UDF函数”选项(如1),右击“个人函数”目录,选择“新增函数”:
+
+
+
+输入函数名称、选择jar包、并填写注册格式、输入输出格式即可创建函数:
+
+ ![img](../../../images/zh_CN/chapter3/tests/hive5.png)
+
+
+
+获得的函数如下:
+
+ ![img](../../../images/zh_CN/chapter3/tests/hive7.png)
+
+### 4.3.3 利用自定义函数进行SQL查询
+
+完成函数注册后,可进入工作空间页面创建.hql文件使用函数:
+
+```sql
+select deptno,ename, rename(ename) as new_name
+from emp;
+```
diff --git a/docs/zh_CN/ch3/DSS_User_Tests3_SparkSQL.md b/docs/zh_CN/ch3/DSS_User_Tests3_SparkSQL.md
new file mode 100644
index 0000000000..aaf2fb44d7
--- /dev/null
+++ b/docs/zh_CN/ch3/DSS_User_Tests3_SparkSQL.md
@@ -0,0 +1,61 @@
+# DSS用户测试样例3:SparkSQL
+
+DSS用户测试样例的目的是为平台新用户提供一组测试样例,用于熟悉DSS的常见操作,并验证DSS平台的正确性
+
+![image-20200408211243941](../../../images/zh_CN/chapter3/tests/home.png)
+
+## 3.1RDD与DataFrame转换
+
+### 3.1.1 RDD转为DataFrame
+
+```scala
+case class MyList(id:Int)
+
+val lis = List(1,2,3,4)
+
+val listRdd = sc.makeRDD(lis)
+import spark.implicits._
+val df = listRdd.map(value => MyList(value)).toDF()
+
+df.show()
+```
+
+### 3.1.2 DataFrame转为RDD
+
+```scala
+case class MyList(id:Int)
+
+val lis = List(1,2,3,4)
+val listRdd = sc.makeRDD(lis)
+import spark.implicits._
+val df = listRdd.map(value => MyList(value)).toDF()
+println("------------------")
+
+val dfToRdd = df.rdd
+
+dfToRdd.collect().foreach(print(_))
+```
+
+## 3.2 DSL语法风格实现
+
+```scala
+val df = df1.union(df2)
+val dfSelect = df.select($"department")
+dfSelect.show()
+```
+
+## 3.3 SQL语法风格实现(入口函数sqlContext)
+
+```scala
+val df = df1.union(df2)
+
+df.createOrReplaceTempView("dfTable")
+val innerSql = """
+ SELECT department
+ FROM dfTable
+ """
+val sqlDF = sqlContext.sql(innerSql)
+sqlDF.show()
+```
+
+
\ No newline at end of file
diff --git "a/docs/zh_CN/ch4/DSS\345\267\245\347\250\213\345\217\221\345\270\203\350\260\203\345\272\246\347\263\273\347\273\237\346\236\266\346\236\204\350\256\276\350\256\241.md" "b/docs/zh_CN/ch4/DSS\345\267\245\347\250\213\345\217\221\345\270\203\350\260\203\345\272\246\347\263\273\347\273\237\346\236\266\346\236\204\350\256\276\350\256\241.md"
index 14dbccac55..1dc37857d7 100644
--- "a/docs/zh_CN/ch4/DSS\345\267\245\347\250\213\345\217\221\345\270\203\350\260\203\345\272\246\347\263\273\347\273\237\346\236\266\346\236\204\350\256\276\350\256\241.md"
+++ "b/docs/zh_CN/ch4/DSS\345\267\245\347\250\213\345\217\221\345\270\203\350\260\203\345\272\246\347\263\273\347\273\237\346\236\266\346\236\204\350\256\276\350\256\241.md"
@@ -17,10 +17,10 @@
(1)从数据库读取最新版本的工程、工作流信息,获取所有的保存在BML库工作流JSON文件。
-(2)将上面的数据库内容,JSON文件内容分别转成DSS中的DWSProject,DWSFlow,如果存在子flow,则需要一并设置到flow中,保持原来的层级关系和依赖关系,构建好DWSProject,其中包含了工程下所有的DWSFlow。
+(2)将上面的数据库内容,JSON文件内容分别转成DSS中的DSSProject,DSSFlow,如果存在子flow,则需要一并设置到flow中,保持原来的层级关系和依赖关系,构建好DSSProject,其中包含了工程下所有的DSSFlow。
一个工作流JSON包含了所有节点的定义,并存储了节点之间的依赖关系,以及工作流自身的属性信息。
-(3)将DWSProject经过工程转换器转成SchedulerProject,转成SchedulerProject的过程中,同时完成了DWSJSONFlow到SchedulerFlow的转换,也完成了DWSNode到SchedulerNode的转换。
+(3)将DSSProject经过工程转换器转成SchedulerProject,转成SchedulerProject的过程中,同时完成了DSSJSONFlow到SchedulerFlow的转换,也完成了DSSNode到SchedulerNode的转换。
(4)使用ProjectTuning对整个SchedulerProject工程进行tuning操作,用于完成工程发布前的整体调整操作,在Azkaban的实现中主要完成了工程的路径设置和工作流的存储路径设置。
diff --git "a/docs/zh_CN/ch4/\347\254\254\344\270\211\346\226\271\347\263\273\347\273\237\346\216\245\345\205\245DSS\346\214\207\345\215\227.md" "b/docs/zh_CN/ch4/\347\254\254\344\270\211\346\226\271\347\263\273\347\273\237\346\216\245\345\205\245DSS\346\214\207\345\215\227.md"
index 73173626eb..690682aabe 100644
--- "a/docs/zh_CN/ch4/\347\254\254\344\270\211\346\226\271\347\263\273\347\273\237\346\216\245\345\205\245DSS\346\214\207\345\215\227.md"
+++ "b/docs/zh_CN/ch4/\347\254\254\344\270\211\346\226\271\347\263\273\347\273\237\346\216\245\345\205\245DSS\346\214\207\345\215\227.md"
@@ -31,28 +31,142 @@
NodeService是用来解决用户在DSS提交的任务在第三方系统生成相应的任务的问题。用户如果在DSS系统的工作流中新建了一个工作流节点并进行任务的编辑,第三方系统需要同步感知到
- 4.getNodeExecution
- NodeExecution接口是用来将任务提交到第三方系统进行执行的接口,NodeExecution接口有支持短时间任务的NodeExecution和支持长时间任务的LongTermNodeExecution。一般短时间任务,如邮件发送等,可以直接实现NodeExecution接口,并重写execute方法,DSS系统同步等待任务结束。另外的长时间任务,如数据质量检测等,可以实现LongTermNodeExecution接口,并重写submit方法,返回一个NodeExecutionAction,DSS系统通过这个NodeExecutionAction可以向第三方系统获取任务的日志、状态等。
+ NodeExecution接口是用来将任务提交到第三方系统进行执行的接口,NodeExecution
+接口有支持短时间任务的NodeExecution和支持长时间任务的LongTermNodeExecution。一般短时间任务,如邮件发送等,可以直接实现NodeExecution接口,并重写execute方法,DSS系统同步等待任务结束。另外的长时间任务,如数据质量检测等,可以实现LongTermNodeExecution接口,并重写submit方法,返回一个NodeExecutionAction,DSS系统通过这个NodeExecutionAction可以向第三方系统获取任务的日志、状态等。
#### 3.第三方系统接入DSS的实现(以Visualis为例)
- Visualis是微众银行WeDataSphere开源的一款商业BI工具,DSS集成Visualis系统之后可以获得数据可视化的能力。Visualis接入DSS系统的代码在DSS项目中已经同步开源,下面将以开源代码为例,对步骤进行罗列分析。
+ Visualis是微众银行WeDataSphere开源的一款商业BI工具,DSS集成Visualis系统之后可以获得数据可视化的能力。
+Visualis接入DSS系统的代码在DSS项目中已经同步开源,下面将以开源代码为例,对步骤进行罗列分析。
Visualis接入的DSS系统的步骤如下:
**3.1.Visualis实现AppJoint接口**
- Visualis实现的 AppJoint接口的实现类是VisualisAppjoint。查看VisualisAppjoint的代码可知,它在init方法时候,初始化了自己实现的SecurityService、 NodeService以及NodeExecution。
+ Visualis实现的 AppJoint接口的实现类是VisualisAppjoint。查看VisualisAppjoint的代码可知,它在init方法时候,
+初始化了自己实现的SecurityService、 NodeService以及NodeExecution。
+```java
+ public void init(String baseUrl, Map params) {
+ securityService = new VisualisSecurityService();
+ securityService.setBaseUrl(baseUrl);
+ nodeExecution = new VisualisNodeExecution();
+ nodeExecution.setBaseUrl(baseUrl);
+ nodeService = new VisualisNodeService();
+ nodeService.setBaseUrl(baseUrl);
+ }
+```
**3.2.Visualis实现SecurtyService接口**
- Visualis实现的SecurityService接口的类名是VisualisSecurityService,并重写了login方法,为了能够进行授权登陆,Visualis采用了提供token的方式,DSS的网关对该token进行授权,这样就能够做到用户鉴权。
+ Visualis实现的SecurityService接口的类名是VisualisSecurityService,
+并重写了login方法,为了能够进行授权登陆,Visualis采用了提供token的方式,DSS的网关对该token进行授权,这样就能够做到用户鉴权。
+
+```java
+public class VisualisSecurityService extends AppJointUrlImpl implements SecurityService {
+ @Override
+ public Session login(String user) throws AppJointErrorException {
+ VisualisSession visualisSession = new VisualisSession();
+ visualisSession.setUser(user);
+ visualisSession.getParameters().put("Token-User",user);
+ visualisSession.getParameters().put("Token-Code","WS-AUTH");
+ return visualisSession;
+ }
+
+ @Override
+ public void logout(String user) {
+
+ }
+}
+```
**3.3.Visualis实现的NodeService接口**
- Visualis实现的NodeService接口的类是VisualisNodeService,并重写了createNode,deleteNode和updateNode三个方法,这三个方法是进行在第三方系统同步生成任务元数据。例如createNode方法是通过调用visualis的HTTP接口在Visualis系统生成同一工程下面的Visualis任务。
+ Visualis实现的NodeService接口的类是VisualisNodeService,并重写了createNode,
+deleteNode和updateNode三个方法,这三个方法是进行在第三方系统同步生成任务元数据。例如createNode方法是通过调用visualis的HTTP接口在Visualis系统生成同一工程下面的Visualis任务。
+
+```java
+ @Override
+ public Map createNode(Session session, AppJointNode node,
+ Map requestBody) throws AppJointErrorException {
+ if (DisplayNodeService.getNodeType().equals(node.getNodeType())) {
+ return DisplayNodeService.createNode(session, getBaseUrl(), String.valueOf(node.getProjectId()), node.getNodeType(), requestBody);
+ } else if (DashboardNodeService.getNodeType().equals(node.getNodeType())) {
+ return DashboardNodeService.createNode(session, getBaseUrl(), String.valueOf(node.getProjectId()), node.getNodeType(), requestBody);
+ } else {
+ throw new AppJointErrorException(42002, "cannot recognize the nodeType " + node.getNodeType());
+ }
+ }
+
+ @Override
+ public void deleteNode(Session session, AppJointNode node) throws AppJointErrorException {
+ if (DisplayNodeService.getNodeType().equals(node.getNodeType())) {
+ DisplayNodeService.deleteNode(session, getBaseUrl(), String.valueOf(node.getProjectId()), node.getNodeType(), node.getJobContent());
+ } else if (DashboardNodeService.getNodeType().equals(node.getNodeType())) {
+ DashboardNodeService.deleteNode(session, getBaseUrl(), String.valueOf(node.getProjectId()), node.getNodeType(), node.getJobContent());
+ } else {
+ throw new AppJointErrorException(42002, "cannot recognize the nodeType " + node.getNodeType());
+ }
+ }
+
+ @Override
+ public Map updateNode(Session session, AppJointNode node,
+ Map requestBody) throws AppJointErrorException {
+ if (DisplayNodeService.getNodeType().equals(node.getNodeType())) {
+ return DisplayNodeService.updateNode(session, getBaseUrl(), node.getProjectId(), node.getNodeType(), requestBody);
+ } else if (DashboardNodeService.getNodeType().equals(node.getNodeType())) {
+ return DashboardNodeService.updateNode(session, getBaseUrl(), node.getProjectId(), node.getNodeType(), requestBody);
+ } else {
+ throw new AppJointErrorException(42002, "cannot recognize the nodeType " + node.getNodeType());
+ }
+ }
+```
**3.4.Visualis实现NodeExecution接口**
- Visualis实现的NodeExecution接口的类是VisualisNodeExecution,并重写了execute方法,该方法传入的两个参数为Node和NodeContext,从NodeContext中我们可以拿到用户、DSS的网关地址,还有网关验证的Token。通过这些,我们可以封装成一个HTTP的请求发送到第三方系统Visualis并从Visualis获取响应结果,NodeContext提供写入结果集的方法,如Visualis的结果集一般是以图片的形式展示,在execute方法的最后,Visualis通过nodeContext获取到一个支持图片写入的PictureResultSetWriter方法,并将结果集进行写入。
+ Visualis实现的NodeExecution接口的类是VisualisNodeExecution,并重写了execute方法,
+该方法传入的两个参数为Node和NodeContext,从NodeContext中我们可以拿到用户、DSS的网关地址,还有网关验证的Token。
+通过这些,我们可以封装成一个HTTP的请求发送到第三方系统Visualis并从Visualis获取响应结果,NodeContext提供写入结果集的方法,
+如Visualis的结果集一般是以图片的形式展示,在execute方法的最后,Visualis通过nodeContext获取到一个支持图片写入的PictureResultSetWriter方法,并将结果集进行写入。
+```scala
+ override def execute(node: AppJointNode, nodeContext: NodeContext, session: Session): NodeExecutionResponse = node match {
+ case commonAppJointNode: CommonAppJointNode =>
+ val appJointResponse = new CompletedNodeExecutionResponse()
+ val idMap = commonAppJointNode.getJobContent
+ val id = idMap.values().iterator().next().toString
+ val url = if(commonAppJointNode.getNodeType.toLowerCase.contains(DISPLAY)) getDisplayPreviewUrl(nodeContext.getGatewayUrl, id)
+ else if(commonAppJointNode.getNodeType.toLowerCase.contains(DASHBOARD)) getDashboardPreviewUrl(nodeContext.getGatewayUrl, id)
+ else {
+ appJointResponse.setIsSucceed(false)
+ appJointResponse.setErrorMsg("不支持的appJoint类型:" + node.getNodeType)
+ return appJointResponse
+ }
+ var response = ""
+ val headers = nodeContext.getTokenHeader(nodeContext.getUser)
+ nodeContext.appendLog(LogUtils.generateInfo(s"Ready to download preview picture from $url."))
+ Utils.tryCatch(download(url, null, headers.toMap,
+ input => Utils.tryFinally{
+ val os = new ByteArrayOutputStream()
+ IOUtils.copy(input, os)
+ response = new String(Base64.getEncoder.encode(os.toByteArray))
+ //response = IOUtils.toString(input, ServerConfiguration.BDP_SERVER_ENCODING.getValue)
+ }(IOUtils.closeQuietly(input)))){ t =>
+ val errException = new ErrorException(70063, "failed to do visualis request")
+ errException.initCause(t)
+ appJointResponse.setException(errException)
+ appJointResponse.setIsSucceed(false)
+ appJointResponse.setErrorMsg(s"用户${nodeContext.getUser}请求Visualis失败!URL为: " + url)
+ return appJointResponse
+ }
+ nodeContext.appendLog(LogUtils.generateInfo("Preview picture downloaded, now ready to write results."))
+ val imagesBytes = response
+ val resultSetWriter = nodeContext.createPictureResultSetWriter()
+ Utils.tryFinally{
+ resultSetWriter.addMetaData(new LineMetaData())
+ resultSetWriter.addRecord(new LineRecord(imagesBytes))
+ }(IOUtils.closeQuietly(resultSetWriter))
+ appJointResponse.setIsSucceed(true)
+ appJointResponse
+ }
+```
**3.5.数据库内容的更新(dss-application模块)**
@@ -66,7 +180,7 @@ Visualis接入的DSS系统的步骤如下:
| url | 10 | 如 http://127.0.0.1:8080 |
| is_user_need_init | 是否需要用户初始化 | 默认否 |
| user_init_url | 用户初始化url | 默认空 |
-| exists_project_service | 是否存在自己的projectService服务, 存在的话要自己写appjoint实现projectService0 | |
+| exists_project_service | 是否存在自己的projectService服务, 存在的话要自己写appjoint实现projectService | |
| enhance_json | 加强json,在appjoint初始化的时候会作为map进行传入 | |
| homepage_url | 接入的系统主页url | |
| direct_url | 接入的系统重定向url | |
@@ -97,11 +211,11 @@ Visualis接入的DSS系统的步骤如下:
**3.6.前端的修改**
- 3.6.1 增加节点类型
-修改src/js/service/nodeType.js文件,增加Qualitis节点类型
+修改src/js/service/nodeType.js文件,增加Visualis节点类型
- 3.6.2 增加节点图标
将节点图标复制到src/js/module/process/images/路径下,目前只支持SVG格式。
- 3.6.3 新增节点配置
-修改src/js/module/process/shape.js文件,增加Qualitis的节点配置信息。
+修改src/js/module/process/shape.js文件,增加Visualis的节点配置信息。
- 3.6.4 修改首页单击节点事件
修改src/js/module/process/index.vue文件,增加节点单击事件以及单击事件的处理逻辑。
- 3.6.5 修改工作流节点双击事件
@@ -109,7 +223,13 @@ Visualis接入的DSS系统的步骤如下:
**3.7.编译打包成jar包放置到指定位置**
- 实现了上述的接口之后,一个AppJoint就已经实现了。打包之后,需要放置到指定的位置。jar包需要放置到dss-server和linkis-appjoint-entrance两个微服务中,以linkis-appjoint-entrance 为例(dss-server与linkis-appjoint-entrance一致),在linkis-appjont-entrance下面的lib的同级目录有一个appjoints目录,目录下面层次如图3-3所示。
+ 实现了上述的接口之后,一个AppJoint就已经实现了。打包之后,需要放置到指定的位置。
+jar包需要放置到dss-server和linkis-appjoint-entrance两个微服务中,以linkis-appjoint-entrance 为例(dss-server与linkis-appjoint-entrance一致),
+在linkis-appjont-entrance下面的lib的同级目录有一个appjoints目录,目录下面层次如图3-3所示。
![appjoints目录示例](/images/zh_CN/chapter4/appjoints.png)
图3-3 appjoints目录示例
- 在appjoints目录下面新建一个visualis目录。visualis目录下面要求有lib目录,lib目录存放的是visualis在实现VisualisAppJoint的编译的jar包,当然如果有引入dss系统没有带入的jar包,也需要放置到lib目录中,如sendemail Appjoint需要发送邮件功能的依赖包,所以需要将这些依赖包和已经实现的jar包统一放置到lib目录中。另外可以将本AppJoint所需要的一些配置参数放置到appjoints.properties,DSS系统提供的AppJointLoader会将这些配置的参数读取,放置到一个Map中,在AppJoint调用init方法的时候传入。
+ 在appjoints目录下面新建一个visualis目录。
+visualis目录下面要求有lib目录,lib目录存放的是visualis在实现VisualisAppJoint的编译的jar包,
+当然如果有引入dss系统没有带入的jar包,也需要放置到lib目录中,如sendemail Appjoint需要发送邮件功能的依赖包,
+所以需要将这些依赖包和已经实现的jar包统一放置到lib目录中。
+另外可以将本AppJoint所需要的一些配置参数放置到appjoints.properties,DSS系统提供的AppJointLoader会将这些配置的参数读取,放置到一个Map中,在AppJoint调用init方法的时候传入。
diff --git a/dss-appjoint-auth/pom.xml b/dss-appjoint-auth/pom.xml
index 04144de5c6..941a120d1c 100644
--- a/dss-appjoint-auth/pom.xml
+++ b/dss-appjoint-auth/pom.xml
@@ -22,7 +22,7 @@
dss
com.webank.wedatasphere.dss
- 0.7.0
+ 0.9.0
4.0.0
@@ -33,6 +33,16 @@
linkis-gateway-httpclient-support
${linkis.version}
+
+ com.webank.wedatasphere.linkis
+ linkis-common
+ ${linkis.version}
+
+
+ javax.servlet
+ javax.servlet-api
+ 3.1.0
+
diff --git a/dss-appjoint-auth/src/main/scala/com/webank/wedatasphere/dss/appjoint/auth/impl/AppJointAuthImpl.scala b/dss-appjoint-auth/src/main/scala/com/webank/wedatasphere/dss/appjoint/auth/impl/AppJointAuthImpl.scala
index 8414615149..15dd7e3072 100644
--- a/dss-appjoint-auth/src/main/scala/com/webank/wedatasphere/dss/appjoint/auth/impl/AppJointAuthImpl.scala
+++ b/dss-appjoint-auth/src/main/scala/com/webank/wedatasphere/dss/appjoint/auth/impl/AppJointAuthImpl.scala
@@ -24,8 +24,9 @@ import com.webank.wedatasphere.dss.appjoint.auth.{AppJointAuth, RedirectMsg}
import com.webank.wedatasphere.linkis.common.utils.Logging
import com.webank.wedatasphere.linkis.httpclient.dws.DWSHttpClient
import com.webank.wedatasphere.linkis.httpclient.dws.config.DWSClientConfigBuilder
-import javax.servlet.http.{Cookie, HttpServletRequest}
+import javax.servlet.http.HttpServletRequest
import org.apache.commons.io.IOUtils
+import org.apache.http.impl.cookie.BasicClientCookie
import scala.collection.JavaConversions._
@@ -38,7 +39,8 @@ class AppJointAuthImpl private() extends AppJointAuth with Logging {
private def getBaseUrl(dssUrl: String): String = {
val uri = new URI(dssUrl)
- uri.getScheme + "://" + uri.getHost + ":" + uri.getPort
+ val dssPort = if(uri.getPort != -1) uri.getPort else 80
+ uri.getScheme + "://" + uri.getHost + ":" + dssPort
}
protected def getDWSClient(dssUrl: String): DWSHttpClient = {
@@ -67,7 +69,7 @@ class AppJointAuthImpl private() extends AppJointAuth with Logging {
val index = cookie.indexOf("=")
val key = cookie.substring(0, index).trim
val value = cookie.substring(index + 1).trim
- userInfoAction.addCookie(new Cookie(key, value))
+ userInfoAction.addCookie(new BasicClientCookie(key, value))
}
val redirectMsg = new RedirectMsgImpl
redirectMsg.setRedirectUrl(request.getParameter(AppJointAuthImpl.REDIRECT_KEY))
diff --git a/dss-appjoint-core/pom.xml b/dss-appjoint-core/pom.xml
index 0c00f2cd7f..6ee707c4dd 100644
--- a/dss-appjoint-core/pom.xml
+++ b/dss-appjoint-core/pom.xml
@@ -22,7 +22,7 @@
dss
com.webank.wedatasphere.dss
- 0.7.0
+ 0.9.0
4.0.0
@@ -50,6 +50,12 @@
dss-common
${dss.version}
+
+
+ com.webank.wedatasphere.linkis
+ linkis-httpclient
+ ${linkis.version}
+
diff --git a/dss-appjoint-core/src/main/scala/com/webank/wedatasphere/dss/appjoint/execution/scheduler/ListenerEventBusNodeExecutionScheduler.scala b/dss-appjoint-core/src/main/scala/com/webank/wedatasphere/dss/appjoint/execution/scheduler/ListenerEventBusNodeExecutionScheduler.scala
index a355270ccb..51d167eaff 100644
--- a/dss-appjoint-core/src/main/scala/com/webank/wedatasphere/dss/appjoint/execution/scheduler/ListenerEventBusNodeExecutionScheduler.scala
+++ b/dss-appjoint-core/src/main/scala/com/webank/wedatasphere/dss/appjoint/execution/scheduler/ListenerEventBusNodeExecutionScheduler.scala
@@ -17,6 +17,8 @@
package com.webank.wedatasphere.dss.appjoint.execution.scheduler
+import java.util.concurrent.ArrayBlockingQueue
+
import com.webank.wedatasphere.dss.appjoint.exception.AppJointErrorException
import com.webank.wedatasphere.dss.appjoint.execution.common.{AsyncNodeExecutionResponse, CompletedNodeExecutionResponse, LongTermNodeExecutionAction}
import com.webank.wedatasphere.dss.appjoint.execution.conf.NodeExecutionConfiguration._
@@ -55,7 +57,7 @@ class ListenerEventBusNodeExecutionScheduler(eventQueueCapacity: Int, name: Stri
val field1 = ru.typeOf[ListenerEventBus[_, _]].decl(ru.TermName("eventQueue")).asMethod
val result = listenerEventBusClass.reflectMethod(field1)
result() match {
- case queue: BlockingLoopArray[AsyncNodeExecutionResponseEvent] => queue
+ case queue: ArrayBlockingQueue[AsyncNodeExecutionResponseEvent] => queue
}
}
@@ -104,18 +106,18 @@ class ListenerEventBusNodeExecutionScheduler(eventQueueCapacity: Int, name: Stri
protected def addEvent(event: AsyncNodeExecutionResponseEvent): Unit = synchronized {
listenerEventBus.post(event)
- event.getResponse.getAction match {
- case longTermAction: LongTermNodeExecutionAction =>
- longTermAction.setSchedulerId(eventQueue.max)
- case _ =>
- }
+// event.getResponse.getAction match {
+// case longTermAction: LongTermNodeExecutionAction =>
+// longTermAction.setSchedulerId(eventQueue.max)
+// case _ =>
+// }
}
- override def removeAsyncResponse(action: LongTermNodeExecutionAction): Unit =
- getAsyncResponse(action).setCompleted(true)
+ override def removeAsyncResponse(action: LongTermNodeExecutionAction): Unit = {
+
+ }
- override def getAsyncResponse(action: LongTermNodeExecutionAction): AsyncNodeExecutionResponse =
- eventQueue.get(action.getSchedulerId).getResponse
+ override def getAsyncResponse(action: LongTermNodeExecutionAction): AsyncNodeExecutionResponse = null
override def start(): Unit = listenerEventBus.start()
diff --git a/dss-appjoint-loader/pom.xml b/dss-appjoint-loader/pom.xml
index 7dc9097f34..b36ab5d799 100644
--- a/dss-appjoint-loader/pom.xml
+++ b/dss-appjoint-loader/pom.xml
@@ -22,12 +22,12 @@
dss
com.webank.wedatasphere.dss
- 0.7.0
+ 0.9.0
4.0.0
dss-appjoint-loader
- 0.7.0
+ 0.9.0
diff --git a/dss-application/pom.xml b/dss-application/pom.xml
index 1a55a80350..54583b1239 100644
--- a/dss-application/pom.xml
+++ b/dss-application/pom.xml
@@ -23,7 +23,7 @@
dss
com.webank.wedatasphere.dss
- 0.7.0
+ 0.9.0
dss-application
diff --git a/dss-application/src/main/java/com/webank/wedatasphere/dss/application/dao/DSSApplicationUserMapper.java b/dss-application/src/main/java/com/webank/wedatasphere/dss/application/dao/DSSApplicationUserMapper.java
new file mode 100644
index 0000000000..f2f59e06b1
--- /dev/null
+++ b/dss-application/src/main/java/com/webank/wedatasphere/dss/application/dao/DSSApplicationUserMapper.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package com.webank.wedatasphere.dss.application.dao;
+
+import com.webank.wedatasphere.dss.application.entity.DSSUser;
+
+/**
+ * Created by chaogefeng on 2019/10/11.
+ */
+public interface DSSApplicationUserMapper {
+ DSSUser getUserByName(String username);
+
+ void registerDssUser(DSSUser userDb);
+
+ void updateUserFirstLogin(Long userId);
+}
diff --git a/dss-application/src/main/java/com/webank/wedatasphere/dss/application/dao/impl/DSSUserMapper.xml b/dss-application/src/main/java/com/webank/wedatasphere/dss/application/dao/impl/DSSApplicationUserMapper.xml
similarity index 94%
rename from dss-application/src/main/java/com/webank/wedatasphere/dss/application/dao/impl/DSSUserMapper.xml
rename to dss-application/src/main/java/com/webank/wedatasphere/dss/application/dao/impl/DSSApplicationUserMapper.xml
index ff731d7c64..040cb3eb02 100644
--- a/dss-application/src/main/java/com/webank/wedatasphere/dss/application/dao/impl/DSSUserMapper.xml
+++ b/dss-application/src/main/java/com/webank/wedatasphere/dss/application/dao/impl/DSSApplicationUserMapper.xml
@@ -19,7 +19,7 @@
-
+
id,`username`,`name`,`is_first_login`
@@ -29,7 +29,7 @@
select * from dss_user where `username` = #{username}
-
+
INSERT INTO dss_user()
VALUES (#{id},#{username},#{name},#{isFirstLogin})
diff --git a/dss-application/src/main/java/com/webank/wedatasphere/dss/application/handler/UserFirstLoginHandler.java b/dss-application/src/main/java/com/webank/wedatasphere/dss/application/handler/UserFirstLoginHandler.java
index 5c5a93c775..f902c5ba3d 100644
--- a/dss-application/src/main/java/com/webank/wedatasphere/dss/application/handler/UserFirstLoginHandler.java
+++ b/dss-application/src/main/java/com/webank/wedatasphere/dss/application/handler/UserFirstLoginHandler.java
@@ -18,7 +18,7 @@
package com.webank.wedatasphere.dss.application.handler;
import com.webank.wedatasphere.dss.application.entity.DSSUser;
-import com.webank.wedatasphere.dss.application.service.DSSUserService;
+import com.webank.wedatasphere.dss.application.service.DSSApplicationUserService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
@@ -33,7 +33,7 @@ public class UserFirstLoginHandler implements Handler {
private Logger logger = LoggerFactory.getLogger(this.getClass());
@Autowired
- private DSSUserService dssUserService;
+ private DSSApplicationUserService dssApplicationUserService;
@Override
public int getOrder() {
@@ -44,7 +44,7 @@ public int getOrder() {
public void handle(DSSUser user) {
logger.info("UserFirstLoginHandler:");
synchronized (user.getUsername().intern()){
- DSSUser userDb = dssUserService.getUserByName(user.getUsername());
+ DSSUser userDb = dssApplicationUserService.getUserByName(user.getUsername());
if(userDb == null){
logger.info("User first enter dss, insert table dss_user");
userDb = new DSSUser();
@@ -52,7 +52,7 @@ public void handle(DSSUser user) {
userDb.setName(user.getName());
userDb.setFirstLogin(true);
userDb.setId(user.getId());
- dssUserService.registerDSSUser(userDb);
+ dssApplicationUserService.registerDssUser(userDb);
}
// TODO: 2019/11/29 update firstLogin
user = userDb;
diff --git a/dss-application/src/main/java/com/webank/wedatasphere/dss/application/restful/ApplicationRestfulApi.java b/dss-application/src/main/java/com/webank/wedatasphere/dss/application/restful/ApplicationRestfulApi.java
index c0fa339629..2842487a1c 100644
--- a/dss-application/src/main/java/com/webank/wedatasphere/dss/application/restful/ApplicationRestfulApi.java
+++ b/dss-application/src/main/java/com/webank/wedatasphere/dss/application/restful/ApplicationRestfulApi.java
@@ -22,7 +22,7 @@
import com.webank.wedatasphere.dss.application.entity.DSSUserVO;
import com.webank.wedatasphere.dss.application.handler.ApplicationHandlerChain;
import com.webank.wedatasphere.dss.application.service.ApplicationService;
-import com.webank.wedatasphere.dss.application.service.DSSUserService;
+import com.webank.wedatasphere.dss.application.service.DSSApplicationUserService;
import com.webank.wedatasphere.dss.application.util.ApplicationUtils;
import com.webank.wedatasphere.linkis.server.Message;
import com.webank.wedatasphere.linkis.server.security.SecurityFilter;
@@ -51,7 +51,7 @@ public class ApplicationRestfulApi {
@Autowired
private ApplicationService applicationService;
@Autowired
- private DSSUserService dataworkisUserService;
+ private DSSApplicationUserService dataworkisUserService;
@Autowired
private ApplicationHandlerChain applicationHandlerChain;
diff --git a/dss-application/src/main/java/com/webank/wedatasphere/dss/application/service/DSSUserService.java b/dss-application/src/main/java/com/webank/wedatasphere/dss/application/service/DSSApplicationUserService.java
similarity index 90%
rename from dss-application/src/main/java/com/webank/wedatasphere/dss/application/service/DSSUserService.java
rename to dss-application/src/main/java/com/webank/wedatasphere/dss/application/service/DSSApplicationUserService.java
index 3333fd4ffe..a798dc4357 100644
--- a/dss-application/src/main/java/com/webank/wedatasphere/dss/application/service/DSSUserService.java
+++ b/dss-application/src/main/java/com/webank/wedatasphere/dss/application/service/DSSApplicationUserService.java
@@ -22,11 +22,11 @@
/**
* Created by chaogefeng on 2019/10/11.
*/
-public interface DSSUserService {
+public interface DSSApplicationUserService {
DSSUser getUserByName(String username);
- void registerDSSUser(DSSUser userDb);
+ void registerDssUser(DSSUser userDb);
void updateUserFirstLogin(Long id);
}
diff --git a/dss-application/src/main/java/com/webank/wedatasphere/dss/application/service/impl/DSSUserServiceImpl.java b/dss-application/src/main/java/com/webank/wedatasphere/dss/application/service/impl/DSSApplicationUserServiceImpl.java
similarity index 66%
rename from dss-application/src/main/java/com/webank/wedatasphere/dss/application/service/impl/DSSUserServiceImpl.java
rename to dss-application/src/main/java/com/webank/wedatasphere/dss/application/service/impl/DSSApplicationUserServiceImpl.java
index 71db0e5df1..c8fae7a631 100644
--- a/dss-application/src/main/java/com/webank/wedatasphere/dss/application/service/impl/DSSUserServiceImpl.java
+++ b/dss-application/src/main/java/com/webank/wedatasphere/dss/application/service/impl/DSSApplicationUserServiceImpl.java
@@ -17,9 +17,9 @@
package com.webank.wedatasphere.dss.application.service.impl;
-import com.webank.wedatasphere.dss.application.dao.DSSUserMapper;
+import com.webank.wedatasphere.dss.application.dao.DSSApplicationUserMapper;
import com.webank.wedatasphere.dss.application.entity.DSSUser;
-import com.webank.wedatasphere.dss.application.service.DSSUserService;
+import com.webank.wedatasphere.dss.application.service.DSSApplicationUserService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
@@ -27,23 +27,23 @@
* Created by chaogefeng on 2019/10/11.
*/
@Service
-public class DSSUserServiceImpl implements DSSUserService {
+public class DSSApplicationUserServiceImpl implements DSSApplicationUserService {
@Autowired
- private DSSUserMapper dssUserMapper;
+ private DSSApplicationUserMapper dssApplicationUserMapper;
@Override
public DSSUser getUserByName(String username) {
- return dssUserMapper.getUserByName(username);
+ return dssApplicationUserMapper.getUserByName(username);
}
@Override
- public void registerDSSUser(DSSUser userDb) {
- dssUserMapper.registerDSSUser( userDb);
+ public void registerDssUser(DSSUser userDb) {
+ dssApplicationUserMapper.registerDssUser( userDb);
}
@Override
public void updateUserFirstLogin(Long id) {
- dssUserMapper.updateUserFirstLogin(id);
+ dssApplicationUserMapper.updateUserFirstLogin(id);
}
}
diff --git a/dss-application/src/main/java/com/webank/wedatasphere/dss/application/service/impl/LinkisUserServiceImpl.java b/dss-application/src/main/java/com/webank/wedatasphere/dss/application/service/impl/LinkisUserServiceImpl.java
index 60f11bdf3d..f40f286ecc 100644
--- a/dss-application/src/main/java/com/webank/wedatasphere/dss/application/service/impl/LinkisUserServiceImpl.java
+++ b/dss-application/src/main/java/com/webank/wedatasphere/dss/application/service/impl/LinkisUserServiceImpl.java
@@ -16,7 +16,7 @@
*/
package com.webank.wedatasphere.dss.application.service.impl;
-import com.webank.wedatasphere.dss.application.dao.DSSUserMapper;
+import com.webank.wedatasphere.dss.application.dao.DSSApplicationUserMapper;
import com.webank.wedatasphere.dss.application.dao.LinkisUserMapper;
import com.webank.wedatasphere.dss.application.entity.DSSUser;
import com.webank.wedatasphere.dss.application.entity.LinkisUser;
@@ -33,7 +33,7 @@ public class LinkisUserServiceImpl implements LinkisUserService {
@Autowired
private LinkisUserMapper linkisUserMapper;
@Autowired
- private DSSUserMapper dssUserMapper;
+ private DSSApplicationUserMapper dssApplicationUserMapper;
@Override
public LinkisUser getUserByName(String username) {
@@ -55,6 +55,6 @@ public void registerDSSUser(LinkisUser userDb) {
dssUser.setName(userDb.getName());
dssUser.setUsername(userDb.getUserName());
dssUser.setFirstLogin(userDb.getFirstLogin());
- dssUserMapper.registerDSSUser(dssUser);
+ dssApplicationUserMapper.registerDssUser(dssUser);
}
}
diff --git a/dss-azkaban-scheduler-appjoint/pom.xml b/dss-azkaban-scheduler-appjoint/pom.xml
index a6d8945341..f7a82d5557 100644
--- a/dss-azkaban-scheduler-appjoint/pom.xml
+++ b/dss-azkaban-scheduler-appjoint/pom.xml
@@ -22,7 +22,7 @@
dss
com.webank.wedatasphere.dss
- 0.7.0
+ 0.9.0
4.0.0
diff --git a/dss-azkaban-scheduler-appjoint/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/azkaban/hooks/LinkisAzkabanNodePublishHook.java b/dss-azkaban-scheduler-appjoint/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/azkaban/hooks/LinkisAzkabanNodePublishHook.java
index 852f9a817e..702275f40c 100644
--- a/dss-azkaban-scheduler-appjoint/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/azkaban/hooks/LinkisAzkabanNodePublishHook.java
+++ b/dss-azkaban-scheduler-appjoint/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/azkaban/hooks/LinkisAzkabanNodePublishHook.java
@@ -59,7 +59,7 @@ public void postPublish(SchedulerNode schedulerNode) {
}
private void writeNodeResourcesToLocal(SchedulerNode schedulerNode) throws DSSErrorException {
- List nodeResources = schedulerNode.getDWSNode().getResources();
+ List nodeResources = schedulerNode.getDssNode().getResources();
if(nodeResources == null || nodeResources.isEmpty()) {return;}
FileOutputStream os = null;
try {
diff --git a/dss-azkaban-scheduler-appjoint/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/azkaban/hooks/LinkisAzkabanProjectPublishHook.java b/dss-azkaban-scheduler-appjoint/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/azkaban/hooks/LinkisAzkabanProjectPublishHook.java
index e6eb9dd62c..40effab61b 100644
--- a/dss-azkaban-scheduler-appjoint/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/azkaban/hooks/LinkisAzkabanProjectPublishHook.java
+++ b/dss-azkaban-scheduler-appjoint/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/azkaban/hooks/LinkisAzkabanProjectPublishHook.java
@@ -75,7 +75,7 @@ private void removeProjectStoreDirAndzip(AzkabanSchedulerProject publishProject)
}
private void writeProjectResourcesToLocal(AzkabanSchedulerProject publishProject)throws DSSErrorException {
- List resources = publishProject.getDWSProject().getProjectResources();
+ List resources = publishProject.getDssProject().getProjectResources();
FileOutputStream os = null;
try {
String storePath = publishProject.getStorePath();
diff --git a/dss-azkaban-scheduler-appjoint/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/azkaban/linkisjob/LinkisJobConverter.java b/dss-azkaban-scheduler-appjoint/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/azkaban/linkisjob/LinkisJobConverter.java
index b13044c6d7..ef6f3f3b1c 100644
--- a/dss-azkaban-scheduler-appjoint/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/azkaban/linkisjob/LinkisJobConverter.java
+++ b/dss-azkaban-scheduler-appjoint/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/azkaban/linkisjob/LinkisJobConverter.java
@@ -79,7 +79,7 @@ private void convertHead(LinkisAzkabanSchedulerNode schedulerNode,LinkisJob job)
}
private void convertDependencies(LinkisAzkabanSchedulerNode schedulerNode,LinkisJob job){
- List dependencys = schedulerNode.getDWSNode().getDependencys();
+ List dependencys = schedulerNode.getDssNode().getDependencys();
if(dependencys != null && !dependencys.isEmpty()) {
StringBuilder dependencies = new StringBuilder();
dependencys.forEach(d ->dependencies.append(d + ","));
@@ -88,12 +88,12 @@ private void convertDependencies(LinkisAzkabanSchedulerNode schedulerNode,Linkis
}
private void convertProxyUser(LinkisAzkabanSchedulerNode schedulerNode,LinkisJob job){
- String userProxy = schedulerNode.getDWSNode().getUserProxy();
+ String userProxy = schedulerNode.getDssNode().getUserProxy();
if(!StringUtils.isEmpty(userProxy)) job.setProxyUser(userProxy);
}
private void convertConfiguration(LinkisAzkabanSchedulerNode schedulerNode,LinkisJob job){
- Map params = schedulerNode.getDWSNode().getParams();
+ Map params = schedulerNode.getDssNode().getParams();
if (params != null && !params.isEmpty()) {
Object configuration = params.get("configuration");
String confprefix = "node.conf.";
@@ -103,7 +103,7 @@ private void convertConfiguration(LinkisAzkabanSchedulerNode schedulerNode,Linki
}
private void convertJobCommand(LinkisAzkabanSchedulerNode schedulerNode,LinkisJob job){
- Map jobContent = schedulerNode.getDWSNode().getJobContent();
+ Map jobContent = schedulerNode.getDssNode().getJobContent();
if(jobContent != null) {
jobContent.remove("jobParams");
job.setCommand(new Gson().toJson(jobContent));
diff --git a/dss-azkaban-scheduler-appjoint/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/azkaban/parser/AzkabanFlowParser.java b/dss-azkaban-scheduler-appjoint/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/azkaban/parser/AzkabanFlowParser.java
index 3da66206d4..0fe7350837 100644
--- a/dss-azkaban-scheduler-appjoint/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/azkaban/parser/AzkabanFlowParser.java
+++ b/dss-azkaban-scheduler-appjoint/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/azkaban/parser/AzkabanFlowParser.java
@@ -3,7 +3,7 @@
import com.webank.wedatasphere.dss.appjoint.scheduler.parser.AbstractFlowParser;
import com.webank.wedatasphere.dss.appjoint.scheduler.parser.NodeParser;
import com.webank.wedatasphere.dss.appjoint.scheduler.azkaban.entity.AzkabanSchedulerFlow;
-import com.webank.wedatasphere.dss.common.entity.flow.DWSJSONFlow;
+import com.webank.wedatasphere.dss.common.entity.flow.DSSJSONFlow;
import com.webank.wedatasphere.dss.appjoint.scheduler.entity.SchedulerFlow;
import java.util.ArrayList;
@@ -30,7 +30,7 @@ public void setNodeParsers(NodeParser[] nodeParsers) {
}
@Override
- public Boolean ifFlowCanParse(DWSJSONFlow flow) {
+ public Boolean ifFlowCanParse(DSSJSONFlow flow) {
return true;
}
diff --git a/dss-azkaban-scheduler-appjoint/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/azkaban/parser/LinkisAzkabanNodeParser.java b/dss-azkaban-scheduler-appjoint/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/azkaban/parser/LinkisAzkabanNodeParser.java
index dcbd92d0fb..e9c8b86a90 100644
--- a/dss-azkaban-scheduler-appjoint/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/azkaban/parser/LinkisAzkabanNodeParser.java
+++ b/dss-azkaban-scheduler-appjoint/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/azkaban/parser/LinkisAzkabanNodeParser.java
@@ -1,7 +1,7 @@
package com.webank.wedatasphere.dss.appjoint.scheduler.azkaban.parser;
import com.webank.wedatasphere.dss.appjoint.scheduler.azkaban.entity.LinkisAzkabanSchedulerNode;
-import com.webank.wedatasphere.dss.common.entity.node.DWSNode;
+import com.webank.wedatasphere.dss.common.entity.node.DSSNode;
import com.webank.wedatasphere.dss.appjoint.scheduler.entity.SchedulerNode;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -12,14 +12,14 @@ public class LinkisAzkabanNodeParser extends AzkabanNodeParser {
@Override
- public SchedulerNode parseNode(DWSNode dwsNode) {
+ public SchedulerNode parseNode(DSSNode dssNode) {
LinkisAzkabanSchedulerNode schedulerNode = new LinkisAzkabanSchedulerNode();
- schedulerNode.setDWSNode(dwsNode);
+ schedulerNode.setDssNode(dssNode);
return schedulerNode;
}
@Override
- public Boolean ifNodeCanParse(DWSNode dwsNode) {
+ public Boolean ifNodeCanParse(DSSNode dssNode) {
//预留
return true;
}
diff --git a/dss-azkaban-scheduler-appjoint/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/azkaban/service/AzkabanProjectService.java b/dss-azkaban-scheduler-appjoint/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/azkaban/service/AzkabanProjectService.java
index 2f2f98e4d0..eb536b14b7 100644
--- a/dss-azkaban-scheduler-appjoint/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/azkaban/service/AzkabanProjectService.java
+++ b/dss-azkaban-scheduler-appjoint/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/azkaban/service/AzkabanProjectService.java
@@ -80,11 +80,11 @@ public Project createProject(Project project, Session session) throws AppJointEr
params.add(new BasicNameValuePair("name", project.getName()));
params.add(new BasicNameValuePair("description", project.getDescription()));
HttpPost httpPost = new HttpPost(projectUrl);
- httpPost.addHeader(HTTP.CONTENT_ENCODING, "UTF-8");
+ httpPost.addHeader(HTTP.CONTENT_ENCODING, HTTP.IDENTITY_CODING);
CookieStore cookieStore = new BasicCookieStore();
cookieStore.addCookie(session.getCookies()[0]);
- HttpEntity entity = EntityBuilder.create().setContentEncoding("UTF-8").
- setContentType(ContentType.create("application/x-www-form-urlencoded", Consts.UTF_8))
+ HttpEntity entity = EntityBuilder.create()
+ .setContentType(ContentType.create("application/x-www-form-urlencoded", Consts.UTF_8))
.setParameters(params).build();
httpPost.setEntity(entity);
CloseableHttpClient httpClient = null;
diff --git a/dss-azkaban-scheduler-appjoint/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/azkaban/tuning/AzkabanProjectTuning.java b/dss-azkaban-scheduler-appjoint/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/azkaban/tuning/AzkabanProjectTuning.java
index 55f0ddf599..b4cec7a03e 100644
--- a/dss-azkaban-scheduler-appjoint/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/azkaban/tuning/AzkabanProjectTuning.java
+++ b/dss-azkaban-scheduler-appjoint/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/azkaban/tuning/AzkabanProjectTuning.java
@@ -60,7 +60,7 @@ private void assignStorePath(AzkabanSchedulerProject azkabanSchedulerProject) {
SimpleDateFormat dateFormat = new SimpleDateFormat(AzkabanSchedulerProject.DATE_FORMAT);
Date date = new Date();
String dataStr = dateFormat.format(date);
- String userName = azkabanSchedulerProject.getDWSProject().getUserName();
+ String userName = azkabanSchedulerProject.getDssProject().getUserName();
String name = azkabanSchedulerProject.getName();
String storePath = AzkabanConf.DEFAULT_STORE_PATH.getValue() + File.separator + userName
+ File.separator + dataStr + File.separator +name;
diff --git a/dss-azkaban-scheduler-appjoint/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/azkaban/tuning/LinkisAzkabanFlowTuning.java b/dss-azkaban-scheduler-appjoint/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/azkaban/tuning/LinkisAzkabanFlowTuning.java
index 4f218ce7d7..42c9962f13 100644
--- a/dss-azkaban-scheduler-appjoint/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/azkaban/tuning/LinkisAzkabanFlowTuning.java
+++ b/dss-azkaban-scheduler-appjoint/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/azkaban/tuning/LinkisAzkabanFlowTuning.java
@@ -9,7 +9,7 @@
import com.webank.wedatasphere.dss.appjoint.scheduler.entity.SchedulerNode;
import com.webank.wedatasphere.dss.appjoint.scheduler.tuning.AbstractFlowTuning;
import com.webank.wedatasphere.dss.appjoint.scheduler.tuning.NodeTuning;
-import com.webank.wedatasphere.dss.common.entity.node.DWSNodeDefault;
+import com.webank.wedatasphere.dss.common.entity.node.DSSNodeDefault;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
@@ -58,7 +58,7 @@ public Boolean ifFlowCanTuning(SchedulerFlow schedulerFlow) {
}
private SchedulerFlow addEndNodeForFlowName(SchedulerFlow flow) {
- DWSNodeDefault endNode = new DWSNodeDefault();
+ DSSNodeDefault endNode = new DSSNodeDefault();
List endNodeList = getFlowEndJobList(flow);
endNode.setId(flow.getName() + "_");
endNode.setName(flow.getName() + "_");
@@ -70,7 +70,7 @@ private SchedulerFlow addEndNodeForFlowName(SchedulerFlow flow) {
endNodeList.forEach(tmpNode -> endNode.addDependency(tmpNode.getName()));
}
LinkisAzkabanSchedulerNode azkabanSchedulerNode = new LinkisAzkabanSchedulerNode();
- azkabanSchedulerNode.setDWSNode(endNode);
+ azkabanSchedulerNode.setDssNode(endNode);
flow.getSchedulerNodes().add((azkabanSchedulerNode));
return flow;
}
@@ -80,7 +80,7 @@ private List getFlowEndJobList(SchedulerFlow flow) {
for (SchedulerNode job : flow.getSchedulerNodes()) {
int flag = 0;
for (SchedulerEdge link : flow.getSchedulerEdges()) {
- if (job.getId().equals(link.getDWSEdge().getSource())) {
+ if (job.getId().equals(link.getDssEdge().getSource())) {
flag = 1;
}
}
diff --git a/dss-common/pom.xml b/dss-common/pom.xml
index bbf896bfa5..fb77c4dad1 100644
--- a/dss-common/pom.xml
+++ b/dss-common/pom.xml
@@ -23,7 +23,7 @@
dss
com.webank.wedatasphere.dss
- 0.7.0
+ 0.9.0
dss-common
diff --git a/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/flow/DWSFlow.java b/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/flow/DSSFlow.java
similarity index 86%
rename from dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/flow/DWSFlow.java
rename to dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/flow/DSSFlow.java
index 15ca42b8ce..ff768f791e 100644
--- a/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/flow/DWSFlow.java
+++ b/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/flow/DSSFlow.java
@@ -24,7 +24,7 @@
/**
* Created by enjoyyin on 2019/5/14.
*/
-public class DWSFlow implements Flow {
+public class DSSFlow implements Flow {
private Long id;
private String name;
private Boolean state; //0,1代表发布过和未发布过
@@ -38,11 +38,11 @@ public class DWSFlow implements Flow {
private Boolean hasSaved;//0disable 1 enable 0表示工作流从来没存过,发布的时候忽略
private String uses;
- private List versions; //为了前台不做修改,还是使用versions 而不使用flowVersions的变量名
- private List children;
+ private List versions; //为了前台不做修改,还是使用versions 而不使用flowVersions的变量名
+ private List children;
private String flowType;
- private DWSFlowVersion latestVersion;
+ private DSSFlowVersion latestVersion;
public Integer getRank() {
@@ -86,27 +86,27 @@ public void setDescription(String description) {
@Override
public void addFlowVersion(FlowVersion flowVersion) {
- this.versions.add((DWSFlowVersion) flowVersion);
+ this.versions.add((DSSFlowVersion) flowVersion);
}
@Override
- public List extends DWSFlow> getChildren() {
+ public List extends DSSFlow> getChildren() {
return children;
}
@Override
public void setChildren(List extends Flow> children) {
- this.children = children.stream().map(f ->(DWSFlow)f).collect(Collectors.toList());
+ this.children = children.stream().map(f ->(DSSFlow)f).collect(Collectors.toList());
}
@Override
- public List getFlowVersions() {
+ public List getFlowVersions() {
return this.versions;
}
@Override
public void setFlowVersions(List extends FlowVersion> flowVersions) {
- this.versions = flowVersions.stream().map(f ->(DWSFlowVersion)f).collect(Collectors.toList());
+ this.versions = flowVersions.stream().map(f ->(DSSFlowVersion)f).collect(Collectors.toList());
}
@Override
@@ -176,11 +176,11 @@ public void setHasSaved(Boolean hasSaved) {
this.hasSaved = hasSaved;
}
- public DWSFlowVersion getLatestVersion() {
+ public DSSFlowVersion getLatestVersion() {
return latestVersion;
}
- public void setLatestVersion(DWSFlowVersion latestVersion) {
+ public void setLatestVersion(DSSFlowVersion latestVersion) {
this.latestVersion = latestVersion;
}
diff --git a/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/flow/DWSFlowPublishHistory.java b/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/flow/DSSFlowPublishHistory.java
similarity index 94%
rename from dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/flow/DWSFlowPublishHistory.java
rename to dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/flow/DSSFlowPublishHistory.java
index e9c4537b6c..7fe3e9ad42 100644
--- a/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/flow/DWSFlowPublishHistory.java
+++ b/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/flow/DSSFlowPublishHistory.java
@@ -20,5 +20,5 @@
/**
* Created by enjoyyin on 2019/9/19.
*/
-public class DWSFlowPublishHistory {
+public class DSSFlowPublishHistory {
}
diff --git a/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/flow/DWSFlowVersion.java b/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/flow/DSSFlowVersion.java
similarity index 92%
rename from dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/flow/DWSFlowVersion.java
rename to dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/flow/DSSFlowVersion.java
index 43c319541e..ce8559097a 100644
--- a/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/flow/DWSFlowVersion.java
+++ b/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/flow/DSSFlowVersion.java
@@ -22,7 +22,7 @@
/**
* Created by enjoyyin on 2019/9/19.
*/
-public class DWSFlowVersion implements FlowVersion, Comparable {
+public class DSSFlowVersion implements FlowVersion, Comparable {
private Long id;
private Long flowID;
private String source;
@@ -31,7 +31,7 @@ public class DWSFlowVersion implements FlowVersion, Comparable {
private Date updateTime;
private Long updatorID;
private String version;
- private DWSFlowPublishHistory publishHistory;
+ private DSSFlowPublishHistory publishHistory;
private String json;
private String updator;
private Boolean isNotPublished; //true 未发过版,false已经过版
@@ -146,16 +146,16 @@ public void setUpdatorID(Long updatorID) {
this.updatorID = updatorID;
}
- public DWSFlowPublishHistory getPublishHistory() {
+ public DSSFlowPublishHistory getPublishHistory() {
return publishHistory;
}
- public void setPublishHistory(DWSFlowPublishHistory publishHistory) {
+ public void setPublishHistory(DSSFlowPublishHistory publishHistory) {
this.publishHistory = publishHistory;
}
@Override
- public int compareTo(DWSFlowVersion o) {
+ public int compareTo(DSSFlowVersion o) {
Integer v1 = Integer.valueOf(this.version.substring(1, version.length()));
Integer v2 = Integer.valueOf(o.version.substring(1,o.version.length()));
return v2 - v1;
diff --git a/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/flow/DWSJSONFlow.java b/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/flow/DSSJSONFlow.java
similarity index 84%
rename from dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/flow/DWSJSONFlow.java
rename to dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/flow/DSSJSONFlow.java
index 1c6e42cc67..ae9b986318 100644
--- a/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/flow/DWSJSONFlow.java
+++ b/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/flow/DSSJSONFlow.java
@@ -23,10 +23,10 @@
/**
* Created by enjoyyin on 2019/5/14.
*/
-public class DWSJSONFlow extends DWSFlow {
+public class DSSJSONFlow extends DSSFlow {
private String json;
- private List children;
+ private List children;
public String getJson() {
return json;
@@ -38,11 +38,11 @@ public void setJson(String json) {
@Override
public void setChildren(List extends Flow> children) {
- this.children = children.stream().map(f ->(DWSJSONFlow)f).collect(Collectors.toList());
+ this.children = children.stream().map(f ->(DSSJSONFlow)f).collect(Collectors.toList());
}
@Override
- public List getChildren() {
+ public List getChildren() {
return children;
}
}
diff --git a/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/node/DWSEdge.java b/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/node/DSSEdge.java
similarity index 97%
rename from dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/node/DWSEdge.java
rename to dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/node/DSSEdge.java
index e255d2a917..c4b30593af 100644
--- a/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/node/DWSEdge.java
+++ b/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/node/DSSEdge.java
@@ -20,7 +20,7 @@
/**
* Created by enjoyyin on 2019/5/14.
*/
-public interface DWSEdge {
+public interface DSSEdge {
String getSource();
diff --git a/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/node/DWSEdgeDefault.java b/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/node/DSSEdgeDefault.java
similarity index 96%
rename from dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/node/DWSEdgeDefault.java
rename to dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/node/DSSEdgeDefault.java
index 4cf094bb42..3e6bc8b139 100644
--- a/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/node/DWSEdgeDefault.java
+++ b/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/node/DSSEdgeDefault.java
@@ -20,7 +20,7 @@
/**
* Created by enjoyyin on 2019/5/14.
*/
-public class DWSEdgeDefault implements DWSEdge {
+public class DSSEdgeDefault implements DSSEdge {
private String source;
private String target;
private String sourceLocation;
@@ -68,7 +68,7 @@ public void setTargetLocation(String targetLocation) {
@Override
public String toString() {
- return "DWSEdge{" +
+ return "DSSEdge{" +
"source='" + source + '\'' +
", target='" + target + '\'' +
", sourceLocation='" + sourceLocation + '\'' +
diff --git a/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/node/DWSNode.java b/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/node/DSSNode.java
similarity index 96%
rename from dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/node/DWSNode.java
rename to dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/node/DSSNode.java
index d7fa7e870e..8f6575c836 100644
--- a/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/node/DWSNode.java
+++ b/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/node/DSSNode.java
@@ -25,7 +25,7 @@
/**
* Created by enjoyyin on 2019/5/14.
*/
-public interface DWSNode extends Node {
+public interface DSSNode extends Node {
Layout getLayout();
diff --git a/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/node/DWSNodeDefault.java b/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/node/DSSNodeDefault.java
similarity index 98%
rename from dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/node/DWSNodeDefault.java
rename to dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/node/DSSNodeDefault.java
index b7748016f5..2d416dfb02 100644
--- a/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/node/DWSNodeDefault.java
+++ b/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/node/DSSNodeDefault.java
@@ -26,7 +26,7 @@
/**
* Created by enjoyyin on 2019/5/14.
*/
-public class DWSNodeDefault implements DWSNode {
+public class DSSNodeDefault implements DSSNode {
private Layout layout;
private String id;
private String jobType;
diff --git a/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/project/DWSJSONProject.java b/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/project/DSSJSONProject.java
similarity index 69%
rename from dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/project/DWSJSONProject.java
rename to dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/project/DSSJSONProject.java
index 05202dac48..ffd9026fc3 100644
--- a/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/project/DWSJSONProject.java
+++ b/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/project/DSSJSONProject.java
@@ -17,8 +17,8 @@
package com.webank.wedatasphere.dss.common.entity.project;
-import com.webank.wedatasphere.dss.common.entity.flow.DWSFlow;
-import com.webank.wedatasphere.dss.common.entity.flow.DWSJSONFlow;
+import com.webank.wedatasphere.dss.common.entity.flow.DSSFlow;
+import com.webank.wedatasphere.dss.common.entity.flow.DSSJSONFlow;
import java.util.List;
import java.util.stream.Collectors;
@@ -26,16 +26,16 @@
/**
* Created by allenlliu on 2019/9/16.
*/
-public class DWSJSONProject extends DWSProject {
- private List flows;
+public class DSSJSONProject extends DSSProject {
+ private List flows;
@Override
- public List getFlows() {
+ public List getFlows() {
return this.flows;
}
@Override
- public void setFlows(List extends DWSFlow> flows) {
- this.flows = flows.stream().map(f ->(DWSJSONFlow)f).collect(Collectors.toList());
+ public void setFlows(List extends DSSFlow> flows) {
+ this.flows = flows.stream().map(f ->(DSSJSONFlow)f).collect(Collectors.toList());
}
}
diff --git a/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/project/DWSProject.java b/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/project/DSSProject.java
similarity index 88%
rename from dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/project/DWSProject.java
rename to dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/project/DSSProject.java
index 3f7ad9c2eb..4454362d8e 100644
--- a/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/project/DWSProject.java
+++ b/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/project/DSSProject.java
@@ -18,7 +18,7 @@
package com.webank.wedatasphere.dss.common.entity.project;
import com.webank.wedatasphere.dss.common.entity.Resource;
-import com.webank.wedatasphere.dss.common.entity.flow.DWSFlow;
+import com.webank.wedatasphere.dss.common.entity.flow.DSSFlow;
import java.util.Date;
import java.util.List;
@@ -27,7 +27,7 @@
/**
* Created by enjoyyin on 2019/9/16.
*/
-public class DWSProject implements Project {
+public class DSSProject implements Project {
private Long id;
private String name;
@@ -48,14 +48,15 @@ public class DWSProject implements Project {
private String product;
private Integer applicationArea;
private String business;
+ private Long workspaceId;
- private DWSProjectVersion latestVersion;
+ private DSSProjectVersion latestVersion;
private Boolean isNotPublish;
private String userName;
private String projectGroup;
private List projectVersions;
- private List flows;
+ private List flows;
private List projectResources;
public List getProjectResources() {
@@ -66,12 +67,12 @@ public void setProjectResources(List projectResources) {
this.projectResources = projectResources;
}
- public List extends DWSFlow> getFlows() {
+ public List extends DSSFlow> getFlows() {
return flows;
}
- public void setFlows(List extends DWSFlow> flows) {
- this.flows = flows.stream().map(f -> (DWSFlow) f).collect(Collectors.toList());
+ public void setFlows(List extends DSSFlow> flows) {
+ this.flows = flows.stream().map(f -> (DSSFlow) f).collect(Collectors.toList());
}
public String getUserName() {
@@ -122,11 +123,11 @@ public void setInitialOrgID(Long initialOrgID) {
this.initialOrgID = initialOrgID;
}
- public DWSProjectVersion getLatestVersion() {
+ public DSSProjectVersion getLatestVersion() {
return latestVersion;
}
- public void setLatestVersion(DWSProjectVersion latestVersion) {
+ public void setLatestVersion(DSSProjectVersion latestVersion) {
this.latestVersion = latestVersion;
}
@@ -174,13 +175,13 @@ public List getProjectVersions() {
@Override
public void setProjectVersions(List extends ProjectVersion> projectVersions) {
- this.projectVersions = projectVersions.stream().map(f -> (DWSProjectVersion) f).collect(Collectors.toList());
+ this.projectVersions = projectVersions.stream().map(f -> (DSSProjectVersion) f).collect(Collectors.toList());
}
@Override
public void addProjectVersion(ProjectVersion projectVersion) {
- this.projectVersions.add((DWSProjectVersion) projectVersion);
+ this.projectVersions.add((DSSProjectVersion) projectVersion);
}
@Override
@@ -282,4 +283,12 @@ public String getBusiness() {
public void setBusiness(String business) {
this.business = business;
}
+
+ public Long getWorkspaceId() {
+ return workspaceId;
+ }
+
+ public void setWorkspaceId(Long workspaceId) {
+ this.workspaceId = workspaceId;
+ }
}
diff --git a/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/project/DWSProjectPublishHistory.java b/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/project/DSSProjectPublishHistory.java
similarity index 98%
rename from dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/project/DWSProjectPublishHistory.java
rename to dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/project/DSSProjectPublishHistory.java
index 7ea9b1ee0e..dca93dd487 100644
--- a/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/project/DWSProjectPublishHistory.java
+++ b/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/project/DSSProjectPublishHistory.java
@@ -22,7 +22,7 @@
/**
* Created by enjoyyin on 2019/5/14.
*/
-public class DWSProjectPublishHistory {
+public class DSSProjectPublishHistory {
private Long id;
private Long projectVersionID;
private Date createTime;
diff --git a/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/project/DWSProjectVersion.java b/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/project/DSSProjectVersion.java
similarity index 91%
rename from dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/project/DWSProjectVersion.java
rename to dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/project/DSSProjectVersion.java
index 534a9631f2..5731ecf8e3 100644
--- a/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/project/DWSProjectVersion.java
+++ b/dss-common/src/main/java/com/webank/wedatasphere/dss/common/entity/project/DSSProjectVersion.java
@@ -22,7 +22,7 @@
/**
* Created by enjoyyin on 2019/9/18.
*/
-public class DWSProjectVersion implements ProjectVersion {
+public class DSSProjectVersion implements ProjectVersion {
private Long id;
private Long projectID;
@@ -33,9 +33,7 @@ public class DWSProjectVersion implements ProjectVersion {
private Integer lock;
private String updator;
private Boolean isNotPublish;
- private DWSProjectPublishHistory publishHistory;
-
-
+ private DSSProjectPublishHistory publishHistory;
@Override
public String getVersion() {
@@ -118,11 +116,11 @@ public void setNotPublish(Boolean notPublish) {
isNotPublish = notPublish;
}
- public DWSProjectPublishHistory getPublishHistory() {
+ public DSSProjectPublishHistory getPublishHistory() {
return publishHistory;
}
- public void setPublishHistory(DWSProjectPublishHistory publishHistory) {
+ public void setPublishHistory(DSSProjectPublishHistory publishHistory) {
this.publishHistory = publishHistory;
}
}
diff --git a/dss-common/src/main/scala/com/webank/wedatasphere/dss/common/protocol/RequestDWSProject.scala b/dss-common/src/main/scala/com/webank/wedatasphere/dss/common/protocol/RequestDSSProject.scala
similarity index 90%
rename from dss-common/src/main/scala/com/webank/wedatasphere/dss/common/protocol/RequestDWSProject.scala
rename to dss-common/src/main/scala/com/webank/wedatasphere/dss/common/protocol/RequestDSSProject.scala
index 92d01f765e..b9b3da46af 100644
--- a/dss-common/src/main/scala/com/webank/wedatasphere/dss/common/protocol/RequestDWSProject.scala
+++ b/dss-common/src/main/scala/com/webank/wedatasphere/dss/common/protocol/RequestDSSProject.scala
@@ -20,6 +20,6 @@ package com.webank.wedatasphere.dss.common.protocol
/**
* Created by enjoyyin on 2019/11/8.
*/
-case class RequestDWSProject(flowId:Long,version:String,projectVersionId:Long)
+case class RequestDSSProject(flowId:Long, version:String, projectVersionId:Long)
case class RequestDSSApplication(name:String)
\ No newline at end of file
diff --git a/dss-flow-execution-entrance/pom.xml b/dss-flow-execution-entrance/pom.xml
index 12218e310e..7e152ced39 100644
--- a/dss-flow-execution-entrance/pom.xml
+++ b/dss-flow-execution-entrance/pom.xml
@@ -22,7 +22,7 @@
dss
com.webank.wedatasphere.dss
- 0.7.0
+ 0.9.0
4.0.0
@@ -33,12 +33,28 @@
com.webank.wedatasphere.linkis
linkis-ujes-entrance
${linkis.version}
+
+
+ org.apache.poi
+ ooxml-schemas
+
+
+
+
+ com.webank.wedatasphere.linkis
+ linkis-cloudRPC
+ ${linkis.version}
-
com.webank.wedatasphere.dss
dss-linkis-node-execution
${dss.version}
+
+
+ com.ibm.icu
+ icu4j
+
+
diff --git a/dss-flow-execution-entrance/src/main/assembly/distribution.xml b/dss-flow-execution-entrance/src/main/assembly/distribution.xml
index c080c0c09f..bb09aad22b 100644
--- a/dss-flow-execution-entrance/src/main/assembly/distribution.xml
+++ b/dss-flow-execution-entrance/src/main/assembly/distribution.xml
@@ -59,16 +59,16 @@
aopalliance:aopalliance:jar
asm:asm:jar
cglib:cglib:jar
- com.amazonaws:aws-java-sdk-autoscaling:jar
- com.amazonaws:aws-java-sdk-core:jar
- com.amazonaws:aws-java-sdk-ec2:jar
- com.amazonaws:aws-java-sdk-route53:jar
- com.amazonaws:aws-java-sdk-sts:jar
- com.amazonaws:jmespath-java:jar
+
+
+
+
+
+
com.fasterxml.jackson.core:jackson-annotations:jar
com.fasterxml.jackson.core:jackson-core:jar
com.fasterxml.jackson.core:jackson-databind:jar
- com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:jar
+
com.fasterxml.jackson.datatype:jackson-datatype-jdk8:jar
com.fasterxml.jackson.datatype:jackson-datatype-jsr310:jar
com.fasterxml.jackson.jaxrs:jackson-jaxrs-base:jar
@@ -84,7 +84,6 @@
com.google.code.gson:gson:jar
com.google.guava:guava:jar
com.google.inject:guice:jar
- com.google.protobuf:protobuf-java:jar
com.netflix.archaius:archaius-core:jar
com.netflix.eureka:eureka-client:jar
com.netflix.eureka:eureka-core:jar
@@ -100,7 +99,6 @@
com.netflix.ribbon:ribbon-loadbalancer:jar
com.netflix.ribbon:ribbon-transport:jar
com.netflix.servo:servo-core:jar
- com.ning:async-http-client:jar
com.sun.jersey.contribs:jersey-apache-client4:jar
com.sun.jersey:jersey-client:jar
com.sun.jersey:jersey-core:jar
@@ -113,15 +111,10 @@
com.webank.wedatasphere.linkis:linkis-common:jar
com.webank.wedatasphere.linkis:linkis-module:jar
commons-beanutils:commons-beanutils:jar
- commons-beanutils:commons-beanutils-core:jar
- commons-cli:commons-cli:jar
commons-codec:commons-codec:jar
commons-collections:commons-collections:jar
commons-configuration:commons-configuration:jar
- commons-daemon:commons-daemon:jar
commons-dbcp:commons-dbcp:jar
- commons-digester:commons-digester:jar
- commons-httpclient:commons-httpclient:jar
commons-io:commons-io:jar
commons-jxpath:commons-jxpath:jar
commons-lang:commons-lang:jar
@@ -129,7 +122,6 @@
commons-net:commons-net:jar
commons-pool:commons-pool:jar
io.micrometer:micrometer-core:jar
- io.netty:netty:jar
io.netty:netty-all:jar
io.netty:netty-buffer:jar
io.netty:netty-codec:jar
@@ -146,41 +138,21 @@
javax.annotation:javax.annotation-api:jar
javax.inject:javax.inject:jar
javax.servlet:javax.servlet-api:jar
- javax.servlet.jsp:jsp-api:jar
javax.validation:validation-api:jar
javax.websocket:javax.websocket-api:jar
javax.ws.rs:javax.ws.rs-api:jar
javax.xml.bind:jaxb-api:jar
javax.xml.stream:stax-api:jar
joda-time:joda-time:jar
- log4j:log4j:jar
mysql:mysql-connector-java:jar
- net.databinder.dispatch:dispatch-core_2.11:jar
- net.databinder.dispatch:dispatch-json4s-jackson_2.11:jar
org.antlr:antlr-runtime:jar
org.antlr:stringtemplate:jar
- org.apache.commons:commons-compress:jar
org.apache.commons:commons-math:jar
- org.apache.commons:commons-math3:jar
- org.apache.curator:curator-client:jar
- org.apache.curator:curator-framework:jar
- org.apache.curator:curator-recipes:jar
- org.apache.directory.api:api-asn1-api:jar
- org.apache.directory.api:api-util:jar
- org.apache.directory.server:apacheds-i18n:jar
- org.apache.directory.server:apacheds-kerberos-codec:jar
- org.apache.hadoop:hadoop-annotations:jar
- org.apache.hadoop:hadoop-auth:jar
- org.apache.hadoop:hadoop-common:jar
- org.apache.hadoop:hadoop-hdfs:jar
- org.apache.htrace:htrace-core:jar
org.apache.httpcomponents:httpclient:jar
- org.apache.httpcomponents:httpcore:jar
org.apache.logging.log4j:log4j-api:jar
org.apache.logging.log4j:log4j-core:jar
org.apache.logging.log4j:log4j-jul:jar
org.apache.logging.log4j:log4j-slf4j-impl:jar
- org.apache.zookeeper:zookeeper:jar
org.aspectj:aspectjweaver:jar
org.bouncycastle:bcpkix-jdk15on:jar
org.bouncycastle:bcprov-jdk15on:jar
@@ -194,7 +166,6 @@
org.eclipse.jetty:jetty-continuation:jar
org.eclipse.jetty:jetty-http:jar
org.eclipse.jetty:jetty-io:jar
- org.eclipse.jetty:jetty-jndi:jar
org.eclipse.jetty:jetty-plus:jar
org.eclipse.jetty:jetty-security:jar
org.eclipse.jetty:jetty-server:jar
@@ -210,7 +181,6 @@
org.eclipse.jetty.websocket:websocket-common:jar
org.eclipse.jetty.websocket:websocket-server:jar
org.eclipse.jetty.websocket:websocket-servlet:jar
- org.fusesource.leveldbjni:leveldbjni-all:jar
org.glassfish.hk2:class-model:jar
org.glassfish.hk2:config-types:jar
org.glassfish.hk2.external:aopalliance-repackaged:jar
@@ -243,13 +213,10 @@
org.json4s:json4s-ast_2.11:jar
org.json4s:json4s-core_2.11:jar
org.json4s:json4s-jackson_2.11:jar
- org.jsoup:jsoup:jar
org.jvnet.mimepull:mimepull:jar
org.jvnet:tiger-types:jar
org.latencyutils:LatencyUtils:jar
org.mortbay.jasper:apache-el:jar
- org.mortbay.jetty:jetty:jar
- org.mortbay.jetty:jetty-util:jar
org.ow2.asm:asm-analysis:jar
org.ow2.asm:asm-commons:jar
org.ow2.asm:asm-tree:jar
@@ -296,11 +263,8 @@
org.springframework:spring-jcl:jar
org.springframework:spring-web:jar
org.springframework:spring-webmvc:jar
- org.tukaani:xz:jar
org.yaml:snakeyaml:jar
- software.amazon.ion:ion-java:jar
- xerces:xercesImpl:jar
- xmlenc:xmlenc:jar
+
xmlpull:xmlpull:jar
xpp3:xpp3_min:jar
diff --git a/dss-flow-execution-entrance/src/main/java/com/webank/wedatasphere/dss/flow/execution/entrance/job/FlowExecutionAppJointSignalSharedJob.java b/dss-flow-execution-entrance/src/main/java/com/webank/wedatasphere/dss/flow/execution/entrance/job/FlowExecutionAppJointSignalSharedJob.java
index 8d0735e581..d0d556aadf 100644
--- a/dss-flow-execution-entrance/src/main/java/com/webank/wedatasphere/dss/flow/execution/entrance/job/FlowExecutionAppJointSignalSharedJob.java
+++ b/dss-flow-execution-entrance/src/main/java/com/webank/wedatasphere/dss/flow/execution/entrance/job/FlowExecutionAppJointSignalSharedJob.java
@@ -17,17 +17,28 @@
package com.webank.wedatasphere.dss.flow.execution.entrance.job;
-import com.webank.wedatasphere.dss.common.entity.node.DWSNode;
import com.webank.wedatasphere.dss.flow.execution.entrance.conf.FlowExecutionEntranceConfiguration;
+import com.webank.wedatasphere.dss.linkis.node.execution.job.JobSignalKeyCreator;
import com.webank.wedatasphere.dss.linkis.node.execution.job.SignalSharedJob;
import java.util.Map;
/**
- * Created by peacewong on 2019/11/14.
+ * Created by johnnwang on 2019/11/14.
*/
-public class FlowExecutionAppJointSignalSharedJob extends FlowExecutionAppJointLinkisSharedJob implements SignalSharedJob {
+public class FlowExecutionAppJointSignalSharedJob extends FlowExecutionAppJointLinkisJob implements SignalSharedJob {
+ private JobSignalKeyCreator signalKeyCreator;
+
+ @Override
+ public JobSignalKeyCreator getSignalKeyCreator() {
+ return this.signalKeyCreator;
+ }
+
+ @Override
+ public void setSignalKeyCreator(JobSignalKeyCreator signalKeyCreator) {
+ this.signalKeyCreator = signalKeyCreator;
+ }
@Override
public String getMsgSaveKey() {
diff --git a/dss-flow-execution-entrance/src/main/java/com/webank/wedatasphere/dss/flow/execution/entrance/job/FlowExecutionJobSignalKeyCreator.java b/dss-flow-execution-entrance/src/main/java/com/webank/wedatasphere/dss/flow/execution/entrance/job/FlowExecutionJobSignalKeyCreator.java
new file mode 100644
index 0000000000..e284b44d8e
--- /dev/null
+++ b/dss-flow-execution-entrance/src/main/java/com/webank/wedatasphere/dss/flow/execution/entrance/job/FlowExecutionJobSignalKeyCreator.java
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package com.webank.wedatasphere.dss.flow.execution.entrance.job;
+
+import com.webank.wedatasphere.dss.flow.execution.entrance.conf.FlowExecutionEntranceConfiguration;
+import com.webank.wedatasphere.dss.linkis.node.execution.job.Job;
+import com.webank.wedatasphere.dss.linkis.node.execution.job.JobSignalKeyCreator;
+import com.webank.wedatasphere.dss.linkis.node.execution.job.SignalSharedJob;
+
+public class FlowExecutionJobSignalKeyCreator implements JobSignalKeyCreator {
+
+ @Override
+ public String getSignalKeyByJob(Job job) {
+ String projectId = job.getJobProps().get(FlowExecutionEntranceConfiguration.PROJECT_NAME());
+ String flowId = job.getJobProps().get(FlowExecutionEntranceConfiguration.FLOW_NAME());
+ String flowExecId = job.getJobProps().get(FlowExecutionEntranceConfiguration.FLOW_EXEC_ID());
+ return projectId + "." + flowId + "." + flowExecId;
+ }
+
+ @Override
+ public String getSignalKeyBySignalSharedJob(SignalSharedJob job) {
+ return getSignalKeyByJob((Job)job);
+ }
+}
diff --git a/dss-flow-execution-entrance/src/main/java/com/webank/wedatasphere/dss/flow/execution/entrance/parser/FlowExecutionFlowParser.java b/dss-flow-execution-entrance/src/main/java/com/webank/wedatasphere/dss/flow/execution/entrance/parser/FlowExecutionFlowParser.java
index 66388106e1..fbd8bdd762 100644
--- a/dss-flow-execution-entrance/src/main/java/com/webank/wedatasphere/dss/flow/execution/entrance/parser/FlowExecutionFlowParser.java
+++ b/dss-flow-execution-entrance/src/main/java/com/webank/wedatasphere/dss/flow/execution/entrance/parser/FlowExecutionFlowParser.java
@@ -19,7 +19,7 @@
import com.webank.wedatasphere.dss.appjoint.scheduler.parser.AbstractFlowParser;
import com.webank.wedatasphere.dss.appjoint.scheduler.parser.NodeParser;
-import com.webank.wedatasphere.dss.common.entity.flow.DWSJSONFlow;
+import com.webank.wedatasphere.dss.common.entity.flow.DSSJSONFlow;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@@ -36,7 +36,7 @@ public void setNodeParsers(NodeParser[] nodeParsers) {
}
@Override
- public Boolean ifFlowCanParse(DWSJSONFlow flow) {
+ public Boolean ifFlowCanParse(DSSJSONFlow flow) {
return true;
}
diff --git a/dss-flow-execution-entrance/src/main/java/com/webank/wedatasphere/dss/flow/execution/entrance/parser/FlowExecutionNodeParser.java b/dss-flow-execution-entrance/src/main/java/com/webank/wedatasphere/dss/flow/execution/entrance/parser/FlowExecutionNodeParser.java
index 379aa14e5a..97c5824b98 100644
--- a/dss-flow-execution-entrance/src/main/java/com/webank/wedatasphere/dss/flow/execution/entrance/parser/FlowExecutionNodeParser.java
+++ b/dss-flow-execution-entrance/src/main/java/com/webank/wedatasphere/dss/flow/execution/entrance/parser/FlowExecutionNodeParser.java
@@ -19,7 +19,7 @@
import com.webank.wedatasphere.dss.appjoint.scheduler.entity.SchedulerNode;
import com.webank.wedatasphere.dss.appjoint.scheduler.parser.AbstractNodeParser;
-import com.webank.wedatasphere.dss.common.entity.node.DWSNode;
+import com.webank.wedatasphere.dss.common.entity.node.DSSNode;
import com.webank.wedatasphere.dss.flow.execution.entrance.entity.FlowExecutionNode;
import org.springframework.stereotype.Component;
@@ -31,14 +31,14 @@
public class FlowExecutionNodeParser extends AbstractNodeParser {
@Override
- public SchedulerNode parseNode(DWSNode dwsNode) {
+ public SchedulerNode parseNode(DSSNode dssNode) {
FlowExecutionNode node = new FlowExecutionNode();
- node.setDWSNode(dwsNode);
+ node.setDssNode(dssNode);
return node;
}
@Override
- public Boolean ifNodeCanParse(DWSNode dwsNode) {
+ public Boolean ifNodeCanParse(DSSNode dssNode) {
return true;
}
diff --git a/dss-flow-execution-entrance/src/main/java/com/webank/wedatasphere/dss/flow/execution/entrance/parser/FlowExecutionReadNodeParser.java b/dss-flow-execution-entrance/src/main/java/com/webank/wedatasphere/dss/flow/execution/entrance/parser/FlowExecutionReadNodeParser.java
index 29f964c5d7..1c04fdfd2a 100644
--- a/dss-flow-execution-entrance/src/main/java/com/webank/wedatasphere/dss/flow/execution/entrance/parser/FlowExecutionReadNodeParser.java
+++ b/dss-flow-execution-entrance/src/main/java/com/webank/wedatasphere/dss/flow/execution/entrance/parser/FlowExecutionReadNodeParser.java
@@ -20,7 +20,7 @@
import com.webank.wedatasphere.dss.appjoint.scheduler.entity.ReadNode;
import com.webank.wedatasphere.dss.appjoint.scheduler.entity.SchedulerNode;
import com.webank.wedatasphere.dss.appjoint.scheduler.parser.AbstractReadNodeParser;
-import com.webank.wedatasphere.dss.common.entity.node.DWSNode;
+import com.webank.wedatasphere.dss.common.entity.node.DSSNode;
import com.webank.wedatasphere.dss.flow.execution.entrance.entity.FlowExecutionNode;
import com.webank.wedatasphere.dss.flow.execution.entrance.entity.FlowExecutonReadNode;
import com.webank.wedatasphere.dss.flow.execution.entrance.utils.FlowExecutionUtils;
@@ -39,8 +39,8 @@ public int getOrder() {
}
@Override
- public Boolean ifNodeCanParse(DWSNode dwsNode) {
- return FlowExecutionUtils.isReadNode(dwsNode.getNodeType());
+ public Boolean ifNodeCanParse(DSSNode dssNode) {
+ return FlowExecutionUtils.isReadNode(dssNode.getNodeType());
}
@Override
diff --git a/dss-flow-execution-entrance/src/main/java/com/webank/wedatasphere/dss/flow/execution/entrance/restful/FlowExecutionRestfulApi.java b/dss-flow-execution-entrance/src/main/java/com/webank/wedatasphere/dss/flow/execution/entrance/restful/FlowExecutionRestfulApi.java
index 807de60299..8ad6dcb51f 100644
--- a/dss-flow-execution-entrance/src/main/java/com/webank/wedatasphere/dss/flow/execution/entrance/restful/FlowExecutionRestfulApi.java
+++ b/dss-flow-execution-entrance/src/main/java/com/webank/wedatasphere/dss/flow/execution/entrance/restful/FlowExecutionRestfulApi.java
@@ -70,6 +70,12 @@ public Response execution(@PathParam("id") String id) {
message = Message.ok("Successfully get job execution info");
message.setMethod("/api/entrance/" + id + "/execution");
message.setStatus(0);
+ long nowTime = System.currentTimeMillis();
+ flowEntranceJob.getFlowContext().getRunningNodes().forEach((k, v) -> {
+ if (v != null) {
+ v.setNowTime(nowTime);
+ }
+ });
message.data("runningJobs", FlowContext$.MODULE$.convertView(flowEntranceJob.getFlowContext().getRunningNodes()));
List
+
+ com.google.code.gson
+ gson
+ 2.8.5
+
diff --git a/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/entity/AbstractSchedulerNode.java b/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/entity/AbstractSchedulerNode.java
index 167d61fda6..a5139d7d9c 100644
--- a/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/entity/AbstractSchedulerNode.java
+++ b/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/entity/AbstractSchedulerNode.java
@@ -17,7 +17,7 @@
package com.webank.wedatasphere.dss.appjoint.scheduler.entity;
-import com.webank.wedatasphere.dss.common.entity.node.DWSNode;
+import com.webank.wedatasphere.dss.common.entity.node.DSSNode;
import java.util.List;
@@ -26,65 +26,65 @@
*/
public abstract class AbstractSchedulerNode implements SchedulerNode {
- private DWSNode dwsNode;
+ private DSSNode dssNode;
@Override
- public DWSNode getDWSNode() {
- return this.dwsNode;
+ public DSSNode getDssNode() {
+ return this.dssNode;
}
@Override
- public void setDWSNode(DWSNode dwsNode) {
- this.dwsNode = dwsNode;
+ public void setDssNode(DSSNode dssNode) {
+ this.dssNode = dssNode;
}
@Override
public String getId() {
- return dwsNode.getId();
+ return dssNode.getId();
}
@Override
public void setId(String id) {
- dwsNode.setId(id);
+ dssNode.setId(id);
}
@Override
public String getNodeType() {
- return dwsNode.getNodeType();
+ return dssNode.getNodeType();
}
@Override
public void setNodeType(String nodeType) {
- dwsNode.setNodeType(nodeType);
+ dssNode.setNodeType(nodeType);
}
@Override
public String getName() {
- return dwsNode.getName();
+ return dssNode.getName();
}
@Override
public void setName(String name) {
- dwsNode.setName(name);
+ dssNode.setName(name);
}
@Override
public void addDependency(String nodeName) {
- dwsNode.addDependency(nodeName);
+ dssNode.addDependency(nodeName);
}
@Override
public void setDependency(List dependency) {
- dwsNode.setDependency(dependency);
+ dssNode.setDependency(dependency);
}
@Override
public void removeDependency(String nodeName) {
- dwsNode.removeDependency(nodeName);
+ dssNode.removeDependency(nodeName);
}
@Override
public List getDependencys() {
- return dwsNode.getDependencys();
+ return dssNode.getDependencys();
}
}
diff --git a/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/entity/AbstractSchedulerProject.java b/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/entity/AbstractSchedulerProject.java
index 6cfd51647f..3a8ad55eb8 100644
--- a/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/entity/AbstractSchedulerProject.java
+++ b/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/entity/AbstractSchedulerProject.java
@@ -17,7 +17,7 @@
package com.webank.wedatasphere.dss.appjoint.scheduler.entity;
-import com.webank.wedatasphere.dss.common.entity.project.DWSProject;
+import com.webank.wedatasphere.dss.common.entity.project.DSSProject;
import com.webank.wedatasphere.dss.common.entity.project.ProjectVersion;
import java.util.List;
@@ -32,7 +32,7 @@ public abstract class AbstractSchedulerProject implements SchedulerProject {
private String name;
private String description;
- private DWSProject dwsProject;
+ private DSSProject dssProject;
private List schedulerFlows;
private List projectVersions;
@@ -101,12 +101,12 @@ public void addProjectVersion(ProjectVersion projectVersion) {
}
@Override
- public DWSProject getDWSProject() {
- return this.dwsProject;
+ public DSSProject getDssProject() {
+ return this.dssProject;
}
@Override
- public void setDWSProject(DWSProject dwsProject) {
- this.dwsProject = dwsProject;
+ public void setDssProject(DSSProject dssProject) {
+ this.dssProject = dssProject;
}
}
diff --git a/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/entity/SchedulerEdge.java b/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/entity/SchedulerEdge.java
index 582f0a4fb7..aee1e72292 100644
--- a/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/entity/SchedulerEdge.java
+++ b/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/entity/SchedulerEdge.java
@@ -17,13 +17,13 @@
package com.webank.wedatasphere.dss.appjoint.scheduler.entity;
-import com.webank.wedatasphere.dss.common.entity.node.DWSEdge;
+import com.webank.wedatasphere.dss.common.entity.node.DSSEdge;
/**
* Created by enjoyyin on 2019/9/7.
*/
public interface SchedulerEdge {
- DWSEdge getDWSEdge();
+ DSSEdge getDssEdge();
- void setDWSEdge(DWSEdge dwsEdge);
+ void setDssEdge(DSSEdge dssEdge);
}
diff --git a/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/entity/SchedulerEdgeDefault.java b/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/entity/SchedulerEdgeDefault.java
index b9f9fb9601..db663fe540 100644
--- a/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/entity/SchedulerEdgeDefault.java
+++ b/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/entity/SchedulerEdgeDefault.java
@@ -17,22 +17,22 @@
package com.webank.wedatasphere.dss.appjoint.scheduler.entity;
-import com.webank.wedatasphere.dss.common.entity.node.DWSEdge;
+import com.webank.wedatasphere.dss.common.entity.node.DSSEdge;
/**
* Created by allenlliu on 2019/9/19.
*/
public class SchedulerEdgeDefault implements SchedulerEdge {
- private DWSEdge dwsEdge;
+ private DSSEdge dssEdge;
@Override
- public DWSEdge getDWSEdge() {
- return dwsEdge;
+ public DSSEdge getDssEdge() {
+ return dssEdge;
}
@Override
- public void setDWSEdge(DWSEdge dwsEdge) {
- this.dwsEdge = dwsEdge;
+ public void setDssEdge(DSSEdge dssEdge) {
+ this.dssEdge = dssEdge;
}
}
diff --git a/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/entity/SchedulerNode.java b/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/entity/SchedulerNode.java
index 1d756efdd2..612c2e25d6 100644
--- a/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/entity/SchedulerNode.java
+++ b/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/entity/SchedulerNode.java
@@ -18,14 +18,14 @@
package com.webank.wedatasphere.dss.appjoint.scheduler.entity;
-import com.webank.wedatasphere.dss.common.entity.node.DWSNode;
+import com.webank.wedatasphere.dss.common.entity.node.DSSNode;
import com.webank.wedatasphere.dss.common.entity.node.Node;
/**
* Created by enjoyyin on 2019/9/7.
*/
public interface SchedulerNode extends Node {
- DWSNode getDWSNode();
+ DSSNode getDssNode();
- void setDWSNode(DWSNode dwsNode);
+ void setDssNode(DSSNode dssNode);
}
diff --git a/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/entity/SchedulerProject.java b/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/entity/SchedulerProject.java
index ca3d8362e5..d4158b0f4c 100644
--- a/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/entity/SchedulerProject.java
+++ b/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/entity/SchedulerProject.java
@@ -17,13 +17,13 @@
package com.webank.wedatasphere.dss.appjoint.scheduler.entity;
-import com.webank.wedatasphere.dss.common.entity.project.DWSProject;
+import com.webank.wedatasphere.dss.common.entity.project.DSSProject;
import com.webank.wedatasphere.dss.common.entity.project.Project;
/**
* Created by enjoyyin on 2019/9/16.
*/
public interface SchedulerProject extends Project {
- DWSProject getDWSProject();
- void setDWSProject(DWSProject dwsProject);
+ DSSProject getDssProject();
+ void setDssProject(DSSProject dssProject);
}
diff --git a/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/parser/AbstractFlowParser.java b/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/parser/AbstractFlowParser.java
index 546cb32ed6..96a9f2aed6 100644
--- a/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/parser/AbstractFlowParser.java
+++ b/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/parser/AbstractFlowParser.java
@@ -27,12 +27,12 @@
import com.webank.wedatasphere.dss.appjoint.scheduler.entity.SchedulerNode;
import com.webank.wedatasphere.dss.common.entity.Resource;
import com.webank.wedatasphere.dss.appjoint.scheduler.entity.SchedulerFlow;
-import com.webank.wedatasphere.dss.common.entity.flow.DWSJSONFlow;
+import com.webank.wedatasphere.dss.common.entity.flow.DSSJSONFlow;
import com.webank.wedatasphere.dss.common.entity.flow.Flow;
-import com.webank.wedatasphere.dss.common.entity.node.DWSEdge;
-import com.webank.wedatasphere.dss.common.entity.node.DWSEdgeDefault;
-import com.webank.wedatasphere.dss.common.entity.node.DWSNode;
-import com.webank.wedatasphere.dss.common.entity.node.DWSNodeDefault;
+import com.webank.wedatasphere.dss.common.entity.node.DSSEdge;
+import com.webank.wedatasphere.dss.common.entity.node.DSSEdgeDefault;
+import com.webank.wedatasphere.dss.common.entity.node.DSSNode;
+import com.webank.wedatasphere.dss.common.entity.node.DSSNodeDefault;
import org.springframework.beans.BeanUtils;
import java.util.*;
@@ -64,39 +64,39 @@ protected void dealFlowResources(){}
protected void dealFlowProperties(Flow flow){}
@Override
- public SchedulerFlow parseFlow(DWSJSONFlow flow) {
+ public SchedulerFlow parseFlow(DSSJSONFlow flow) {
downloadFlowResources();
dealFlowResources();
dealFlowProperties(flow);
- return resolveDWSJSONFlow(flow);
+ return resolveDSSJSONFlow(flow);
}
- // 解析DWSJSONFlow,生成DWSNode
- public SchedulerFlow resolveDWSJSONFlow(DWSJSONFlow jsonFlow){
+ // 解析DSSJSONFlow,生成DSSNode
+ public SchedulerFlow resolveDSSJSONFlow(DSSJSONFlow jsonFlow){
SchedulerFlow schedulerFlow = createSchedulerFlow();
BeanUtils.copyProperties(jsonFlow,schedulerFlow,"children");
JsonParser parser = new JsonParser();
JsonObject jsonObject = parser.parse(jsonFlow.getJson()).getAsJsonObject();
JsonArray nodeJsonArray = jsonObject.getAsJsonArray("nodes");
Gson gson = new Gson();
- List dwsNodes = gson.fromJson(nodeJsonArray, new TypeToken>() {
+ List dssNodes = gson.fromJson(nodeJsonArray, new TypeToken>() {
}.getType());
List schedulerNodeList = new ArrayList<>();
List schedulerEdgeList = new ArrayList<>();
- for (DWSNode dwsNode : dwsNodes) {
+ for (DSSNode dssNode : dssNodes) {
Optional firstNodeParser = Arrays.stream(getNodeParsers())
- .filter(p -> p.ifNodeCanParse(dwsNode))
+ .filter(p -> p.ifNodeCanParse(dssNode))
.sorted((p1, p2) -> p2.getOrder() - p1.getOrder())
.findFirst();
- SchedulerNode schedulerNode = firstNodeParser.orElseThrow(()->new IllegalArgumentException("NodeParser个数应该大于0")).parseNode(dwsNode);
+ SchedulerNode schedulerNode = firstNodeParser.orElseThrow(()->new IllegalArgumentException("NodeParser个数应该大于0")).parseNode(dssNode);
schedulerNodeList.add(schedulerNode);
}
JsonArray edgeJsonArray = jsonObject.getAsJsonArray("edges");
- List dwsEdges = gson.fromJson(edgeJsonArray, new TypeToken>() {
+ List dssEdges = gson.fromJson(edgeJsonArray, new TypeToken>() {
}.getType());
- for (DWSEdge dwsEdge : dwsEdges) {
+ for (DSSEdge dssEdge : dssEdges) {
SchedulerEdge schedulerEdge = new SchedulerEdgeDefault();
- schedulerEdge.setDWSEdge(dwsEdge);
+ schedulerEdge.setDssEdge(dssEdge);
schedulerEdgeList.add(schedulerEdge);
}
diff --git a/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/parser/AbstractProjectParser.java b/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/parser/AbstractProjectParser.java
index 27ef1c1c25..a7d481d24b 100644
--- a/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/parser/AbstractProjectParser.java
+++ b/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/parser/AbstractProjectParser.java
@@ -21,10 +21,10 @@
import com.webank.wedatasphere.dss.appjoint.scheduler.entity.SchedulerProjectVersionForFlows;
import com.webank.wedatasphere.dss.appjoint.scheduler.entity.SchedulerFlow;
import com.webank.wedatasphere.dss.appjoint.scheduler.entity.SchedulerProject;
-import com.webank.wedatasphere.dss.common.entity.flow.DWSFlow;
-import com.webank.wedatasphere.dss.common.entity.flow.DWSJSONFlow;
-import com.webank.wedatasphere.dss.common.entity.project.DWSJSONProject;
-import com.webank.wedatasphere.dss.common.entity.project.DWSProject;
+import com.webank.wedatasphere.dss.common.entity.flow.DSSFlow;
+import com.webank.wedatasphere.dss.common.entity.flow.DSSJSONFlow;
+import com.webank.wedatasphere.dss.common.entity.project.DSSProject;
+import com.webank.wedatasphere.dss.common.entity.project.DSSJSONProject;
import com.webank.wedatasphere.dss.common.entity.project.ProjectVersionForFlows;
import org.springframework.beans.BeanUtils;
@@ -50,27 +50,27 @@ public FlowParser[] getFlowParsers() {
return flowParsers;
}
- public DWSJSONProject parseToDWSJSONProject(DWSProject dwsProject){
- DWSJSONProject dwsjsonProject = new DWSJSONProject();
- BeanUtils.copyProperties(dwsProject,dwsjsonProject,"flows","projectVersions");
- List extends DWSFlow> dwsFlows = dwsProject.getFlows();
- List dwsjsonFlows = dwsFlows.stream().map(this::toDWSJsonFlow).collect(Collectors.toList());
- dwsjsonProject.setFlows(dwsjsonFlows);
- return dwsjsonProject;
+ public DSSJSONProject parseToDssJsonProject(DSSProject dssProject){
+ DSSJSONProject dssJsonProject = new DSSJSONProject();
+ BeanUtils.copyProperties(dssProject, dssJsonProject,"flows","projectVersions");
+ List extends DSSFlow> dwsFlows = dssProject.getFlows();
+ List dssJsonFlows = dwsFlows.stream().map(this::toDssJsonFlow).collect(Collectors.toList());
+ dssJsonProject.setFlows(dssJsonFlows);
+ return dssJsonProject;
}
- private DWSJSONFlow toDWSJsonFlow(DWSFlow dwsFlow){
- DWSJSONFlow dwsjsonFlow = new DWSJSONFlow();
- BeanUtils.copyProperties(dwsFlow,dwsjsonFlow,"children","flowVersions");
- dwsjsonFlow.setJson(dwsFlow.getLatestVersion().getJson());
- if(dwsFlow.getChildren() != null){
- dwsjsonFlow.setChildren(dwsFlow.getChildren().stream().map(this::toDWSJsonFlow).collect(Collectors.toList()));
+ private DSSJSONFlow toDssJsonFlow(DSSFlow dssFlow){
+ DSSJSONFlow dssJsonFlow = new DSSJSONFlow();
+ BeanUtils.copyProperties(dssFlow, dssJsonFlow,"children","flowVersions");
+ dssJsonFlow.setJson(dssFlow.getLatestVersion().getJson());
+ if(dssFlow.getChildren() != null){
+ dssJsonFlow.setChildren(dssFlow.getChildren().stream().map(this::toDssJsonFlow).collect(Collectors.toList()));
}
- return dwsjsonFlow;
+ return dssJsonFlow;
}
- public SchedulerProject parseProject(DWSJSONProject project){
+ public SchedulerProject parseProject(DSSJSONProject project){
AbstractSchedulerProject schedulerProject = createSchedulerProject();
SchedulerProjectVersionForFlows projectVersionForFlows = new SchedulerProjectVersionForFlows();
schedulerProject.setProjectVersions(new ArrayList());
@@ -81,23 +81,23 @@ public SchedulerProject parseProject(DWSJSONProject project){
return schedulerProject;
}
- private SchedulerFlow invokeFlowParser(ProjectVersionForFlows projectVersionForFlows, DWSJSONFlow dwsjsonFlow, FlowParser[] flowParsers){
- List flowParsersF = Arrays.stream(flowParsers).filter(f -> f.ifFlowCanParse(dwsjsonFlow)).collect(Collectors.toList());
+ private SchedulerFlow invokeFlowParser(ProjectVersionForFlows projectVersionForFlows, DSSJSONFlow dssJsonFlow, FlowParser[] flowParsers){
+ List flowParsersF = Arrays.stream(flowParsers).filter(f -> f.ifFlowCanParse(dssJsonFlow)).collect(Collectors.toList());
// TODO: 2019/9/25 如果flowParsers数量>1 ||<=0抛出异常
- SchedulerFlow schedulerFlow = flowParsersF.get(0).parseFlow(dwsjsonFlow);
+ SchedulerFlow schedulerFlow = flowParsersF.get(0).parseFlow(dssJsonFlow);
//收集所有的不分层级的flow?
projectVersionForFlows.addFlow(schedulerFlow);
- if(dwsjsonFlow.getChildren() != null){
- List schedulerFlows = dwsjsonFlow.getChildren().stream().map(f -> invokeFlowParser(projectVersionForFlows,f, flowParsers)).collect(Collectors.toList());
+ if(dssJsonFlow.getChildren() != null){
+ List schedulerFlows = dssJsonFlow.getChildren().stream().map(f -> invokeFlowParser(projectVersionForFlows,f, flowParsers)).collect(Collectors.toList());
schedulerFlow.setChildren(schedulerFlows);
}
return schedulerFlow;
}
@Override
- public SchedulerProject parseProject(DWSProject dwsProject) {
- SchedulerProject schedulerProject = parseProject(parseToDWSJSONProject(dwsProject));
- schedulerProject.setDWSProject(dwsProject);
+ public SchedulerProject parseProject(DSSProject dssProject) {
+ SchedulerProject schedulerProject = parseProject(parseToDssJsonProject(dssProject));
+ schedulerProject.setDssProject(dssProject);
return schedulerProject;
}
diff --git a/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/parser/AbstractReadNodeParser.java b/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/parser/AbstractReadNodeParser.java
index 6cd767f720..68e8d970e7 100644
--- a/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/parser/AbstractReadNodeParser.java
+++ b/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/parser/AbstractReadNodeParser.java
@@ -18,7 +18,7 @@
package com.webank.wedatasphere.dss.appjoint.scheduler.parser;
import com.webank.wedatasphere.dss.appjoint.scheduler.entity.SchedulerNode;
-import com.webank.wedatasphere.dss.common.entity.node.DWSNode;
+import com.webank.wedatasphere.dss.common.entity.node.DSSNode;
import com.webank.wedatasphere.dss.appjoint.scheduler.entity.ReadNode;
import java.util.Arrays;
@@ -32,7 +32,7 @@ public abstract class AbstractReadNodeParser implements ContextNodeParser {
@Override
public String[] getShareNodeIds(SchedulerNode node) {
//需根据节点的参数进行解析生成
- Map jobParams = node.getDWSNode().getParams();
+ Map jobParams = node.getDssNode().getParams();
if(jobParams == null) return null;
Map configuration =(Map) jobParams.get("configuration");
Map runtime = (Map) configuration.get("runtime");
@@ -57,16 +57,16 @@ public void setReadNodeContext(SchedulerNode node) {
@Override
public ReadNode parseNode(SchedulerNode node) {
ReadNode readNode = createReadNode();
- readNode.setDWSNode(node.getDWSNode());
+ readNode.setDssNode(node.getDssNode());
readNode.setSchedulerNode(node);
readNode.setShareNodeIds(getShareNodeIds(node));
return readNode;
}
@Override
- public SchedulerNode parseNode(DWSNode dwsNode) {
+ public SchedulerNode parseNode(DSSNode dssNode) {
SchedulerNode schedulerNode = createSchedulerNode();
- schedulerNode.setDWSNode(dwsNode);
+ schedulerNode.setDssNode(dssNode);
return parseNode(schedulerNode);
}
diff --git a/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/parser/FlowParser.java b/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/parser/FlowParser.java
index b6b49897c6..85369debce 100644
--- a/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/parser/FlowParser.java
+++ b/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/parser/FlowParser.java
@@ -19,18 +19,18 @@
import com.webank.wedatasphere.dss.appjoint.scheduler.entity.SchedulerFlow;
-import com.webank.wedatasphere.dss.common.entity.flow.DWSJSONFlow;
+import com.webank.wedatasphere.dss.common.entity.flow.DSSJSONFlow;
/**
* Created by enjoyyin on 2019/9/7.
*/
public interface FlowParser {
- SchedulerFlow parseFlow(DWSJSONFlow flow);
+ SchedulerFlow parseFlow(DSSJSONFlow flow);
void setNodeParsers(NodeParser[] nodeParsers);
NodeParser[] getNodeParsers();
- Boolean ifFlowCanParse(DWSJSONFlow flow);
+ Boolean ifFlowCanParse(DSSJSONFlow flow);
}
diff --git a/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/parser/NodeParser.java b/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/parser/NodeParser.java
index 602d386235..ebba917a16 100644
--- a/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/parser/NodeParser.java
+++ b/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/parser/NodeParser.java
@@ -18,14 +18,14 @@
package com.webank.wedatasphere.dss.appjoint.scheduler.parser;
import com.webank.wedatasphere.dss.appjoint.scheduler.entity.SchedulerNode;
-import com.webank.wedatasphere.dss.common.entity.node.DWSNode;
+import com.webank.wedatasphere.dss.common.entity.node.DSSNode;
import com.webank.wedatasphere.dss.appjoint.scheduler.order.Order;
/**
* Created by enjoyyin on 2019/9/7.
*/
public interface NodeParser extends Order {
- SchedulerNode parseNode(DWSNode dwsNode);
+ SchedulerNode parseNode(DSSNode dssNode);
- Boolean ifNodeCanParse(DWSNode dwsNode);
+ Boolean ifNodeCanParse(DSSNode dssNode);
}
diff --git a/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/parser/ProjectParser.java b/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/parser/ProjectParser.java
index 13ffd4362c..db00db311a 100644
--- a/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/parser/ProjectParser.java
+++ b/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/parser/ProjectParser.java
@@ -18,13 +18,13 @@
package com.webank.wedatasphere.dss.appjoint.scheduler.parser;
import com.webank.wedatasphere.dss.appjoint.scheduler.entity.SchedulerProject;
-import com.webank.wedatasphere.dss.common.entity.project.DWSProject;
+import com.webank.wedatasphere.dss.common.entity.project.DSSProject;
/**
* Created by enjoyyin on 2019/9/16.
*/
public interface ProjectParser {
- SchedulerProject parseProject(DWSProject dwsProject);
+ SchedulerProject parseProject(DSSProject dssProject);
void setFlowParsers(FlowParser[] flowParser);
FlowParser[] getFlowParsers();
diff --git a/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/parser/SendEmailNodeParser.java b/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/parser/SendEmailNodeParser.java
index f4511a8d39..1bd3606f0d 100644
--- a/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/parser/SendEmailNodeParser.java
+++ b/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/parser/SendEmailNodeParser.java
@@ -18,7 +18,7 @@
package com.webank.wedatasphere.dss.appjoint.scheduler.parser;
import com.webank.wedatasphere.dss.appjoint.scheduler.constant.SchedulerAppJointConstant;
-import com.webank.wedatasphere.dss.common.entity.node.DWSNode;
+import com.webank.wedatasphere.dss.common.entity.node.DSSNode;
import java.util.Map;
@@ -28,9 +28,9 @@
public abstract class SendEmailNodeParser extends AbstractReadNodeParser {
@Override
- public Boolean ifNodeCanParse(DWSNode dwsNode) {
+ public Boolean ifNodeCanParse(DSSNode dssNode) {
//判断是sendemail 并且category是node
- Map params = dwsNode.getParams();
+ Map params = dssNode.getParams();
if(params != null && !params.isEmpty()){
Object configuration = params.get(SchedulerAppJointConstant.CONFIGURATION);
if(configuration instanceof Map){
@@ -38,7 +38,7 @@ public Boolean ifNodeCanParse(DWSNode dwsNode) {
if(runtime instanceof Map){
Object category = ((Map) runtime).get(SchedulerAppJointConstant.CATEGORY);
if(category != null && SchedulerAppJointConstant.NODE.equals(category.toString())){
- return SchedulerAppJointConstant.SENDEMAIL_NODE_TYPE.equals(dwsNode.getNodeType());
+ return SchedulerAppJointConstant.SENDEMAIL_NODE_TYPE.equals(dssNode.getNodeType());
}
}
}
diff --git a/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/tuning/AbstractFlowTuning.java b/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/tuning/AbstractFlowTuning.java
index f55ca135c8..42423a69d6 100644
--- a/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/tuning/AbstractFlowTuning.java
+++ b/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/tuning/AbstractFlowTuning.java
@@ -75,8 +75,8 @@ private void setDependencies(SchedulerNode node,List schedulerNod
private List resolveDependencys(SchedulerNode node,List schedulerNodes, List flowEdges) {
List dependencys = new ArrayList<>();
flowEdges.forEach(edge -> {
- if (edge.getDWSEdge().getTarget().equals(node.getId())) {
- dependencys.add(schedulerNodes.stream().filter(n ->edge.getDWSEdge().getSource().equals(n.getId())).findFirst().get().getName());
+ if (edge.getDssEdge().getTarget().equals(node.getId())) {
+ dependencys.add(schedulerNodes.stream().filter(n ->edge.getDssEdge().getSource().equals(n.getId())).findFirst().get().getName());
}
});
@@ -112,7 +112,7 @@ private String getProxyUser(SchedulerFlow schedulerFlow) {
private void setProxyUser(SchedulerFlow schedulerFlow) {
String proxyUser = getProxyUser(schedulerFlow);
if(StringUtils.isNotBlank(proxyUser)) {
- schedulerFlow.getSchedulerNodes().forEach(node -> node.getDWSNode().setUserProxy(proxyUser));
+ schedulerFlow.getSchedulerNodes().forEach(node -> node.getDssNode().setUserProxy(proxyUser));
schedulerFlow.setUserProxy(proxyUser);
}
}
diff --git a/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/tuning/AbstractShareNodeFlowTuning.java b/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/tuning/AbstractShareNodeFlowTuning.java
index 84493b510f..81dc0d5d31 100644
--- a/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/tuning/AbstractShareNodeFlowTuning.java
+++ b/dss-scheduler-appjoint-core/src/main/java/com/webank/wedatasphere/dss/appjoint/scheduler/tuning/AbstractShareNodeFlowTuning.java
@@ -66,9 +66,8 @@ public Map getShareNodes(SchedulerFlow flow, ReadNode[] read
Map res = new HashMap<>();
//遍历readNodes,将NodeIds转为name的集合,过滤掉删除了节点但是还滞留在content里面的id
Arrays.stream(readNodes).filter(rn ->rn.getShareNodeIds() != null).forEach(rn ->{
- List names = Arrays.stream(rn.getShareNodeIds()).filter(id->flow.getSchedulerNodes().stream().filter(sn -> !id.equals(sn.getId())).findFirst().isPresent()).
- map(id -> flow.getSchedulerNodes().stream().filter(sn -> id.equals(sn.getId())).findFirst().get().getName()).collect(Collectors.toList());
- rn.setShareNodeIds(names.toArray(new String[0]));
+ rn.setShareNodeIds(Arrays.stream(rn.getShareNodeIds()).filter(id -> flow.getSchedulerNodes().stream().anyMatch(sn -> id.equals(sn.getId()))).
+ map(id -> flow.getSchedulerNodes().stream().filter(sn -> id.equals(sn.getId())).findFirst().get().getName()).toArray(String[]::new));
});
Stream.of(readNodes).forEach(x ->
{
@@ -84,7 +83,7 @@ public Map getShareNodes(SchedulerFlow flow, ReadNode[] read
if(schedulerNode != null) {
int shareTimes = (nameAndNumMap.get(key)).intValue();
ShareNode shareNode = createShareNode();
- shareNode.setDWSNode(schedulerNode.getDWSNode());
+ shareNode.setDssNode(schedulerNode.getDssNode());
shareNode.setSchedulerNode(schedulerNode);
shareNode.setShareTimes(shareTimes);
res.put(shareNode, shareTimes);
diff --git a/dss-server/pom.xml b/dss-server/pom.xml
index f42dbb0a44..c3ec0ed388 100644
--- a/dss-server/pom.xml
+++ b/dss-server/pom.xml
@@ -22,7 +22,7 @@
dss
com.webank.wedatasphere.dss
- 0.7.0
+ 0.9.0
4.0.0
@@ -117,7 +117,7 @@
com.webank.wedatasphere.linkis
- 0.9.1
+ ${linkis.version}
@@ -145,7 +145,7 @@
com.webank.wedatasphere.dss
dss-scheduler-appjoint-core
- 0.7.0
+ 0.9.0
diff --git a/dss-server/src/main/assembly/distribution.xml b/dss-server/src/main/assembly/distribution.xml
index ffa6656d8c..0c7f789ee4 100644
--- a/dss-server/src/main/assembly/distribution.xml
+++ b/dss-server/src/main/assembly/distribution.xml
@@ -41,16 +41,16 @@
aopalliance:aopalliance:jar
asm:asm:jar
cglib:cglib:jar
- com.amazonaws:aws-java-sdk-autoscaling:jar
- com.amazonaws:aws-java-sdk-core:jar
- com.amazonaws:aws-java-sdk-ec2:jar
- com.amazonaws:aws-java-sdk-route53:jar
- com.amazonaws:aws-java-sdk-sts:jar
- com.amazonaws:jmespath-java:jar
+
+
+
+
+
+
com.fasterxml.jackson.core:jackson-annotations:jar
com.fasterxml.jackson.core:jackson-core:jar
com.fasterxml.jackson.core:jackson-databind:jar
- com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:jar
+
com.fasterxml.jackson.datatype:jackson-datatype-jdk8:jar
com.fasterxml.jackson.datatype:jackson-datatype-jsr310:jar
com.fasterxml.jackson.jaxrs:jackson-jaxrs-base:jar
@@ -82,7 +82,6 @@
com.netflix.ribbon:ribbon-loadbalancer:jar
com.netflix.ribbon:ribbon-transport:jar
com.netflix.servo:servo-core:jar
- com.ning:async-http-client:jar
com.sun.jersey.contribs:jersey-apache-client4:jar
com.sun.jersey:jersey-client:jar
com.sun.jersey:jersey-core:jar
@@ -137,8 +136,6 @@
joda-time:joda-time:jar
log4j:log4j:jar
mysql:mysql-connector-java:jar
- net.databinder.dispatch:dispatch-core_2.11:jar
- net.databinder.dispatch:dispatch-json4s-jackson_2.11:jar
org.antlr:antlr-runtime:jar
org.antlr:stringtemplate:jar
org.apache.commons:commons-compress:jar
@@ -176,7 +173,9 @@
org.eclipse.jetty:jetty-continuation:jar
org.eclipse.jetty:jetty-http:jar
org.eclipse.jetty:jetty-io:jar
+
org.eclipse.jetty:jetty-plus:jar
org.eclipse.jetty:jetty-security:jar
org.eclipse.jetty:jetty-server:jar
@@ -225,7 +224,9 @@
org.json4s:json4s-ast_2.11:jar
org.json4s:json4s-core_2.11:jar
org.json4s:json4s-jackson_2.11:jar
+
org.jvnet.mimepull:mimepull:jar
org.jvnet:tiger-types:jar
org.latencyutils:LatencyUtils:jar
@@ -280,7 +281,7 @@
org.springframework:spring-webmvc:jar
org.tukaani:xz:jar
org.yaml:snakeyaml:jar
- software.amazon.ion:ion-java:jar
+
xerces:xercesImpl:jar
xmlenc:xmlenc:jar
xmlpull:xmlpull:jar
diff --git a/dss-server/src/main/java/com/webank/wedatasphere/dss/server/constant/DSSServerConstant.java b/dss-server/src/main/java/com/webank/wedatasphere/dss/server/constant/DSSServerConstant.java
index 3aebc85926..95ab683080 100644
--- a/dss-server/src/main/java/com/webank/wedatasphere/dss/server/constant/DSSServerConstant.java
+++ b/dss-server/src/main/java/com/webank/wedatasphere/dss/server/constant/DSSServerConstant.java
@@ -19,12 +19,13 @@
public class DSSServerConstant {
- public static final String DWS_PROJECT_FIRST_VERSION = "v000001";
- public static final String DWS_PROJECT_FIRST_VERSION_COMMENT = "first version";
- public static final String DWS_PROJECT_SOURCE = "create by user";
+ public static final String DSS_PROJECT_FIRST_VERSION = "v000001";
+ public static final String DSS_PROJECT_FIRST_VERSION_COMMENT = "first version";
+ public static final String DSS_PROJECT_SOURCE = "create by user";
+ public static final String DSS_WORKSPACE_SOURCE = "create by user";
public static final String PROJECT_VERSION_ID = "projectVersionID";
public static final String PUBLISH_FLOW_REPORT_FORMATE = "工作流名:%s,版本号:%s,工作流内容为空,请自行修改或者删除";
- public static final String EMVEDDEDFLOWID ="\"embeddedFlowId\":" ;
+ public static final String EMVEDDEDFLOWID = "\"embeddedFlowId\":";
public static final String VERSION_FORMAT = "%06d";
public static final String VERSION_PREFIX = "v";
}
diff --git a/dss-application/src/main/java/com/webank/wedatasphere/dss/application/dao/DSSUserMapper.java b/dss-server/src/main/java/com/webank/wedatasphere/dss/server/dao/DSSUserMapper.java
similarity index 86%
rename from dss-application/src/main/java/com/webank/wedatasphere/dss/application/dao/DSSUserMapper.java
rename to dss-server/src/main/java/com/webank/wedatasphere/dss/server/dao/DSSUserMapper.java
index fe81ae46a4..31d4974e12 100644
--- a/dss-application/src/main/java/com/webank/wedatasphere/dss/application/dao/DSSUserMapper.java
+++ b/dss-server/src/main/java/com/webank/wedatasphere/dss/server/dao/DSSUserMapper.java
@@ -15,14 +15,15 @@
*
*/
-package com.webank.wedatasphere.dss.application.dao;
+package com.webank.wedatasphere.dss.server.dao;
import com.webank.wedatasphere.dss.application.entity.DSSUser;
-/**
- * Created by chaogefeng on 2019/10/11.
- */
public interface DSSUserMapper {
+ Long getUserID(String userName);
+
+ String getuserName(Long userID);
+
DSSUser getUserByName(String username);
void registerDSSUser(DSSUser userDb);
diff --git a/dss-server/src/main/java/com/webank/wedatasphere/dss/server/dao/FlowMapper.java b/dss-server/src/main/java/com/webank/wedatasphere/dss/server/dao/FlowMapper.java
index 52fa6fb2be..ea830d7cf8 100644
--- a/dss-server/src/main/java/com/webank/wedatasphere/dss/server/dao/FlowMapper.java
+++ b/dss-server/src/main/java/com/webank/wedatasphere/dss/server/dao/FlowMapper.java
@@ -18,8 +18,8 @@
package com.webank.wedatasphere.dss.server.dao;
-import com.webank.wedatasphere.dss.common.entity.flow.DWSFlow;
-import com.webank.wedatasphere.dss.common.entity.flow.DWSFlowVersion;
+import com.webank.wedatasphere.dss.common.entity.flow.DSSFlow;
+import com.webank.wedatasphere.dss.common.entity.flow.DSSFlowVersion;
import org.apache.ibatis.annotations.Param;
import org.springframework.dao.DuplicateKeyException;
@@ -27,23 +27,23 @@
public interface FlowMapper {
- DWSFlow selectFlowByID(Long id);
+ DSSFlow selectFlowByID(Long id);
- List listFlowByTaxonomyID(@Param("projectID") Long projectID, @Param("taxonomyID") Long taxonomyID, @Param("isRootFlow") Boolean isRootFlow);
+ List listFlowByTaxonomyID(@Param("projectID") Long projectID, @Param("taxonomyID") Long taxonomyID, @Param("isRootFlow") Boolean isRootFlow);
- List listFlowVersionsByFlowID(@Param("flowID") Long flowID, @Param("projectVersionID") Long projectVersionID);
+ List listFlowVersionsByFlowID(@Param("flowID") Long flowID, @Param("projectVersionID") Long projectVersionID);
- void insertFlow(DWSFlow dwsFlow) throws DuplicateKeyException;
+ void insertFlow(DSSFlow dssFlow) throws DuplicateKeyException;
- void insertFlowVersion(DWSFlowVersion version);
+ void insertFlowVersion(DSSFlowVersion version);
- void batchInsertFlowVersion(@Param("flowVersions") List flowVersions);
+ void batchInsertFlowVersion(@Param("flowVersions") List flowVersions);
void insertFlowRelation(@Param("flowID") Long flowID, @Param("parentFlowID") Long parentFlowID);
- DWSFlowVersion selectVersionByFlowID(@Param("flowID") Long flowID, @Param("version") String version, @Param("projectVersionID") Long projectVersionID);
+ DSSFlowVersion selectVersionByFlowID(@Param("flowID") Long flowID, @Param("version") String version, @Param("projectVersionID") Long projectVersionID);
- void updateFlowBaseInfo(DWSFlow dwsFlow) throws DuplicateKeyException;
+ void updateFlowBaseInfo(DSSFlow dssFlow) throws DuplicateKeyException;
List selectSubFlowIDByParentFlowID(Long parentFlowID);
@@ -55,17 +55,17 @@ public interface FlowMapper {
Long selectParentFlowIDByFlowID(Long flowID);
- List listFlowByProjectID(Long projectID);
+ List listFlowByProjectID(Long projectID);
- List listVersionByFlowIDAndProjectVersionID(@Param("flowID") Long flowID, @Param("projectVersionID") Long projectVersionID);
+ List listVersionByFlowIDAndProjectVersionID(@Param("flowID") Long flowID, @Param("projectVersionID") Long projectVersionID);
Boolean noVersions(Long flowID);
- List listLastFlowVersionsByProjectVersionID(@Param("projectVersionID") Long projectVersionId);
+ List listLastFlowVersionsByProjectVersionID(@Param("projectVersionID") Long projectVersionId);
- List listLatestRootFlowVersionByProjectVersionID(Long projectVersionID);
+ List listLatestRootFlowVersionByProjectVersionID(Long projectVersionID);
- void batchUpdateFlowVersion(List flowVersions);
+ void batchUpdateFlowVersion(List flowVersions);
Long getParentFlowID(Long flowID);
}
diff --git a/dss-server/src/main/java/com/webank/wedatasphere/dss/server/dao/FlowTaxonomyMapper.java b/dss-server/src/main/java/com/webank/wedatasphere/dss/server/dao/FlowTaxonomyMapper.java
index f5c3dc1ec1..640e8b7d03 100644
--- a/dss-server/src/main/java/com/webank/wedatasphere/dss/server/dao/FlowTaxonomyMapper.java
+++ b/dss-server/src/main/java/com/webank/wedatasphere/dss/server/dao/FlowTaxonomyMapper.java
@@ -18,7 +18,7 @@
package com.webank.wedatasphere.dss.server.dao;
-import com.webank.wedatasphere.dss.server.entity.DWSFlowTaxonomy;
+import com.webank.wedatasphere.dss.server.entity.DSSFlowTaxonomy;
import org.apache.ibatis.annotations.Param;
import org.springframework.dao.DuplicateKeyException;
@@ -26,11 +26,11 @@
public interface FlowTaxonomyMapper {
- DWSFlowTaxonomy selectFlowTaxonomyByID(Long id);
+ DSSFlowTaxonomy selectFlowTaxonomyByID(Long id);
- void insertFlowTaxonomy(DWSFlowTaxonomy dwsFlowTaxonomy) throws DuplicateKeyException;
+ void insertFlowTaxonomy(DSSFlowTaxonomy dssFlowTaxonomy) throws DuplicateKeyException;
- void updateFlowTaxonomy(DWSFlowTaxonomy dwsFlowTaxonomy) throws DuplicateKeyException;
+ void updateFlowTaxonomy(DSSFlowTaxonomy dssFlowTaxonomy) throws DuplicateKeyException;
Long hasFlows(Long flowTaxonomyID);
@@ -47,5 +47,5 @@ public interface FlowTaxonomyMapper {
void deleteFlowTaxonomyByProjectID(Long projectID);
- List listFlowTaxonomyByProjectID(Long projectID);
+ List listFlowTaxonomyByProjectID(Long projectID);
}
diff --git a/dss-server/src/main/java/com/webank/wedatasphere/dss/server/dao/ProjectMapper.java b/dss-server/src/main/java/com/webank/wedatasphere/dss/server/dao/ProjectMapper.java
index b5757d1f8b..c2ae56786d 100644
--- a/dss-server/src/main/java/com/webank/wedatasphere/dss/server/dao/ProjectMapper.java
+++ b/dss-server/src/main/java/com/webank/wedatasphere/dss/server/dao/ProjectMapper.java
@@ -18,9 +18,9 @@
package com.webank.wedatasphere.dss.server.dao;
-import com.webank.wedatasphere.dss.common.entity.project.DWSProject;
-import com.webank.wedatasphere.dss.common.entity.project.DWSProjectPublishHistory;
-import com.webank.wedatasphere.dss.common.entity.project.DWSProjectVersion;
+import com.webank.wedatasphere.dss.common.entity.project.DSSProject;
+import com.webank.wedatasphere.dss.common.entity.project.DSSProjectPublishHistory;
+import com.webank.wedatasphere.dss.common.entity.project.DSSProjectVersion;
import org.apache.ibatis.annotations.Param;
import java.util.List;
@@ -28,19 +28,19 @@
public interface ProjectMapper {
- DWSProject selectProjectByID(Long id);
+ DSSProject selectProjectByID(Long id);
- DWSProjectVersion selectLatestVersionByProjectID(Long projectID);
+ DSSProjectVersion selectLatestVersionByProjectID(Long projectID);
- DWSProject selectProjectByVersionID(Long projectVersionID);
+ DSSProject selectProjectByVersionID(Long projectVersionID);
- void addProject(DWSProject dwsProject);
+ void addProject(DSSProject dssProject);
- void addProjectVersion(DWSProjectVersion dwsProjectVersion);
+ void addProjectVersion(DSSProjectVersion dssProjectVersion);
void updateDescription(@Param("projectID") Long projectID, @Param("description") String description, @Param("product")String product ,@Param("applicationArea")Integer applicationArea ,@Param("business")String business);
- List listProjectVersionsByProjectID(Long projectID);
+ List listProjectVersionsByProjectID(Long projectID);
Boolean noPublished(Long projectID);
@@ -48,15 +48,15 @@ public interface ProjectMapper {
void deleteProjectBaseInfo(long projectID);
- DWSProjectVersion selectProjectVersionByID(Long id);
+ DSSProjectVersion selectProjectVersionByID(Long id);
- DWSProjectVersion selectProjectVersionByProjectIDAndVersionID(@Param("projectID") Long projectId, @Param("version") String version);
+ DSSProjectVersion selectProjectVersionByProjectIDAndVersionID(@Param("projectID") Long projectId, @Param("version") String version);
Integer updateLock(@Param("lock") Integer lock, @Param("projectVersionID") Long projectVersionID);
- DWSProjectPublishHistory selectProjectPublishHistoryByProjectVersionID(Long projectVersionID);
+ DSSProjectPublishHistory selectProjectPublishHistoryByProjectVersionID(Long projectVersionID);
- void insertPublishHistory(DWSProjectPublishHistory dwsProjectPublishHistory);
+ void insertPublishHistory(DSSProjectPublishHistory dssProjectPublishHistory);
void updatePublishHistoryState(@Param("projectVersionID") Long projectVersionID, @Param("status") Integer status);
diff --git a/dss-server/src/main/java/com/webank/wedatasphere/dss/server/dao/ProjectTaxonomyMapper.java b/dss-server/src/main/java/com/webank/wedatasphere/dss/server/dao/ProjectTaxonomyMapper.java
index 3d87bde6b7..21fc44e41a 100644
--- a/dss-server/src/main/java/com/webank/wedatasphere/dss/server/dao/ProjectTaxonomyMapper.java
+++ b/dss-server/src/main/java/com/webank/wedatasphere/dss/server/dao/ProjectTaxonomyMapper.java
@@ -17,8 +17,8 @@
package com.webank.wedatasphere.dss.server.dao;
-import com.webank.wedatasphere.dss.server.entity.DWSProjectTaxonomy;
-import com.webank.wedatasphere.dss.server.entity.DWSProjectTaxonomyRelation;
+import com.webank.wedatasphere.dss.server.entity.DSSProjectTaxonomy;
+import com.webank.wedatasphere.dss.server.entity.DSSProjectTaxonomyRelation;
import org.apache.ibatis.annotations.Param;
import org.springframework.dao.DuplicateKeyException;
@@ -26,16 +26,16 @@
public interface ProjectTaxonomyMapper {
- DWSProjectTaxonomy selectProjectTaxonomyByID(Long id);
- DWSProjectTaxonomyRelation selectProjectTaxonomyRelationByTaxonomyIdOrProjectId(Long taxonomyIdOrProjectId);
- List listProjectTaxonomyByUser(String userName);
+ DSSProjectTaxonomy selectProjectTaxonomyByID(Long id);
+ DSSProjectTaxonomyRelation selectProjectTaxonomyRelationByTaxonomyIdOrProjectId(Long taxonomyIdOrProjectId);
+ List listProjectTaxonomyByUser(String userName);
//--------------------
List listProjectIDByTaxonomyID(@Param("taxonomyID") Long taxonomyID, @Param("userName") String userName);
- void insertProjectTaxonomy(DWSProjectTaxonomy dwsProjectTaxonomy) throws DuplicateKeyException;
+ void insertProjectTaxonomy(DSSProjectTaxonomy dssProjectTaxonomy) throws DuplicateKeyException;
- void updateProjectTaxonomy(DWSProjectTaxonomy dwsProjectTaxonomy) throws DuplicateKeyException;
+ void updateProjectTaxonomy(DSSProjectTaxonomy dssProjectTaxonomy) throws DuplicateKeyException;
Long hasProjects(Long projectTaxonomyID);
diff --git a/dss-server/src/main/java/com/webank/wedatasphere/dss/server/dao/WorkspaceMapper.java b/dss-server/src/main/java/com/webank/wedatasphere/dss/server/dao/WorkspaceMapper.java
new file mode 100644
index 0000000000..c3fbaca222
--- /dev/null
+++ b/dss-server/src/main/java/com/webank/wedatasphere/dss/server/dao/WorkspaceMapper.java
@@ -0,0 +1,68 @@
+/*
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package com.webank.wedatasphere.dss.server.dao;
+
+import com.webank.wedatasphere.dss.server.dto.response.*;
+import com.webank.wedatasphere.dss.server.entity.*;
+import com.webank.wedatasphere.dss.server.dto.response.HomepageDemoInstanceVo;
+import com.webank.wedatasphere.dss.server.dto.response.HomepageDemoMenuVo;
+import com.webank.wedatasphere.dss.server.dto.response.HomepageVideoVo;
+import com.webank.wedatasphere.dss.server.dto.response.WorkspaceFavoriteVo;
+import org.apache.ibatis.annotations.Param;
+
+import java.util.List;
+
+/**
+ * Created by schumiyi on 2020/6/22
+ */
+public interface WorkspaceMapper {
+
+ List getWorkspaces();
+
+ List findByWorkspaceName(String name);
+
+ void addWorkSpace(DSSWorkspace dssWorkspace);
+
+ List getHomepageDemoMenusEn();
+ List getHomepageDemoMenusCn();
+
+ List getHomepageInstancesByMenuIdCn(Long id);
+ List getHomepageInstancesByMenuIdEn(Long id);
+
+ List getHomepageVideosEn();
+ List getHomepageVideosCn();
+
+ DSSWorkspace getWorkspaceById(Long workspaceId);
+
+ List getManagementMenuCn();
+ List getManagementMenuEn();
+
+ List getApplicationMenuCn();
+ List getApplicationMenuEn();
+
+ List getMenuAppInstancesCn(Long id);
+ List getMenuAppInstancesEn(Long id);
+
+ List getWorkspaceFavoritesCn(@Param("username") String username, @Param("workspaceId") Long workspaceId);
+
+ List getWorkspaceFavoritesEn(@Param("username") String username, @Param("workspaceId") Long workspaceId);
+
+ void addFavorite(DSSFavorite dssFavorite);
+
+ void deleteFavorite(Long favouritesId);
+}
diff --git a/dss-server/src/main/java/com/webank/wedatasphere/dss/server/dao/impl/dwsUserMapper.xml b/dss-server/src/main/java/com/webank/wedatasphere/dss/server/dao/impl/dwsUserMapper.xml
index 65fbd5eb2c..ac65dce4fc 100644
--- a/dss-server/src/main/java/com/webank/wedatasphere/dss/server/dao/impl/dwsUserMapper.xml
+++ b/dss-server/src/main/java/com/webank/wedatasphere/dss/server/dao/impl/dwsUserMapper.xml
@@ -19,7 +19,7 @@
-
+
diff --git a/dss-server/src/main/java/com/webank/wedatasphere/dss/server/dao/impl/flowMapper.xml b/dss-server/src/main/java/com/webank/wedatasphere/dss/server/dao/impl/flowMapper.xml
index 38d66f96cf..37dae091a4 100644
--- a/dss-server/src/main/java/com/webank/wedatasphere/dss/server/dao/impl/flowMapper.xml
+++ b/dss-server/src/main/java/com/webank/wedatasphere/dss/server/dao/impl/flowMapper.xml
@@ -29,11 +29,11 @@
id,`flow_id`,`source`,`version`,`json_path`,`comment`,`update_time`,`updator_id`,`project_version_id`
-