diff --git a/.gitattributes b/.gitattributes
new file mode 100644
index 000000000..b1f35fc4f
--- /dev/null
+++ b/.gitattributes
@@ -0,0 +1,4 @@
+*.js linguist-language=java
+*.css linguist-language=java
+*.html linguist-language=java
+*.vue linguist-language=java
\ No newline at end of file
diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml
new file mode 100644
index 000000000..33e2be454
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/bug_report.yml
@@ -0,0 +1,99 @@
+name: Bug report
+title: "[Bug] "
+description: Problems and issues with code of Streamis
+labels: [bug, triage]
+body:
+ - type: markdown
+ attributes:
+ value: |
+ Thank you for reporting the problem!
+ Please make sure what you are reporting is a bug with reproducible steps. To ask questions
+ or share ideas, pleae post on our [Discussion page](https://github.com/WeBankFinTech/Streamis/discussions) instead.
+
+ - type: checkboxes
+ attributes:
+ label: Search before asking
+ description: >
+ Please make sure to search in the [issues](https://github.com/WeBankFinTech/Streamis/issues) first to see
+ whether the same issue was reported already.
+ options:
+ - label: >
+ I searched the [issues](https://github.com/WeBankFinTech/Streamis/issues) and found no similar
+ issues.
+ required: true
+
+ - type: dropdown
+ attributes:
+ label: Streamis Component
+ description: |
+ What component are you using? Streamis has many modules, please make sure to choose the module that
+ you found the bug.
+ multiple: true
+ options:
+ - "streamis-commons"
+ - "streamis-server"
+ - "streamis-job-manager"
+ - "streamis-job-launcher"
+ - "streamis-web"
+ validations:
+ required: true
+
+ - type: textarea
+ attributes:
+ label: What happened + What you expected to happen
+ description: Describe 1. the bug 2. expected behavior 3. useful information (e.g., logs)
+ placeholder: >
+ Please provide the context in which the problem occurred and explain what happened. Further,
+ To Reproduce Steps to reproduce the behavior: 1. Go to '...' 2. Click on '....' 3. Scroll down to '.... 4. See error
+ please also explain why you think the behaviour is erroneous. It is extremely helpful if you can
+ copy and paste the fragment of logs showing the exact error messages or wrong behaviour here.
+
+ **NOTE**: Expected behavior A clear and concise description of what you expected to happen.Screenshots If applicable, add screenshots to help explain your problem.
+ validations:
+ required: true
+
+ - type: textarea
+ attributes:
+ label: Relevent platform
+ description: The platform where you occurred this issue
+ placeholder: >
+ Please specify Desktop or Smartphone, Version / Dependencies / OS / Browser
+ validations:
+ required: true
+
+ - type: textarea
+ attributes:
+ label: Reproduction script
+ description: >
+ Please provide a reproducible script. Providing a narrow reproduction (minimal / no external dependencies) will
+ help us triage and address issues in the timely manner!
+ placeholder: >
+ Please provide a short code snippet (less than 50 lines if possible) that can be copy-pasted to
+ reproduce the issue. The snippet should have **no external library dependencies**
+ (i.e., use fake or mock data / environments).
+
+ **NOTE**: If the code snippet cannot be run by itself, the issue will be marked as "needs-repro-script"
+ until the repro instruction is updated.
+ validations:
+ required: true
+
+ - type: textarea
+ attributes:
+ label: Anything else
+ description: Anything else we need to know?
+ placeholder: >
+ How often does this problem occur? (Once? Every time? Only when certain conditions are met?)
+ Any relevant logs to include? Are there other relevant issues?
+
+ - type: checkboxes
+ attributes:
+ label: Are you willing to submit a PR?
+ description: >
+ This is absolutely not required, but we are happy to guide you in the contribution process
+ especially if you already have a good understanding of how to implement the fix.
+ options:
+ - label: Yes I am willing to submit a PR!
+
+ - type: markdown
+ attributes:
+ value: "Thanks for completing our form!"
diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml
new file mode 100644
index 000000000..40276a265
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/config.yml
@@ -0,0 +1,5 @@
+blank_issues_enabled: fasle
+contact_links:
+ - name: Ask a question or get support
+ url: https://github.com/WeBankFinTech/Streamis/discussions
+ about: Ask a question or request support for using Streamis
\ No newline at end of file
diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml
new file mode 100644
index 000000000..1e7492c46
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/feature_request.yml
@@ -0,0 +1,63 @@
+name: Streamis feature request
+description: Suggest an idea for Streamis project
+title: "[Feature] "
+labels: [enhancement]
+body:
+ - type: markdown
+ attributes:
+ value: |
+ Thank you for finding the time to propose a new feature!
+ We really appreciate the community efforts to improve Streamis.
+ - type: checkboxes
+ attributes:
+ label: Search before asking
+ description: >
+ Please make sure to search in the [issues](https://github.com/WeBankFinTech/Streamis/issues) first to see
+ whether the same feature was requested already.
+ options:
+ - label: >
+ I had searched in the [issues](https://github.com/WeBankFinTech/Streamis/issues) and found no similar
+ feature requirement.
+ required: true
+ - type: textarea
+ attributes:
+ label: Problem Description
+ description: Is your feature request related to a problem? Please describe.
+
+ - type: textarea
+ attributes:
+ label: Description
+ description: A short description of your feature
+
+ - type: textarea
+ attributes:
+ label: Use case
+ description: >
+ Describe the use case of your feature request.
+ placeholder: >
+ Describe the solution you'd like A clear and concise description of what you want to happen.
+
+ - type: textarea
+ attributes:
+ label: solutions
+ description: Describe alternatives you've considered A clear and concise description of any alternative solutions or features you've considered.
+
+ - type: textarea
+ attributes:
+ label: Anything else
+ description: Anything else we need to know?
+ placeholder: >
+ Additional context Add any other context or screenshots about the feature request here.
+
+ - type: checkboxes
+ attributes:
+ label: Are you willing to submit a PR?
+ description: >
+ This is absolutely not required, but we are happy to guide you in the contribution process
+ especially if you already have a good understanding of how to implement the feature.
+ options:
+ - label: Yes I am willing to submit a PR!
+
+ - type: markdown
+ attributes:
+ value: "Thanks for completing our form!"
diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
new file mode 100644
index 000000000..418271fff
--- /dev/null
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -0,0 +1,27 @@
+### What is the purpose of the change
+(For example: Streamis-Server defines the restful interfaces of Streamis, we can use linkis-httpclient to access it.
+Related issues: #50. )
+
+### Brief change log
+(for example:)
+- Define the the restful interfaces of Streamis;
+- Define the service and dao interfaces of Streamis.
+
+### Verifying this change
+(Please pick either of the following options)
+This change is a trivial rework / code cleanup without any test coverage.
+(or)
+This change is already covered by existing tests, such as (please describe tests).
+(or)
+This change added tests and can be verified as follows:
+(example:)
+- Added tests for creating and execute the Streamis jobs and verify the availability of different Streamis Job, such as flinkSQL, Jar.
+
+### Does this pull request potentially affect one of the following parts:
+- Dependencies (does it add or upgrade a dependency): (yes / no)
+- Anything that affects deployment: (yes / no / don't know)
+- The Core framework, i.e., JobManager, Server.: (yes / no)
+
+### Documentation
+- Does this pull request introduce a new feature? (yes / no)
+- If yes, how is the feature documented? (not applicable / docs / JavaDocs / not documented)
\ No newline at end of file
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
new file mode 100644
index 000000000..7f3d71477
--- /dev/null
+++ b/.github/workflows/build.yml
@@ -0,0 +1,55 @@
+#
+# Copyright 2019 WeBank.
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+name: Streamis CI Actions
+
+on:
+ push:
+ pull_request:
+
+jobs:
+ build:
+
+ runs-on: ubuntu-latest
+
+ strategy:
+ matrix:
+ node-version: [14.17.3]
+ # See supported Node.js release schedule at https://nodejs.org/en/about/releases/
+
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v2
+ - name: Set up JDK 8
+ uses: actions/setup-java@v2
+ with:
+ distribution: 'adopt'
+ java-version: 8
+ - name: Use Node.js ${{ matrix.node-version }}
+ uses: actions/setup-node@v2
+ with:
+ node-version: ${{ matrix.node-version }}
+ - name: Build backend by maven
+ run: |
+ mvn -N install
+ mvn clean package
+ - name: Build frontend by node.js
+ run: |
+ cd web
+ npm install
+ npm run build
diff --git a/.github/workflows/check_license.yml b/.github/workflows/check_license.yml
new file mode 100644
index 000000000..3a9e1c01d
--- /dev/null
+++ b/.github/workflows/check_license.yml
@@ -0,0 +1,48 @@
+#
+# Copyright 2019 WeBank.
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+name: Streamis License check
+
+on: [push, pull_request]
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout source
+ uses: actions/checkout@v2
+ - name: Set up JDK 8
+ uses: actions/setup-java@v2
+ with:
+ java-version: '8'
+ distribution: 'adopt'
+ - name: mvn -N install
+ run:
+ mvn -N install
+ - name: License check with Maven
+ run: |
+ rat_file=`mvn apache-rat:check | { grep -oe "\\S\\+/rat.txt" || true; }`
+ echo "rat_file=$rat_file"
+ if [[ -n "$rat_file" ]];then echo "check error!" && cat $rat_file && exit 123;else echo "check success!" ;fi
+ - name: Upload the report
+ uses: actions/upload-artifact@v2
+ with:
+ name: license-check-report
+ path: "**/target/rat.txt"
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 000000000..d8b0f432b
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,23 @@
+*.iml
+.idea
+.DS_Store
+assembly/target
+streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/target
+streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/target
+streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/target
+streamis-jobmanager/streamis-job-launcher/streamis-job-launcher.iml
+streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/target
+streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/target
+streamis-jobmanager/streamis-jobmanager-common/target
+streamis-jobmanager/streamis-jobmanager-server/target
+streamis-jobmanager/streamis-projectmanager-server/target
+
+streamis-project/streamis-project-common/target
+streamis-project/streamis-project-server/target
+
+streamis-server/target
+streamis-appconn/target
+/logs/streamis-server.log
+/logs/linkis.log
+/test/target
+/test/src
\ No newline at end of file
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 000000000..261eeb9e9
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/README-ZH.md b/README-ZH.md
new file mode 100644
index 000000000..5723fe3d5
--- /dev/null
+++ b/README-ZH.md
@@ -0,0 +1,127 @@
+# Streamis
+
+[![License](https://img.shields.io/badge/license-Apache%202-4EB1BA.svg)](https://www.apache.org/licenses/LICENSE-2.0.html)
+
+[English](README.md) | 中文
+
+## 引言
+
+ Streamis 是 **微众银行** 联合 **天翼云**、**仙翁科技** 和 **萨摩耶云** 联合共建的流式应用开发管理系统。
+
+ 基于 [DataSphere Studio](https://github.com/WeBankFinTech/DataSphereStudio) 的框架化能力,以及底层对接 [Linkis](https://github.com/apache/incubator-linkis) 的 **Flink 引擎**,让用户低成本完成流式应用的开发、调试、发布和生产管理。
+
+ 未来还规划将以工作流式的图形化拖拽开发体验,将流式应用以Source节点、
+Dimension节点、Transform节点、Sink节点 和 [Visualis](https://github.com/WeBankFinTech/Visualis) 节点串连成一条流式工作流,让用户以更低的学习成本完成流式应用的开发、调试和发布。
+
+----
+
+## 核心特点
+
+#### 1. 基于 DSS 和 Linkis,打造领先的流式应用开发管理系统。
+
+ 以 Flink 为底层计算引擎,基于开发中心和生产中心隔离的架构设计模式,完全隔离开发权限与发布权限,隔离开发环境与生产环境,保证业务应用的高稳定性和高安全性。
+
+ 应用开发层与 DSS 的数据应用开发流程有机整合,提供极简的用户使用体验。
+
+ 应用执行层集成 Linkis 计算中间件,打造金融级具备高并发、高可用、多租户隔离和资源管控等能力的流式应用管理能力。
+
+#### 2. 强大的流式应用开发调试能力。
+
+ 基于 DSS-Scriptis 提供流式应用的开发和调试功能,支持对 FlinkSQL 进行实时调试和结果集展示。
+
+![流式开发中心](docs/images/开发中心.png)
+
+#### 3. 强大的流式应用生产中心能力。
+
+ 支持流式作业的多版本管理、全生命周期管理、监控告警、checkpoint 和 savepoint 管理能力。
+
+![流式应用生产](docs/images/stream_product_center.png)
+
+ 流式应用运行情况:
+
+![流式应用运行情况](docs/images/stream_job_detail.png)
+
+ 流式应用参数配置:
+
+![流式应用配置](docs/images/stream_job_config_1.png)
+![流式应用配置](docs/images/stream_job_config_2.png)
+
+ 更多功能,请参考:[Streamis 用户手册](docs/zh_CN/0.2.4/使用文档/Streamis用户手册.md)。
+
+----
+
+## 依赖的生态组件
+
+| 依赖的应用工具 | 描述 | Streamis 兼容版本 |
+|--------------|---------------------------------------------------------------|--------------|
+| [DataSphereStudio](https://github.com/WeBankFinTech/DataSphereStudio) | 数据应用开发管理集成框架。以工作流式的图形化拖拽开发体验,将满足从数据交换、脱敏清洗、分析挖掘、质量检测、可视化展现、定时调度到数据输出应用等,数据应用开发全流程场景需求。 | >= DSS1.1.0(已发布)|
+| [Linkis](https://github.com/apache/incubator-linkis) | 计算中间件 Apache Linkis,通过提供 REST/WebSocket/JDBC/SDK 等标准接口,上层应用可以方便地连接访问 MySQL/Spark/Hive/Presto/Flink 等底层引擎。 | >= Linkis1.1.1(已发布),部分功能需要Linkis 1.1.2支持 |
+
+----
+
+## Demo试用环境
+
+ 正在部署中,敬请期待!
+
+----
+
+## 下载
+
+ 请前往 [Streamis releases](https://github.com/WeBankFinTech/Streamis/releases) 页面下载 Streamis 的已编译版本或源码包。
+
+----
+
+## 编译和安装部署
+
+ 请参考 [Streamis 安装部署文档](docs/zh_CN/0.2.4/Streamis安装文档.md) ,用于安装部署 Streamis 及其依赖环境。
+
+
+----
+
+## 示例和使用指引
+
+ 请到 [用户使用文档](docs/zh_CN/0.2.4/使用文档/Streamis用户手册.md) ,了解如何快速使用 Streamis。
+
+----
+
+## Streamis 功能介绍
+
+| 功能模组 | 描述 | Streamis |
+ | :----: | :----: |-------|
+ | 安装部署 | 部署难易程度和第三方依赖 | 一键部署,依赖Linkis Flink引擎 |
+ | 开发中心| FlinkSQL 流式应用实时开发、调试 | 支持,需集成DSS |
+ | 生产中心 | 流式应用管理运维能力 | 支持 |
+ | | 复用 Linkis 计算治理能力 | 支持 |
+ | | 支持 FlinkSQL 和 FlinkJar 包等方式发布 | 支持 |
+ | | 流式应用的多版本管理能力 | 支持 |
+ | | 流式应用的参数配置和告警能力 | 支持 |
+ | 服务高可用 | 应用高可用,服务多点,状态快照实现容错处理,故障不影响使用 | 支持 |
+ | 系统管理 | 节点、资源管理 | 支持 |
+ |权限管理 |任务的操作权限控制 |支持 |
+
+----
+
+## 架构
+
+![架构](images/zh_CN/readme/architecture.png)
+
+----
+
+## 贡献
+
+ 我们非常欢迎和期待更多的贡献者参与共建 Streamis, 不论是代码、文档,或是其他能够帮助到社区的贡献形式。
+
+## 联系我们
+
+ 对 Streamis 的任何问题和建议,敬请提交 [issue](https://github.com/WeBankFinTech/Streamis/issues),以便跟踪处理和经验沉淀共享。
+
+ 您也可以扫描下面的二维码,加入我们的 微信/QQ群,以获得更快速的响应。
+
+![交流](images/zh_CN/readme/communication.png)
+
+----
+
+## License
+
+ DSS is under the Apache 2.0 license. See the [License](LICENSE) file for details.
+
diff --git a/README.md b/README.md
index 4f78a9656..2db81429d 100644
--- a/README.md
+++ b/README.md
@@ -1,2 +1,127 @@
-# dw-streamis
+# Streamis
+
+[![License](https://img.shields.io/badge/license-Apache%202-4EB1BA.svg)](https://www.apache.org/licenses/LICENSE-2.0.html)
+
+English | [中文](README-ZH.md)
+
+## Introduction
+
+ Streamis is an jointed development project for Streaming application development and management established by WeBank, CtYun, Samoyed Financial Cloud and XianWeng Technology.
+
+ Based on the framework capabilities of [DataSphere Studio](https://github.com/WeBankFinTech/DataSphereStudio) and the underlying docking [Linkis](https://github.com/apache/incubator-linkis/blob/master/README.md) 's **Flink engine** allows users to complete the development, debugging, release and production management of streaming applications at low cost.
+
+ In the future, it is also planned to use a workflow-style graphical drag-and-drop development experience, and the streaming application will be based on the Source node,
+The Dimension node, Transform node, Sink node and [Visualis](https://github.com/WeBankFinTech/Visualis) nodes are connected in series to form a streaming workflow, allowing users to complete the development of streaming applications at a lower learning cost. Debug and release.
+
+----
+
+## Core features
+
+#### 1. Based on DSS and DSS-Scriptis, to create a leading streaming application development management system.
+
+ With Flink as the underlying computation engine, based on the architectural design pattern of the isolation between the development center and the production center, it completely isolates development permissions and publishing permissions, and isolates the development environment and production environment to ensure high stability and high security of streaming applications.
+
+ The application development layer is organically integrated with the data application development process of DSS, providing simplier user experience.
+
+ The application execution layer integrates Linkis to provide financial-level streaming application management capabilities with high concurrency, high availability, multi-tenant isolation, and resource management.
+
+#### 2. Powerful streaming application development and debugging capabilities.
+
+ Based on DSS-Scriptis, provides streaming application development and debugging functions, and supports real-time debugging and result set display of FlinkSQL.
+
+![development center](docs/images/开发中心.png)
+
+#### 3. Powerful streaming application production center capabilities.
+
+ Supports multi-version management, full life cycle management, monitoring alarm, checkpoint and savepoint management capabilities of streaming jobs.
+
+![prod center](docs/images/stream_product_center_en.png)
+
+ Running information page:
+
+![Running information](docs/images/stream_job_detail_en.png)
+
+ Configurations page:
+
+![Configurations](docs/images/stream_job_config_en_1.png)
+![Configurations](docs/images/stream_job_config_en_2.png)
+
+ For more features, please refer to: [User Manual](docs/en_US/userManual/StreamisUserManual.md).
+
+----
+
+## Depended ecosystems
+
+| Depended Component | Description | Streamis compatibility |
+| -------------- | -------------------------------------------------------------- | --------------|
+| [DataSphereStudio](https://github.com/WeBankFinTech/DataSphereStudio) | Data application development management framework. With a unified UI, the workflow-like graphical drag-and-drop development experience meets the entire lifecycle of data application development from data import, desensitization cleaning, data analysis, data mining, quality inspection, visualization, scheduling to data output applications, etc. | >= DSS1.1.0 (Released) |
+| [Linkis](https://github.com/apache/incubator-linkis) | Apache Linkis, builds a layer of computation middleware, by using standard interfaces such as REST/WS/JDBC provided by Linkis, the upper applications can easily access the underlying engines such as MySQL/Spark/Hive/Presto/Flink, etc. | >= Linkis1.1.1 (Released),some functions need to be supported by linkis 1.1.2 |
+
+## Demo Trial environment
+
+ In progress, stay tuned!
+
+----
+
+## Download
+
+ Please go to the [Streamis Releases](https://github.com/WeBankFinTech/Streamis/releases) Page to download a compiled version or a source code package of Streamis.
+
+----
+
+## Compile and install deployment
+
+please refer to [Streamis Installation and Deployment Document](docs/en_US/0.2.4/StreamisDeployment.md) for installing and deploying Streamis.
+
+----
+
+## Examples and usage guidelines
+
+ Please visit to [User documentation](docs/en_US/userManual/StreamisUserManual.md), learn how to use Streamis quickly.
+
+----
+
+## Features
+
+| Function Module | Description | Streamis |
+ | :----: | :----: |-------|
+| UI | Integrated and convenient management interface and monitoring window | Integrated |
+| Installation and deployment | Deployment difficulty and third-party dependencies | One-click deployment, relying on Linkis Flink engine |
+| Development Center | FlinkSQL streaming application real-time development and debugging | Support, need to integrate DSS |
+|Production Center | Streaming Application Management Operation and Maintenance Capability | Support |
+| | Reuse Linkis computing governance capabilities | Support |
+| | Support FlinkSQL and FlinkJar package release | Support |
+| | Multi-version management capabilities | Support |
+| | Configuration and alert management capabilities | Support |
+| Service high availability | Multiple services,State snapshot for fault tolerance, failure does not affect the use | Application high availability |
+| System Management | Node and Resource Management | Support |
+| Permission management | Task operation permission control | Support |
+
+----
+
+## Architecture
+
+![Architecture](images/en_US/readme/architecture.png)
+
+----
+
+## Contributing
+
+ Contributions are always welcomed, we need more contributors to build Streamis together. either code, or doc, or other supports that could help the community.
+
+----
+
+## Communication contribution
+
+ For any questions or suggestions, please kindly submit an [issue](https://github.com/WeBankFinTech/Streamis/issues).
+
+ You can scan the QR code below to join our WeChat and QQ group to get more immediate response.
+
+![comminicate](images/zh_CN/readme/communication.png)
+
+----
+
+## License
+
+ DSS is under the Apache 2.0 license. See the [License](LICENSE) file for details.
diff --git a/assembly/pom.xml b/assembly/pom.xml
new file mode 100644
index 000000000..46d4e8a16
--- /dev/null
+++ b/assembly/pom.xml
@@ -0,0 +1,70 @@
+
+
+
+
+
+ streamis
+ com.webank.wedatasphere.streamis
+ 0.2.4
+
+ 4.0.0
+
+ assembly
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-antrun-plugin
+
+
+ package
+
+ run
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-assembly-plugin
+ 2.3
+
+
+ dist
+ package
+
+ single
+
+
+ false
+ wedatasphere-streamis-${streamis.version}-dist
+ false
+ false
+
+ src/main/assembly/assembly.xml
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/assembly/src/main/assembly/assembly.xml b/assembly/src/main/assembly/assembly.xml
new file mode 100644
index 000000000..158df4b98
--- /dev/null
+++ b/assembly/src/main/assembly/assembly.xml
@@ -0,0 +1,80 @@
+
+
+
+ dist
+
+ tar.gz
+
+ false
+
+
+
+
+
+
+ ${project.parent.basedir}
+ .
+
+ README*
+ LICENSE*
+ NOTICE*
+
+
+
+
+
+ ${project.parent.basedir}/conf/
+
+ conf
+
+ **/*
+
+ unix
+
+
+
+
+ ${project.parent.basedir}/bin/
+
+ bin
+
+ **/*
+
+ unix
+
+
+
+
+ ${project.parent.basedir}/db/
+
+ db
+
+ **/*
+
+
+
+
+
+ ${project.parent.basedir}/streamis-server/target/
+
+ ./share/streamis-server/
+
+ **/*.zip
+
+
+
+
+
diff --git a/bin/install.sh b/bin/install.sh
new file mode 100644
index 000000000..35d03c1b5
--- /dev/null
+++ b/bin/install.sh
@@ -0,0 +1,159 @@
+#!/bin/sh
+#Actively load user env
+if [ -f "~/.bashrc" ];then
+ echo "Warning! user bashrc file does not exist."
+else
+ source ~/.bashrc
+fi
+
+shellDir=`dirname $0`
+workDir=`cd ${shellDir}/..;pwd`
+
+SERVER_IP=""
+SERVER_HOME=""
+
+local_host="`hostname --fqdn`"
+LOCAL_IP="`ifconfig | grep 'inet' | grep -v '127.0.0.1' | cut -d: -f2 | awk '{ print $2}'`"
+
+#To be compatible with MacOS and Linux
+txt=""
+if [[ "$OSTYPE" == "darwin"* ]]; then
+ txt="''"
+elif [[ "$OSTYPE" == "linux-gnu" ]]; then
+ txt=""
+elif [[ "$OSTYPE" == "cygwin" ]]; then
+ echo "streamis not support Windows operating system"
+ exit 1
+elif [[ "$OSTYPE" == "msys" ]]; then
+ echo "streamis not support Windows operating system"
+ exit 1
+elif [[ "$OSTYPE" == "win32" ]]; then
+ echo "streamis not support Windows operating system"
+ exit 1
+elif [[ "$OSTYPE" == "freebsd"* ]]; then
+ txt=""
+else
+ echo "Operating system unknown, please tell us(submit issue) for better service"
+ exit 1
+fi
+
+function isSuccess(){
+if [ $? -ne 0 ]; then
+ echo "Failed to " + $1
+ exit 1
+else
+ echo "Succeed to" + $1
+fi
+}
+
+function checkJava(){
+ java -version
+ isSuccess "execute java --version"
+}
+
+
+##install env:expect,
+sudo yum install -y expect
+isSuccess "install expect"
+
+##install env:telnet,
+sudo yum install -y telnet
+isSuccess "install telnet"
+
+##load config
+echo "step1:load config"
+source ${workDir}/conf/config.sh
+source ${workDir}/conf/db.sh
+isSuccess "load config"
+
+local_host="`hostname --fqdn`"
+
+
+##env check
+echo "Do you want to clear Streamis table information in the database?"
+echo " 1: Do not execute table-building statements"
+echo " 2: Dangerous! Clear all data and rebuild the tables."
+echo ""
+
+MYSQL_INSTALL_MODE=1
+
+read -p "Please input the choice:" idx
+if [[ '2' = "$idx" ]];then
+ MYSQL_INSTALL_MODE=2
+ echo "You chose Rebuild the table"
+elif [[ '1' = "$idx" ]];then
+ MYSQL_INSTALL_MODE=1
+ echo "You chose not execute table-building statements"
+else
+ echo "no choice,exit!"
+ exit 1
+fi
+
+##init db
+if [[ '2' = "$MYSQL_INSTALL_MODE" ]];then
+ mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD -D$MYSQL_DB --default-character-set=utf8 -e "source ${workDir}/db/streamis_ddl.sql"
+ isSuccess "source streamis_ddl.sql"
+ mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD -D$MYSQL_DB --default-character-set=utf8 -e "source ${workDir}/db/streamis_dml.sql"
+ isSuccess "source streamis_dml.sql"
+ echo "Rebuild the table"
+fi
+
+
+EUREKA_URL=http://$EUREKA_INSTALL_IP:$EUREKA_PORT/eureka/
+
+##function start
+function installPackage(){
+echo "start to install $SERVERNAME"
+echo "$SERVERNAME-step1: create dir"
+
+if ! test -e $SERVER_HOME; then
+ sudo mkdir -p $SERVER_HOME;sudo chown -R $deployUser:$deployUser $SERVER_HOME
+ isSuccess "create the dir of $SERVERNAME"
+fi
+
+echo "$SERVERNAME-step2:copy install package"
+cp ${workDir}/share/$PACKAGE_DIR/$SERVERNAME.zip $SERVER_HOME
+isSuccess "copy ${SERVERNAME}.zip"
+cd $SERVER_HOME/;rm -rf $SERVERNAME-bak; mv -f $SERVERNAME $SERVERNAME-bak
+cd $SERVER_HOME/;unzip $SERVERNAME.zip > /dev/null
+isSuccess "unzip ${SERVERNAME}.zip"
+
+echo "$SERVERNAME-step3:subsitution conf"
+SERVER_CONF_PATH=$SERVER_HOME/$SERVERNAME/conf/application.yml
+sed -i "s#port:.*#port: $SERVER_PORT#g" $SERVER_CONF_PATH
+sed -i "s#defaultZone:.*#defaultZone: $EUREKA_URL#g" $SERVER_CONF_PATH
+sed -i "s#hostname:.*#hostname: $SERVER_IP#g" $SERVER_CONF_PATH
+isSuccess "subsitution conf of $SERVERNAME"
+}
+
+function setDatasourcePassword(){
+ PASSWORD=$MYSQL_PASSWORD
+ temp=${PASSWORD//#/%tream%}
+ sed -i "s#wds.linkis.server.mybatis.datasource.password.*#wds.linkis.server.mybatis.datasource.password=$temp#g" $SERVER_CONF_PATH
+ sed -i "s/%tream%/#/g" $SERVER_CONF_PATH
+}
+##function end
+
+
+##Streamis-Server Install
+PACKAGE_DIR=streamis-server
+SERVERNAME=streamis-server
+SERVER_IP=$STREAMIS_SERVER_INSTALL_IP
+SERVER_PORT=$STREAMIS_SERVER_INSTALL_PORT
+SERVER_HOME=$STREAMIS_INSTALL_HOME
+###install Streamis-Server
+installPackage
+###update Streamis-Server linkis.properties
+echo "$SERVERNAME-step4:update linkis.properties"
+SERVER_CONF_PATH=$SERVER_HOME/$SERVERNAME/conf/linkis.properties
+sed -i "s#wds.linkis.server.mybatis.datasource.url.*#wds.linkis.server.mybatis.datasource.url=jdbc:mysql://${MYSQL_HOST}:${MYSQL_PORT}/${MYSQL_DB}?characterEncoding=UTF-8#g" $SERVER_CONF_PATH
+sed -i "s#wds.linkis.server.mybatis.datasource.username.*#wds.linkis.server.mybatis.datasource.username=$MYSQL_USER#g" $SERVER_CONF_PATH
+setDatasourcePassword
+sed -i "s#wds.linkis.gateway.ip.*#wds.linkis.gateway.ip=$GATEWAY_INSTALL_IP#g" $SERVER_CONF_PATH
+sed -i "s#wds.linkis.gateway.port.*#wds.linkis.gateway.port=$GATEWAY_PORT#g" $SERVER_CONF_PATH
+sed -i "s#wds.linkis.gateway.url.*#wds.linkis.gateway.url=http://${GATEWAY_INSTALL_IP}:${GATEWAY_PORT}#g" $SERVER_CONF_PATH
+isSuccess "subsitution linkis.properties of $SERVERNAME"
+echo "<----------------$SERVERNAME:end------------------->"
+echo ""
+
+
diff --git a/bin/start.sh b/bin/start.sh
new file mode 100644
index 000000000..0be2ba7cb
--- /dev/null
+++ b/bin/start.sh
@@ -0,0 +1,80 @@
+#!/usr/bin/env bash
+#
+# Copyright 2019 WeBank
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+
+
+# Start all streamis applications
+info="We will start all streamis applications, it will take some time, please wait"
+echo ${info}
+
+#Actively load user env
+source ~/.bash_profile
+
+workDir=`dirname "${BASH_SOURCE-$0}"`
+workDir=`cd "$workDir"; pwd`
+
+
+CONF_DIR="${workDir}"/../conf
+CONF_FILE=${CONF_DIR}/config.sh
+
+function isSuccess(){
+if [ $? -ne 0 ]; then
+ echo "ERROR: " + $1
+ exit 1
+else
+ echo "INFO:" + $1
+fi
+}
+
+sudo yum -y install dos2unix
+
+
+local_host="`hostname --fqdn`"
+
+#if there is no LINKIS_INSTALL_HOME,we need to source config again
+if [ -z ${STREAMIS_INSTALL_HOME} ];then
+ echo "Warning: STREAMIS_INSTALL_HOME does not exist, we will source config"
+ if [ ! -f "${CONF_FILE}" ];then
+ echo "Error: can not find config file, start applications failed"
+ exit 1
+ else
+ source ${CONF_FILE}
+ fi
+fi
+
+function startApp(){
+echo "<-------------------------------->"
+echo "Begin to start $SERVER_NAME"
+SERVER_BIN=${STREAMIS_INSTALL_HOME}/${SERVER_NAME}/bin
+SERVER_START_CMD="source ~/.bash_profile;cd ${SERVER_BIN}; dos2unix ./* > /dev/null 2>&1; dos2unix ../conf/* > /dev/null 2>&1;sh start-${SERVER_NAME}.sh > /dev/null 2>&1 &"
+
+if [ -n "${SERVER_IP}" ];then
+ ssh ${SERVER_IP} "${SERVER_START_CMD}"
+else
+ ssh ${local_host} "${SERVER_START_CMD}"
+fi
+isSuccess "End to start $SERVER_NAME"
+echo "<-------------------------------->"
+sleep 15 #for Eureka register
+}
+
+#streamis-server
+SERVER_NAME=streamis-server
+SERVER_IP=$STREAMIS_SERVER_INSTALL_IP
+startApp
+
+
diff --git a/bin/stop.sh b/bin/stop.sh
new file mode 100644
index 000000000..33c59c560
--- /dev/null
+++ b/bin/stop.sh
@@ -0,0 +1,73 @@
+#!/usr/bin/env bash
+#
+# Copyright 2019 WeBank
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+
+
+# Start all streamis applications
+info="We will stop all streamis applications, it will take some time, please wait"
+echo ${info}
+
+#Actively load user env
+source ~/.bash_profile
+
+workDir=`dirname "${BASH_SOURCE-$0}"`
+workDir=`cd "$workDir"; pwd`
+
+
+CONF_DIR="${workDir}"/../conf
+CONF_FILE=${CONF_DIR}/config.sh
+
+function isSuccess(){
+if [ $? -ne 0 ]; then
+ echo "ERROR: " + $1
+ exit 1
+else
+ echo "INFO:" + $1
+fi
+}
+
+
+
+local_host="`hostname --fqdn`"
+
+#if there is no LINKIS_INSTALL_HOME,we need to source config again
+if [ -z ${STREAMIS_INSTALL_HOME} ];then
+ echo "Warning: STREAMIS_INSTALL_HOME does not exist, we will source config"
+ if [ ! -f "${CONF_FILE}" ];then
+ echo "Error: can not find config file, stop applications failed"
+ exit 1
+ else
+ source ${CONF_FILE}
+ fi
+fi
+
+function stopAPP(){
+echo "<-------------------------------->"
+echo "Begin to stop $SERVER_NAME"
+SERVER_BIN=${STREAMIS_INSTALL_HOME}/${SERVER_NAME}/bin
+SERVER_STOP_CMD="source ~/.bash_profile;cd ${SERVER_BIN}; dos2unix ./* > /dev/null 2>&1; dos2unix ../conf/* > /dev/null 2>&1; sh stop-${SERVER_NAME}.sh"
+if [ -n "${SERVER_IP}" ];then
+ ssh -p $SSH_PORT ${SERVER_IP} "${SERVER_STOP_CMD}"
+else
+ ssh -p $SSH_PORT ${local_host} "${SERVER_STOP_CMD}"
+fi
+isSuccess "End to stop $SERVER_NAME"
+echo "<-------------------------------->"
+}
+
+#streamis-server
+SERVER_NAME=streamis-server
diff --git a/bin/upgrade.sh b/bin/upgrade.sh
new file mode 100644
index 000000000..856b0c4bc
--- /dev/null
+++ b/bin/upgrade.sh
@@ -0,0 +1,205 @@
+#!/usr/bin/env bash
+#
+# Copyright 2022 WeBank
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Use to upgrade from 0.1.0 to 0.2.4
+
+if [ -f "~/.bashrc" ];then
+ echo "Warning! user bashrc file does not exist."
+else
+ source ~/.bashrc
+fi
+
+shellDir=`dirname $0`
+workDir=`cd ${shellDir}/..;pwd`
+
+interact_echo(){
+ while [ 1 ]; do
+ read -p "$1 (Y/N)" yn
+ if [[ "${yn}x" == "Yx" ]] || [[ "${yn}x" == "yx" ]]; then
+ return 0
+ elif [[ "${yn}x" == "Nx" ]] || [[ "${yn}x" == "nx" ]]; then
+ return 1
+ else
+ echo "Unknown choose: [$yn], please choose again."
+ fi
+ done
+}
+
+interact_echo "Are you sure the current version of Streamis is 0.2.x < 0.2.5 and need to upgrade to 0.2.5 ?"
+if [[ $? == 0 ]]; then
+ source ${workDir}/conf/db.sh
+ echo "<------ Will connect to [${MYSQL_HOST}:${MYSQL_PORT}] to upgrade the tables in database... ------>"
+ mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD -D$MYSQL_DB --default-character-set=utf8 << EOF 1>/dev/null
+ /*Modify the table column*/
+ ALTER TABLE \`linkis_stream_job\` MODIFY COLUMN \`project_name\` varchar(100) DEFAULT NULL;
+ ALTER TABLE \`linkis_stream_job\` MODIFY COLUMN \`name\` varchar(200) DEFAULT NULL;
+ ALTER TABLE \`linkis_stream_project\` MODIFY COLUMN \`name\` varchar(100) DEFAULT NULL;
+ ALTER TABLE \`linkis_stream_task\` MODIFY COLUMN \`job_id\` varchar(200) DEFAULT NULL;
+ ALTER TABLE \`linkis_stream_task\` MODIFY COLUMN \`linkis_job_id\` varchar(200) DEFAULT NULL;
+
+ ALTER TABLE \`linkis_stream_project\` ADD create_time datetime DEFAULT NULL;
+ ALTER TABLE \`linkis_stream_project\` ADD last_update_by varchar(50) DEFAULT NULL;
+ ALTER TABLE \`linkis_stream_project\` ADD last_update_time datetime DEFAULT NULL;
+ ALTER TABLE \`linkis_stream_project\` ADD is_deleted tinyint unsigned DEFAULT 0;
+
+ /*Add indexes into the tables*/
+ ALTER TABLE \`linkis_stream_job\` ADD UNIQUE KEY(\`project_name\`, \`name\`);
+ ALTER TABLE \`linkis_stream_job_version\` ADD UNIQUE KEY(\`job_id\`, \`version\`);
+
+ /*Add new tables*/
+ DROP TABLE IF EXISTS \`linkis_stream_project_privilege\`;
+ CREATE TABLE \`linkis_stream_project_privilege\` (
+ \`id\` bigint(20) NOT NULL AUTO_INCREMENT,
+ \`project_id\` bigint(20) NOT NULL,
+ \`user_name\` varchar(100) NOT NULL,
+ \`privilege\` tinyint(1) DEFAULT '0' NOT NULL COMMENT '1:发布权限 ,2:编辑权限 ,3:查看权限',
+ PRIMARY KEY (\`id\`) USING BTREE
+ ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 COMMENT='项目权限表';
+
+ DROP TABLE IF EXISTS \`linkis_stream_job_config_def\`;
+ CREATE TABLE \`linkis_stream_job_config_def\` (
+ \`id\` bigint(20) NOT NULL AUTO_INCREMENT,
+ \`key\` varchar(100) COLLATE utf8_bin NOT NULL,
+ \`name\` varchar(100) COLLATE utf8_bin DEFAULT NULL COMMENT 'Equals option',
+ \`type\` varchar(50) COLLATE utf8_bin NOT NULL DEFAULT 'NONE' COMMENT 'def type, NONE: 0, INPUT: 1, SELECT: 2',
+ \`sort\` int(10) DEFAULT '0' COMMENT 'In order to sort the configurations that have the same level',
+ \`description\` varchar(200) COLLATE utf8_bin DEFAULT NULL COMMENT 'Description of configuration',
+ \`validate_type\` varchar(50) COLLATE utf8_bin DEFAULT NULL COMMENT 'Method the validate the configuration',
+ \`validate_rule\` varchar(100) COLLATE utf8_bin DEFAULT NULL COMMENT 'Value of validation rule',
+ \`style\` varchar(200) COLLATE utf8_bin DEFAULT '' COMMENT 'Display style',
+ \`visiable\` tinyint(1) NOT NULL DEFAULT '1' COMMENT '0: hidden, 1: display',
+ \`level\` tinyint(1) NOT NULL DEFAULT '1' COMMENT '0: root, 1: leaf',
+ \`unit\` varchar(25) COLLATE utf8_bin DEFAULT NULL COMMENT 'Unit symbol',
+ \`default_value\` varchar(200) COLLATE utf8_bin DEFAULT NULL COMMENT 'Default value',
+ \`ref_values\` varchar(200) COLLATE utf8_bin DEFAULT '',
+ \`parent_ref\` bigint(20) DEFAULT NULL COMMENT 'Parent key of configuration def',
+ \`required\` tinyint(1) NOT NULL DEFAULT '0' COMMENT 'If the value of configuration is necessary',
+ \`is_temp\` tinyint(1) DEFAULT '0' COMMENT 'Temp configuration',
+ PRIMARY KEY (\`id\`),
+ UNIQUE KEY \`config_def_key\` (\`key\`)
+ ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+ DROP TABLE IF EXISTS \`linkis_stream_job_config\`;
+ CREATE TABLE \`linkis_stream_job_config\` (
+ \`job_id\` bigint(20) NOT NULL,
+ \`job_name\` varchar(200) COLLATE utf8_bin NOT NULL COMMENT 'Just store the job name',
+ \`key\` varchar(100) COLLATE utf8_bin NOT NULL,
+ \`value\` varchar(500) COLLATE utf8_bin NOT NULL,
+ \`ref_def_id\` bigint(20) DEFAULT NULL COMMENT 'Refer to id in config_def table',
+ PRIMARY KEY (\`job_id\`,\`key\`),
+ KEY \`config_def_id\` (\`ref_def_id\`)
+ ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+ /*Execute dml*/
+ source ${workDir}/db/streamis_dml.sql
+
+ /*Data migration*/
+ INSERT INTO \`linkis_stream_job_config\`(\`key\`, \`value\`, \`job_id\`, \`job_name\`, \`ref_def_id\`) SELECT ov.config_key, ov.config_value, ov.job_id, ov.job_name, d.id as refer_id from linkis_stream_configuration_config_value ov left join linkis_stream_job_config_def d on ov.config_key = d.key WHERE ov.config_value IS NOT NULL AND ov.job_name IS NOT NULL GROUP BY ov.job_id,ov.config_key;
+ UPDATE linkis_stream_job_config SET \`key\` = "wds.linkis.flink.taskmanager.memory" WHERE \`key\` = "flink.taskmanager.memory";
+ UPDATE linkis_stream_job_config SET \`key\` = "wds.linkis.flink.taskmanager.cpus" WHERE \`key\` = "flink.taskmanager.cpu.cores";
+ UPDATE linkis_stream_job_config SET \`key\` = "wds.linkis.flink.taskmanager.cpus" WHERE \`key\` = "wds.linkis.flink.taskManager.cpus";
+ UPDATE linkis_stream_job_config SET \`key\` = "wds.linkis.flink.taskmanager.numberOfTaskSlots" WHERE \`key\` = "flink.taskmanager.numberOfTaskSlots";
+ UPDATE linkis_stream_job_config SET \`key\` = "wds.linkis.flink.app.parallelism" WHERE \`key\` = "wds.linkis.engineconn.flink.app.parallelism";
+ UPDATE linkis_stream_job_config SET \`key\` = "wds.linkis.flink.jobmanager.memory" WHERE \`key\` = "flink.jobmanager.memory";
+ UPDATE linkis_stream_job_config c SET \`ref_def_id\` = (SELECT d.id FROM linkis_stream_job_config_def d WHERE d.\`key\` = c.\`key\`) WHERE c.ref_def_id IS NULL;
+ SELECT @flink_extra_param_id:=id FROM linkis_stream_job_config_def WHERE \`key\` = "wds.linkis.flink.custom";
+ UPDATE linkis_stream_job_config SET ref_def_id = @flink_extra_param_id WHERE ref_def_id IS NULL;
+
+ /*Drop tables*/
+ /*DROP TABLE \`linkis_stream_configuration_config_key\`*/
+ /*DROP TABLE \`linkis_stream_configuration_config_value\`*/
+
+ /*update tables data*/
+ delimiter %%
+
+ create procedure update_project()
+ BEGIN
+ -- 声明变量
+ DECLARE projectname varchar(50);
+ DECLARE done INT default 0;
+
+ -- 创建游标,并设置游标所指的数据
+ DECLARE cur CURSOR for
+ SELECT distinct j.project_name from linkis_stream_job j;
+ -- 游标执行完,即遍历结束。设置done的值为1
+ DECLARE CONTINUE HANDLER for not FOUND set done = 1;
+ -- 开启游标
+ open cur;
+ -- 执行循环
+ posLoop:
+ LOOP
+ -- 从游标中取出projectname
+ FETCH cur INTO projectname ;
+ -- 如果done的值为1,即遍历结束,结束循环
+ IF done = 1 THEN
+ LEAVE posLoop;
+ -- 注意,if语句需要添加END IF结束IF
+ END IF;
+ insert into linkis_stream_project(\`name\`,\`create_by\`,\`create_time\`) values (projectname,\'system\',now());
+ -- 关闭循环
+ END LOOP posLoop;
+ -- 关闭游标
+ CLOSE cur;
+ -- 关闭分隔标记
+ END %%
+
+ create procedure update_project_privilege()
+ BEGIN
+ -- 声明变量
+ DECLARE projectid bigint(20);
+ DECLARE create_by varchar(50);
+ DECLARE done INT default 0;
+
+ -- 创建游标,并设置游标所指的数据
+ DECLARE cur CURSOR for
+ SELECT distinct p.id,j.create_by from linkis_stream_project p,linkis_stream_job j where p.name =j.project_name ;
+ -- 游标执行完,即遍历结束。设置done的值为1
+ DECLARE CONTINUE HANDLER for not FOUND set done = 1;
+ -- 开启游标
+ open cur;
+ -- 执行循环
+ posLoop:
+ LOOP
+ -- 从游标中取出id
+ FETCH cur INTO projectid ,create_by;
+ -- 如果done的值为1,即遍历结束,结束循环
+ IF done = 1 THEN
+ LEAVE posLoop;
+ -- 注意,if语句需要添加END IF结束IF
+ END IF;
+
+ insert into linkis_stream_project_privilege (project_id ,user_name ,privilege) values (projectid,create_by,2);
+ -- 关闭循环
+ END LOOP posLoop;
+ -- 关闭游标
+ CLOSE cur;
+ -- 关闭分隔标记
+ END %%
+ delimiter ;
+
+ call update_project;
+ call update_project_privilege;
+
+ drop PROCEDURE update_project;
+ drop PROCEDURE update_project_privilege;
+
+EOF
+ echo "<------ End to upgrade ------>"
+fi
+
+
+
diff --git a/conf/config.sh b/conf/config.sh
new file mode 100644
index 000000000..4760e1f0f
--- /dev/null
+++ b/conf/config.sh
@@ -0,0 +1,45 @@
+### deploy user
+deployUser=hadoop
+
+### ssh port
+SSH_PORT=22
+
+##The Max Heap size for the JVM
+SERVER_HEAP_SIZE="512M"
+
+##The Port of Streamis
+STREAMIS_PORT=9400
+
+### The install home path of STREAMIS,Must provided
+STREAMIS_INSTALL_HOME=/appcom/Install/streamis
+
+### Linkis EUREKA information. # Microservices Service Registration Discovery Center
+EUREKA_INSTALL_IP=127.0.0.1
+EUREKA_PORT=20303
+
+### Specifies the user workspace, which is used to store the user's script files and log files.
+### Generally local directory
+#WORKSPACE_USER_ROOT_PATH=file:///tmp/linkis/
+#### Path to store job ResultSet:file or hdfs path
+#RESULT_SET_ROOT_PATH=hdfs:///tmp/linkis
+
+### Linkis Gateway information
+GATEWAY_INSTALL_IP=127.0.0.1
+GATEWAY_PORT=9001
+
+
+################### The install Configuration of all Micro-Services #####################
+#
+# NOTICE:
+# 1. If you just wanna try, the following micro-service configuration can be set without any settings.
+# These services will be installed by default on this machine.
+# 2. In order to get the most complete enterprise-level features, we strongly recommend that you install
+# the following microservice parameters
+#
+
+STREAMIS_SERVER_INSTALL_IP=127.0.0.1
+STREAMIS_SERVER_INSTALL_PORT=9400
+
+STREAMIS_VERSION=0.2.4
+
+STREAMIS_FILE_NAME="STREAMIS-$STREAMIS_VERSION"
\ No newline at end of file
diff --git a/conf/db.sh b/conf/db.sh
new file mode 100644
index 000000000..176f14419
--- /dev/null
+++ b/conf/db.sh
@@ -0,0 +1,8 @@
+### for DSS-Server and Eventchecker APPCONN
+MYSQL_HOST=
+MYSQL_PORT=
+MYSQL_DB=
+MYSQL_USER=
+MYSQL_PASSWORD=
+
+
diff --git a/db/streamis_ddl.sql b/db/streamis_ddl.sql
new file mode 100644
index 000000000..144ecde68
--- /dev/null
+++ b/db/streamis_ddl.sql
@@ -0,0 +1,257 @@
+
+SET NAMES utf8mb4;
+SET FOREIGN_KEY_CHECKS = 0;
+
+--
+-- Table structure for table `linkis_stream_job_config_def`
+--
+
+DROP TABLE IF EXISTS `linkis_stream_job_config_def`;
+CREATE TABLE `linkis_stream_job_config_def` (
+ `id` bigint(20) NOT NULL AUTO_INCREMENT,
+ `key` varchar(100) COLLATE utf8_bin NOT NULL,
+ `name` varchar(100) COLLATE utf8_bin DEFAULT NULL COMMENT 'Equals option',
+ `type` varchar(50) COLLATE utf8_bin NOT NULL DEFAULT 'NONE' COMMENT 'def type, NONE: 0, INPUT: 1, SELECT: 2',
+ `sort` int(10) DEFAULT '0' COMMENT 'In order to sort the configurations that have the same level',
+ `description` varchar(200) COLLATE utf8_bin DEFAULT NULL COMMENT 'Description of configuration',
+ `validate_type` varchar(50) COLLATE utf8_bin DEFAULT NULL COMMENT 'Method the validate the configuration',
+ `validate_rule` varchar(100) COLLATE utf8_bin DEFAULT NULL COMMENT 'Value of validation rule',
+ `style` varchar(200) COLLATE utf8_bin DEFAULT '' COMMENT 'Display style',
+ `visiable` tinyint(1) NOT NULL DEFAULT '1' COMMENT '0: hidden, 1: display',
+ `level` tinyint(1) NOT NULL DEFAULT '1' COMMENT '0: root, 1: leaf',
+ `unit` varchar(25) COLLATE utf8_bin DEFAULT NULL COMMENT 'Unit symbol',
+ `default_value` varchar(200) COLLATE utf8_bin DEFAULT NULL COMMENT 'Default value',
+ `ref_values` varchar(200) COLLATE utf8_bin DEFAULT '',
+ `parent_ref` bigint(20) DEFAULT NULL COMMENT 'Parent key of configuration def',
+ `required` tinyint(1) NOT NULL DEFAULT '0' COMMENT 'If the value of configuration is necessary',
+ `is_temp` tinyint(1) DEFAULT '0' COMMENT 'Temp configuration',
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `config_def_key` (`key`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+--
+-- Table structure for table `linkis_stream_job_config`
+--
+
+DROP TABLE IF EXISTS `linkis_stream_job_config`;
+CREATE TABLE `linkis_stream_job_config` (
+ `job_id` bigint(20) NOT NULL,
+ `job_name` varchar(200) COLLATE utf8_bin NOT NULL COMMENT 'Just store the job name',
+ `key` varchar(100) COLLATE utf8_bin NOT NULL,
+ `value` varchar(500) COLLATE utf8_bin NOT NULL,
+ `ref_def_id` bigint(20) DEFAULT NULL COMMENT 'Refer to id in config_def table',
+ PRIMARY KEY (`job_id`,`key`),
+ KEY `config_def_id` (`ref_def_id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+-- ----------------------------
+-- Table structure for linkis_stream_job_alarm_send_history
+-- ----------------------------
+DROP TABLE IF EXISTS `linkis_stream_job_alarm_send_history`;
+CREATE TABLE `linkis_stream_job_alarm_send_history` (
+ `id` bigint(20) NOT NULL,
+ `job_id` bigint(20) NULL DEFAULT NULL,
+ `task_id` bigint(20) NULL DEFAULT NULL,
+ `create_by` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ `type` tinyint(1) NULL DEFAULT NULL,
+ `rule_type` tinyint(1) NULL DEFAULT NULL,
+ `content` varchar(200) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ PRIMARY KEY (`id`) USING BTREE
+) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci COMMENT = '报警历史信息' ROW_FORMAT = Compact;
+
+-- ----------------------------
+-- Records of linkis_stream_job_alarm_send_history
+-- ----------------------------
+
+-- ----------------------------
+-- Table structure for linkis_stream_job_checkpoints
+-- ----------------------------
+DROP TABLE IF EXISTS `linkis_stream_job_checkpoints`;
+CREATE TABLE `linkis_stream_job_checkpoints` (
+ `id` bigint(20) NOT NULL,
+ `config_value_id` bigint(20) NULL DEFAULT NULL,
+ `path` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ `size` int(20) NULL DEFAULT NULL,
+ `status` tinyint(1) NULL DEFAULT NULL,
+ `trigger_timestamp` datetime NULL DEFAULT NULL,
+ `latest_ack_timestamp` datetime NULL DEFAULT NULL,
+ PRIMARY KEY (`id`) USING BTREE
+) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci ROW_FORMAT = Compact;
+
+-- ----------------------------
+-- Records of linkis_stream_job_checkpoints
+-- ----------------------------
+
+-- ----------------------------
+-- Table structure for linkis_stream_job_role
+-- ----------------------------
+DROP TABLE IF EXISTS `linkis_stream_job_role`;
+CREATE TABLE `linkis_stream_job_role` (
+ `id` bigint(20) NOT NULL,
+ `job_id` bigint(20) NULL DEFAULT NULL,
+ `name` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ `front_name` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ `update_time` datetime NULL DEFAULT NULL,
+ `description` varchar(200) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ PRIMARY KEY (`id`) USING BTREE
+) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci ROW_FORMAT = Compact;
+
+-- ----------------------------
+-- Records of linkis_stream_job_role
+-- ----------------------------
+INSERT INTO `linkis_stream_job_role` VALUES (1, -1, '管理员', '管理员', '2021-04-07 20:57:09', NULL);
+
+
+-- ----------------------------
+-- Table structure for linkis_stream_job_user_role
+-- ----------------------------
+DROP TABLE IF EXISTS `linkis_stream_job_user_role`;
+CREATE TABLE `linkis_stream_job_user_role` (
+ `id` bigint(20) NOT NULL,
+ `job_id` bigint(20) DEFAULT NULL,
+ `user_id` bigint(20) DEFAULT NULL,
+ `role_id` bigint(20) DEFAULT NULL,
+ `username` varchar(100) DEFAULT NULL,
+ PRIMARY KEY (`id`) USING BTREE
+ ) ENGINE=InnoDB DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='作业角色关系';
+
+-- ----------------------------
+-- Records of linkis_stream_job_user_role
+-- ----------------------------
+
+/*Table structure for table `linkis_stream_job` */
+
+DROP TABLE IF EXISTS `linkis_stream_job`;
+
+CREATE TABLE `linkis_stream_job` (
+ `id` bigint(20) NOT NULL AUTO_INCREMENT,
+ `project_name` varchar(100) DEFAULT NULL,
+ `name` varchar(200) DEFAULT NULL,
+ `status` tinyint(1) DEFAULT '0' COMMENT '1:已完成 ,2:等待重启 ,3:告警 ,4:慢任务 ,5:运行中 ,6:失败任务',
+ `create_by` varchar(50) DEFAULT NULL,
+ `create_time` datetime DEFAULT NULL,
+ `label` varchar(200) DEFAULT NULL,
+ `description` varchar(200) DEFAULT NULL,
+ `job_type` varchar(30) DEFAULT NULL COMMENT '目前只支持flink.sql、flink.jar',
+ `submit_user` varchar(100) DEFAULT NULL,
+ `workspace_name` varchar(50) DEFAULT NULL,
+ `current_version` varchar(50) DEFAULT NULL,
+ PRIMARY KEY (`id`) USING BTREE,
+ UNIQUE KEY(`project_name`, `name`)
+) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='作业表';
+
+/*Table structure for table `linkis_stream_job_version` */
+
+DROP TABLE IF EXISTS `linkis_stream_job_version`;
+
+CREATE TABLE `linkis_stream_job_version` (
+ `id` bigint(20) NOT NULL AUTO_INCREMENT,
+ `job_id` varchar(50) DEFAULT NULL,
+ `version` varchar(20) DEFAULT NULL,
+ `source` varchar(255) DEFAULT NULL COMMENT '这个版本的来源,比如:用户上传,由某个历史版本回退回来的',
+ `job_content` text COMMENT '内容为meta.json',
+ `comment` varchar(255) DEFAULT NULL,
+ `create_time` datetime DEFAULT NULL,
+ `create_by` varchar(32) DEFAULT NULL,
+ PRIMARY KEY (`id`) USING BTREE,
+ UNIQUE KEY(`job_id`, `version`)
+) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='作业表';
+
+/*Table structure for table `linkis_stream_job_version_files` */
+
+DROP TABLE IF EXISTS `linkis_stream_job_version_files`;
+
+CREATE TABLE `linkis_stream_job_version_files` (
+ `id` bigint(20) NOT NULL AUTO_INCREMENT,
+ `job_id` varchar(50) NOT NULL,
+ `job_version_id` bigint(20) NOT NULL,
+ `file_name` varchar(500) DEFAULT NULL,
+ `version` varchar(30) DEFAULT NULL COMMENT '文件版本号,由用户上传时指定的',
+ `store_path` varchar(100) DEFAULT NULL COMMENT '如:{"resource":"22edar22", "version": "v0001"}',
+ `store_type` varchar(20) DEFAULT NULL COMMENT '存储类型,一般就是bml',
+ `create_time` datetime DEFAULT NULL,
+ `create_by` varchar(32) DEFAULT NULL,
+ PRIMARY KEY (`id`) USING BTREE
+) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8;
+
+/*Table structure for table `linkis_stream_project` */
+
+DROP TABLE IF EXISTS `linkis_stream_project`;
+
+CREATE TABLE `linkis_stream_project` (
+ `id` bigint(20) NOT NULL AUTO_INCREMENT,
+ `workspace_id` bigint(20) DEFAULT NULL,
+ `name` varchar(100) DEFAULT NULL,
+ `create_by` varchar(50) DEFAULT NULL,
+ PRIMARY KEY (`id`) USING BTREE
+) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='项目表';
+
+/*Table structure for table `linkis_stream_project_files` */
+
+DROP TABLE IF EXISTS `linkis_stream_project_files`;
+
+CREATE TABLE `linkis_stream_project_files` (
+ `id` bigint(20) NOT NULL AUTO_INCREMENT,
+ `file_name` varchar(500) DEFAULT NULL,
+ `version` varchar(30) DEFAULT NULL COMMENT '文件版本号,由用户上传时指定的',
+ `store_path` varchar(100) DEFAULT NULL COMMENT '如:{"resource":"22edar22", "version": "v0001"}',
+ `store_type` varchar(20) DEFAULT NULL COMMENT '存储类型,一般就是bml',
+ `project_name` varchar(50) DEFAULT NULL,
+ `create_time` datetime DEFAULT CURRENT_TIMESTAMP,
+ `create_by` varchar(32) DEFAULT NULL,
+ `comment` varchar(255) DEFAULT NULL COMMENT '说明',
+ PRIMARY KEY (`id`) USING BTREE
+) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='项目表';
+
+/*Table structure for table `linkis_stream_task` */
+
+DROP TABLE IF EXISTS `linkis_stream_task`;
+
+CREATE TABLE `linkis_stream_task` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `job_version_id` bigint(20) NOT NULL,
+ `job_id` varchar(200) DEFAULT NULL,
+ `version` varchar(50) DEFAULT NULL,
+ `status` int(3) DEFAULT NULL,
+ `start_time` datetime DEFAULT NULL,
+ `last_update_time` datetime DEFAULT NULL,
+ `err_desc` varchar(10240) DEFAULT NULL,
+ `submit_user` varchar(50) DEFAULT NULL,
+ `linkis_job_id` varchar(200) DEFAULT NULL,
+ `linkis_job_info` mediumtext,
+ PRIMARY KEY (`id`) USING BTREE
+) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='任务表';
+
+DROP TABLE IF EXISTS `linkis_stream_alert_record`;
+
+CREATE TABLE `linkis_stream_alert_record` (
+ `id` bigint(20) NOT NULL AUTO_INCREMENT,
+ `alert_level` varchar(20) NOT NULL DEFAULT 'critical' COMMENT '告警级别',
+ `alert_user` varchar(20) NOT NULL COMMENT '告警用户',
+ `alert_msg` varchar(200) NOT NULL COMMENT '告警信息',
+ `job_id` bigint(20) NOT NULL,
+ `job_version_id` bigint(20) DEFAULT NULL,
+ `task_id` bigint(20) DEFAULT NULL,
+ `create_time` datetime DEFAULT NULL,
+ `status` bigint(2) DEFAULT '1' COMMENT '''1为成功,0为失败''',
+ `error_msg` varchar(200) DEFAULT NULL COMMENT '告警发送失败后的错误信息',
+ PRIMARY KEY (`id`)
+) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8;
+
+DROP TABLE IF EXISTS `linkis_stream_project_privilege`;
+
+CREATE TABLE `linkis_stream_project_privilege` (
+ `id` bigint(20) NOT NULL AUTO_INCREMENT,
+ `project_id` bigint(20) NOT NULL,
+ `user_name` varchar(100) NOT NULL,
+ `privilege` tinyint(1) DEFAULT '0' NOT NULL COMMENT '1:发布权限 ,2:编辑权限 ,3:查看权限',
+ PRIMARY KEY (`id`) USING BTREE
+) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 COMMENT='项目权限表';
+
+
+ALTER TABLE `linkis_stream_project` ADD create_time datetime DEFAULT NULL;
+ALTER TABLE `linkis_stream_project` ADD last_update_by varchar(50) DEFAULT NULL;
+ALTER TABLE `linkis_stream_project` ADD last_update_time datetime DEFAULT NULL;
+ALTER TABLE `linkis_stream_project` ADD is_deleted tinyint unsigned DEFAULT 0;
+
+SET FOREIGN_KEY_CHECKS = 1;
diff --git a/db/streamis_dml.sql b/db/streamis_dml.sql
new file mode 100644
index 000000000..85bd4bc96
--- /dev/null
+++ b/db/streamis_dml.sql
@@ -0,0 +1,25 @@
+-- ----------------------------
+-- Records of linkis_stream_job_config_def
+-- ----------------------------
+
+INSERT INTO `linkis_stream_job_config_def` VALUES (1,'wds.linkis.flink.resource','资源配置','NONE',0,'资源配置','None',NULL,'',1,0,NULL,NULL,'',NULL,0,0);
+INSERT INTO `linkis_stream_job_config_def` VALUES (2,'wds.linkis.flink.app.parallelism','Parallelism并行度','NUMBER',0,'Parallelism并行度','Regex','^([1-9]\\d{0,1}|100)$','',1,1,NULL,'4','',1,1,0);
+INSERT INTO `linkis_stream_job_config_def` VALUES (3,'wds.linkis.flink.jobmanager.memory','JobManager Memory (M)','NUMBER',0,'JobManager Memory (M)','Regex','^([1-9]\\d{0,4}|100000)$','',1,1,'M','1024','',1,1,0);
+INSERT INTO `linkis_stream_job_config_def` VALUES (4,'wds.linkis.flink.taskmanager.memory','TaskManager Memory (M)','NUMBER',0,'JobManager Memory (M)','Regex','^([1-9]\\d{0,4}|100000)$','',1,1,'M','4096','',1,1,0);
+INSERT INTO `linkis_stream_job_config_def` VALUES (5,'wds.linkis.flink.taskmanager.numberOfTaskSlots','TaskManager Slot数量','NUMBER',0,'TaskManager Slot数量','Regex','^([1-9]\\d{0,1}|100)$','',1,1,NULL,'2','',1,1,0);
+INSERT INTO `linkis_stream_job_config_def` VALUES (6,'wds.linkis.flink.taskmanager.cpus','TaskManager CPUs','NUMBER',0,'TaskManager CPUs','Regex','^([1-9]\\d{0,1}|100)$','',1,1,NULL,'2','',1,1,0);
+INSERT INTO `linkis_stream_job_config_def` VALUES (7,'wds.linkis.flink.custom','Flink参数','NONE',0,'Flink自定义参数','None',NULL,'',1,0,NULL,NULL,'',NULL,0,0);
+INSERT INTO `linkis_stream_job_config_def` VALUES (8,'wds.linkis.flink.produce','生产配置','NONE',0,'生产配置','None',NULL,'',1,0,NULL,NULL,'',NULL,0,0);
+INSERT INTO `linkis_stream_job_config_def` VALUES (9,'wds.linkis.flink.checkpoint.switch','Checkpoint开关','SELECT',0,'Checkpoint开关',NULL,NULL,'',1,1,'','OFF','ON,OFF',8,0,0);
+INSERT INTO `linkis_stream_job_config_def` VALUES (10,'wds.linkis.flink.savepoint.path','快照(Savepoint)文件位置【仅需恢复任务时指定】','INPUT',4,'快照(Savepoint)文件位置','None',NULL,'',1,1,NULL,NULL,'',8,0,1);
+INSERT INTO `linkis_stream_job_config_def` VALUES (11,'wds.linkis.flink.alert','告警设置','NONE',0,'告警设置','None',NULL,'',1,1,NULL,NULL,'',NULL,0,0);
+INSERT INTO `linkis_stream_job_config_def` VALUES (12,'wds.linkis.flink.alert.rule','告警规则','NONE',0,'告警规则','None',NULL,'',1,1,NULL,NULL,'',NULL,0,0);
+INSERT INTO `linkis_stream_job_config_def` VALUES (13,'wds.linkis.flink.alert.user','告警用户','NONE',0,'告警用户',NULL,NULL,'',1,1,NULL,NULL,'',NULL,0,0);
+INSERT INTO `linkis_stream_job_config_def` VALUES (14,'wds.linkis.flink.alert.level','告警级别','NONE',0,'告警级别','None',NULL,'',1,1,NULL,NULL,'',NULL,0,0);
+INSERT INTO `linkis_stream_job_config_def` VALUES (15,'wds.linkis.flink.alert.failure.level','失败时告警级别','NONE',0,'失败时告警级别','None',NULL,'',1,1,NULL,NULL,'',NULL,0,0);
+INSERT INTO `linkis_stream_job_config_def` VALUES (16,'wds.linkis.flink.alert.failure.user','失败时告警用户','NONE',0,'失败时告警用户','None',NULL,'',1,1,NULL,NULL,'',NULL,0,0);
+INSERT INTO `linkis_stream_job_config_def` VALUES (32,'wds.linkis.flink.authority','权限设置','NONE',0,'权限设置','None',NULL,'',1,0,NULL,NULL,'',NULL,0,0);
+INSERT INTO `linkis_stream_job_config_def` VALUES (33,'wds.linkis.flink.authority.visible','可见人员','INPUT',0,'可见人员','None',NULL,'',1,1,NULL,NULL,'',32,0,0);
+INSERT INTO `linkis_stream_job_config_def` VALUES (34,'wds.linkis.rm.yarnqueue','使用Yarn队列','INPUT',0,'使用Yarn队列','None',NULL,'',1,1,NULL,NULL,'',1,0,0);
+INSERT INTO `linkis_stream_job_config_def` VALUES (35,'wds.linkis.flink.app.fail-restart.switch','作业失败自动拉起开关','SELECT',1,'作业失败自动拉起开关','None',NULL,'',1,1,NULL,'OFF','ON,OFF',8,0,0);
+INSERT INTO `linkis_stream_job_config_def` VALUES (36,'wds.linkis.flink.app.start-auto-restore.switch','作业启动状态自恢复','SELECT',2,'作业启动状态自恢复','None',NULL,'',1,1,NULL,'ON','ON,OFF',8,0,0);
\ No newline at end of file
diff --git a/docs/en_US/0.2.0/StreamisDeployment.md b/docs/en_US/0.2.0/StreamisDeployment.md
new file mode 100644
index 000000000..429d7d8f2
--- /dev/null
+++ b/docs/en_US/0.2.0/StreamisDeployment.md
@@ -0,0 +1,165 @@
+# Streamis installation and deployment documentation
+
+## 1. Component introduction
+Streamis0.2.4 provides the Streamis-JobManager component, the role of the component is
+1. Publish streaming applications
+2. Set streaming application parameters, such as the number of Flink slots, checkpoint related parameters, etc.
+3. Manage streaming applications (e.g. start and stop)
+4. Streaming application monitoring
+
+
+## 2. Code compilation
+Streamis does not require manual compilation. You can download the installation package directly for deployment. Please [click to download the installation package](https://github.com/WeBankFinTech/Streamis/releases)
+
+If you have already obtained the installation package, you can skip this step
+
+- The background compilation method is as follows
+```
+cd ${STREAMIS_CODE_HOME}
+mvn -N install
+mvn clean install
+```
+After successful compilation, the installation package will be generated in the 'assembly/target' directory of the project
+
+- The front-end compilation method is as follows
+
+Pre dependency: nodejs, python 2.0
+
+```bash
+cd ${STREAMIS_CODE_HOME}/web
+npm i
+npm run build
+```
+After the compilation is successful, the installation package will be generated in the `${STREAMIS_CODE_HOME}/web` directory
+
+## 3. Installation preparation
+### 3.1 Basic environment installation
+ The following software must be installed:
+
+- MySQL (5.5+), [How to install MySQL](https://www.runoob.com/mysql/mysql-install.html)
+- JDK (above 1.8.0_141), [How to install JDK](https://www.runoob.com/java/java-environment-setup.html)
+
+### 3.2 Linkis and DSS environments
+- The execution of Streamis depends on Linkis, and it needs to be version 1.1.1 and above, so you need to install Linkis above 1.1.1 and ensure that the Flink engine can be used normally.Some functions need to be supported by linkis-1.1.2.
+- Datasphere studio (> =1.1.0), the development and debugging of streaming jobs depend on DSS scriptis, and the streaming production center needs to be embedded in the DSS engineering framework system, so it depends on * * dss-1.1.0 * * and above.
+
+Before the formal installation of streamis, please install linkis-1.1.1 and dss-1.1.0 or above, and ensure that the linkis Flink engine and DSS can be used normally. For the installation of DSS and linkis, please refer to the [dss & linkis one click installation and deployment document](https://github.com/WeBankFinTech/DataSphereStudio-Doc/blob/main/zh_CN/%E5%AE%89%E8%A3%85%E9%83%A8%E7%BD%B2/DSS%E5%8D%95%E6%9C%BA%E9%83%A8%E7%BD%B2%E6%96%87%E6%A1%A3.md).
+
+How to verify that DSS and linkis are basically available? You can create a flinksql script on DSS scriptis and execute it. If flinksql can execute correctly and return the result set, it means that the DSS and linkis environments are available.
+
+
+## 4. Installation and startup
+
+### Background installation
+
+1.installation package preparation
+
+Upload the installation package to the installation directory of the Linux server (currently only supports linux environment deployment), such as /appcom/install/streams, and then extract it:
+
+```bash
+cd /appcom/Install/streamis
+tar -xvf wedatasphere-streamis-${streamis-version}-dist.tar.gz
+```
+
+2.Modify the database configuration
+```bash
+vi conf/db.sh
+#Configure basic database information
+```
+
+3.Modify the basic configuration file
+
+```bash
+vi conf/config.sh
+#Configure service port information
+#Configure Linkis service information
+```
+4.Installation
+```bash
+sh bin/install.sh
+```
+
+- The install.sh script will ask you if you need to initialize the database and import metadata.
+
+ Because the user is worried that the user repeatedly executes the install.sh script to clear the user data in the database, when the install.sh is executed, the user will be asked if they need to initialize the database and import metadata.
+
+ **Yes must be selected for the first installation**.
+
+5.start up
+```bash
+sh bin/start.sh
+```
+
+- Start verification
+Verification method, because Streamis and Linkis use a set of Eureka, you need to check whether the Eureka page of Linkis already contains Streamis services, as shown in the figure,
+![components](../../images/zh_CN/eureka_streamis.png)
+
+
+
+### Front-end deployment
+
+1.Install nginx
+
+```bash
+sudo yum install -y nginx
+```
+2.Deploy the front-end package
+```
+mkdir ${STREAMIS_FRONT_PATH}
+cd ${STREAMIS_FRONT_PATH}
+#Place the front-end package
+unzip streamis-{streamis-version}.zip
+```
+3.Modify the nginx configuration file
+
+```bash
+cd /etc/nginx/conf.d
+vi streamis.conf
+# Copy the following template and modify it according to the actual situation
+```
+```
+server {
+ listen 9088;# access port
+ server_name localhost;
+ location / {
+ root ${STREAMIS_FRONT_PAH}; # Please modify it to the appropriate static file directory of Streamis
+ index index.html index.html;
+ }
+ location /api {
+ proxy_pass http://${Linkis_GATEWAY_IP}:${LINKIS_GATEWY_PORT}; #Back-end Linkis address, please modify it to the ip and port of the Linkis gateway
+ proxy_set_header Host $host;
+ proxy_set_header X-Real-IP $remote_addr;
+ proxy_set_header x_real_ipP $remote_addr;
+ proxy_set_header remote_addr $remote_addr;
+ proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
+ proxy_http_version 1.1;
+ proxy_connect_timeout 4s;
+ proxy_read_timeout 600s;
+ proxy_send_timeout 12s;
+ proxy_set_header Upgrade $http_upgrade;
+ proxy_set_header Connection upgrade;
+ }
+
+ #error_page 404 /404.html;
+ # redirect server error pages to the static page /50x.html
+ #
+ error_page 500 502 503 504 /50x.html;
+ location = /50x.html {
+ root /usr/share/nginx/html;
+ }
+}
+```
+4.Load nginx configuration
+```bash
+sudo nginx -s reload
+```
+
+## 5. Access to DSS
+
+If you want to use the streamis0.2.4 front end normally, you also need to install the DSS StreamisAppConn plug-in. Please refer to: [StreamisAppConn plug-in installation document](development/StreamisAppConnInstallationDocument.md)
+
+## 6. Linkis Flink engine compilation and installation
+If you want to run streamis0.2.4 normally, you also need to install the linkis Flink engine. Please refer to: [linkis Flink engine installation document](https://linkis.apache.org/zh-CN/docs/1.1.2/engine_usage/flink/)
+
+## 7. Streamis component upgrade document / script
+If you want to upgrade from a lower version of streamis to streamis0.2.4, please refer to: [streamis upgrade document](development/StreamisUpgradeDocumentation.md)
diff --git a/docs/en_US/0.2.0/architecture/StreamisAppConnDesignDocument.md b/docs/en_US/0.2.0/architecture/StreamisAppConnDesignDocument.md
new file mode 100644
index 000000000..8a25eaee8
--- /dev/null
+++ b/docs/en_US/0.2.0/architecture/StreamisAppConnDesignDocument.md
@@ -0,0 +1,178 @@
+# Streamis access AppConn
+
+## Overall flow chart
+![Streamis access DSS](../../../images/streamis_appconn_en.png)
+
+## DSS project APPCONN plug-in streamis-appconn
+
+### The configuration table
+Configure the following four tables:dss_workspace_dictionary、dss_appconn、dss_workspace_menu_appconn、dss_appconn_instance,appconn_name for realTimeJobCenter is appconn accessed by the graphical interface,appconn_name for streamis is appconn accessed by the API,The StreamisAppConn object is instantiated based on the configuration information in the table when DSS is started.Appconn in the following SQL_ INSTALL_ IP and appconn_ INSTALL_ When executing DSS installation script for automatic installation, port will enter through interactive commands.
+```roomsql
+delete from `dss_workspace_dictionary` WHERE `appconn_name` = 'streamis';
+
+INSERT INTO `dss_workspace_dictionary` ( `workspace_id`, `parent_key`, `dic_name`, `dic_name_en`, `dic_key`, `dic_value`, `dic_value_en`, `title`, `title_en`, `url`, `url_type`,`icon`, `order_num`, `remark`, `create_user`, `create_time`, `update_user`, `update_time`, appconn_name)
+VALUES ('0','p_develop_process','流式生产中心','Streamis Product Center','pdp_streamis_product_center','streamis_prod',NULL,NULL,NULL,
+'http://APPCONN_INSTALL_IP:APPCONN_INSTALL_PORT/#/realtimeJobCenter?projectName=${projectName}&workspaceName=${workspaceName}','0','kaifa-icon','1','工程开发流程-流式生产中心','SYSTEM','2020-12-28 17:32:35',NULL,'2022-06-30 17:49:02','streamis');
+
+select @old_dss_appconn_id:=id from `dss_appconn` where `appconn_name` = 'streamis';
+
+delete from `dss_workspace_menu_appconn` WHERE `appconn_id` = @old_dss_appconn_id;
+delete from `dss_appconn_instance` where `appconn_id` = @old_dss_appconn_id;
+delete from `dss_appconn` where `appconn_name`='streamis';
+
+select @old_jobcenter_dss_appconn_id:=id from `dss_appconn` where `appconn_name` = 'realTimeJobCenter';
+
+delete from `dss_workspace_menu_appconn` WHERE `appconn_id` = @old_jobcenter_dss_appconn_id;
+delete from `dss_appconn_instance` where `appconn_id` = @old_jobcenter_dss_appconn_id;
+delete from `dss_appconn` where `appconn_name`='realTimeJobCenter';
+
+INSERT INTO dss_appconn
+(appconn_name, is_user_need_init, `level`, if_iframe, is_external, reference, class_name, appconn_class_path, resource)
+VALUES('streamis', 0, 1, 1, 1, NULL, 'com.webank.wedatasphere.streamis.dss.appconn.StreamisAppConn', NULL, NULL);
+INSERT INTO dss_appconn
+(appconn_name, is_user_need_init, `level`, if_iframe, is_external, reference, class_name, appconn_class_path, resource)
+VALUES('realTimeJobCenter', 0, 1, 1, 1, 'sso', '', NULL, NULL);
+
+select @dss_appconn_id:=id from `dss_appconn` where `appconn_name` = 'streamis';
+select @jobcenter_dss_appconn_id:=id from `dss_appconn` where `appconn_name` = 'realTimeJobCenter';
+
+INSERT INTO dss_workspace_menu_appconn
+(appconn_id, menu_id, title_en, title_cn, desc_en, desc_cn, labels_en, labels_cn, is_active, access_button_en, access_button_cn, manual_button_en, manual_button_cn, manual_button_url, icon, `order`, create_by, create_time, last_update_time, last_update_user, image)
+VALUES(@jobcenter_dss_appconn_id, 1, 'StreamSQL development', 'StreamSQL开发', 'Real-time application development is a streaming solution jointly built by WeDataSphere, Boss big data team and China Telecom ctcloud Big data team.', '实时应用开发是微众银行微数域(WeDataSphere)、Boss直聘大数据团队 和 中国电信天翼云大数据团队 社区联合共建的流式解决方案,以 Linkis 做为内核,基于 Flink Engine 构建的批流统一的 Flink SQL,助力实时化转型。',
+'streaming, realtime', '流式,实时', 0, 'under union construction', '联合共建中', 'related information', '相关资讯', 'http://127.0.0.1:8088/wiki/scriptis/manual/workspace_cn.html', 'shujukaifa-logo', NULL, NULL, NULL, NULL, NULL, 'shujukaifa-icon');
+
+INSERT INTO dss_appconn_instance
+(appconn_id, label, url, enhance_json, homepage_uri)
+VALUES(@dss_appconn_id, 'DEV', 'http://APPCONN_INSTALL_IP:APPCONN_INSTALL_PORT/', '', 'http://APPCONN_INSTALL_IP:APPCONN_INSTALL_PORT/#/realTimeJobCenter');
+
+INSERT INTO dss_appconn_instance
+(appconn_id, label, url, enhance_json, homepage_uri)
+VALUES(@jobcenter_dss_appconn_id, 'DEV', 'http://APPCONN_INSTALL_IP:APPCONN_INSTALL_PORT/#/realTimeJobCenter', NULL, NULL);
+```
+
+### Concrete implementation description
+StreamisAppConn extends AbstractOnlySSOAppConn implements SecondlyAppConn,Override method to create StreamisStructureIntegrationStandard.StreamisAppConn currently only implements section-free login and organizational structure specification capabilities。
+
+To create a StreamisProjectService by rewriting the methods in StreamisStructureIntegrationStandard, four methods need to be rewritten internally, and four operation classes will be created: StreamisProjectSearchOperation, StreamisProjectCreationOperation, StreamisProjectUpdateOperation, and StreamisPrejectDeleteOperation. The operation class calls the streamis application through HTTP to query, create, modify and delete project respectively, and synchronize the DSS project information to Streamis.
+
+- When DSS creates a project, it will first call the query operation to query whether the same project name already exists in streamis. If it does, a prompt will pop up. If it does not exist, it will continue to call the create operation to create a new project in streamis;
+- When DSS modifies a project, it will call the modify operation to update the project information in streamis;
+- When DSS deletes an item, it will call the delete operation to delete the item information in streamis and change the deletion mark.
+
+## API
+1 API name: query project
+- API path:GET /streamis/project/searchProject
+- Request parameters
+
+|Parameter name |Whether it is necessary |Example |remarks |
+|-------------|---------|-------|--------|
+|projectName |yes | | |
+
+- Return data
+
+|name |type |Whether it is necessary |Default |remarks |
+|-------------|--------|---------|---------|--------|
+|method |string |no |
+|status |number |yes |
+|message |string |no |
+|data |object |yes |
+|- projectId |number |yes |
+
+2 API name:create project
+- API path:POST /streamis/project/createProject
+- Request parameters
+
+|Parameter name |Whether it is necessary |Example |remarks |
+|-------------|---------|-------|--------|
+|projectName |yes | | |
+|workspaceId |no | | |
+|releaseUsers |no | | |
+|editUsers |no | | |
+|accessUsers |no | | |
+
+- Return data
+
+|name |type |Whether it is necessary |Default |remarks |
+|----------|--------|---------|---------|--------|
+|method |string |no |
+|status |number |yes |
+|message |string |no |
+|data |object |yes |
+|- projectId |number |yes |
+|- projectName |string |no |
+
+3 API name:update project
+- API path:PUT /streamis/project/updateProject
+- Request parameters
+
+|Parameter name |Whether it is necessary |Example |remarks |
+|-------------|---------|-------|--------|
+|projectId |yes | | |
+|projectName |yes | | |
+|workspaceId |no | | |
+|releaseUsers |no | | |
+|editUsers |no | | |
+|accessUsers |no | | |
+
+- Return data
+
+|name |type |Whether it is necessary |Default |remarks |
+|----------|--------|---------|---------|--------|
+|method |string |no |
+|status |number |yes |
+|message |string |no |
+|data |object |no |
+
+4 API name:delete project
+- API path:DELETE /streamis/project/deleteProject
+- Request parameters
+
+|Parameter name |Whether it is necessary |Example |remarks |
+|-------------|---------|-------|--------|
+|projectId |yes | | |
+|projectName |no | | |
+
+- Return data
+
+|name |type |Whether it is necessary |Default |remarks |
+|----------|--------|---------|---------|--------|
+|method |string |no |
+|status |number |yes |
+|message |string |no |
+|data |object |no |
+
+## Streamis project streamis-project-server
+
+### Related table operation
+There are 2 tables involved in streamis, including linkis_ stream_ Project and linkis_ stream_ project_ privilege.
+```roomsql
+--table already exists
+CREATE TABLE `linkis_stream_project` (
+ `id` bigint(20) NOT NULL AUTO_INCREMENT,
+ `workspace_id` bigint(20) DEFAULT NULL,
+ `name` varchar(100) DEFAULT NULL,
+ `create_by` varchar(50) DEFAULT NULL,
+ PRIMARY KEY (`id`) USING BTREE
+) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='project table';
+
+--newly added table
+CREATE TABLE `linkis_stream_project_privilege` (
+ `id` bigint(20) NOT NULL AUTO_INCREMENT,
+ `project_id` bigint(20) NOT NULL,
+ `user_name` varchar(100) NOT NULL,
+ `privilege` tinyint(1) DEFAULT '0' NOT NULL COMMENT '1:RELEASE ,2:EDIT ,3:ACCESS',
+ PRIMARY KEY (`id`) USING BTREE
+) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 COMMENT='project privilege table';
+
+--newly added field
+ALTER TABLE `linkis_stream_project` ADD create_time datetime DEFAULT NULL;
+ALTER TABLE `linkis_stream_project` ADD last_update_by varchar(50) DEFAULT NULL;
+ALTER TABLE `linkis_stream_project` ADD last_update_time datetime DEFAULT NULL;
+ALTER TABLE `linkis_stream_project` ADD is_deleted tinyint unsigned DEFAULT 0;
+```
+
+### Concrete implementation description
+- The query operation will obtain the request parameter projectName in the table links_ stream_project Query the corresponding ID. If the query operation is successful, the returned status is 0, and the projectid is the queried ID. if the query result is empty, the projectid is null;
+- The creation operation will insert the project information (projectName、workspaceId) in the request parameters into the project table linkis_stream_project and auto increment the generated ID, associate the user in the permission information data (releaseUsers、editUsers、accessUsers) in the request parameters with the generated ID of the project table, and insert the table linkis_stream_project_privilege, the status value returned from the successful creation operation is 0, and the project table generation ID will be returned as the value of projectId;
+- The modification operation will update the request information data to the table linkis_stream_project and linkis_stream_project_privilege, the status value returned successfully is 0;
+- In the delete operation, the is_deleted field of the table linkis_stream_project will be marked as 1 according to the projectId. The relevant data in the table linkis_stream_project_privilege will be deleted, the status value returned successfully is 0.
diff --git a/docs/en_US/0.2.0/architecture/StreamisAuthenticationDesignDocument.md b/docs/en_US/0.2.0/architecture/StreamisAuthenticationDesignDocument.md
new file mode 100644
index 000000000..6013b3b4a
--- /dev/null
+++ b/docs/en_US/0.2.0/architecture/StreamisAuthenticationDesignDocument.md
@@ -0,0 +1,43 @@
+# Authentication
+
+## Authentication flow chart
+In Streamis, the module that needs authentication does not rely on the Streamis project server module. The rest interface is called to handle authentication.
+
+![Streamis project authentication operation](../../../images/streamis_project_privilege_en.png)
+
+## Specific implementation instructions
+Get the set of all permissions according to the current user name and item id/ name. If the permission set contains RELEASE permission, you have the permission to publish / edit / view; if the permission set contains EDIT permission, you have the permission to edit / view; if the permission set contains ACCESS permission, you have the permission to view;
+Permission inclusion relationship: RELEASE permission includes EDIT permission and ACCESS permission; edit permission includes ACCESS permission.
+
+### edit privilege API:
+
+|RequestMethod |API path |name |
+|------|----------------------------------------------------------|-----------------|
+|POST |/streamis/streamProjectManager/project/files/upload |Project resource file - Import |
+|GET |/streamis/streamProjectManager/project/files/delete |Delete all versions of the file under the project |
+|GET |/streamis/streamProjectManager/project/files/version/delete |Delete version file |
+|GET |/streamis/streamProjectManager/project/files/download |Task details - Download |
+|POST |streamis/streamJobManager/job/createOrUpdate |create or Update streamis-job|
+|POST |/streamis/streamJobManager/job/upload |Upload file |
+|POST |/streamis/streamJobManager/job/execute |start-up |
+|GET |/streamis/streamJobManager/job/stop |stop |
+|PUT |/streamis/streamJobManager/job//snapshot/{jobId:\w+} |Snapshot generation |
+|GET |/streamis/streamJobManager/config/json/{jobId:\w+} |Configuration - save |
+|POST |/streamis/streamJobManager/job/bulk/execution |Batch start |
+|POST |/streamis/streamJobManager/job/bulk/pause |Batch stop |
+
+
+### access privilege API:
+
+|RequestMethod |API path |name |
+|------|----------------------------------------------------------|-------------|
+|GET |streamis/streamJobManager/job/list |Query the jobs that the current user can view |
+|GET |/streamis/streamProjectManager/project/files/list |prokect resource document |
+|GET |/streamis/streamProjectManager/project/files/version/list |Obtain all versions of the file under the project |
+|GET |/streamis/streamJobManager/job/version |Query job version |
+|GET |/streamis/streamJobManager/job/execute/history |Job execution history |
+|GET |/streamis/streamJobManager/job/progress |Get the latest task status of the current version of the job |
+|GET |/streamis/streamJobManager/job/jobContent |Task details |
+|GET |/streamis/streamJobManager/job/logs |Get log |
+|POST |/streamis/streamJobManager/config/json/{jobId:\w+} |Get task configuration |
+|GET |/streamis/streamJobManager/config/view |Query the current job configuration information |
\ No newline at end of file
diff --git a/docs/en_US/0.2.0/development/StreamisAppConnInstallationDocument.md b/docs/en_US/0.2.0/development/StreamisAppConnInstallationDocument.md
new file mode 100644
index 000000000..6891d5e92
--- /dev/null
+++ b/docs/en_US/0.2.0/development/StreamisAppConnInstallationDocument.md
@@ -0,0 +1,73 @@
+Streamisappconn installation document this article mainly introduces the deployment, configuration and installation of streamisappconn in DSS (datasphere studio) 1.1.0
+
+# 1. Preparation for deploying streamisappconn
+Before deploying streamisappconn, please complete the installation of streamis0.2.4 and other related components, and ensure that the basic functions of the project are available.
+
+# 2. Download and compilation of streamisappconn plug-in
+1) Download binary package
+
+We provide the material package of streamisappconn, which you can download directly. [Click to jump to the release interface](https://github.com/WeBankFinTech/Streamis/releases)
+
+2) Compile package
+
+If you want to develop and compile streamisappconn yourself, the specific compilation steps are as follows: 1 Code for clone streams; 2 Find the streamis appconn module and compile streamis appconn separately
+```shell script
+cd {STREAMIS_CODE_HOME}/streamis-appconn
+mvn clean install
+```
+Streamis will be found under this path Zip installation package
+```shell script
+{STREAMIS_CODE_HOME}\streamis-appconn\target\streamis.zip
+```
+
+# 3. Overall steps for deployment and configuration of streamisappconn plug-in
+1. get the packed streamis Zip material package
+
+2. place it in the following directory and unzip it
+
+Note: after extracting streamis appconn for the first time, make sure that there is no index in the current folder_ v0000XX. Index file, which will be generated later
+```shell script
+cd {DSS_Install_HOME}/dss/dss-appconns
+unzip streamis.zip
+```
+The extracted directory structure is:
+```shell script
+conf
+db
+lib
+```
+3. execute scripts for automatic installation
+ ```shell script
+cd {DSS_INSTALL_HOME}/dss/bin
+sh ./appconn-install.sh
+# Script is an interactive installation scheme. You need to enter the string streamis and the IP and port of streamis service to complete the installation
+# The streamis port here refers to the front-end port, which is configured in nginx. Instead of the back-end service port
+```
+
+## 4. After the installation of streamis appconn, you need to restart the DSS service to finally complete the plug-in update
+###4.1) make the deployed appconn effective
+Use the DSS start / stop script to make appconn effective. Enter the directory {DSS_INSTALL_HOME}/dss/sbin where the script is located, and execute the script using the following commands in sequence:
+```shell script
+sh ./dss-stop-all.sh
+sh ./dss-start-all.sh
+```
+###4.2) verify whether streamis appconn is effective
+After installing and deploying streamis appconn, you can preliminarily verify whether the streamis appconn is successfully installed through the following steps.
+
+Create a new project in the DSS workspace
+![DSS_workspace_Streamis_project](../../../images/zh_CN/dss_streamis_project.png)
+
+Check whether the project is created synchronously in the streamis database. Query the records indicating that appconn is successfully installed
+```roomsql
+SELECT * FROM linkis_stream_project WHERE name = ' input project_name ';
+```
+
+# 5. Installation principle of streamis appconn
+The relevant configuration information of streamis will be inserted into the following table. The use configuration of streamis can be completed by configuring the following table. (Note: if you only need to quickly install appconn, you don't need to pay too much attention to the following fields. Most of the init.sql provided are configured by default. Focus on the above operations.)
+
+|table name |table function |remarks |
+|-------------------|-----------------------------------------|------|
+|dss_workspace_dictionary |Configuring a streaming production center |must|
+|dss_appconn |Basic information of appconn, used to load appconn |must|
+|dss_workspace_menu_appconn |Appconn menu, front-end connection to streamis |must|
+|dss_appconn_instance |The instance information of appconn, including its own URL information |must|
diff --git a/docs/en_US/0.2.0/development/StreamisUpgradeDocumentation.md b/docs/en_US/0.2.0/development/StreamisUpgradeDocumentation.md
new file mode 100644
index 000000000..3e4bd2b4a
--- /dev/null
+++ b/docs/en_US/0.2.0/development/StreamisUpgradeDocumentation.md
@@ -0,0 +1,44 @@
+Streamis upgrade document. This article mainly introduces the upgrade steps of adapting DSS1.1.0 and linkis1.1.1 based on the original installation of Streamis service. The biggest difference between Streamis 0.2.4 and Streamis 0.1.0 is that it accesses DSS appconn and optimizes the start and stop of jobs.
+
+# 1. Work before upgrading streamis
+Before upgrading Streamis, please install linkis1.1.1 and DSS1.1.0 or above, and ensure that the linkis Flink engine and DSS can be used normally. For the installation of DSS and linkis, please refer to [dss & linkis one click installation and deployment document](https://github.com/WeBankFinTech/DataSphereStudio-Doc/blob/main/zh_CN/%E5%AE%89%E8%A3%85%E9%83%A8%E7%BD%B2/DSS%E5%8D%95%E6%9C%BA%E9%83%A8%E7%BD%B2%E6%96%87%E6%A1%A3.md).
+
+# 2. Streamis upgrade steps
+
+## Install streamisappconn
+1) Delete the old version of StreamisAppconn package
+
+Enter the following directory, find the appconn folder of streamis and delete it, if any:
+```shell script
+{DSS_Install_HOME}/dss/dss-appconns
+```
+
+2) StreamisAppconn installation deployment
+
+To install the DSS StreamisAppConn plug-in. Please refer to: [StreamisAppConn plug-in installation document](development/StreamisAppConnInstallationDocument.md)
+
+## Installing the Streamis backend
+Update Lib in the obtained installation package to the path 'streamis-server/lib' under the streamis installation directory, and the file contents under 'streamis-server/conf' can be updated as needed.
+
+Enter the installation directory and execute the update script to complete the update of database table structure and data:
+```shell script
+cd {Streamis_Install_HOME}
+sh bin/upgrade.sh
+```
+
+Then complete the update and restart of the Streamis server through the following command:
+```shell script
+cd {Streamis_Install_HOME}/streamis-server
+sh bin/stop-streamis-server.sh
+sh bin/start-streamis-server.sh
+```
+
+##Installing the Streamis front end
+First delete the front-end directory folder of the old version, and then replace it with the new front-end installation package.
+```
+mkdir ${STREAMIS_FRONT_PATH}
+cd ${STREAMIS_FRONT_PATH}
+#1.Delete front-end directory folder
+#2.Place the front-end package
+unzip streamis-${streamis-version}.zip
+```
\ No newline at end of file
diff --git a/docs/en_US/userManual/StreamisUserManual.md b/docs/en_US/userManual/StreamisUserManual.md
new file mode 100644
index 000000000..86a65490b
--- /dev/null
+++ b/docs/en_US/userManual/StreamisUserManual.md
@@ -0,0 +1,205 @@
+# Streamis quick start
+
+## 1. Preface
+
+ This article is a quick start document for Streamis 0.2.4, which covers the basic usage process of Stremis. More details on operation and usage will be provided in the user documentation.
+
+
+## 2. Streamis entrance
+ For the convenience of users,**the Streamis system is embedded in the DSS system in the form of DSS components**
+
+The entry path is **Home-DSS component application-Enter Streamis**
+
+![Streamis entrance](../../images/create_stream_product_center_en.png)
+
Picture 2.1 Streamis entrance]
+
+## 3. Core indicators
+
+ Entering the homepage, the top half shows the core indicators.
+
+ The core indicator shows the status summary of the Flink tasks uploaded to the project for execution. There are temporarily 7 states, showing the state name and the number of tasks in that state. The specific content is as shown in the figure below.
+
+![Core indicators](../../images/home_page_en.png)
+Picture 3.1 Core indicators
+
+# 4. Job management
+
+  Support the release of Flink Jar and Flink SQL, and provide the development and debugging and production management capabilities of streaming applications, such as: start and stop, status monitoring, checkpoint, etc.
+
+## 4.1. Upload job
+
+ Click **"Upload"** and select the zip file to upload. The file is divided into two job types, Flink Jar and Flink SQL, and the format is as follows:
+
+### 4.1.1. The contents of the Flink Jar ZIP file are as follows:
+
+![jar zip](../../images/jarZip.png)
+Picture 4.1 Flink Jar ZIP
+
+
+
+#### Flink Jar meta.json:
+
+```json
+{
+ "projectName": "flinkJarTest3",
+ "jobName": "flinkJarTestc",
+ "jobType": "flink.jar",
+ "tags": "e,t,y,h,g",
+ "description": "test of FlinkJar Job3",
+ "jobContent": {
+ "main.class.jar": "frauddetection-0.1.jar",
+ "main.class": "spendreport.FraudDetectionJob",
+ "args": ["aa","bb"],
+ "hdfs.jars": [],
+ "dependency.jars": [],
+ "resources": []
+ }
+}
+```
+
+### 4.1.2. Flink SQL ZIP
+
+ The SQL type file is just a ZIP compressed package of the meta.json file, and its content is as follows:
+
+#### Flink SQL meta.json:
+
+```json
+{
+ "projectName": "flinkSqlTestD",
+ "jobName": "flinkSqlTesta",
+ "jobType": "flink.sql",
+ "tags": "a,b,c",
+ "description": "test FlinkSql JobD",
+ "jobContent": {
+ "type": "sql",
+ "sql": "select 1",
+ "file": "",
+ "resourceId": "",
+ "version": ""
+ }
+}
+
+```
+
+
+
+ After the ZIP file is uploaded successfully, the task is displayed in the task list in Figure 3.1, and operations such as start, stop, configuration and checkpoint are provided according to the task status;
+
+ The task list provides the function of **"query"**, where **job name** provides the function of fuzzy query.
+
+
+## 4.3. Management jobs
+
+
+
+ Click **"job name"** of a task in the task list to provide the function of managing the task, or click **"three dots"** to the left of the name to call up the specific function configuration entry, as shown below:
+
+![Configuration job](../../images/job_list_en.png)
+Picture4.3 Configuration job
+
+
+
+ The configuration task provides four functions, namely:
+
+- Parameter configuration
+- alarm configuration
+- operation history
+- operation log
+- snapshot[savepoint]
+
+
+
+Click batch operation, and multiple job tasks can be restarted. Restart and snapshot will generate a snapshot and then restart. Restart directly will not generate a snapshot.
+
+![jobbulk_operate](../../images/jobbulk_operate_en.png)
+
+### 4.3.1. Job summary:
+
+![Operating condition](../../images/stream_job_detail_en.png)
+ Picture 4.4 Job summary
+
+
+
+ The running status summarizes the real-time traffic, total amount of data, and load status of the task.
+
+
+
+### 4.3.2. Job history:
+
+![Execution history](../../images/stream_job_history_en.png)
+Picture 4.5 Job history
+
+
+
+ The running history records the record of each run of the task.
+
+
+
+
+### 4.3.3. Job config:
+
+![Configuration](../../images/stream_job_config_en_1.png)
+![Configuration](../../images/stream_job_config_en_2.png)
+Picture 4.6 Job config
+
+
+
+ The configuration page is used to configure various parameters of the running task, and provides the following types of configuration:
+
+- Resource configuration
+- Production configuration
+- Flink parameters
+- Permission settings
+
+
+
+### 4.3.4. Job details:
+
+
+
+ The job details are divided into two display interfaces according to the task type Flink Jar and Flink SQL.
+
+
+
+**Flink Jar Job details**
+
+![Job details](../../images/stream_job_flinkjar_jobcontent_en.png)
+Picture 4.7 Flink Jar Job details
+
+
+
+ Flink Jar task details show the contents and parameters of the task Jar package, and provide the function of downloading the Jar package.
+
+
+
+
+**Flink SQL job details**
+
+![Job details](../../images/stream_job_flinksql_jobcontent_en.png)
+Picture 4.8 Flink SQL job details
+
+
+
+ The Flink SQL job details show the SQL statement of the task.
+
+
+
+
+
+## 5. Project resource file
+
+
+
+ **Page entry:** On the top right of the homepage-core indicators **"Project Resource File"**.
+
+ The project resource file provides the function of uploading and managing the resource files required by the project, as shown in the figure below:
+
+
+
+![Engineering Documents Home Page](../../images/project_source_file_list_en.png)
+Picture 5.1 Engineering Documents Home Page
+
+
+
+![Upload project file](../../images/project_source_file_import_en.png)
+Picture 5.2 Upload project file
diff --git a/docs/images/SQLDetail.png b/docs/images/SQLDetail.png
new file mode 100644
index 000000000..e847f9f1e
Binary files /dev/null and b/docs/images/SQLDetail.png differ
diff --git a/docs/images/SQLDetail_en.png b/docs/images/SQLDetail_en.png
new file mode 100644
index 000000000..3c0f6bb14
Binary files /dev/null and b/docs/images/SQLDetail_en.png differ
diff --git a/docs/images/config.png b/docs/images/config.png
new file mode 100644
index 000000000..04f9f406d
Binary files /dev/null and b/docs/images/config.png differ
diff --git a/docs/images/config_en.png b/docs/images/config_en.png
new file mode 100644
index 000000000..6c6f6f46e
Binary files /dev/null and b/docs/images/config_en.png differ
diff --git a/docs/images/create_script_file.png b/docs/images/create_script_file.png
new file mode 100644
index 000000000..ce3030df9
Binary files /dev/null and b/docs/images/create_script_file.png differ
diff --git a/docs/images/create_stream_product_center.png b/docs/images/create_stream_product_center.png
new file mode 100644
index 000000000..701b4c683
Binary files /dev/null and b/docs/images/create_stream_product_center.png differ
diff --git a/docs/images/create_stream_product_center_en.png b/docs/images/create_stream_product_center_en.png
new file mode 100644
index 000000000..952d11af9
Binary files /dev/null and b/docs/images/create_stream_product_center_en.png differ
diff --git a/docs/images/enter_flinksql.png b/docs/images/enter_flinksql.png
new file mode 100644
index 000000000..c360dbe28
Binary files /dev/null and b/docs/images/enter_flinksql.png differ
diff --git a/docs/images/entry.png b/docs/images/entry.png
new file mode 100644
index 000000000..259fd268f
Binary files /dev/null and b/docs/images/entry.png differ
diff --git a/docs/images/entry_en.png b/docs/images/entry_en.png
new file mode 100644
index 000000000..a4c5d0cc8
Binary files /dev/null and b/docs/images/entry_en.png differ
diff --git a/docs/images/fileHome.png b/docs/images/fileHome.png
new file mode 100644
index 000000000..b46c98dd3
Binary files /dev/null and b/docs/images/fileHome.png differ
diff --git a/docs/images/fileHome_en.png b/docs/images/fileHome_en.png
new file mode 100644
index 000000000..5c26bc03d
Binary files /dev/null and b/docs/images/fileHome_en.png differ
diff --git a/docs/images/fileUpload.png b/docs/images/fileUpload.png
new file mode 100644
index 000000000..198ff6b11
Binary files /dev/null and b/docs/images/fileUpload.png differ
diff --git a/docs/images/fileUpload_en.png b/docs/images/fileUpload_en.png
new file mode 100644
index 000000000..42e8e8cc4
Binary files /dev/null and b/docs/images/fileUpload_en.png differ
diff --git a/docs/images/flinksql_job_use_demo.png b/docs/images/flinksql_job_use_demo.png
new file mode 100644
index 000000000..7c8dd715d
Binary files /dev/null and b/docs/images/flinksql_job_use_demo.png differ
diff --git a/docs/images/flinksql_job_use_demo2.png b/docs/images/flinksql_job_use_demo2.png
new file mode 100644
index 000000000..606fe116f
Binary files /dev/null and b/docs/images/flinksql_job_use_demo2.png differ
diff --git a/docs/images/flinksql_script_file.png b/docs/images/flinksql_script_file.png
new file mode 100644
index 000000000..47229ab8c
Binary files /dev/null and b/docs/images/flinksql_script_file.png differ
diff --git a/docs/images/history.png b/docs/images/history.png
new file mode 100644
index 000000000..42004745e
Binary files /dev/null and b/docs/images/history.png differ
diff --git a/docs/images/history_en.png b/docs/images/history_en.png
new file mode 100644
index 000000000..30b891b0f
Binary files /dev/null and b/docs/images/history_en.png differ
diff --git a/docs/images/homePage.png b/docs/images/homePage.png
new file mode 100644
index 000000000..c1ca398e4
Binary files /dev/null and b/docs/images/homePage.png differ
diff --git a/docs/images/homePage_en.png b/docs/images/homePage_en.png
new file mode 100644
index 000000000..5e8797904
Binary files /dev/null and b/docs/images/homePage_en.png differ
diff --git a/docs/images/home_page.png b/docs/images/home_page.png
new file mode 100644
index 000000000..80c419d53
Binary files /dev/null and b/docs/images/home_page.png differ
diff --git a/docs/images/home_page_en.png b/docs/images/home_page_en.png
new file mode 100644
index 000000000..28b0399a1
Binary files /dev/null and b/docs/images/home_page_en.png differ
diff --git a/docs/images/image-20211230174445688.png b/docs/images/image-20211230174445688.png
new file mode 100644
index 000000000..066357b2f
Binary files /dev/null and b/docs/images/image-20211230174445688.png differ
diff --git a/docs/images/image-20211230174723105.png b/docs/images/image-20211230174723105.png
new file mode 100644
index 000000000..484c55797
Binary files /dev/null and b/docs/images/image-20211230174723105.png differ
diff --git a/docs/images/image-20211230175424588.png b/docs/images/image-20211230175424588.png
new file mode 100644
index 000000000..9fc8934d2
Binary files /dev/null and b/docs/images/image-20211230175424588.png differ
diff --git a/docs/images/image-20211231092431429.png b/docs/images/image-20211231092431429.png
new file mode 100644
index 000000000..ec7305b09
Binary files /dev/null and b/docs/images/image-20211231092431429.png differ
diff --git a/docs/images/image-20211231092520768.png b/docs/images/image-20211231092520768.png
new file mode 100644
index 000000000..95822b05e
Binary files /dev/null and b/docs/images/image-20211231092520768.png differ
diff --git a/docs/images/image-20211231092959561.png b/docs/images/image-20211231092959561.png
new file mode 100644
index 000000000..dcfbaf083
Binary files /dev/null and b/docs/images/image-20211231092959561.png differ
diff --git a/docs/images/image-20211231093901173.png b/docs/images/image-20211231093901173.png
new file mode 100644
index 000000000..d5effb0df
Binary files /dev/null and b/docs/images/image-20211231093901173.png differ
diff --git a/docs/images/image-20211231094103002.png b/docs/images/image-20211231094103002.png
new file mode 100644
index 000000000..0cd4c6872
Binary files /dev/null and b/docs/images/image-20211231094103002.png differ
diff --git a/docs/images/image-20211231101048962.png b/docs/images/image-20211231101048962.png
new file mode 100644
index 000000000..f6be574aa
Binary files /dev/null and b/docs/images/image-20211231101048962.png differ
diff --git a/docs/images/image-20211231102020703.png b/docs/images/image-20211231102020703.png
new file mode 100644
index 000000000..91b20ebfa
Binary files /dev/null and b/docs/images/image-20211231102020703.png differ
diff --git a/docs/images/jarDetail_en.png b/docs/images/jarDetail_en.png
new file mode 100644
index 000000000..2143eec38
Binary files /dev/null and b/docs/images/jarDetail_en.png differ
diff --git a/docs/images/jarZip.png b/docs/images/jarZip.png
new file mode 100644
index 000000000..3f395acbf
Binary files /dev/null and b/docs/images/jarZip.png differ
diff --git a/docs/images/jobNav.png b/docs/images/jobNav.png
new file mode 100644
index 000000000..f106ca1d1
Binary files /dev/null and b/docs/images/jobNav.png differ
diff --git a/docs/images/jobNav_en.png b/docs/images/jobNav_en.png
new file mode 100644
index 000000000..130d2aabc
Binary files /dev/null and b/docs/images/jobNav_en.png differ
diff --git a/docs/images/job_list.png b/docs/images/job_list.png
new file mode 100644
index 000000000..50fb4e4c9
Binary files /dev/null and b/docs/images/job_list.png differ
diff --git a/docs/images/job_list_en.png b/docs/images/job_list_en.png
new file mode 100644
index 000000000..487bf502d
Binary files /dev/null and b/docs/images/job_list_en.png differ
diff --git a/docs/images/jobbulk_operate.png b/docs/images/jobbulk_operate.png
new file mode 100644
index 000000000..38aabe699
Binary files /dev/null and b/docs/images/jobbulk_operate.png differ
diff --git a/docs/images/jobbulk_operate_en.png b/docs/images/jobbulk_operate_en.png
new file mode 100644
index 000000000..6c064cbf6
Binary files /dev/null and b/docs/images/jobbulk_operate_en.png differ
diff --git a/docs/images/project_source_file_import.png b/docs/images/project_source_file_import.png
new file mode 100644
index 000000000..a7b2404b8
Binary files /dev/null and b/docs/images/project_source_file_import.png differ
diff --git a/docs/images/project_source_file_import_en.png b/docs/images/project_source_file_import_en.png
new file mode 100644
index 000000000..01b3419ca
Binary files /dev/null and b/docs/images/project_source_file_import_en.png differ
diff --git a/docs/images/project_source_file_list.png b/docs/images/project_source_file_list.png
new file mode 100644
index 000000000..ebcf25982
Binary files /dev/null and b/docs/images/project_source_file_list.png differ
diff --git a/docs/images/project_source_file_list_en.png b/docs/images/project_source_file_list_en.png
new file mode 100644
index 000000000..84590e46e
Binary files /dev/null and b/docs/images/project_source_file_list_en.png differ
diff --git a/docs/images/statusDetail.png b/docs/images/statusDetail.png
new file mode 100644
index 000000000..6236aa578
Binary files /dev/null and b/docs/images/statusDetail.png differ
diff --git a/docs/images/statusDetail_en.png b/docs/images/statusDetail_en.png
new file mode 100644
index 000000000..120b2c415
Binary files /dev/null and b/docs/images/statusDetail_en.png differ
diff --git a/docs/images/stream_job_config_1.png b/docs/images/stream_job_config_1.png
new file mode 100644
index 000000000..5e391bf84
Binary files /dev/null and b/docs/images/stream_job_config_1.png differ
diff --git a/docs/images/stream_job_config_2.png b/docs/images/stream_job_config_2.png
new file mode 100644
index 000000000..a1ea87288
Binary files /dev/null and b/docs/images/stream_job_config_2.png differ
diff --git a/docs/images/stream_job_config_en_1.png b/docs/images/stream_job_config_en_1.png
new file mode 100644
index 000000000..40d403db5
Binary files /dev/null and b/docs/images/stream_job_config_en_1.png differ
diff --git a/docs/images/stream_job_config_en_2.png b/docs/images/stream_job_config_en_2.png
new file mode 100644
index 000000000..466f73ed2
Binary files /dev/null and b/docs/images/stream_job_config_en_2.png differ
diff --git a/docs/images/stream_job_detail.png b/docs/images/stream_job_detail.png
new file mode 100644
index 000000000..99dcb0a97
Binary files /dev/null and b/docs/images/stream_job_detail.png differ
diff --git a/docs/images/stream_job_detail_en.png b/docs/images/stream_job_detail_en.png
new file mode 100644
index 000000000..d7a7e3ba5
Binary files /dev/null and b/docs/images/stream_job_detail_en.png differ
diff --git a/docs/images/stream_job_flinkjar_jobcontent.png b/docs/images/stream_job_flinkjar_jobcontent.png
new file mode 100644
index 000000000..9b0063eb3
Binary files /dev/null and b/docs/images/stream_job_flinkjar_jobcontent.png differ
diff --git a/docs/images/stream_job_flinkjar_jobcontent_en.png b/docs/images/stream_job_flinkjar_jobcontent_en.png
new file mode 100644
index 000000000..6e02623ba
Binary files /dev/null and b/docs/images/stream_job_flinkjar_jobcontent_en.png differ
diff --git a/docs/images/stream_job_flinksql_jobcontent.png b/docs/images/stream_job_flinksql_jobcontent.png
new file mode 100644
index 000000000..cdc9eceae
Binary files /dev/null and b/docs/images/stream_job_flinksql_jobcontent.png differ
diff --git a/docs/images/stream_job_flinksql_jobcontent_en.png b/docs/images/stream_job_flinksql_jobcontent_en.png
new file mode 100644
index 000000000..16e5faeea
Binary files /dev/null and b/docs/images/stream_job_flinksql_jobcontent_en.png differ
diff --git a/docs/images/stream_job_history.png b/docs/images/stream_job_history.png
new file mode 100644
index 000000000..c90a486a1
Binary files /dev/null and b/docs/images/stream_job_history.png differ
diff --git a/docs/images/stream_job_history_en.png b/docs/images/stream_job_history_en.png
new file mode 100644
index 000000000..d6e075ad3
Binary files /dev/null and b/docs/images/stream_job_history_en.png differ
diff --git a/docs/images/stream_product_center.png b/docs/images/stream_product_center.png
new file mode 100644
index 000000000..a039e76c5
Binary files /dev/null and b/docs/images/stream_product_center.png differ
diff --git a/docs/images/stream_product_center_en.png b/docs/images/stream_product_center_en.png
new file mode 100644
index 000000000..477fea4fd
Binary files /dev/null and b/docs/images/stream_product_center_en.png differ
diff --git a/docs/images/streamis_appconn_en.png b/docs/images/streamis_appconn_en.png
new file mode 100644
index 000000000..8a96039f1
Binary files /dev/null and b/docs/images/streamis_appconn_en.png differ
diff --git a/docs/images/streamis_project_privilege_en.png b/docs/images/streamis_project_privilege_en.png
new file mode 100644
index 000000000..76f123fe2
Binary files /dev/null and b/docs/images/streamis_project_privilege_en.png differ
diff --git a/docs/images/upload_jobtask_error.png b/docs/images/upload_jobtask_error.png
new file mode 100644
index 000000000..ffa2033ae
Binary files /dev/null and b/docs/images/upload_jobtask_error.png differ
diff --git a/docs/images/upload_jobtask_error_solve.png b/docs/images/upload_jobtask_error_solve.png
new file mode 100644
index 000000000..72971929e
Binary files /dev/null and b/docs/images/upload_jobtask_error_solve.png differ
diff --git a/docs/images/versionDetail.png b/docs/images/versionDetail.png
new file mode 100644
index 000000000..1264b018b
Binary files /dev/null and b/docs/images/versionDetail.png differ
diff --git a/docs/images/versionDetail_en.png b/docs/images/versionDetail_en.png
new file mode 100644
index 000000000..8ad144b3a
Binary files /dev/null and b/docs/images/versionDetail_en.png differ
diff --git a/docs/images/versionHistory.png b/docs/images/versionHistory.png
new file mode 100644
index 000000000..9ec2505ff
Binary files /dev/null and b/docs/images/versionHistory.png differ
diff --git a/docs/images/versionHistory_en.png b/docs/images/versionHistory_en.png
new file mode 100644
index 000000000..16e1567c5
Binary files /dev/null and b/docs/images/versionHistory_en.png differ
diff --git a/docs/images/zh_CN/DSS_integration_Streamis.png b/docs/images/zh_CN/DSS_integration_Streamis.png
new file mode 100644
index 000000000..e568c8655
Binary files /dev/null and b/docs/images/zh_CN/DSS_integration_Streamis.png differ
diff --git a/docs/images/zh_CN/dss_streamis_project.png b/docs/images/zh_CN/dss_streamis_project.png
new file mode 100644
index 000000000..f8316a9a6
Binary files /dev/null and b/docs/images/zh_CN/dss_streamis_project.png differ
diff --git a/docs/images/zh_CN/eureka_streamis.png b/docs/images/zh_CN/eureka_streamis.png
new file mode 100644
index 000000000..4ca0d8495
Binary files /dev/null and b/docs/images/zh_CN/eureka_streamis.png differ
diff --git a/docs/images/zh_CN/meta_txt_demo.png b/docs/images/zh_CN/meta_txt_demo.png
new file mode 100644
index 000000000..7d46f229d
Binary files /dev/null and b/docs/images/zh_CN/meta_txt_demo.png differ
diff --git a/docs/images/zh_CN/start-app.png b/docs/images/zh_CN/start-app.png
new file mode 100644
index 000000000..1aafb49ae
Binary files /dev/null and b/docs/images/zh_CN/start-app.png differ
diff --git a/docs/images/zh_CN/streamis_appconn.png b/docs/images/zh_CN/streamis_appconn.png
new file mode 100644
index 000000000..34949619d
Binary files /dev/null and b/docs/images/zh_CN/streamis_appconn.png differ
diff --git a/docs/images/zh_CN/streamis_config.png b/docs/images/zh_CN/streamis_config.png
new file mode 100644
index 000000000..501bbfd53
Binary files /dev/null and b/docs/images/zh_CN/streamis_config.png differ
diff --git a/docs/images/zh_CN/streamis_project_privilege.png b/docs/images/zh_CN/streamis_project_privilege.png
new file mode 100644
index 000000000..9aa4c67fe
Binary files /dev/null and b/docs/images/zh_CN/streamis_project_privilege.png differ
diff --git a/docs/images/zh_CN/upload_zip.png b/docs/images/zh_CN/upload_zip.png
new file mode 100644
index 000000000..9c1efbacc
Binary files /dev/null and b/docs/images/zh_CN/upload_zip.png differ
diff --git "a/docs/images/\345\274\200\345\217\221\344\270\255\345\277\203.png" "b/docs/images/\345\274\200\345\217\221\344\270\255\345\277\203.png"
new file mode 100644
index 000000000..8a5d6619b
Binary files /dev/null and "b/docs/images/\345\274\200\345\217\221\344\270\255\345\277\203.png" differ
diff --git "a/docs/zh_CN/0.2.0/AppConn\347\232\204\344\275\277\347\224\250.md" "b/docs/zh_CN/0.2.0/AppConn\347\232\204\344\275\277\347\224\250.md"
new file mode 100644
index 000000000..588eb4c1a
--- /dev/null
+++ "b/docs/zh_CN/0.2.0/AppConn\347\232\204\344\275\277\347\224\250.md"
@@ -0,0 +1,90 @@
+# Streamis系统的AppConn插件使用
+
+## 1.StreamisAppConn
+----------
+
+### 1.1介绍
+StreamisAppConn是Streamis用来与DSS集成的一个AppConn,功能包括
+
+|实现的规范和Service | 功能 | 作用微服务 |
+|---------------------|------------------------------------------------------|---------------------------------------------------------|
+| 二级规范 | 与DSS工程打通,支持工程内容同步 | DSS-Framework-Project-Server |
+| 三级规范-CRUDService | 支持流式编排创建、获取、更新、删除等操作 | DSS-Framework-Orchestrator-Server |
+| 三级规范-ExportService和ImportService | 支持流式编排的导入导出 | DSS-Framework-Orchestrator-Server |
+
+
+
+### 1.2部署
+
+1. 编译
+
+```bash
+#整体编译streamis代码
+cd ${STREAMIS_CODE_HOME}
+mvn -N install
+mvn clean install
+#单独编译appconn插件
+cd ${STREAMIS_CODE_HOME}/streamis-plugins/streamis-appconn
+mvn clean install
+```
+
+2. 部署
+1. 从 ${STREAMIS_CODE_HOME}/streamis-plugins/streamis-appconn/target 获取appconn的安装包
+2. 上传到DSS放置appconn的目录
+```bash
+cd ${DSS_HOME}/dss/dss-appconns
+unzip streamis-appconn.zip
+```
+3. 执行sql
+需要进入到
+```roomsql
+SET @STREAMIS_INSTALL_IP_PORT='127.0.0.1:9003';
+SET @URL = replace('http://STREAMIS_IP_PORT', 'STREAMIS_IP_PORT', @STREAMIS_INSTALL_IP_PORT);
+SET @HOMEPAGE_URL = replace('http://STREAMIS_IP_PORT', 'STREAMIS_IP_PORT', @STREAMIS_INSTALL_IP_PORT);
+SET @PROJECT_URL = replace('http://STREAMIS_IP_PORT', 'STREAMIS_IP_PORT', @STREAMIS_INSTALL_IP_PORT);
+SET @REDIRECT_URL = replace('http://STREAMIS_IP_PORT/udes/auth', 'STREAMIS_IP_PORT', @STREAMIS_INSTALL_IP_PORT);
+
+delete from `dss_application` WHERE `name` = 'Streamis';
+INSERT INTO `dss_application`(`name`,`url`,`is_user_need_init`,`level`,`user_init_url`,`exists_project_service`,`project_url`,`enhance_json`,`if_iframe`,`homepage_url`,`redirect_url`) VALUES ('Streamis', @URL, 0, 1, NULL, 0, @PROJECT_URL, '', 1, @HOMEPAGE_URL, @REDIRECT_URL);
+
+select @dss_streamis_applicationId:=id from `dss_application` WHERE `name` = 'Streamis';
+
+delete from `dss_onestop_menu` WHERE `name` = '数据交换';
+select @dss_onestop_menu_id:=id from `dss_onestop_menu` where `name` = '数据交换';
+
+delete from `dss_onestop_menu_application` WHERE title_en = 'Streamis';
+```
+
+
+
+### 1.3使用
+
+## 2.Streamis DataSource AppConn
+----------
+### 2.1介绍
+|实现的规范和Service | 功能 | 作用微服务 |
+|---------------------|------------------------------------------------------|---------------------------------------------------------|
+| 三级规范的CRUDService | 支持数据源节点的创建、获取、更新、删除等操作 | DSS-Workflow-Server |
+| 三级规范的ExportService和ImportService | 支持数据源的导入导出 | DSS-Workflow-Server |
+| 三级规范的ExecutionService | 支持数据源的执行 | Linkis-AppConn-Engine |
+
+1. 实现三级规范的CRUDService,支持数据源节点的创建、获取、更新、删除等操作
+2. 实现三级规范的ExportService和ImportService,支持数据源的导入导出
+3. 实现三级规范的ExecutionService,支持数据源的执行
+### 2.2部署
+
+### 2.3使用
+
+
+## 3.Streamis JobManager AppConn
+
+### 3.1介绍
+StreamisJobManager AppConn与SchedulisAppConn的功能是类似的,主要是将DSS的工作流转换成Streamis能够提交执行的流式应用,并把此流式应用发布到StreamisJobManager的
+
+|实现的规范和Service | 功能 | 作用微服务 |
+|---------------------|------------------------------------------------------|---------------------------------------------------------|
+| 工作流转换规范 | 支持将流式工作流转换成Linkis Flink引擎可以执行的流式应用 | DSS-Framework-Orchestrator-Server |
+| 工作流发布规范 | 支持将转换之后的流式应用发布到Streamis-JobManager | DSS-Framework-Orchestrator-Server |
+
+### 3.2部署
+
diff --git "a/docs/zh_CN/0.2.0/Streamis\345\256\211\350\243\205\346\226\207\346\241\243.md" "b/docs/zh_CN/0.2.0/Streamis\345\256\211\350\243\205\346\226\207\346\241\243.md"
new file mode 100644
index 000000000..f51c0cc43
--- /dev/null
+++ "b/docs/zh_CN/0.2.0/Streamis\345\256\211\350\243\205\346\226\207\346\241\243.md"
@@ -0,0 +1,206 @@
+# Streamis安装部署文档
+
+## 1.组件介绍
+
+Streamis0.2.4 提供了 Streamis-JobManager 流式生产中心,其作用主要有:
+
+1. 上传/更新流式应用
+2. 配置流式应用参数,如 Flink 的 Slot 数量、checkpoint相关参数等
+3. 管理流式应用,如启停、savepoint等
+4. 流式应用监控告警
+
+
+## 2.代码编译
+
+**Streamis 无需手动编译,可以直接下载安装包进行部署,请 [点我下载安装包](https://github.com/WeBankFinTech/Streamis/releases)。**
+
+如果您想自己编译 Streamis,可参考如下步骤进行。
+
+- 后台编译方式如下:
+
+```shell script
+cd ${STREAMIS_CODE_HOME}
+mvn -N install
+mvn clean install
+```
+编译成功后将会在项目的 `assembly/target` 目录下生成安装包 `wedatasphere-streamis-${streamis-version}-dist.tar.gz`
+
+- 前端编译方式如下:
+
+前置依赖:nodejs、python 2.0
+
+```shell script
+cd ${STREAMIS_CODE_HOME}/web
+npm i
+npm run build
+```
+编译成功后,在 `${STREAMIS_CODE_HOME}/web` 目录下生成 `streamis-${streamis-version}-dist.zip`
+
+## 3.安装准备
+
+#### 3.1 基础环境安装
+
+ 下面的软件必须安装:
+
+- MySQL (5.5+),[如何安装MySQL](https://www.runoob.com/mysql/mysql-install.html)
+- JDK (1.8.0_141以上),[如何安装JDK](https://www.runoob.com/java/java-environment-setup.html)
+
+### 3.2 Linkis 和 DSS 环境
+
+- Linkis (>=1.1.1),Streamis 的执行依赖于 Linkis 的 Flink 引擎,并且依赖 **Linkis-1.1.1** 及以上版本,部分功能需要Linkis-1.1.2支持。
+- DataSphere Studio (>=1.1.0),Streamis 流式作业的开发和调试,依赖于 DSS-Scriptis,Streamis 流式生产中心则需嵌入到 DSS 工程框架体系之中,所以依赖于 **DSS-1.1.0** 及以上版本。
+
+在正式安装 Streamis 之前,请先安装 Linkis1.1.1 和 DSS1.1.0 及以上版本,并且保证 Linkis Flink 引擎 和 DSS 可以正常使用,DSS 和 Linkis 安装,可参照 [DSS & Linkis 一键安装部署文档](https://github.com/WeBankFinTech/DataSphereStudio-Doc/blob/main/zh_CN/%E5%AE%89%E8%A3%85%E9%83%A8%E7%BD%B2/DSS%E5%8D%95%E6%9C%BA%E9%83%A8%E7%BD%B2%E6%96%87%E6%A1%A3.md)。
+
+如何验证 DSS 和 Linkis 已基本可用?您可以在 DSS-Scriptis 上新建一个 flinksql 脚本并执行,如果 flinksql 能正确执行并返回结果集,表示 DSS 和 linkis 环境是可用的。
+
+
+## 4.安装和启动
+
+### 后台安装
+
+1.安装包准备
+
+将安装包上传到 Linux 服务器(目前只支持 Linux 环境部署)的安装目录,如 /appcom/Install/streamis,然后进行解压:
+
+```shell script
+cd /appcom/Install/streamis
+tar -xvf wedatasphere-streamis-${streamis-version}-dist.tar.gz
+```
+
+2.修改数据库配置
+
+```shell script
+vi conf/db.sh
+#配置基础的数据库信息
+
+```
+
+3.修改基础配置文件
+
+```shell script
+ vi conf/config.sh
+```
+
+```shell script
+### deploy user
+deployUser=hadoop
+
+### ssh port
+SSH_PORT=22
+
+##The Port of Streamis
+STREAMIS_PORT=9400
+
+### The install home path of STREAMIS,Must provided
+STREAMIS_INSTALL_HOME=/appcom/Install/streamis
+
+### Linkis EUREKA information. # Microservices Service Registration Discovery Center
+EUREKA_INSTALL_IP=127.0.0.1
+EUREKA_PORT=20303
+
+### Linkis Gateway information
+GATEWAY_INSTALL_IP=127.0.0.1
+GATEWAY_PORT=9001
+
+```
+
+4.执行安装脚本
+
+```shell script
+sh bin/install.sh
+```
+
+- install.sh脚本会询问您是否需要初始化数据库并导入元数据。
+
+ 因为担心用户重复执行install.sh脚本,把数据库中的用户数据清空,所以在install.sh执行时,会询问用户是否需要初始化数据库并导入元数据。
+
+ **第一次安装**必须选是。
+
+
+5.启动
+
+```shell script
+sh bin/start.sh
+```
+
+- 启动验证
+
+验证方式,因为 Streamis 与 Linkis 同用一套 Eureka,所以您需要检查 Linkis 的 Eureka 页面是否已经包含了 Streamis 的服务,如图:
+
+![components](../../images/zh_CN/eureka_streamis.png)
+
+
+### 前端部署
+
+1.安装nginx
+
+```bash
+sudo yum install -y nginx
+```
+
+2.部署前端包
+
+```
+mkdir ${STREAMIS_FRONT_PATH}
+cd ${STREAMIS_FRONT_PATH}
+#放置前端包
+unzip streamis-${streamis-version}.zip
+```
+
+3.修改nginx配置文件
+
+```bash
+cd /etc/nginx/conf.d
+vi streamis.conf
+# 复制下面的模板并根据实际情况进行修改
+```
+
+```
+server {
+ listen 9088;# 访问端口
+ server_name localhost;
+ location / {
+ root ${STREAMIS_FRONT_PATH}/dist; # 请修改成Streamis前端的静态文件目录
+ index index.html index.html;
+ }
+ location /api {
+ proxy_pass http://${Linkis_GATEWAY_IP}:${LINKIS_GATEWY_PORT}; #后端Linkis的地址,请修改成Linkis网关的ip和端口
+ proxy_set_header Host $host;
+ proxy_set_header X-Real-IP $remote_addr;
+ proxy_set_header x_real_ipP $remote_addr;
+ proxy_set_header remote_addr $remote_addr;
+ proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
+ proxy_http_version 1.1;
+ proxy_connect_timeout 4s;
+ proxy_read_timeout 600s;
+ proxy_send_timeout 12s;
+ proxy_set_header Upgrade $http_upgrade;
+ proxy_set_header Connection upgrade;
+ }
+
+ #error_page 404 /404.html;
+ # redirect server error pages to the static page /50x.html
+ #
+ error_page 500 502 503 504 /50x.html;
+ location = /50x.html {
+ root /usr/share/nginx/html;
+ }
+}
+```
+
+4.加载nginx配置
+
+```bash
+sudo nginx -s reload
+```
+
+## 5. 接入DSS
+
+如您想正常使用 Streamis0.2.4 前端,还需安装 DSS StreamisAppConn 插件,请参考: [StreamisAppConn 插件安装文档](development/StreamisAppConn安装文档.md)
+
+## 6.Linkis Flink引擎编译安装
+如您想正常执行 Streamis0.2.4,还需安装 Linkis Flink 引擎,请参考: [Linkis Flink 引擎安装文档](https://linkis.apache.org/zh-CN/docs/1.1.2/engine_usage/flink/)
+
+## 7.Streamis组件升级文档/脚本
+如您想从Streamis较低版本升级到 Streamis0.2.4 ,请参考:[Streamis升级文档](development/Streamis升级文档.md)
\ No newline at end of file
diff --git a/docs/zh_CN/0.2.0/architecture/README.md b/docs/zh_CN/0.2.0/architecture/README.md
new file mode 100644
index 000000000..e69de29bb
diff --git a/docs/zh_CN/0.2.0/architecture/SUMMARY.md b/docs/zh_CN/0.2.0/architecture/SUMMARY.md
new file mode 100644
index 000000000..9aed8f45f
--- /dev/null
+++ b/docs/zh_CN/0.2.0/architecture/SUMMARY.md
@@ -0,0 +1,9 @@
+* [Streamis架构设计](README.md)
+ * [StreamDataSource 架构设计]()
+ * [StreamJobManager 架构设计]()
+ * [StreamWorkflow 架构设计]()
+ * [Stream Plugins 架构设计]()
+ * [DataSourceAppConn 介绍]()
+ * [StreamisAppConn 介绍]()
+ * [StreamJobManagerAppConn 介绍]()
+ * [FlinkAppConn 介绍]()
\ No newline at end of file
diff --git "a/docs/zh_CN/0.2.0/architecture/Streamis\346\216\245\345\205\245AppConn\350\256\276\350\256\241\346\226\207\346\241\243.md" "b/docs/zh_CN/0.2.0/architecture/Streamis\346\216\245\345\205\245AppConn\350\256\276\350\256\241\346\226\207\346\241\243.md"
new file mode 100644
index 000000000..d8bf3122c
--- /dev/null
+++ "b/docs/zh_CN/0.2.0/architecture/Streamis\346\216\245\345\205\245AppConn\350\256\276\350\256\241\346\226\207\346\241\243.md"
@@ -0,0 +1,178 @@
+# Streamis接入AppConn
+
+## 总体流程图
+![Streamis接入DSS](../../../images/zh_CN/streamis_appconn.png)
+
+## DSS项目appconn插件streamis-appconn
+
+### 配置表
+配置下面4张表dss_workspace_dictionary、dss_appconn、dss_workspace_menu_appconn、dss_appconn_instance,appconn_name为realTimeJobCenter是界面访问的appconn,appconn_name为streamis是接口访问的appconn,dss启动时会根据表中配置信息实例化StreamisAppConn对象。下面sql中APPCONN_INSTALL_IP和APPCONN_INSTALL_PORT在执行DSS安装脚本进行自动化安装时会通过交互式命令的方式输入。
+```roomsql
+delete from `dss_workspace_dictionary` WHERE `appconn_name` = 'streamis';
+
+INSERT INTO `dss_workspace_dictionary` ( `workspace_id`, `parent_key`, `dic_name`, `dic_name_en`, `dic_key`, `dic_value`, `dic_value_en`, `title`, `title_en`, `url`, `url_type`,`icon`, `order_num`, `remark`, `create_user`, `create_time`, `update_user`, `update_time`, appconn_name)
+VALUES ('0','p_develop_process','流式生产中心','Streamis Product Center','pdp_streamis_product_center','streamis_prod',NULL,NULL,NULL,
+'http://APPCONN_INSTALL_IP:APPCONN_INSTALL_PORT/#/realtimeJobCenter?projectName=${projectName}&workspaceName=${workspaceName}','0','kaifa-icon','1','工程开发流程-流式生产中心','SYSTEM','2020-12-28 17:32:35',NULL,'2022-06-30 17:49:02','streamis');
+
+select @old_dss_appconn_id:=id from `dss_appconn` where `appconn_name` = 'streamis';
+
+delete from `dss_workspace_menu_appconn` WHERE `appconn_id` = @old_dss_appconn_id;
+delete from `dss_appconn_instance` where `appconn_id` = @old_dss_appconn_id;
+delete from `dss_appconn` where `appconn_name`='streamis';
+
+select @old_jobcenter_dss_appconn_id:=id from `dss_appconn` where `appconn_name` = 'realTimeJobCenter';
+
+delete from `dss_workspace_menu_appconn` WHERE `appconn_id` = @old_jobcenter_dss_appconn_id;
+delete from `dss_appconn_instance` where `appconn_id` = @old_jobcenter_dss_appconn_id;
+delete from `dss_appconn` where `appconn_name`='realTimeJobCenter';
+
+INSERT INTO dss_appconn
+(appconn_name, is_user_need_init, `level`, if_iframe, is_external, reference, class_name, appconn_class_path, resource)
+VALUES('streamis', 0, 1, 1, 1, NULL, 'com.webank.wedatasphere.streamis.dss.appconn.StreamisAppConn', NULL, NULL);
+INSERT INTO dss_appconn
+(appconn_name, is_user_need_init, `level`, if_iframe, is_external, reference, class_name, appconn_class_path, resource)
+VALUES('realTimeJobCenter', 0, 1, 1, 1, 'sso', '', NULL, NULL);
+
+select @dss_appconn_id:=id from `dss_appconn` where `appconn_name` = 'streamis';
+select @jobcenter_dss_appconn_id:=id from `dss_appconn` where `appconn_name` = 'realTimeJobCenter';
+
+INSERT INTO dss_workspace_menu_appconn
+(appconn_id, menu_id, title_en, title_cn, desc_en, desc_cn, labels_en, labels_cn, is_active, access_button_en, access_button_cn, manual_button_en, manual_button_cn, manual_button_url, icon, `order`, create_by, create_time, last_update_time, last_update_user, image)
+VALUES(@jobcenter_dss_appconn_id, 1, 'StreamSQL development', 'StreamSQL开发', 'Real-time application development is a streaming solution jointly built by WeDataSphere, Boss big data team and China Telecom ctcloud Big data team.', '实时应用开发是微众银行微数域(WeDataSphere)、Boss直聘大数据团队 和 中国电信天翼云大数据团队 社区联合共建的流式解决方案,以 Linkis 做为内核,基于 Flink Engine 构建的批流统一的 Flink SQL,助力实时化转型。',
+'streaming, realtime', '流式,实时', 0, 'under union construction', '联合共建中', 'related information', '相关资讯', 'http://127.0.0.1:8088/wiki/scriptis/manual/workspace_cn.html', 'shujukaifa-logo', NULL, NULL, NULL, NULL, NULL, 'shujukaifa-icon');
+
+INSERT INTO dss_appconn_instance
+(appconn_id, label, url, enhance_json, homepage_uri)
+VALUES(@dss_appconn_id, 'DEV', 'http://APPCONN_INSTALL_IP:APPCONN_INSTALL_PORT/', '', 'http://APPCONN_INSTALL_IP:APPCONN_INSTALL_PORT/#/realTimeJobCenter');
+
+INSERT INTO dss_appconn_instance
+(appconn_id, label, url, enhance_json, homepage_uri)
+VALUES(@jobcenter_dss_appconn_id, 'DEV', 'http://APPCONN_INSTALL_IP:APPCONN_INSTALL_PORT/#/realTimeJobCenter', NULL, NULL);
+```
+
+### 具体实现说明
+StreamisAppConn继承AbstractOnlySSOAppConn实现SecondlyAppConn,重写方法创建一个StreamisStructureIntegrationStandard,目前StreamisAppConn只实现了免密登录和组织结构规范功能。
+
+通过重写StreamisStructureIntegrationStandard中方法创建StreamisProjectService,其内部需要重写4个方法,会创建4个操作类StreamisProjectSearchOperation、StreamisProjectCreationOperation、StreamisProjectUpdateOperation、StreamisPrejectDeleteOperation。Operation类通过http调用streamis应用,分别实现查询、创建、修改、删除项目,同步dss项目信息到Streamis。
+
+- dss创建项目时,会先调用查询操作,查询streamis中是否已存在相同的项目名称,如果存在则弹出提示信息,不存在则继续调用创建操作去streamis中新建项目,
+- dss修改项目时,会调用修改操作去streamis中更新项目信息,
+- dss删除项目时,会调用删除操作去streamis中删除项目信息,更改删除标记。
+
+## 接口
+1 接口名称:查询项目
+- 接口路径:GET /streamis/project/searchProject
+- 请求参数
+
+|参数名称 |是否必须 |示例 |备注 |
+|-------------|---------|-------|--------|
+|projectName |是 | | |
+
+- 返回数据
+
+|名称 |类型 |是否必须 |默认值 |备注 |
+|-------------|--------|---------|---------|--------|
+|method |string |否 |
+|status |number |是 |
+|message |string |否 |
+|data |object |是 |
+|- projectId |number |是 |
+
+2 接口名称:创建项目
+- 接口路径:POST /streamis/project/createProject
+- 请求参数
+
+|参数名称 |是否必须 |示例 |备注 |
+|-------------|---------|-------|--------|
+|projectName |是 | | |
+|workspaceId |否 | | |
+|releaseUsers |否 | | |
+|editUsers |否 | | |
+|accessUsers |否 | | |
+
+- 返回数据
+
+|名称 |类型 |是否必须 |默认值 |备注 |
+|----------|--------|---------|---------|--------|
+|method |string |否 |
+|status |number |是 |
+|message |string |否 |
+|data |object |是 |
+|- projectId |number |是 |
+|- projectName |string |否 |
+
+3 接口名称:修改项目
+- 接口路径:PUT /streamis/project/updateProject
+- 请求参数
+
+|参数名称 |是否必须 |示例 |备注 |
+|-------------|---------|-------|--------|
+|projectId |是 | | |
+|projectName |是 | | |
+|workspaceId |否 | | |
+|releaseUsers |否 | | |
+|editUsers |否 | | |
+|accessUsers |否 | | |
+
+- 返回数据
+
+|名称 |类型 |是否必须 |默认值 |备注 |
+|----------|--------|---------|---------|--------|
+|method |string |否 |
+|status |number |是 |
+|message |string |否 |
+|data |object |否 |
+
+4 接口名称:删除项目
+- 接口路径:DELETE /streamis/project/deleteProject
+- 请求参数
+
+|参数名称 |是否必须 |示例 |备注 |
+|-------------|---------|-------|--------|
+|projectId |是 | | |
+|projectName |否 | | |
+
+- 返回数据
+
+|名称 |类型 |是否必须 |默认值 |备注 |
+|----------|--------|---------|---------|--------|
+|method |string |否 |
+|status |number |是 |
+|message |string |否 |
+|data |object |否 |
+
+## Streamis项目streamis-project-server
+
+### 相关表操作
+Streamis涉及到的表共2张,项目表linkis_stream_project和项目权限表linkis_stream_project_privilege。
+```roomsql
+--已存在表
+CREATE TABLE `linkis_stream_project` (
+ `id` bigint(20) NOT NULL AUTO_INCREMENT,
+ `workspace_id` bigint(20) DEFAULT NULL,
+ `name` varchar(100) DEFAULT NULL,
+ `create_by` varchar(50) DEFAULT NULL,
+ PRIMARY KEY (`id`) USING BTREE
+) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='项目表';
+
+--新增加表
+CREATE TABLE `linkis_stream_project_privilege` (
+ `id` bigint(20) NOT NULL AUTO_INCREMENT,
+ `project_id` bigint(20) NOT NULL,
+ `user_name` varchar(100) NOT NULL,
+ `privilege` tinyint(1) DEFAULT '0' NOT NULL COMMENT '1:发布权限 ,2:编辑权限 ,3:查看权限',
+ PRIMARY KEY (`id`) USING BTREE
+) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 COMMENT='项目权限表';
+
+--新增加字段
+ALTER TABLE `linkis_stream_project` ADD create_time datetime DEFAULT NULL;
+ALTER TABLE `linkis_stream_project` ADD last_update_by varchar(50) DEFAULT NULL;
+ALTER TABLE `linkis_stream_project` ADD last_update_time datetime DEFAULT NULL;
+ALTER TABLE `linkis_stream_project` ADD is_deleted tinyint unsigned DEFAULT 0;
+```
+
+### 具体实现说明
+- 查询操作,会获取请求参数projectName,在表linkis_stream_project中查询对应id,查询操作成功返回status为0,projectId为查询出来的id,如果查询结果为空则projectId为null;
+- 创建操作,会将请求参数中项目信息(projectName、workspaceId)插入项目表linkis_stream_project并自增生成id,将请求参数中权限信息数据(releaseUsers、editUsers、accessUsers)中user和项目表生成id关联,插入表linkis_stream_project_privilege,创建操作成功返回status值为0,会将项目表生成id作为projectId的值返回;
+- 修改操作,会根据请求参数projectId将请求信息数据更新到表linkis_stream_project和 linkis_stream_project_privilege,成功直接返回status值为0;
+- 删除操作,会根据projectId将表linkis_stream_project的is_deleted字段标记为1,linkis_stream_project_privilege中相关数据删除,成功直接返回status值为0。
diff --git "a/docs/zh_CN/0.2.0/architecture/Streamis\351\211\264\346\235\203\350\256\276\350\256\241\346\226\207\346\241\243.md" "b/docs/zh_CN/0.2.0/architecture/Streamis\351\211\264\346\235\203\350\256\276\350\256\241\346\226\207\346\241\243.md"
new file mode 100644
index 000000000..02514a3e8
--- /dev/null
+++ "b/docs/zh_CN/0.2.0/architecture/Streamis\351\211\264\346\235\203\350\256\276\350\256\241\346\226\207\346\241\243.md"
@@ -0,0 +1,43 @@
+# 鉴权
+
+## 鉴权流程图
+streamis存在需要鉴权的模块没有依赖streamis-project-server模块的情况,使用rest接口调用来处理鉴权。
+![Streamis项目鉴权操作](../../../images/zh_CN/streamis_project_privilege.png)
+
+## 具体实现说明
+根据当前用户名和项目ID/名称获取到所有权限的集合,如果权限集合中包含RELEASE权限,则拥有发布/编辑/查看的权限,如果权限集合中包含EDIT权限,则拥有编辑/查看的权限,如果权限集合中包含ACCESS权限,则拥有查看的权限;
+权限包含关系:RELEASE权限包含EDIT权限、ACCESS权限;EDIT权限包含ACCESS权限。
+
+## 接口
+### edit权限接口:
+
+|RequestMethod |接口路径 |名称 |
+|------|----------------------------------------------------------|-----------------|
+|POST |/streamis/streamProjectManager/project/files/upload |工程资源文件-导入 |
+|GET |/streamis/streamProjectManager/project/files/delete |删除项目下该文件所有版本 |
+|GET |/streamis/streamProjectManager/project/files/version/delete |删除版本文件 |
+|GET |/streamis/streamProjectManager/project/files/download |任务详情-下载 |
+|POST |streamis/streamJobManager/job/createOrUpdate |创建或更新streamis-job|
+|POST |/streamis/streamJobManager/job/upload |上传文件 |
+|POST |/streamis/streamJobManager/job/execute |启动 |
+|GET |/streamis/streamJobManager/job/stop |停止 |
+|PUT |/streamis/streamJobManager/job//snapshot/{jobId:\w+} |快照生成 |
+|GET |/streamis/streamJobManager/config/json/{jobId:\w+} |配置-保存 |
+|POST |/streamis/streamJobManager/job/bulk/execution |批量启动 |
+|POST |/streamis/streamJobManager/job/bulk/pause |批量停止 |
+
+
+### access权限接口:
+
+|RequestMethod |接口路径 |名称 |
+|------|----------------------------------------------------------|-------------|
+|GET |streamis/streamJobManager/job/list |查询当前用户可查看job |
+|GET |/streamis/streamProjectManager/project/files/list |工程资源文件 |
+|GET |/streamis/streamProjectManager/project/files/version/list |获取工程下该文件所有版本 |
+|GET |/streamis/streamJobManager/job/version |查询job版本 |
+|GET |/streamis/streamJobManager/job/execute/history |job执行历史 |
+|GET |/streamis/streamJobManager/job/progress |获取job当前版本最新task状态 |
+|GET |/streamis/streamJobManager/job/jobContent |任务详情 |
+|GET |/streamis/streamJobManager/job/logs |获取日志 |
+|POST |/streamis/streamJobManager/config/json/{jobId:\w+} |获得任务配置 |
+|GET |/streamis/streamJobManager/config/view |查询当前job配置信息 |
\ No newline at end of file
diff --git a/docs/zh_CN/0.2.0/development/Interface_documentation/README.md b/docs/zh_CN/0.2.0/development/Interface_documentation/README.md
new file mode 100644
index 000000000..05897a731
--- /dev/null
+++ b/docs/zh_CN/0.2.0/development/Interface_documentation/README.md
@@ -0,0 +1,63 @@
+## 1. 目录
+
+* [StreamDataSource 接口文档]()
+* [StreamJobManager 接口文档]()
+* [StreamWorkflow 接口文档]()
+
+## 2. URL规范
+
+```
+/api/rest_j/v1/streamis/{moduleName}/.+
+```
+
+**约定**:
+
+ - rest_j表示接口符合Jersey规范
+ - v1为服务的版本号,**版本号会随着 Linkis 版本进行升级**
+ - streamis为微服务名
+ - {moduleName}为模块名,其中:
+ * StreamDataSource 模块名命名为 streamDataSource;
+ * StreamJobManager 模块名命名为 streamJobManager;
+ * StreamWorkflow 模块名命名为 streamWorkflow;
+
+## 3. 接口请求格式
+
+## 3. 接口请求格式
+
+```json
+{
+ "method": "/api/rest_j/v1/streamis/.+",
+ "data": {}
+}
+```
+
+**约定**:
+
+ - method:请求的Restful API URL。
+ - data:请求的具体数据。
+
+## 4. 接口返回格式
+
+```json
+{
+ "method": "/api/rest_j/v1/streamis/.+",
+ "status": 0,
+ "message": "创建成功!",
+ "data": {}
+}
+```
+
+**约定**:
+
+ - method:返回请求的Restful API URL,主要是websocket模式需要使用。
+ - status:返回状态信息,其中:-1表示没有登录,0表示成功,1表示错误,2表示验证失败,3表示没该接口的访问权限。
+ - data:返回具体的数据。
+ - message:返回请求的提示信息。如果status非0时,message返回的是错误信息,其中data有可能存在stack字段,返回具体的堆栈信息。
+
+另:根据status的不同,HTTP请求的状态码也不一样,一般情况下:
+
+ - 当status为0时,HTTP的状态码为200
+ - 当status为-1时,HTTP的状态码为401
+ - 当status为1时,HTTP的状态码为400
+ - 当status为2时,HTTP的状态码为412
+ - 当status为3时,HTTP的状态码为403
\ No newline at end of file
diff --git a/docs/zh_CN/0.2.0/development/Requirements_documentation/README.md b/docs/zh_CN/0.2.0/development/Requirements_documentation/README.md
new file mode 100644
index 000000000..9991b7b0c
--- /dev/null
+++ b/docs/zh_CN/0.2.0/development/Requirements_documentation/README.md
@@ -0,0 +1,4 @@
+## 1. 目录
+
+* [StreamDataSource 需求文档]()
+* [StreamJobManager 需求文档]()
\ No newline at end of file
diff --git a/docs/zh_CN/0.2.0/development/SUMMARY.md b/docs/zh_CN/0.2.0/development/SUMMARY.md
new file mode 100644
index 000000000..7b9a53b5b
--- /dev/null
+++ b/docs/zh_CN/0.2.0/development/SUMMARY.md
@@ -0,0 +1,13 @@
+* [开发规范](Specification_documentation/README.md)
+* [接口文档](Interface_documentation/README.md)
+ * [StreamDataSource 接口文档]()
+ * [StreamJobManager 接口文档]()
+ * [StreamWorkflow 接口文档]()
+* [表结构设计文档](Table_Structure_documentation/README.md)
+ * [StreamDataSource 表结构设计文档]()
+ * [StreamJobManager 表结构设计文档]()
+ * [StreamWorkflow 表结构设计文档]()
+* [需求文档](Requirements_documentation/README.md)
+ * [StreamDataSource 需求文档]()
+ * [StreamJobManager 需求文档]()
+* [UI交互稿](UI_draft/Streamis交互稿V4.zip)
\ No newline at end of file
diff --git a/docs/zh_CN/0.2.0/development/Specification_documentation/Exception_Throws.md b/docs/zh_CN/0.2.0/development/Specification_documentation/Exception_Throws.md
new file mode 100644
index 000000000..8598b026b
--- /dev/null
+++ b/docs/zh_CN/0.2.0/development/Specification_documentation/Exception_Throws.md
@@ -0,0 +1,31 @@
+## 如何定义新异常?
+
+- 自定义的异常都必须继承自WarnException、ErrorException或FatalException之一
+
+- 自定义的异常必须包含错误码和错误描述,如有必要,也可将发生异常的ip地址和进程端口封装到异常当中
+
+- 慎用WarnException!WarnException抛出来的异常,如果在Restful和RPC的Receiver端被捕获,不会给前端或sender端抛出执行失败,而是只返回一条警告信息!
+
+- WARNException的异常级别为1,ErrorException的异常级别为2,FatalException的异常级别为3
+
+
+|异常类| 所在服务| 错误码| 错误描述|
+|:---- |:--- |:--- |:--- |
+|DWCException| common| 无| 顶级父类,继承自Exception,不允许直接继承|
+|DWCRuntimeException| common| 无| 顶级父类,继承自RuntimeException,不允许直接继承|
+|WarnException| common| 无| 次级父类,继承自DWCRuntimeException。提示级的异常,必须直接或间接继承该类|
+|ErrorException| common| 无| 次级父类,继承自DWCException。错误级的异常,必须直接或间接继承该类|
+|FatalException| common| 无| 次级父类,继承自DWCException。致命级的异常,必须直接或间接继承该类|
+
+
+## 模块异常规范
+
+Streamis 架构错误码范围为:30000~30999,其中:
+
+- StreamDataSource 错误码范围为:30000~30099
+
+- StreamJobManager 错误码范围为:30100~30199
+
+- StreamWorkflow 错误码范围为:30200~30299
+
+- Stream Plugins 错误码范围为:30300~30499
\ No newline at end of file
diff --git a/docs/zh_CN/0.2.0/development/Specification_documentation/Log_out.md b/docs/zh_CN/0.2.0/development/Specification_documentation/Log_out.md
new file mode 100644
index 000000000..693ab7c93
--- /dev/null
+++ b/docs/zh_CN/0.2.0/development/Specification_documentation/Log_out.md
@@ -0,0 +1,23 @@
+## 日志规范
+
+1. 【**约定**】Streamis 选择引用 Linkis Commons 通用模块,其中已包含了日志框架,主要以 **slf4j** 和 **Log4j2** 作为日志打印框架,去除了Spring-Cloud包中自带的logback。
+由于Slf4j会随机选择一个日志框架进行绑定,所以以后在引入新maven包的时候,需要将诸如slf4j-log4j等桥接包exclude掉,不然日志打印会出现问题。但是如果新引入的maven包依赖log4j等包,不要进行exclude,不然代码运行可能会报错。
+
+2. 【**配置**】log4j2的配置文件默认为 log4j2.xml ,需要放置在 classpath 中。如果需要和 springcloud 结合,可以在 application.yml 中加上 logging:config:classpath:log4j2-spring.xml (配置文件的位置)。
+
+3. 【**强制**】类中不可直接使用日志系统(log4j2、Log4j、Logback)中的API。
+
+ * 如果是Scala代码,强制继承Logging trait
+ * java采用 LoggerFactory.getLogger(getClass)。
+
+4. 【**强制**】严格区分日志级别。其中:
+
+ * Fatal级别的日志,在初始化的时候,就应该抛出来,并使用System.out(-1)退出。
+ * ERROR级别的异常为开发人员必须关注和处理的异常,不要随便用ERROR级别的。
+ * Warn级别是用户操作异常日志和一些方便日后排除BUG的日志。
+ * INFO为关键的流程日志。
+ * DEBUG为调式日志,非必要尽量少写。
+
+5. 【**强制**】要求:INFO级别的日志,每个小模块都必须有,关键的流程、跨模块级的调用,都至少有INFO级别的日志。守护线程清理资源等必须有WARN级别的日志。
+
+6. 【**强制**】异常信息应该包括两类信息:案发现场信息和异常堆栈信息。如果不处理,那么通过关键字throws往上抛出。 正例:logger.error(各类参数或者对象toString + "_" + e.getMessage(), e);
\ No newline at end of file
diff --git a/docs/zh_CN/0.2.0/development/Specification_documentation/README.md b/docs/zh_CN/0.2.0/development/Specification_documentation/README.md
new file mode 100644
index 000000000..d2e847b1c
--- /dev/null
+++ b/docs/zh_CN/0.2.0/development/Specification_documentation/README.md
@@ -0,0 +1,22 @@
+## 1. 说明
+
+为了规范 Streamis 社区的联合共建开发环境,提高 Streamis 版本开发迭代的产出质量,规范 Streamis 的整个开发设计流程,强烈建议各位Contributor遵守以下开发规范:
+
+说明:Streamis 初始版本的开发规范较为精简,后续会随着 Streamis 的版本迭代不断补充和完善,欢迎各位 Contributor 提出自己的见解和意见。
+
+## 2. 代码提交规范
+
+但是在正式发布Release Notes时,为了保证Release Notes的完整性,请各位模块负责人按照需求文档,先提好各个 issue,并将 issue 加入到 [Project-0.2.4](https://github.com/WeBankFinTech/Streamis/projects/2)。
+
+请注意:Streamis-0.2.4 使用 [Project-0.2.4](https://github.com/WeBankFinTech/Streamis/projects/2) 作为 DPMS 工具,来全程追踪和管理版本的进度。
+
+## 2. 后台开发规范
+
+* [接口开发规范](../Interface_documentation/README.md)
+* [Dao层开发规范](../Table_Structure_documentation/README.md)
+* [异常规范](Exception_Throws.md)
+* [日志规范](Log_out.md)
+
+## 3. 前端开发规范
+
+* [前端开发规范](../../../../../web/README.md)
\ No newline at end of file
diff --git "a/docs/zh_CN/0.2.0/development/StreamisAppConn\345\256\211\350\243\205\346\226\207\346\241\243.md" "b/docs/zh_CN/0.2.0/development/StreamisAppConn\345\256\211\350\243\205\346\226\207\346\241\243.md"
new file mode 100644
index 000000000..9bc8dfe00
--- /dev/null
+++ "b/docs/zh_CN/0.2.0/development/StreamisAppConn\345\256\211\350\243\205\346\226\207\346\241\243.md"
@@ -0,0 +1,71 @@
+StreamisAppConn安装文档 本文主要介绍在DSS(DataSphere Studio)1.1.0中StreamisAppConn的部署、配置以及安装
+
+# 1.部署StreamisAppConn的准备工作
+您在部署StreamisAppConn之前,请安装完成Streamis0.2.4及其他相关组件的安装,并确保工程基本功能可用。
+
+# 2.StreamisAppConn插件的下载和编译
+1)下载二进制包
+我们提供StreamisAppconn的物料包,您可直接下载使用。[点击跳转 Release 界面](https://github.com/WeBankFinTech/Streamis/releases)
+
+2) 编译打包
+如果您想自己开发和编译StreamisAppConn,具体编译步骤如下: 1.clone Streamis的代码 2.找到streamis-appconn模块,单独编译streamis-appconn
+```shell script
+cd {STREAMIS_CODE_HOME}/streamis-appconn
+mvn clean install
+```
+会在该路径下找到streamis.zip安装包
+```shell script
+{STREAMIS_CODE_HOME}\streamis-appconn\target\streamis.zip
+```
+
+# 3.StreamisAppConn插件的部署和配置总体步骤
+ 1.拿到打包出来的streamis.zip物料包
+
+ 2.放置到如下目录并进行解压
+
+注意:第一次解压streamis appconn后,确保当前文件夹下没有index_v0000XX.index文件,该文件在后面才会生成
+```shell script
+cd {DSS_Install_HOME}/dss/dss-appconns
+unzip streamis.zip
+```
+解压出来的目录结构为:
+```shell script
+conf
+db
+lib
+```
+ 3.执行脚本进行自动化安装
+ ```shell script
+cd {DSS_INSTALL_HOME}/dss/bin
+sh ./appconn-install.sh
+# 脚本是交互式的安装方案,您需要输入字符串streamis以及streamis服务的ip和端口,即可以完成安装
+# 这里的streamis端口是指前端端口,在nginx进行配置。而不是后端的服务端口
+```
+
+## 4.完成streamis-appconn的安装后,需要重启dss服务,才能最终完成插件的更新
+### 4.1)使部署好的APPCONN生效
+使用DSS启停脚本使APPCONN生效,进入到脚本所在目录{DSS_INSTALL_HOME}/dss/sbin中,依次使用如下命令执行脚本:
+```shell script
+sh ./dss-stop-all.sh
+sh ./dss-start-all.sh
+```
+### 4.2)验证streamis-appconn是否生效
+在安装部署完成streamis-appconn之后,可通过以下步骤初步验证streamis-appconn是否安装成功。
+
+在DSS工作空间创建一个新的项目
+![DSS工作空间Streamis项目](../../../images/zh_CN/dss_streamis_project.png)
+
+在streamis数据库查看是否同步创建项目,查询有记录说明appconn安装成功
+```roomsql
+SELECT * FROM linkis_stream_project WHERE name = '项目名称';
+```
+
+# 5.Streamis AppConn安装原理
+Streamis 的相关配置信息会插入到以下表中,通过配置下表,可以完成 Streamis 的使用配置。(注:如果仅仅需要快速安装APPCONN,无需过分关注以下字段,提供的init.sql中大多以进行默认配置。重点关注以上操作即可)
+
+|表名 |表作用 |备注 |
+|-------------------|-----------------------------------------|------|
+|dss_workspace_dictionary |配置流式生产中心 |必须|
+|dss_appconn |AppConn的基本信息,用于加载AppConn |必须|
+|dss_workspace_menu_appconn |AppConn菜单,前端连接Streamis |必须|
+|dss_appconn_instance |AppConn的实例的信息,包括自身的url信息 |必须|
diff --git "a/docs/zh_CN/0.2.0/development/Streamis\345\215\207\347\272\247\346\226\207\346\241\243.md" "b/docs/zh_CN/0.2.0/development/Streamis\345\215\207\347\272\247\346\226\207\346\241\243.md"
new file mode 100644
index 000000000..9935164fe
--- /dev/null
+++ "b/docs/zh_CN/0.2.0/development/Streamis\345\215\207\347\272\247\346\226\207\346\241\243.md"
@@ -0,0 +1,45 @@
+Streamis 升级文档,本文主要介绍在原有安装Streamis服务的基础上适配DSS1.1.0和Linkis1.1.1的升级步骤,Streamis0.2.4相对与Streamis0.1.0版本最大的区别在于接入了DSS AppConn,对job的启停做了优化。
+
+# 1.升级Streamis前的工作
+您在升级Streamis之前,请先安装 Linkis1.1.1 和 DSS1.1.0 及以上版本,并且保证 Linkis Flink 引擎 和 DSS 可以正常使用,DSS 和 Linkis 安装,可参照 [DSS & Linkis 一键安装部署文档](https://github.com/WeBankFinTech/DataSphereStudio-Doc/blob/main/zh_CN/%E5%AE%89%E8%A3%85%E9%83%A8%E7%BD%B2/DSS%E5%8D%95%E6%9C%BA%E9%83%A8%E7%BD%B2%E6%96%87%E6%A1%A3.md)。
+
+# 2.Streamis升级步骤
+
+## 安装StreamisAppConn
+
+1)删除旧版本StreamisAppConn包
+
+进入下列目录,找到streamis的appconn文件夹并删除,如果存在的话:
+```shell script
+{DSS_Install_HOME}/dss/dss-appconns
+```
+
+2)StreamisAppConn安装部署
+
+安装 DSS StreamisAppConn 插件,请参考: [StreamisAppConn 插件安装文档](development/StreamisAppConn安装文档.md)
+
+## 安装Streamis后端
+将获取到的安装包中lib更新到Streamis安装目录下的路径 `streamis-server/lib` 中,`streamis-server/conf`下的文件内容可根据需要进行更新。
+
+进入安装目录下,执行更新脚本,完成对数据库表结构和数据的更新:
+```shell script
+cd {Streamis_Install_HOME}
+sh bin/upgrade.sh
+```
+
+再通过以下命令完成 Streamis Server 的更新重启:
+```shell script
+cd {Streamis_Install_HOME}/streamis-server
+sh bin/stop-streamis-server.sh
+sh bin/start-streamis-server.sh
+```
+
+## 安装Streamis前端
+先删除旧版本前端目录文件夹,再替换为新的前端安装包
+```
+mkdir ${STREAMIS_FRONT_PATH}
+cd ${STREAMIS_FRONT_PATH}
+#1.删除前端目录文件夹
+#2.放置前端包
+unzip streamis-${streamis-version}.zip
+```
\ No newline at end of file
diff --git a/docs/zh_CN/0.2.0/development/Table_Structure_documentation/README.md b/docs/zh_CN/0.2.0/development/Table_Structure_documentation/README.md
new file mode 100644
index 000000000..ccf939700
--- /dev/null
+++ b/docs/zh_CN/0.2.0/development/Table_Structure_documentation/README.md
@@ -0,0 +1,9 @@
+## 1. 目录
+
+* [StreamDataSource 表结构设计文档]()
+* [StreamJobManager 表结构设计文档]()
+* [StreamWorkflow 表结构设计文档]()
+
+## 2. Dao层开发规范
+
+统一引入 Linkis 的 linkis-commons/linkis-mybatis 模块,使用标准的 Dao 层接口 + Dao 层xml 形式
\ No newline at end of file
diff --git a/docs/zh_CN/0.2.0/development/Table_Structure_documentation/db/streamis-jobmanager.sql b/docs/zh_CN/0.2.0/development/Table_Structure_documentation/db/streamis-jobmanager.sql
new file mode 100644
index 000000000..f5a64f2c4
--- /dev/null
+++ b/docs/zh_CN/0.2.0/development/Table_Structure_documentation/db/streamis-jobmanager.sql
@@ -0,0 +1,342 @@
+
+SET NAMES utf8mb4;
+SET FOREIGN_KEY_CHECKS = 0;
+
+-- ----------------------------
+-- Table structure for linkis_stream_bml
+-- ----------------------------
+DROP TABLE IF EXISTS `linkis_stream_bml`;
+CREATE TABLE `linkis_stream_bml` (
+ `id` bigint(20) NOT NULL,
+ `name` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ `bml_type` tinyint(1) NULL DEFAULT NULL,
+ `org_identification` bigint(20) NULL DEFAULT NULL,
+ ` latest_version` varchar(20) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ PRIMARY KEY (`id`) USING BTREE
+) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci ROW_FORMAT = Compact;
+
+-- ----------------------------
+-- Records of linkis_stream_bml
+-- ----------------------------
+
+-- ----------------------------
+-- Table structure for linkis_stream_bml_version
+-- ----------------------------
+DROP TABLE IF EXISTS `linkis_stream_bml_version`;
+CREATE TABLE `linkis_stream_bml_version` (
+ `id` bigint(20) NOT NULL,
+ `bml_id` bigint(20) NULL DEFAULT NULL,
+ `version` varchar(20) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ `storage_path` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ ` attribute` varchar(200) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ PRIMARY KEY (`id`) USING BTREE
+) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci COMMENT = '物料版本' ROW_FORMAT = Compact;
+
+-- ----------------------------
+-- Records of linkis_stream_bml_version
+-- ----------------------------
+
+-- ----------------------------
+-- Table structure for linkis_stream_cluster
+-- ----------------------------
+DROP TABLE IF EXISTS `linkis_stream_cluster`;
+CREATE TABLE `linkis_stream_cluster` (
+ `id` int(11) NOT NULL,
+ `yarn_conf_dir` varchar(200) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ `hdfs_conf_dir` varchar(200) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ `resource_manager_url` varchar(200) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ `savepoint_dir` varchar(200) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ PRIMARY KEY (`id`) USING BTREE
+) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci COMMENT = 'flink 集群信息' ROW_FORMAT = Compact;
+
+-- ----------------------------
+-- Records of linkis_stream_cluster
+-- ----------------------------
+
+-- ----------------------------
+-- Table structure for linkis_stream_configuration_config_key
+-- ----------------------------
+DROP TABLE IF EXISTS `linkis_stream_configuration_config_key`;
+CREATE TABLE `linkis_stream_configuration_config_key` (
+ `id` bigint(20) NOT NULL,
+ `key` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ `name` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ `description` varchar(200) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ `default_value` varchar(200) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ `validate_type` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ `validate_range` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ `is_hidden` tinyint(1) NULL DEFAULT NULL,
+ `is_advanced` tinyint(1) NULL DEFAULT NULL,
+ `level` tinyint(1) NULL DEFAULT NULL,
+ `treename` varchar(20) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ `type` int(10) NULL DEFAULT NULL,
+ `sort` int(10) NULL DEFAULT NULL,
+ `status` tinyint(10) NULL DEFAULT NULL COMMENT '1 custom , 2 selected ',
+ PRIMARY KEY (`id`) USING BTREE,
+ UNIQUE INDEX `key_index`(`key`) USING BTREE
+) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci COMMENT = '配置信息' ROW_FORMAT = Compact;
+
+-- ----------------------------
+-- Records of linkis_stream_configuration_config_key
+-- ----------------------------
+INSERT INTO `linkis_stream_configuration_config_key` VALUES (1, 'wds.linkis.flink.resource', '资源配置', '资源配置', NULL, 'None', NULL, 0, 0, 1, '资源配置', 1, 0, 1);
+INSERT INTO `linkis_stream_configuration_config_key` VALUES (2, 'wds.linkis.flink.taskmanager.num', 'Task Managers数量', 'Task Managers数量', '4', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', 0, 0, 2, '资源配置', 1, 1, 1);
+INSERT INTO `linkis_stream_configuration_config_key` VALUES (3, 'wds.linkis.flink.jobmanager.memory', 'JobManager Memory', 'JobManager Memory', '1.5', 'Regex', '^([1-9]\\d{0,2}|1000)(G|g)$', 0, 0, 2, '资源配置', 1, 2, 1);
+INSERT INTO `linkis_stream_configuration_config_key` VALUES (4, 'wds.linkis.flink.taskmanager.memory', 'TaskManager Memory', 'TaskManager Memory', '1.5', 'Regex', '^([1-9]\\d{0,2}|1000)(G|g)$', 0, 0, 2, '资源配置', 1, 3, 1);
+INSERT INTO `linkis_stream_configuration_config_key` VALUES (5, 'wds.linkis.flink.jobmanager.cpus', 'JobManager CPUs', 'JobManager CPUs', '1', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', 0, 0, 2, '资源配置', 1, 4, 1);
+INSERT INTO `linkis_stream_configuration_config_key` VALUES (6, 'wds.linkis.flink.taskManager.cpus', 'TaskManager CPUs', 'TaskManager CPUs', '1', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', 0, 0, 2, '资源配置', 1, 5, 1);
+INSERT INTO `linkis_stream_configuration_config_key` VALUES (7, 'wds.linkis.flink.custom', '自定义参数', '自定义参数', NULL, 'None', NULL, 0, 0, 1, '自定义参数', 2, 0, 1);
+INSERT INTO `linkis_stream_configuration_config_key` VALUES (8, 'wds.linkis.flink.produce', '生产配置', '生产配置', NULL, 'None', NULL, 0, 0, 1, '生产配置', 3, 0, 1);
+INSERT INTO `linkis_stream_configuration_config_key` VALUES (9, 'wds.linkis.flink.checkpoint.interval', 'Checkpoint间隔', 'Checkpoint间隔', NULL, NULL, NULL, 0, 0, 2, '生产配置', 3, 1, 1);
+INSERT INTO `linkis_stream_configuration_config_key` VALUES (10, 'wds.linkis.flink.reboot.strategy', '重启策略', '重启策略', '不重启,基于Checkpoint自动重启,无Checkpoint不重启', 'None', NULL, 0, 0, 2, '重启策略', 3, 2, 2);
+INSERT INTO `linkis_stream_configuration_config_key` VALUES (11, 'wds.linkis.flink.alert', '告警设置', '告警设置', NULL, 'None', NULL, 0, 0, 1, '告警设置', 4, 0, 1);
+INSERT INTO `linkis_stream_configuration_config_key` VALUES (12, 'wds.linkis.flink.alert.rule', '告警规则', '告警规则', '任务日志中出现ERROR/EXCEPTION,任务核心指标出现异常', 'None', NULL, 0, 0, 2, '告警规则', 4, 1, 2);
+INSERT INTO `linkis_stream_configuration_config_key` VALUES (13, 'wds.linkis.flink.alert.user', '告警用户', '告警用户', NULL, NULL, NULL, 0, 0, 2, '告警用户', 4, 3, 1);
+INSERT INTO `linkis_stream_configuration_config_key` VALUES (14, 'wds.linkis.flink.alert.leve', '告警级别', '告警级别', 'CLEARED,INDETERMINATE,WARNING,MINOR,MAJOR,CRITICAL', 'None', NULL, 0, 0, 2, '告警级别', 4, 2, 2);
+INSERT INTO `linkis_stream_configuration_config_key` VALUES (15, 'wds.linkis.flink.alert.failure.level', '失败时告警级别', '失败时告警级别', 'CLEARED,INDETERMINATE,WARNING,MINOR,MAJOR,CRITICAL', 'None', NULL, 0, 0, 2, '失败时告警级别', 4, 4, 2);
+INSERT INTO `linkis_stream_configuration_config_key` VALUES (16, 'wds.linkis.flink.alert.failure.user', '失败时告警用户', '失败时告警用户', NULL, 'None', NULL, 0, 0, 2, '失败时告警用户', 4, 5, 1);
+INSERT INTO `linkis_stream_configuration_config_key` VALUES (17, 'wds.linkis.flink.authority', '权限设置', '权限设置', NULL, 'None', NULL, 0, 0, 1, '权限设置', 5, 0, 1);
+INSERT INTO `linkis_stream_configuration_config_key` VALUES (18, 'wds.linkis.flink.authority.author', '授权模式', '授权模式', '私密,指定全员可见,指定人员可见', 'None', NULL, 0, 0, 2, '授权模式', 5, 1, 2);
+INSERT INTO `linkis_stream_configuration_config_key` VALUES (19, 'wds.linkis.flink.authority.visible', '可见人员', '可见人员', NULL, 'None', NULL, 0, 0, 2, '可见人员', 5, 2, 1);
+
+-- ----------------------------
+-- Table structure for linkis_stream_configuration_config_value
+-- ----------------------------
+DROP TABLE IF EXISTS `linkis_stream_configuration_config_value`;
+CREATE TABLE `linkis_stream_configuration_config_value` (
+ `id` bigint(20) NOT NULL,
+ `configkey_id` bigint(20) NULL DEFAULT NULL,
+ `config_value` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ `type` int(10) NULL DEFAULT NULL,
+ `job_id` bigint(20) NULL DEFAULT NULL,
+ `job_name` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ `config_key` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ PRIMARY KEY (`id`) USING BTREE,
+ INDEX `key`(`config_key`) USING BTREE,
+ INDEX `keyid`(`configkey_id`) USING BTREE
+) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci COMMENT = '配置信息' ROW_FORMAT = Compact;
+
+-- ----------------------------
+-- Records of linkis_stream_configuration_config_value
+-- ----------------------------
+
+-- ----------------------------
+-- Table structure for linkis_stream_frame_version
+-- ----------------------------
+DROP TABLE IF EXISTS `linkis_stream_frame_version`;
+CREATE TABLE `linkis_stream_frame_version` (
+ `id` bigint(20) NOT NULL,
+ `frame` varchar(20) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ `version` varchar(20) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ `java_version` varchar(20) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ PRIMARY KEY (`id`) USING BTREE
+) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci COMMENT = '框架信息' ROW_FORMAT = COMPACT;
+
+-- ----------------------------
+-- Records of linkis_stream_frame_version
+-- ----------------------------
+
+-- ----------------------------
+-- Table structure for linkis_stream_job
+-- ----------------------------
+DROP TABLE IF EXISTS `linkis_stream_job`;
+CREATE TABLE `linkis_stream_job` (
+ `id` bigint(20) NOT NULL AUTO_INCREMENT,
+ `project_id` bigint(20) NULL DEFAULT NULL,
+ `name` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ `type` tinyint(1) NULL DEFAULT NULL,
+ `current_task_id` bigint(20) NULL DEFAULT NULL,
+ `current_version` varchar(20) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ `current_release_time` datetime NULL DEFAULT NULL,
+ `status` tinyint(1) NULL DEFAULT NULL COMMENT '1:已完成 ,2:等待重启 ,3:告警 ,4:慢任务 ,5:运行中 ,6:失败任务',
+ `org_identification` bigint(20) NULL DEFAULT NULL,
+ `create_by` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ `label` varchar(200) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ `current_released` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ `description` varchar(200) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ PRIMARY KEY (`id`) USING BTREE
+) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci COMMENT = '作业表' ROW_FORMAT = Compact;
+
+-- ----------------------------
+-- Records of linkis_stream_job
+-- ----------------------------
+
+-- ----------------------------
+-- Table structure for linkis_stream_job_alarm_send_history
+-- ----------------------------
+DROP TABLE IF EXISTS `linkis_stream_job_alarm_send_history`;
+CREATE TABLE `linkis_stream_job_alarm_send_history` (
+ `id` bigint(20) NOT NULL,
+ `job_id` bigint(20) NULL DEFAULT NULL,
+ `task_id` bigint(20) NULL DEFAULT NULL,
+ `create_by` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ `type` tinyint(1) NULL DEFAULT NULL,
+ `rule_type` tinyint(1) NULL DEFAULT NULL,
+ `content` varchar(200) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ PRIMARY KEY (`id`) USING BTREE
+) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci COMMENT = '报警历史信息' ROW_FORMAT = Compact;
+
+-- ----------------------------
+-- Records of linkis_stream_job_alarm_send_history
+-- ----------------------------
+
+-- ----------------------------
+-- Table structure for linkis_stream_job_checkpoints
+-- ----------------------------
+DROP TABLE IF EXISTS `linkis_stream_job_checkpoints`;
+CREATE TABLE `linkis_stream_job_checkpoints` (
+ `id` bigint(20) NOT NULL,
+ `config_value_id` bigint(20) NULL DEFAULT NULL,
+ `path` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ `size` int(20) NULL DEFAULT NULL,
+ `status` tinyint(1) NULL DEFAULT NULL,
+ `trigger_timestamp` datetime NULL DEFAULT NULL,
+ `latest_ack_timestamp` datetime NULL DEFAULT NULL,
+ PRIMARY KEY (`id`) USING BTREE
+) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci ROW_FORMAT = Compact;
+
+-- ----------------------------
+-- Records of linkis_stream_job_checkpoints
+-- ----------------------------
+
+-- ----------------------------
+-- Table structure for linkis_stream_job_code_resource
+-- ----------------------------
+DROP TABLE IF EXISTS `linkis_stream_job_code_resource`;
+CREATE TABLE `linkis_stream_job_code_resource` (
+ `id` bigint(20) NOT NULL,
+ `job_version_id` bigint(20) NULL DEFAULT NULL,
+ `bml_version_id` bigint(20) NULL DEFAULT NULL,
+ PRIMARY KEY (`id`) USING BTREE
+) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci COMMENT = '其他代码' ROW_FORMAT = Compact;
+
+-- ----------------------------
+-- Records of linkis_stream_job_code_resource
+-- ----------------------------
+
+-- ----------------------------
+-- Table structure for linkis_stream_job_role
+-- ----------------------------
+DROP TABLE IF EXISTS `linkis_stream_job_role`;
+CREATE TABLE `linkis_stream_job_role` (
+ `id` bigint(20) NOT NULL,
+ `job_id` bigint(20) NULL DEFAULT NULL,
+ `name` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ `front_name` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ `update_time` datetime NULL DEFAULT NULL,
+ `description` varchar(200) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ PRIMARY KEY (`id`) USING BTREE
+) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci ROW_FORMAT = Compact;
+
+-- ----------------------------
+-- Records of linkis_stream_job_role
+-- ----------------------------
+INSERT INTO `linkis_stream_job_role` VALUES (1, -1, '管理员', '管理员', '2021-04-07 20:57:09', NULL);
+
+-- ----------------------------
+-- Table structure for linkis_stream_job_sql_resource
+-- ----------------------------
+DROP TABLE IF EXISTS `linkis_stream_job_sql_resource`;
+CREATE TABLE `linkis_stream_job_sql_resource` (
+ `id` bigint(20) NOT NULL,
+ `job_version_id` bigint(20) NULL DEFAULT NULL,
+ `execute_sql` varchar(200) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ PRIMARY KEY (`id`) USING BTREE
+) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci ROW_FORMAT = Compact;
+
+-- ----------------------------
+-- Records of linkis_stream_job_sql_resource
+-- ----------------------------
+
+-- ----------------------------
+-- Table structure for linkis_stream_job_user_role
+-- ----------------------------
+DROP TABLE IF EXISTS `linkis_stream_job_user_role`;
+CREATE TABLE `linkis_stream_job_user_role` (
+ `id` bigint(20) NOT NULL,
+ `job_id` bigint(20) NULL DEFAULT NULL,
+ `user_id` bigint(20) NULL DEFAULT NULL,
+ `role_id` bigint(20) NULL DEFAULT NULL,
+ `type` tinyint(1) NULL DEFAULT NULL,
+ PRIMARY KEY (`id`) USING BTREE
+) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci COMMENT = '作业角色关系' ROW_FORMAT = Compact;
+
+-- ----------------------------
+-- Records of linkis_stream_job_user_role
+-- ----------------------------
+
+-- ----------------------------
+-- Table structure for linkis_stream_job_version
+-- ----------------------------
+DROP TABLE IF EXISTS `linkis_stream_job_version`;
+CREATE TABLE `linkis_stream_job_version` (
+ `id` bigint(20) NOT NULL AUTO_INCREMENT,
+ ` job_id` bigint(20) NULL DEFAULT NULL,
+ `version` varchar(20) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ `program_arguments` varchar(200) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ `bml_version` varchar(200) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ `bml_id` bigint(20) DEFAULT NULL,
+ PRIMARY KEY (`id`) USING BTREE
+) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci ROW_FORMAT = Compact;
+
+-- ----------------------------
+-- Records of linkis_stream_job_version
+-- ----------------------------
+
+-- ----------------------------
+-- Table structure for linkis_stream_project
+-- ----------------------------
+DROP TABLE IF EXISTS `linkis_stream_project`;
+CREATE TABLE `linkis_stream_project` (
+ `id` bigint(20) NOT NULL AUTO_INCREMENT,
+ `workspace_id` bigint(20) NULL DEFAULT NULL,
+ `name` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ `create_by` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ PRIMARY KEY (`id`) USING BTREE
+) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci COMMENT = '项目表' ROW_FORMAT = Compact;
+
+-- ----------------------------
+-- Records of linkis_stream_project
+-- ----------------------------
+
+-- ----------------------------
+-- Table structure for linkis_stream_task
+-- ----------------------------
+DROP TABLE IF EXISTS `linkis_stream_task`;
+CREATE TABLE `linkis_stream_task` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `job_version_id` bigint(20) NOT NULL,
+ `job_id` varchar(50) DEFAULT NULL,
+ `version` varchar(50) DEFAULT NULL,
+ `status` int(3) DEFAULT NULL,
+ `start_time` datetime DEFAULT NULL,
+ `last_update_time` datetime DEFAULT NULL,
+ `end_time` datetime DEFAULT NULL,
+ `err_desc` varchar(10240) DEFAULT NULL,
+ `submit_user` varchar(50) DEFAULT NULL,
+ `linkis_job_id` varchar(50) DEFAULT NULL,
+ `linkis_job_info` mediumtext,
+ PRIMARY KEY (`id`) USING BTREE
+ ) ENGINE=InnoDB AUTO_INCREMENT=0 DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='任务表'
+
+-- ----------------------------
+-- Records of linkis_stream_task
+-- ----------------------------
+
+-- ----------------------------
+-- Table structure for linkis_stream_user
+-- ----------------------------
+DROP TABLE IF EXISTS `linkis_stream_user`;
+CREATE TABLE `linkis_stream_user` (
+ `id` bigint(20) NOT NULL,
+ `username` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ `name` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL,
+ PRIMARY KEY (`id`) USING BTREE
+) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci COMMENT = '用户表' ROW_FORMAT = Compact;
+
+-- ----------------------------
+-- Records of linkis_stream_user
+-- ----------------------------
+INSERT INTO `linkis_stream_user` VALUES (1, 'hdfs', 'hdfs');
+
+SET FOREIGN_KEY_CHECKS = 1;
diff --git "a/docs/zh_CN/0.2.0/\344\275\277\347\224\250\346\226\207\346\241\243/Streamis\347\224\250\346\210\267\346\211\213\345\206\214.md" "b/docs/zh_CN/0.2.0/\344\275\277\347\224\250\346\226\207\346\241\243/Streamis\347\224\250\346\210\267\346\211\213\345\206\214.md"
new file mode 100644
index 000000000..6281caee2
--- /dev/null
+++ "b/docs/zh_CN/0.2.0/\344\275\277\347\224\250\346\226\207\346\241\243/Streamis\347\224\250\346\210\267\346\211\213\345\206\214.md"
@@ -0,0 +1,209 @@
+# Streamis快速入门
+
+## 1. 前言
+
+ 本文是Streamis0.2.4的快速入门文档,涵盖了Stremis的基本使用流程,更多的操作使用细节,将会在用户使用文档中提供。
+
+
+## 2. Streamis整合至DSS
+
+ 为了方便用户使用,**Streamis系统以DSS组件的形式嵌入DSS系统中**
+
+##### 2.1 **如何接入?**
+
+按照 [StreamisAppConn安装文档](../development/StreamisAppConn安装文档.md) 安装部署StreamisAppConn成功后,Streamis系统会自动嵌入DSS系统中。
+
+##### 2.2 如何验证 DSS 已经成功集成了 Streamis?
+
+请进入 DSS 的工程首页,创建一个工程
+
+![image-20211230173334826](../../../images/create_stream_product_center.png)
+
+进入到工程里面,点击左上角按钮切换到”流式生产中心“,如果出现streamis的首页,则表示 DSS 已经成功集成了 Streamis。如下图:
+
+![image-20211230173839138](../../../images/stream_product_center.png)
+
+
+## 3. 核心指标
+
+进入到streamis首页,上半部显示的是核心指标。
+
+核心指标显示当前用户可查看到的上传到该项目执行的Flink任务的状态汇总,状态暂时有9种,显示状态名称和处于该状态的任务数量,具体内容如下图。
+
+![核心指标](../../../images/home_page.png)
+
+图 3.1 首页核心指标
+
+# 4. 任务示例
+
+ 主要演示案例从Script FlinkSQL开发,调试到Streamis发布的整个流程。
+
+## 4.1. Script开发SQL
+
+ 顶部Scriptis菜单创建一个脚本文件,脚本类型选择Flink,如下图所示:
+
+![进入FlinkSQL](../../../images/enter_flinksql.png)
+
+![create_script_file.png](../../../images/create_script_file.png)
+
+编写FlinkSQL,source,sink,transform等。
+
+![flinksql_script_file](../../../images/flinksql_script_file.png)
+
+点击运行后,即可调试该脚本
+
+## 4.2. 发布至Streamis
+
+### 4.2.1 打包Streamis Job任务
+
+ 流式应用物料包是指的按照Streamis打包规范,将元数据信息(流式应用描述信息),流式应用代码,流式应用使用到的物料等内容打包成zip包。zip具体格式如下:
+
+ xxx.zip
+ ├── meta.json
+ ├── test.sql
+ ├── test.jar
+ ├── file3
+
+其中,meta.json是StreamisJob的元数据信息,其格式为:
+
+```
+{
+ "projectName": "", # 项目名
+ "jobName": "", # 作业名
+ "jobType": "flink.sql", # 目前只支持flink.sql、flink.jar
+ "tags": "", # 应用标签
+ "description": "" # 作业描述,
+ "jobContent": {
+ # 不同的jobType,其内容各不相同,具体请往下看
+ }
+}
+```
+
+!!!!!**特别需要注意的是:**
+
+ **此处的projectName需要和您dss工程中创建的工程名一致,不然在streamis页面导入ZIP包时,刷新列表后会不会显示,因为两者的projectName不一致**
+
+如果jobType为"flink.sql",则jobContent为:
+
+```
+{
+ "type": "" # file, bml or sql
+ "sql": "select 1",
+ "file": "test.sql",
+ "resourceId": "",
+ "version": ""
+}
+其中,如果type为"file",则只识别file字段;如果type为"sql",则只识别sql字段;如果type为"bml",则只识别resourceId和version字段。
+```
+
+如果jobType为"flink.jar",则jobContent为:
+
+```
+{
+ "main.class.jar": "", # string。main class的jar,如:test.jar
+ "main.class": "", # main class,如 com.webank.Test
+ "args": "", # main class 的入参,即main函数的args,请以空格为分隔符
+ "hdfs.jars"; [], # 依赖的HDFS jars,如:hdfs:///user/hadoop/test1.jar
+ "dependency.jars": [], # 依赖的jars,如:test2.jar
+ "resources": [] # 依赖的资源文件,如:test.properties
+}
+```
+
+### 4.2.2 示例
+
+ streamisjobtest为flinksql文件,meta.json是该任务的元数据信息。
+
+![flinksql_job_use_demo](../../../images/flinksql_job_use_demo.png)
+
+
+
+![flinksql_job_use_demo2](../../../images/flinksql_job_use_demo2.png)
+
+将SQL文件和meta.json文件打包成一个zip文件,注意:只能打包成zip文件,其他格式如rar、7z等格式无法识别。
+
+如果上传zip文件出现下面错误,请调整下nginx的配置`vi /etc/nginx/conf.d/streamis.conf`,添加属性`client_max_body_size`,如下图所示。
+![upload_jobtask_error](../../../images/upload_jobtask_error.png)
+![upload_jobtask_error_solve](../../../images/upload_jobtask_error_solve.png)
+-----
+
+在streamis中将该zip包导入,导入任务后,任务的运行状态变成"未启动",版本会+1(导入新的job任务版本从1开始),最新发布时间会更新至最新时间。
+
+点击相应的作业名称、配置或左边3个竖点中(参数配置/告警配置/运行历史/运行日志)可进入job任务详情,点击 启动 可执行作业。
+
+点击左边3个竖点中 快照【savepoint】 可保存快照。
+
+![job_list](../../../images/job_list.png)
+
+点击批量操作,可选中多个作业任务重启,快照重启会先生成快照再重新启动,直接重启不会生成快照
+
+![jobbulk_operate](../../../images/jobbulk_operate.png)
+
+####
+
+
+# 5、Streamis任务介绍
+
+点击”作业名称“,可查看任务的详情,包括,运行情况、执行历史、配置、任务详情、告警等。
+
+## 5.1 运行情况
+
+![stream_job_detail](../../../images/stream_job_detail.png)
+
+## 5.2 执行历史
+
+打开执行历史可以查看该任务的历史运行情况,
+
+历史日志:只有正在运行的任务才能查看历史日志。
+
+历史日志中可以查看当前任务启动的flink引擎的日志,可以根据关键字等查看关键日志,点击查看最新日志,可以查看当前引擎的最新日志。
+
+![stream_job_history](../../../images/stream_job_history.png)
+
+## 5.3 配置
+
+给Streamis任务配置一些flink资源参数以及checkpoint的参数
+
+![image-20211231101503678](../../../images/stream_job_config_1.png)
+![image-20211231101503678](../../../images/stream_job_config_2.png)
+
+
+
+## 5.4任务详情
+
+
+
+ 任务详情根据任务类型Flink Jar 和 Flink SQL分为两种显示界面。
+
+
+
+- **Flink Jar任务详情**
+
+![任务详情](../../../images/stream_job_flinkjar_jobcontent.png)
+
+ Flink Jar任务详情展示了任务Jar包的内容和参数, 同时提供下载该Jar包的功能。
+
+
+
+- **Flink SQL任务详情**
+
+![任务详情](../../../images/stream_job_flinksql_jobcontent.png)
+
+ Flink SQL任务详情展示了该任务的SQL语句。
+
+
+
+## 5.5 进入Yarn页面
+
+正在运行的Streamis任务可以通过该按钮进入到yarn管理界面上的查看flink任务运行情况。
+
+![image-20211231102020703](../../../images/image-20211231102020703.png)
+
+## 6 工程资源文件
+Streamis首页-核心指标右上角-工程资源文件。
+工程资源文件提供了上传和管理项目所需资源文件的功能,如下图所示:
+
+![project_source_file_list](../../../images/project_source_file_list.png)
+
+上传项目文件
+
+![project_source_file_import](../../../images/project_source_file_import.png)
diff --git a/images/en_US/readme/architecture.png b/images/en_US/readme/architecture.png
new file mode 100644
index 000000000..36a4a1f66
Binary files /dev/null and b/images/en_US/readme/architecture.png differ
diff --git a/images/zh_CN/readme/architecture.png b/images/zh_CN/readme/architecture.png
new file mode 100644
index 000000000..872891e43
Binary files /dev/null and b/images/zh_CN/readme/architecture.png differ
diff --git a/images/zh_CN/readme/communication.png b/images/zh_CN/readme/communication.png
new file mode 100644
index 000000000..12e86727d
Binary files /dev/null and b/images/zh_CN/readme/communication.png differ
diff --git a/package-lock.json b/package-lock.json
new file mode 100644
index 000000000..48e341a09
--- /dev/null
+++ b/package-lock.json
@@ -0,0 +1,3 @@
+{
+ "lockfileVersion": 1
+}
diff --git a/pom.xml b/pom.xml
new file mode 100644
index 000000000..d35eae47f
--- /dev/null
+++ b/pom.xml
@@ -0,0 +1,323 @@
+
+
+
+
+ 4.0.0
+
+ com.webank.wedatasphere.streamis
+ streamis
+ 0.2.4
+ pom
+
+ Streamis Project Parent POM
+ https://github.com/WeBankFinTech/Streamis
+
+
+
+ Apache 2.0 License
+ http://www.apache.org/licenses/LICENSE-2.0.html
+ repo
+
+
+
+
+
+ streamis-jobmanager
+ streamis-project
+ streamis-server
+ assembly
+ streamis-appconn
+
+
+
+ 1.1.3
+ 4.12
+ 1.1.0
+ 0.2.4
+ 2.11.12
+ 1.8
+ 3.3.3
+ 2.8.5
+ 2.13.2
+ 3.1.1
+ 4.5.4
+ 4.5.4
+ 1.9.4
+ UTF-8
+ 5.2.12.RELEASE
+ 2.1.2
+ 2.3.7.RELEASE
+ 2.2.6.RELEASE
+ 3.1.1
+ 3.8.1
+ 2.6
+ 0.9.10
+ 2.21
+ 1.9.5
+ 1.4.19
+ 0.2.4
+ 5.1.47
+ 2.0.1.Final
+
+
+
+
+
+
+
+ org.scala-lang
+ scala-library
+ ${scala.version}
+
+
+ org.scala-lang
+ scala-compiler
+ ${scala.version}
+
+
+ org.scala-lang
+ scala-reflect
+ ${scala.version}
+
+
+ org.scala-lang
+ scalap
+ ${scala.version}
+
+
+ commons-lang
+ commons-lang
+ ${commons.lang.version}
+
+
+ org.apache.linkis
+ linkis-mybatis
+ ${linkis.version}
+
+
+
+ org.apache.linkis
+ linkis-scheduler
+ ${linkis.version}
+
+
+ org.apache.linkis
+ linkis-module
+
+
+ org.springframework.boot
+ spring-boot-starter-tomcat
+
+
+ hibernate-validator
+ org.hibernate.validator
+
+
+ ${linkis.version}
+
+
+ org.apache.linkis
+ linkis-common
+ ${linkis.version}
+
+
+
+ org.apache.linkis
+ linkis-protocol
+ ${linkis.version}
+
+
+
+ com.google.code.gson
+ gson
+ ${gson.version}
+
+
+ com.fasterxml.jackson.core
+ jackson-databind
+ ${fasterxml.jackson.version}
+
+
+ org.apache.commons
+ commons-math3
+ ${commons.math.version}
+
+
+ xstream
+ com.thoughtworks.xstream
+ ${xstream.version}
+
+
+ javax.validation
+ validation-api
+ ${validation.api.version}
+
+
+
+
+
+
+ release
+
+
+
+ org.apache.maven.plugins
+ maven-source-plugin
+ 3.1.0
+
+ true
+
+
+
+ create-source-jar
+
+ jar-no-fork
+ test-jar-no-fork
+
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-deploy-plugin
+ 3.0.0-M1
+
+
+ org.apache.maven.plugins
+ maven-gpg-plugin
+ 1.5
+
+
+ sign-artifacts
+ verify
+
+ sign
+
+
+
+
+
+
+
+
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-deploy-plugin
+ 2.8.2
+
+
+ org.apache.maven.plugins
+ maven-enforcer-plugin
+ 1.4.1
+
+
+ enforce-versions
+
+ enforce
+
+
+
+
+ ${maven.version}
+
+
+ ${java.version}
+
+
+
+ org.jboss.netty
+
+ true
+
+
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-compiler-plugin
+ 2.5.1
+
+ ${jdk.compile.version}
+ ${jdk.compile.version}
+
+
+
+ org.apache.maven.plugins
+ maven-site-plugin
+ 3.3
+
+
+ net.alchim31.maven
+ scala-maven-plugin
+ 3.2.2
+
+
+ eclipse-add-source
+
+ add-source
+
+
+
+ scala-compile-first
+ process-resources
+
+ compile
+
+
+
+ scala-test-compile-first
+ process-test-resources
+
+ testCompile
+
+
+
+ attach-scaladocs
+ verify
+
+ doc-jar
+
+
+
+
+ ${scala.version}
+ incremental
+ true
+
+
+
+ org.apache.maven.plugins
+ maven-jar-plugin
+ 2.6
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/streamis-appconn/pom.xml b/streamis-appconn/pom.xml
new file mode 100644
index 000000000..bf07ee0dc
--- /dev/null
+++ b/streamis-appconn/pom.xml
@@ -0,0 +1,135 @@
+
+
+
+ streamis
+ com.webank.wedatasphere.streamis
+ 0.2.4
+
+ 4.0.0
+
+ streamis-appconn
+
+
+ 8
+ 8
+
+
+
+
+ com.webank.wedatasphere.dss
+ dss-appconn-core
+ ${dss.version}
+
+
+ linkis-common
+ org.apache.linkis
+
+
+ json4s-jackson_2.11
+ org.json4s
+
+
+ scala-compiler
+ org.scala-lang
+
+
+ scala-library
+ org.scala-lang
+
+
+ scala-reflect
+ org.scala-lang
+
+
+ scalap
+ org.scala-lang
+
+
+ dss-common
+ com.webank.wedatasphere.dss
+
+
+
+
+ org.apache.linkis
+ linkis-common
+ ${linkis.version}
+ provided
+
+
+ org.apache.linkis
+ linkis-module
+ ${linkis.version}
+ provided
+
+
+ com.webank.wedatasphere.dss
+ dss-common
+ ${dss.version}
+ provided
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-deploy-plugin
+
+
+ net.alchim31.maven
+ scala-maven-plugin
+
+
+ org.apache.maven.plugins
+ maven-jar-plugin
+
+
+ org.apache.maven.plugins
+ maven-assembly-plugin
+ 2.3
+ false
+
+
+ make-assembly
+ package
+
+ single
+
+
+
+ src/main/assembly/distribution.xml
+
+
+
+
+
+ false
+ streamis
+ false
+ false
+
+ src/main/assembly/distribution.xml
+
+
+
+
+
+
+ src/main/java
+
+ **/*.xml
+
+
+
+ src/main/resources
+
+ **/application.yml
+ **/bootstrap.yml
+ **/log4j2.xml
+
+
+
+
+
\ No newline at end of file
diff --git a/streamis-appconn/src/main/assembly/distribution.xml b/streamis-appconn/src/main/assembly/distribution.xml
new file mode 100644
index 000000000..b82589846
--- /dev/null
+++ b/streamis-appconn/src/main/assembly/distribution.xml
@@ -0,0 +1,62 @@
+
+
+
+ dss-streamis-appconn
+
+ zip
+
+ true
+ streamis
+
+
+
+ lib
+ true
+ true
+ false
+ true
+ true
+
+
+
+
+
+
+ ${basedir}/src/main/resources
+
+ appconn.properties
+
+ 0777
+ conf
+ unix
+
+
+
+ ${basedir}/src/main/resources
+
+ init.sql
+
+ 0777
+ db
+
+
+
\ No newline at end of file
diff --git a/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/StreamisAppConn.java b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/StreamisAppConn.java
new file mode 100644
index 000000000..c28dc6d7c
--- /dev/null
+++ b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/StreamisAppConn.java
@@ -0,0 +1,21 @@
+package com.webank.wedatasphere.streamis.dss.appconn;
+
+import com.webank.wedatasphere.dss.appconn.core.ext.SecondlyAppConn;
+import com.webank.wedatasphere.dss.appconn.core.impl.AbstractOnlySSOAppConn;
+import com.webank.wedatasphere.dss.standard.app.structure.StructureIntegrationStandard;
+import com.webank.wedatasphere.streamis.dss.appconn.structure.StreamisStructureIntegrationStandard;
+
+public class StreamisAppConn extends AbstractOnlySSOAppConn implements SecondlyAppConn {
+
+ private StreamisStructureIntegrationStandard structureIntegrationStandard;
+
+ @Override
+ public StructureIntegrationStandard getOrCreateStructureStandard() {
+ return structureIntegrationStandard;
+ }
+
+ @Override
+ protected void initialize() {
+ structureIntegrationStandard = new StreamisStructureIntegrationStandard();
+ }
+}
diff --git a/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/constraints/Constraints.java b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/constraints/Constraints.java
new file mode 100644
index 000000000..46507753f
--- /dev/null
+++ b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/constraints/Constraints.java
@@ -0,0 +1,18 @@
+package com.webank.wedatasphere.streamis.dss.appconn.constraints;
+
+import org.apache.linkis.common.conf.CommonVars;
+
+/**
+ * use Constraints class to manage the constant value
+ */
+public class Constraints {
+
+ // AppConn name
+ public static final String STREAMIS_APPCONN_NAME = CommonVars.apply("wds.dss.appconn.streamis.name", "Streamis").getValue();
+
+ public static final String STREAMIS_SERVER_VERSION = CommonVars.apply("wds.dss.appconn.streamis.server.version", "v1").getValue();
+
+ public static final String API_REQUEST_PREFIX = CommonVars.apply("wds.dss.appconn.streamis.api.request-prefix", "/api/rest_j/"+STREAMIS_SERVER_VERSION+"/streamis/project").getValue();
+
+
+}
diff --git a/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/exception/StreamisAppConnErrorException.java b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/exception/StreamisAppConnErrorException.java
new file mode 100644
index 000000000..6765b3d89
--- /dev/null
+++ b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/exception/StreamisAppConnErrorException.java
@@ -0,0 +1,14 @@
+package com.webank.wedatasphere.streamis.dss.appconn.exception;
+
+import com.webank.wedatasphere.dss.standard.common.exception.operation.ExternalOperationFailedException;
+
+public class StreamisAppConnErrorException extends ExternalOperationFailedException {
+
+ public StreamisAppConnErrorException(int errorCode, String message) {
+ super(errorCode, message);
+ }
+
+ public StreamisAppConnErrorException(int errorCode, String message, Throwable cause) {
+ super(errorCode, message, cause);
+ }
+}
diff --git a/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/StreamisStructureIntegrationStandard.java b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/StreamisStructureIntegrationStandard.java
new file mode 100644
index 000000000..43e44c8b4
--- /dev/null
+++ b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/StreamisStructureIntegrationStandard.java
@@ -0,0 +1,20 @@
+package com.webank.wedatasphere.streamis.dss.appconn.structure;
+
+import com.webank.wedatasphere.dss.standard.app.structure.AbstractStructureIntegrationStandard;
+import com.webank.wedatasphere.dss.standard.app.structure.project.ProjectService;
+import com.webank.wedatasphere.streamis.dss.appconn.structure.project.StreamisProjectService;
+
+/**
+ * Structure integration standard
+ */
+public class StreamisStructureIntegrationStandard extends AbstractStructureIntegrationStandard {
+
+ /**
+ * Singleton project service
+ * @return
+ */
+ @Override
+ protected ProjectService createProjectService() {
+ return new StreamisProjectService();
+ }
+}
diff --git a/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/project/StreamisPrejectDeleteOperation.java b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/project/StreamisPrejectDeleteOperation.java
new file mode 100644
index 000000000..bfbb8a36a
--- /dev/null
+++ b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/project/StreamisPrejectDeleteOperation.java
@@ -0,0 +1,38 @@
+package com.webank.wedatasphere.streamis.dss.appconn.structure.project;
+
+import com.webank.wedatasphere.dss.standard.app.sso.origin.request.action.DSSDeleteAction;
+import com.webank.wedatasphere.dss.standard.app.structure.AbstractStructureOperation;
+import com.webank.wedatasphere.dss.standard.app.structure.project.ProjectDeletionOperation;
+import com.webank.wedatasphere.dss.standard.common.entity.ref.ResponseRef;
+import com.webank.wedatasphere.dss.standard.common.exception.operation.ExternalOperationFailedException;
+import com.webank.wedatasphere.streamis.dss.appconn.structure.ref.StreamisProjectContentReqRef;
+import com.webank.wedatasphere.streamis.dss.appconn.utils.StreamisCommonUtil;
+
+import static com.webank.wedatasphere.streamis.dss.appconn.constraints.Constraints.API_REQUEST_PREFIX;
+import static com.webank.wedatasphere.streamis.dss.appconn.constraints.Constraints.STREAMIS_APPCONN_NAME;
+
+public class StreamisPrejectDeleteOperation extends AbstractStructureOperation
+ implements ProjectDeletionOperation {
+
+ private String projectUrl;
+
+ @Override
+ protected String getAppConnName() {
+ return STREAMIS_APPCONN_NAME;
+ }
+
+ @Override
+ public ResponseRef deleteProject(StreamisProjectContentReqRef refProjectContentRequestRef) throws ExternalOperationFailedException {
+ DSSDeleteAction deleteAction = new DSSDeleteAction();
+ deleteAction.setUser(refProjectContentRequestRef.getUserName());
+ deleteAction.setParameter("projectId", refProjectContentRequestRef.getRefProjectId());
+ return StreamisCommonUtil.getInternalResponseRef(refProjectContentRequestRef, ssoRequestOperation, projectUrl, deleteAction);
+ }
+
+ @Override
+ public void init() {
+ super.init();
+ projectUrl = mergeBaseUrl(mergeUrl(API_REQUEST_PREFIX, "deleteProject"));
+ }
+
+}
diff --git a/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/project/StreamisProjectCreationOperation.java b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/project/StreamisProjectCreationOperation.java
new file mode 100644
index 000000000..e8d000c2b
--- /dev/null
+++ b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/project/StreamisProjectCreationOperation.java
@@ -0,0 +1,57 @@
+package com.webank.wedatasphere.streamis.dss.appconn.structure.project;
+
+import com.webank.wedatasphere.dss.common.entity.project.DSSProject;
+import com.webank.wedatasphere.dss.common.utils.DSSCommonUtils;
+import com.webank.wedatasphere.dss.standard.app.sso.Workspace;
+import com.webank.wedatasphere.dss.standard.app.sso.origin.request.action.DSSPostAction;
+import com.webank.wedatasphere.dss.standard.app.structure.AbstractStructureOperation;
+import com.webank.wedatasphere.dss.standard.app.structure.project.ProjectCreationOperation;
+import com.webank.wedatasphere.dss.standard.app.structure.project.ref.DSSProjectContentRequestRef;
+import com.webank.wedatasphere.dss.standard.app.structure.project.ref.DSSProjectPrivilege;
+import com.webank.wedatasphere.dss.standard.app.structure.project.ref.ProjectResponseRef;
+import com.webank.wedatasphere.dss.standard.common.entity.ref.InternalResponseRef;
+import com.webank.wedatasphere.dss.standard.common.exception.operation.ExternalOperationFailedException;
+import com.webank.wedatasphere.streamis.dss.appconn.exception.StreamisAppConnErrorException;
+import com.webank.wedatasphere.streamis.dss.appconn.utils.StreamisCommonUtil;
+
+import static com.webank.wedatasphere.streamis.dss.appconn.constraints.Constraints.API_REQUEST_PREFIX;
+import static com.webank.wedatasphere.streamis.dss.appconn.constraints.Constraints.STREAMIS_APPCONN_NAME;
+
+public class StreamisProjectCreationOperation extends AbstractStructureOperation
+ implements ProjectCreationOperation {
+
+ private String projectUrl;
+
+ @Override
+ protected String getAppConnName() {
+ return STREAMIS_APPCONN_NAME;
+ }
+
+ @Override
+ public ProjectResponseRef createProject(DSSProjectContentRequestRef.DSSProjectContentRequestRefImpl dssProjectContentRequestRef) throws ExternalOperationFailedException {
+ DSSPostAction streamisPostAction = new DSSPostAction();
+ streamisPostAction.setUser(dssProjectContentRequestRef.getUserName());
+ DSSProject dssProject = dssProjectContentRequestRef.getDSSProject();
+ Workspace workspace = dssProjectContentRequestRef.getWorkspace();
+ DSSProjectPrivilege dssProjectPrivilege = dssProjectContentRequestRef.getDSSProjectPrivilege();
+ if(dssProject == null || dssProjectPrivilege == null){
+ //TODO error code need to amend
+ throw new StreamisAppConnErrorException(-1, "the dssProject or dssProjectPrivilege is null");
+ }
+ streamisPostAction.addRequestPayload("projectName",dssProject.getName());
+ streamisPostAction.addRequestPayload("workspaceId", workspace==null?null:workspace.getWorkspaceId());
+ streamisPostAction.addRequestPayload("releaseUsers",dssProjectPrivilege.getReleaseUsers());
+ streamisPostAction.addRequestPayload("editUsers",dssProjectPrivilege.getEditUsers());
+ streamisPostAction.addRequestPayload("accessUsers",dssProjectPrivilege.getAccessUsers());
+ InternalResponseRef responseRef = StreamisCommonUtil.getInternalResponseRef(dssProjectContentRequestRef, ssoRequestOperation, projectUrl, streamisPostAction);
+ Long projectId = DSSCommonUtils.parseToLong(responseRef.getData().get("projectId"));
+ return ProjectResponseRef.newExternalBuilder()
+ .setRefProjectId(projectId).success();
+ }
+
+ @Override
+ public void init() {
+ super.init();
+ projectUrl = mergeBaseUrl(mergeUrl(API_REQUEST_PREFIX, "createProject"));
+ }
+}
diff --git a/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/project/StreamisProjectSearchOperation.java b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/project/StreamisProjectSearchOperation.java
new file mode 100644
index 000000000..097bd484b
--- /dev/null
+++ b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/project/StreamisProjectSearchOperation.java
@@ -0,0 +1,45 @@
+package com.webank.wedatasphere.streamis.dss.appconn.structure.project;
+
+import com.webank.wedatasphere.dss.common.utils.DSSCommonUtils;
+import com.webank.wedatasphere.dss.standard.app.sso.origin.request.action.DSSGetAction;
+import com.webank.wedatasphere.dss.standard.app.structure.AbstractStructureOperation;
+import com.webank.wedatasphere.dss.standard.app.structure.project.ProjectSearchOperation;
+import com.webank.wedatasphere.dss.standard.app.structure.project.ref.ProjectResponseRef;
+import com.webank.wedatasphere.dss.standard.common.entity.ref.InternalResponseRef;
+import com.webank.wedatasphere.dss.standard.common.exception.operation.ExternalOperationFailedException;
+import com.webank.wedatasphere.streamis.dss.appconn.structure.ref.StreamisProjectContentReqRef;
+import com.webank.wedatasphere.streamis.dss.appconn.utils.StreamisCommonUtil;
+
+import static com.webank.wedatasphere.streamis.dss.appconn.constraints.Constraints.API_REQUEST_PREFIX;
+import static com.webank.wedatasphere.streamis.dss.appconn.constraints.Constraints.STREAMIS_APPCONN_NAME;
+
+public class StreamisProjectSearchOperation extends AbstractStructureOperation
+ implements ProjectSearchOperation {
+
+ private String projectUrl;
+
+ @Override
+ protected String getAppConnName() {
+ return STREAMIS_APPCONN_NAME;
+ }
+
+ @Override
+ public ProjectResponseRef searchProject(StreamisProjectContentReqRef streamisProjectContentReqRef) throws ExternalOperationFailedException {
+ DSSGetAction getAction = new DSSGetAction();
+ getAction.setUser(streamisProjectContentReqRef.getUserName());
+ getAction.setParameter("projectName",streamisProjectContentReqRef.getProjectName());
+ InternalResponseRef responseRef = StreamisCommonUtil.getInternalResponseRef(streamisProjectContentReqRef, ssoRequestOperation, projectUrl, getAction);
+ if(responseRef.getData().get("projectId")==null){
+ return ProjectResponseRef.newExternalBuilder().success();
+ }
+ Long projectId = DSSCommonUtils.parseToLong(responseRef.getData().get("projectId"));
+ return ProjectResponseRef.newExternalBuilder()
+ .setRefProjectId(projectId).success();
+ }
+
+ @Override
+ public void init() {
+ super.init();
+ projectUrl = mergeBaseUrl(mergeUrl(API_REQUEST_PREFIX, "searchProject"));
+ }
+}
diff --git a/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/project/StreamisProjectService.java b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/project/StreamisProjectService.java
new file mode 100644
index 000000000..ce5a7fa7d
--- /dev/null
+++ b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/project/StreamisProjectService.java
@@ -0,0 +1,32 @@
+package com.webank.wedatasphere.streamis.dss.appconn.structure.project;
+
+import com.webank.wedatasphere.dss.standard.app.structure.project.*;
+import com.webank.wedatasphere.dss.standard.app.structure.project.ref.DSSProjectContentRequestRef;
+import com.webank.wedatasphere.streamis.dss.appconn.structure.ref.StreamisProjectContentReqRef;
+import com.webank.wedatasphere.streamis.dss.appconn.structure.ref.StreamisProjectUpdateReqRef;
+
+/**
+ * Streamis project service
+ */
+public class StreamisProjectService extends ProjectService {
+
+ @Override
+ protected ProjectCreationOperation createProjectCreationOperation() {
+ return new StreamisProjectCreationOperation();
+ }
+
+ @Override
+ protected ProjectUpdateOperation createProjectUpdateOperation() {
+ return new StreamisProjectUpdateOperation();
+ }
+
+ @Override
+ protected ProjectDeletionOperation createProjectDeletionOperation() {
+ return new StreamisPrejectDeleteOperation();
+ }
+
+ @Override
+ protected ProjectSearchOperation createProjectSearchOperation() {
+ return new StreamisProjectSearchOperation();
+ }
+}
diff --git a/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/project/StreamisProjectUpdateOperation.java b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/project/StreamisProjectUpdateOperation.java
new file mode 100644
index 000000000..fe4ca033d
--- /dev/null
+++ b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/project/StreamisProjectUpdateOperation.java
@@ -0,0 +1,56 @@
+package com.webank.wedatasphere.streamis.dss.appconn.structure.project;
+
+import com.webank.wedatasphere.dss.common.entity.project.DSSProject;
+import com.webank.wedatasphere.dss.standard.app.sso.Workspace;
+import com.webank.wedatasphere.dss.standard.app.sso.origin.request.action.DSSPutAction;
+import com.webank.wedatasphere.dss.standard.app.structure.AbstractStructureOperation;
+import com.webank.wedatasphere.dss.standard.app.structure.project.ProjectUpdateOperation;
+import com.webank.wedatasphere.dss.standard.app.structure.project.ref.DSSProjectPrivilege;
+import com.webank.wedatasphere.dss.standard.common.entity.ref.ResponseRef;
+import com.webank.wedatasphere.dss.standard.common.exception.operation.ExternalOperationFailedException;
+import com.webank.wedatasphere.streamis.dss.appconn.exception.StreamisAppConnErrorException;
+import com.webank.wedatasphere.streamis.dss.appconn.structure.ref.StreamisProjectUpdateReqRef;
+import com.webank.wedatasphere.streamis.dss.appconn.utils.StreamisCommonUtil;
+
+import static com.webank.wedatasphere.streamis.dss.appconn.constraints.Constraints.API_REQUEST_PREFIX;
+import static com.webank.wedatasphere.streamis.dss.appconn.constraints.Constraints.STREAMIS_APPCONN_NAME;
+
+public class StreamisProjectUpdateOperation extends AbstractStructureOperation
+ implements ProjectUpdateOperation {
+
+ private String projectUrl;
+
+ @Override
+ protected String getAppConnName() {
+ return STREAMIS_APPCONN_NAME;
+ }
+
+ @Override
+ public ResponseRef updateProject(StreamisProjectUpdateReqRef projectUpdateRequestRef) throws ExternalOperationFailedException {
+ DSSPutAction updateAction = new DSSPutAction();
+ updateAction.setUser(projectUpdateRequestRef.getUserName());
+ DSSProject dssProject = projectUpdateRequestRef.getDSSProject();
+ DSSProjectPrivilege dssProjectPrivilege = projectUpdateRequestRef.getDSSProjectPrivilege();
+ Workspace workspace = projectUpdateRequestRef.getWorkspace();
+ if(dssProject == null || dssProjectPrivilege == null){
+ throw new StreamisAppConnErrorException(600500, "the dssProject or dssProjectPrivilege is null");
+ }
+ updateAction.addRequestPayload("projectId",projectUpdateRequestRef.getRefProjectId());
+ updateAction.addRequestPayload("projectName",dssProject.getName());
+ updateAction.addRequestPayload("workspaceId", workspace==null?null:workspace.getWorkspaceId());
+ updateAction.addRequestPayload("releaseUsers",dssProjectPrivilege.getReleaseUsers());
+ updateAction.addRequestPayload("editUsers",dssProjectPrivilege.getEditUsers());
+ updateAction.addRequestPayload("accessUsers",dssProjectPrivilege.getAccessUsers());
+ return StreamisCommonUtil.getInternalResponseRef(projectUpdateRequestRef, ssoRequestOperation, projectUrl, updateAction);
+ }
+
+
+ @Override
+ public void init() {
+ super.init();
+ projectUrl = mergeBaseUrl(mergeUrl(API_REQUEST_PREFIX, "updateProject"));
+ }
+
+}
+
+
diff --git a/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/ref/StreamisProjectContentReqRef.java b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/ref/StreamisProjectContentReqRef.java
new file mode 100644
index 000000000..3f3421948
--- /dev/null
+++ b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/ref/StreamisProjectContentReqRef.java
@@ -0,0 +1,13 @@
+package com.webank.wedatasphere.streamis.dss.appconn.structure.ref;
+
+import com.webank.wedatasphere.dss.standard.app.structure.project.ref.RefProjectContentRequestRef;
+
+/**
+ * Streamis project content ref
+ */
+public class StreamisProjectContentReqRef extends StreamisStructureReqRef
+ implements RefProjectContentRequestRef {
+ public StreamisProjectContentReqRef(){
+
+ }
+}
diff --git a/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/ref/StreamisProjectUpdateReqRef.java b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/ref/StreamisProjectUpdateReqRef.java
new file mode 100644
index 000000000..3bd209dc2
--- /dev/null
+++ b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/ref/StreamisProjectUpdateReqRef.java
@@ -0,0 +1,12 @@
+package com.webank.wedatasphere.streamis.dss.appconn.structure.ref;
+
+import com.webank.wedatasphere.dss.standard.app.structure.project.ref.ProjectUpdateRequestRef;
+
+/**
+ * Streamis project update request ref
+ */
+public class StreamisProjectUpdateReqRef extends StreamisStructureReqRef implements ProjectUpdateRequestRef {
+ public StreamisProjectUpdateReqRef(){
+
+ }
+}
diff --git a/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/ref/StreamisStructureReqRef.java b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/ref/StreamisStructureReqRef.java
new file mode 100644
index 000000000..b5ab1f330
--- /dev/null
+++ b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/ref/StreamisStructureReqRef.java
@@ -0,0 +1,7 @@
+package com.webank.wedatasphere.streamis.dss.appconn.structure.ref;
+
+import com.webank.wedatasphere.dss.standard.app.structure.StructureRequestRefImpl;
+
+public abstract class StreamisStructureReqRef> extends StructureRequestRefImpl{
+ // Extend the structure request ref
+}
diff --git a/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/utils/NumberUtils.java b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/utils/NumberUtils.java
new file mode 100644
index 000000000..db22cf871
--- /dev/null
+++ b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/utils/NumberUtils.java
@@ -0,0 +1,11 @@
+package com.webank.wedatasphere.streamis.dss.appconn.utils;
+
+public class NumberUtils {
+
+ public static Integer getInt(Object original) {
+ if (original instanceof Double) {
+ return ((Double) original).intValue();
+ }
+ return (Integer) original;
+ }
+}
diff --git a/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/utils/StreamisCommonUtil.java b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/utils/StreamisCommonUtil.java
new file mode 100644
index 000000000..2db1d396e
--- /dev/null
+++ b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/utils/StreamisCommonUtil.java
@@ -0,0 +1,61 @@
+package com.webank.wedatasphere.streamis.dss.appconn.utils;
+
+import com.webank.wedatasphere.dss.standard.app.sso.builder.SSOUrlBuilderOperation;
+import com.webank.wedatasphere.dss.standard.app.sso.origin.request.action.DSSHttpAction;
+import com.webank.wedatasphere.dss.standard.app.sso.ref.WorkspaceRequestRef;
+import com.webank.wedatasphere.dss.standard.app.sso.request.SSORequestOperation;
+import com.webank.wedatasphere.dss.standard.common.entity.ref.InternalResponseRef;
+import com.webank.wedatasphere.dss.standard.common.entity.ref.ResponseRef;
+import com.webank.wedatasphere.dss.standard.common.entity.ref.ResponseRefBuilder;
+import com.webank.wedatasphere.dss.standard.common.exception.operation.ExternalOperationFailedException;
+import com.webank.wedatasphere.dss.standard.sso.utils.SSOHelper;
+import org.apache.linkis.httpclient.request.HttpAction;
+import org.apache.linkis.httpclient.response.HttpResult;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static com.webank.wedatasphere.streamis.dss.appconn.constraints.Constraints.STREAMIS_APPCONN_NAME;
+
+public class StreamisCommonUtil {
+
+ private final static Logger logger = LoggerFactory.getLogger(StreamisCommonUtil.class);
+
+ public static SSOUrlBuilderOperation getSSOUrlBuilderOperation(WorkspaceRequestRef requestRef, String url) {
+ SSOUrlBuilderOperation ssoUrlBuilderOperation = SSOHelper.createSSOUrlBuilderOperation(requestRef.getWorkspace());
+ ssoUrlBuilderOperation.setAppName(STREAMIS_APPCONN_NAME);
+ ssoUrlBuilderOperation.setReqUrl(url);
+ return ssoUrlBuilderOperation;
+ }
+
+ public static HttpResult getHttpResult(WorkspaceRequestRef requestRef,
+ SSORequestOperation ssoRequestOperation,
+ String url,
+ DSSHttpAction streamisHttpAction) throws ExternalOperationFailedException {
+
+ try {
+ SSOUrlBuilderOperation ssoUrlBuilderOperation = getSSOUrlBuilderOperation(requestRef, url);
+ streamisHttpAction.setUrl(ssoUrlBuilderOperation.getBuiltUrl());
+ return ssoRequestOperation.requestWithSSO(ssoUrlBuilderOperation, streamisHttpAction);
+ } catch (Exception e) {
+ throw new ExternalOperationFailedException(90177, "Create streamis node Exception", e);
+ }
+ }
+
+ public static InternalResponseRef getInternalResponseRef(WorkspaceRequestRef requestRef,
+ SSORequestOperation ssoRequestOperation,
+ String url,
+ DSSHttpAction streamisHttpAction) throws ExternalOperationFailedException {
+ HttpResult httpResult = getHttpResult(requestRef, ssoRequestOperation, url, streamisHttpAction);
+ logger.info("responseBody from streamis is {}",httpResult.getResponseBody());
+ InternalResponseRef responseRef = new ResponseRefBuilder.InternalResponseRefBuilder().setResponseBody(httpResult.getResponseBody()).build();
+ checkResponseRef(responseRef);
+ return responseRef;
+ }
+
+ public static void checkResponseRef(ResponseRef responseRef) throws ExternalOperationFailedException {
+ if (responseRef.getStatus() != 0 ) {
+ logger.error(responseRef.getResponseBody());
+ throw new ExternalOperationFailedException(90177, responseRef.getErrorMsg(), null);
+ }
+ }
+}
diff --git a/streamis-appconn/src/main/resources/appconn.properties b/streamis-appconn/src/main/resources/appconn.properties
new file mode 100644
index 000000000..a9a9181b9
--- /dev/null
+++ b/streamis-appconn/src/main/resources/appconn.properties
@@ -0,0 +1,14 @@
+#/*
+#* Copyright 2021 WeBank
+#* Licensed under the Apache License, Version 2.0 (the "License");
+#* you may not use this file except in compliance with the License.
+#* You may obtain a copy of the License at
+#*
+#* http://www.apache.org/licenses/LICENSE-2.0
+#*
+#* Unless required by applicable law or agreed to in writing, software
+#* distributed under the License is distributed on an "AS IS" BASIS,
+#* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#* See the License for the specific language governing permissions and
+#* limitations under the License.
+#*/
\ No newline at end of file
diff --git a/streamis-appconn/src/main/resources/init.sql b/streamis-appconn/src/main/resources/init.sql
new file mode 100644
index 000000000..6d9224ba6
--- /dev/null
+++ b/streamis-appconn/src/main/resources/init.sql
@@ -0,0 +1,40 @@
+delete from `dss_workspace_dictionary` WHERE `workspace_id` = '0' and `dic_key` = 'pdp_streamis_product_center';
+
+INSERT INTO `dss_workspace_dictionary` ( `workspace_id`, `parent_key`, `dic_name`, `dic_name_en`, `dic_key`, `dic_value`, `dic_value_en`, `title`, `title_en`, `url`, `url_type`,`icon`, `order_num`, `remark`, `create_user`, `create_time`, `update_user`, `update_time`)
+VALUES ('0','p_develop_process','流式生产中心','Streamis Product Center','pdp_streamis_product_center','streamis_prod',NULL,NULL,NULL,
+'http://APPCONN_INSTALL_IP:APPCONN_INSTALL_PORT/#/realtimeJobCenter?projectName=${projectName}&workspaceName=${workspaceName}','0','kaifa-icon','1','工程开发流程-流式生产中心','SYSTEM','2020-12-28 17:32:35',NULL,'2022-06-30 17:49:02');
+
+select @old_dss_appconn_id:=id from `dss_appconn` where `appconn_name` = 'streamis';
+
+delete from `dss_workspace_menu_appconn` WHERE `appconn_id` = @old_dss_appconn_id;
+delete from `dss_appconn_instance` where `appconn_id` = @old_dss_appconn_id;
+delete from `dss_appconn` where `appconn_name`='streamis';
+
+select @old_jobcenter_dss_appconn_id:=id from `dss_appconn` where `appconn_name` = 'realTimeJobCenter';
+
+delete from `dss_workspace_menu_appconn` WHERE `appconn_id` = @old_jobcenter_dss_appconn_id;
+delete from `dss_appconn_instance` where `appconn_id` = @old_jobcenter_dss_appconn_id;
+delete from `dss_appconn` where `appconn_name`='realTimeJobCenter';
+
+INSERT INTO dss_appconn
+(appconn_name, is_user_need_init, `level`, if_iframe, is_external, reference, class_name, appconn_class_path, resource)
+VALUES('streamis', 0, 1, 1, 1, NULL, 'com.webank.wedatasphere.streamis.dss.appconn.StreamisAppConn', NULL, NULL);
+INSERT INTO dss_appconn
+(appconn_name, is_user_need_init, `level`, if_iframe, is_external, reference, class_name, appconn_class_path, resource)
+VALUES('realTimeJobCenter', 0, 1, 1, 1, 'sso', '', NULL, NULL);
+
+select @dss_appconn_id:=id from `dss_appconn` where `appconn_name` = 'streamis';
+select @jobcenter_dss_appconn_id:=id from `dss_appconn` where `appconn_name` = 'realTimeJobCenter';
+
+INSERT INTO dss_workspace_menu_appconn
+(appconn_id, menu_id, title_en, title_cn, desc_en, desc_cn, labels_en, labels_cn, is_active, access_button_en, access_button_cn, manual_button_en, manual_button_cn, manual_button_url, icon, `order`, create_by, create_time, last_update_time, last_update_user, image)
+VALUES(@jobcenter_dss_appconn_id, 1, 'StreamSQL development', 'StreamSQL开发', 'Real-time application development is a streaming solution jointly built by WeDataSphere, Boss big data team and China Telecom ctcloud Big data team.', '实时应用开发是微众银行微数域(WeDataSphere)、Boss直聘大数据团队 和 中国电信天翼云大数据团队 社区联合共建的流式解决方案,以 Linkis 做为内核,基于 Flink Engine 构建的批流统一的 Flink SQL,助力实时化转型。',
+'streaming, realtime', '流式,实时', 0, 'under union construction', '联合共建中', 'related information', '相关资讯', 'http://127.0.0.1:8088/wiki/scriptis/manual/workspace_cn.html', 'shujukaifa-logo', NULL, NULL, NULL, NULL, NULL, 'shujukaifa-icon');
+
+INSERT INTO dss_appconn_instance
+(appconn_id, label, url, enhance_json, homepage_uri)
+VALUES(@dss_appconn_id, 'DEV', 'http://APPCONN_INSTALL_IP:APPCONN_INSTALL_PORT/', '', 'http://APPCONN_INSTALL_IP:APPCONN_INSTALL_PORT/#/realTimeJobCenter');
+
+INSERT INTO dss_appconn_instance
+(appconn_id, label, url, enhance_json, homepage_uri)
+VALUES(@jobcenter_dss_appconn_id, 'DEV', 'http://APPCONN_INSTALL_IP:APPCONN_INSTALL_PORT/#/realTimeJobCenter', NULL, NULL);
\ No newline at end of file
diff --git a/streamis-jobmanager/pom.xml b/streamis-jobmanager/pom.xml
new file mode 100644
index 000000000..72529fb40
--- /dev/null
+++ b/streamis-jobmanager/pom.xml
@@ -0,0 +1,39 @@
+
+
+
+
+
+ streamis
+ com.webank.wedatasphere.streamis
+ 0.2.4
+
+ 4.0.0
+
+ streamis-jobmanager
+ pom
+
+ streamis-jobmanager-common
+ streamis-job-launcher
+ streamis-job-manager
+ streamis-jobmanager-server
+ streamis-projectmanager-server
+ streamis-job-log
+
+
+
+
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-launcher/pom.xml b/streamis-jobmanager/streamis-job-launcher/pom.xml
new file mode 100755
index 000000000..b26fa26b7
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/pom.xml
@@ -0,0 +1,35 @@
+
+
+
+
+
+ streamis-jobmanager
+ com.webank.wedatasphere.streamis
+ 0.2.4
+
+ 4.0.0
+
+ streamis-job-launcher
+ pom
+
+
+ streamis-job-launcher-base
+ streamis-job-launcher-service
+ streamis-job-launcher-linkis
+
+
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/pom.xml b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/pom.xml
new file mode 100755
index 000000000..9d95cfbd0
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/pom.xml
@@ -0,0 +1,61 @@
+
+
+
+
+
+ streamis-jobmanager
+ com.webank.wedatasphere.streamis
+ 0.2.4
+ ../../pom.xml
+
+ 4.0.0
+
+ streamis-job-launcher-base
+
+
+
+ 8
+ 8
+
+
+
+
+ org.apache.linkis
+ linkis-common
+
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-deploy-plugin
+
+
+
+ net.alchim31.maven
+ scala-maven-plugin
+
+
+ org.apache.maven.plugins
+ maven-jar-plugin
+
+
+ ${project.artifactId}-${project.version}
+
+
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/JobConfDefinition.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/JobConfDefinition.java
new file mode 100644
index 000000000..3f6e2ce4e
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/JobConfDefinition.java
@@ -0,0 +1,251 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.launcher.entity;
+
+/**
+ * Job configuration definition
+ */
+public class JobConfDefinition {
+ /**
+ * Id
+ */
+ private Long id;
+
+ /**
+ * keyword
+ */
+ private String key;
+
+ /**
+ * Display name equals 'option'
+ */
+ private String name;
+
+ /**
+ * Type: NONE: 0, INPUT: 1, SELECT: 2, NUMBER: 3
+ */
+ private String type;
+
+ /**
+ * Sort
+ */
+ private Integer sort;
+
+ /**
+ * Description
+ */
+ private String description;
+
+ /**
+ * Validate type
+ */
+ private String validateType;
+
+ /**
+ * Validate rule
+ */
+ private String validateRule;
+
+ /**
+ * Style (Json/html/css)
+ */
+ private String style;
+
+ /**
+ * Visiable
+ */
+ private int visiable = 1;
+
+ /**
+ * Level
+ */
+ private int level = 1;
+
+ /**
+ * Unit symbol
+ */
+ private String unit;
+
+ /**
+ * Default value
+ */
+ private String defaultValue;
+
+ /**
+ * Refer values
+ */
+ private String refValues;
+
+ /**
+ * Parent ref
+ */
+ private Long parentRef;
+
+ /**
+ * Is required
+ */
+ private boolean required;
+
+ private boolean mark;
+
+ public JobConfDefinition(){
+
+ }
+
+ public JobConfDefinition(Long id, String key,
+ String type, Long parentRef, Integer level){
+ this.id = id;
+ this.key = key;
+ this.type = type;
+ this.parentRef = parentRef;
+ this.level = level;
+ }
+ public Long getId() {
+ return id;
+ }
+
+ public void setId(Long id) {
+ this.id = id;
+ }
+
+ public String getKey() {
+ return key;
+ }
+
+ public void setKey(String key) {
+ this.key = key;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public String getType() {
+ return type;
+ }
+
+ public void setType(String type) {
+ this.type = type;
+ }
+
+ public Integer getSort() {
+ return sort;
+ }
+
+ public void setSort(Integer sort) {
+ this.sort = sort;
+ }
+
+ public String getDescription() {
+ return description;
+ }
+
+ public void setDescription(String description) {
+ this.description = description;
+ }
+
+ public String getValidateType() {
+ return validateType;
+ }
+
+ public void setValidateType(String validateType) {
+ this.validateType = validateType;
+ }
+
+ public String getValidateRule() {
+ return validateRule;
+ }
+
+ public void setValidateRule(String validateRule) {
+ this.validateRule = validateRule;
+ }
+
+ public String getStyle() {
+ return style;
+ }
+
+ public void setStyle(String style) {
+ this.style = style;
+ }
+
+ public int getVisiable() {
+ return visiable;
+ }
+
+ public void setVisiable(int visiable) {
+ this.visiable = visiable;
+ }
+
+ public int getLevel() {
+ return level;
+ }
+
+ public void setLevel(int level) {
+ this.level = level;
+ }
+
+ public String getDefaultValue() {
+ return defaultValue;
+ }
+
+ public void setDefaultValue(String defaultValue) {
+ this.defaultValue = defaultValue;
+ }
+
+ public String getRefValues() {
+ return refValues;
+ }
+
+ public void setRefValues(String refValues) {
+ this.refValues = refValues;
+ }
+
+ public Long getParentRef() {
+ return parentRef;
+ }
+
+ public void setParentRef(Long parentRef) {
+ this.parentRef = parentRef;
+ }
+
+ public String getUnit() {
+ return unit;
+ }
+
+ public void setUnit(String unit) {
+ this.unit = unit;
+ }
+
+ public boolean isRequired() {
+ return required;
+ }
+
+ public void setRequired(boolean required) {
+ this.required = required;
+ }
+
+ public boolean isMark() {
+ return mark;
+ }
+
+ public void setMark(boolean mark) {
+ this.mark = mark;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/JobConfValue.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/JobConfValue.java
new file mode 100644
index 000000000..208ddf35a
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/JobConfValue.java
@@ -0,0 +1,97 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package com.webank.wedatasphere.streamis.jobmanager.launcher.entity;
+
+/**
+ * Job conf value
+ */
+public class JobConfValue {
+
+ /**
+ * Job id
+ */
+ private Long jobId;
+
+ /**
+ * Job name
+ */
+ private String jobName;
+
+ /**
+ * Keyword refer to 'JobConfDefinition'
+ */
+ private String key;
+
+ /**
+ * Actual value
+ */
+ private String value;
+
+ /**
+ * Id refer to 'JobConfDefinition'
+ */
+ private Long referDefId;
+
+ public JobConfValue(){
+
+ }
+
+ public JobConfValue(String key, String value, Long referDefId){
+ this.key = key;
+ this.value = value;
+ this.referDefId = referDefId;
+ }
+ public Long getJobId() {
+ return jobId;
+ }
+
+ public void setJobId(Long jobId) {
+ this.jobId = jobId;
+ }
+
+ public String getJobName() {
+ return jobName;
+ }
+
+ public void setJobName(String jobName) {
+ this.jobName = jobName;
+ }
+
+ public String getKey() {
+ return key;
+ }
+
+ public void setKey(String key) {
+ this.key = key;
+ }
+
+ public String getValue() {
+ return value;
+ }
+
+ public void setValue(String value) {
+ this.value = value;
+ }
+
+ public Long getReferDefId() {
+ return referDefId;
+ }
+
+ public void setReferDefId(Long referDefId) {
+ this.referDefId = referDefId;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/JobRole.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/JobRole.java
new file mode 100644
index 000000000..f88be08f4
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/JobRole.java
@@ -0,0 +1,66 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.launcher.entity;
+
+import java.util.Date;
+
+public class JobRole {
+ private Long id;
+ private Long jobId;
+ private Long roleId;
+ private Date updateTime;
+ private String description;
+
+ public Long getId() {
+ return id;
+ }
+
+ public void setId(Long id) {
+ this.id = id;
+ }
+
+ public Long getJobId() {
+ return jobId;
+ }
+
+ public void setJobId(Long jobId) {
+ this.jobId = jobId;
+ }
+
+ public Long getRoleId() {
+ return roleId;
+ }
+
+ public void setRoleId(Long roleId) {
+ this.roleId = roleId;
+ }
+
+ public Date getUpdateTime() {
+ return updateTime;
+ }
+
+ public void setUpdateTime(Date updateTime) {
+ this.updateTime = updateTime;
+ }
+
+ public String getDescription() {
+ return description;
+ }
+
+ public void setDescription(String description) {
+ this.description = description;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/JobUser.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/JobUser.java
new file mode 100644
index 000000000..acb9cfafa
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/JobUser.java
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.launcher.entity;
+
+public class JobUser {
+ private Long id;
+ private Long jobId;
+ private Long userId;
+ private String username;
+
+ public Long getId() {
+ return id;
+ }
+
+ public void setId(Long id) {
+ this.id = id;
+ }
+
+ public Long getJobId() {
+ return jobId;
+ }
+
+ public void setJobId(Long jobId) {
+ this.jobId = jobId;
+ }
+
+ public Long getUserId() {
+ return userId;
+ }
+
+ public void setUserId(Long userId) {
+ this.userId = userId;
+ }
+
+ public String getUsername() {
+ return username;
+ }
+
+ public void setUsername(String username) {
+ this.username = username;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/JobUserRole.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/JobUserRole.java
new file mode 100755
index 000000000..616bdd04b
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/JobUserRole.java
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.launcher.entity;
+
+
+public class JobUserRole {
+ private Long id;
+ private Long jobId;
+ private Long userId;
+ private Long roleId;
+ private String username;
+
+ public Long getId() {
+ return id;
+ }
+
+ public void setId(Long id) {
+ this.id = id;
+ }
+
+ public Long getJobId() {
+ return jobId;
+ }
+
+ public void setJobId(Long jobId) {
+ this.jobId = jobId;
+ }
+
+ public Long getUserId() {
+ return userId;
+ }
+
+ public void setUserId(Long userId) {
+ this.userId = userId;
+ }
+
+ public Long getRoleId() {
+ return roleId;
+ }
+
+ public void setRoleId(Long roleId) {
+ this.roleId = roleId;
+ }
+
+ public String getUsername() {
+ return username;
+ }
+
+ public void setUsername(String username) {
+ this.username = username;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/User.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/User.java
new file mode 100755
index 000000000..b4f8dc2ec
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/User.java
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.launcher.entity;
+
+
+public class User {
+ private Long id;
+ private String username;
+ private String name;
+
+ public Long getId() {
+ return id;
+ }
+
+ public void setId(Long id) {
+ this.id = id;
+ }
+
+ public String getUsername() {
+ return username;
+ }
+
+ public void setUsername(String username) {
+ this.username = username;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/vo/JobConfDefinitionVo.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/vo/JobConfDefinitionVo.java
new file mode 100644
index 000000000..1529a326f
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/vo/JobConfDefinitionVo.java
@@ -0,0 +1,238 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.entity.vo;
+
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.webank.wedatasphere.streamis.jobmanager.launcher.entity.JobConfDefinition;
+import org.apache.commons.lang3.StringUtils;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Optional;
+
+/**
+ * According to JobConfDefinition
+ */
+@JsonInclude(JsonInclude.Include.NON_NULL)
+public class JobConfDefinitionVo {
+
+ /**
+ * keyword
+ */
+ private String key;
+
+ /**
+ * Display name equals 'option'
+ */
+ private String name;
+
+ /**
+ * Type: NONE: 0, INPUT: 1, SELECT: 2, NUMBER: 3
+ */
+ private String type;
+
+ /**
+ * Sort
+ */
+ private Integer sort;
+
+ /**
+ * Description
+ */
+ private String description;
+
+ /**
+ * Validate type
+ */
+ @JsonProperty("validate_type")
+ private String validateType;
+
+ /**
+ * Validate rule
+ */
+ @JsonProperty("validate_rule")
+ private String validateRule;
+
+ /**
+ * Style (Json/html/css)
+ */
+ private String style;
+
+ /**
+ * Visiable
+ */
+ private int visiable = 1;
+
+ /**
+ * Level
+ */
+ private int level = 1;
+
+ /**
+ * Unit symbol
+ */
+ private String unit;
+
+ /**
+ * Default value
+ */
+ @JsonProperty("default_value")
+ private String defaultValue;
+
+ /**
+ * Refer values
+ */
+ @JsonProperty("ref_values")
+ private List refValues = new ArrayList<>();
+
+ /**
+ * Children definition
+ */
+ @JsonProperty("child_def")
+ private List childDef;
+
+ private boolean required;
+
+ public JobConfDefinitionVo(){
+
+ }
+
+ public JobConfDefinitionVo(JobConfDefinition definition){
+ this.key = definition.getKey();
+ this.name = definition.getName();
+ this.type = definition.getType();
+ this.sort = definition.getSort();
+ this.description = definition.getDescription();
+ this.validateType = definition.getValidateType();
+ this.validateRule = definition.getValidateRule();
+ this.style = definition.getStyle();
+ this.visiable = definition.getVisiable();
+ this.level = definition.getLevel();
+ this.defaultValue = definition.getDefaultValue();
+ if (StringUtils.isNotBlank(definition.getRefValues())){
+ this.refValues = Arrays.asList(definition.getRefValues().split(","));
+ }
+ this.required = definition.isRequired();
+ this.unit = definition.getUnit();
+ }
+
+ public String getKey() {
+ return key;
+ }
+
+ public void setKey(String key) {
+ this.key = key;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public String getType() {
+ return type;
+ }
+
+ public void setType(String type) {
+ this.type = type;
+ }
+
+ public Integer getSort() {
+ return sort;
+ }
+
+ public void setSort(Integer sort) {
+ this.sort = sort;
+ }
+
+ public String getDescription() {
+ return description;
+ }
+
+ public void setDescription(String description) {
+ this.description = description;
+ }
+
+ public String getValidateType() {
+ return validateType;
+ }
+
+ public void setValidateType(String validateType) {
+ this.validateType = validateType;
+ }
+
+ public String getValidateRule() {
+ return validateRule;
+ }
+
+ public void setValidateRule(String validateRule) {
+ this.validateRule = validateRule;
+ }
+
+ public String getStyle() {
+ return style;
+ }
+
+ public void setStyle(String style) {
+ this.style = style;
+ }
+
+ public int getVisiable() {
+ return visiable;
+ }
+
+ public void setVisiable(int visiable) {
+ this.visiable = visiable;
+ }
+
+ public int getLevel() {
+ return level;
+ }
+
+ public void setLevel(int level) {
+ this.level = level;
+ }
+
+ public String getDefaultValue() {
+ return defaultValue;
+ }
+
+ public void setDefaultValue(String defaultValue) {
+ this.defaultValue = defaultValue;
+ }
+
+ public List getRefValues() {
+ return refValues;
+ }
+
+ public void setRefValues(List refValues) {
+ this.refValues = refValues;
+ }
+
+ public List getChildDef() {
+ return childDef;
+ }
+
+ public void setChildDef(List childDef) {
+ this.childDef = childDef;
+ }
+
+ public String getUnit() {
+ return unit;
+ }
+
+ public void setUnit(String unit) {
+ this.unit = unit;
+ }
+
+ public boolean isRequired() {
+ return required;
+ }
+
+ public void setRequired(boolean required) {
+ this.required = required;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/vo/JobConfValueSet.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/vo/JobConfValueSet.java
new file mode 100755
index 000000000..67c59ee7d
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/vo/JobConfValueSet.java
@@ -0,0 +1,103 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.launcher.entity.vo;
+
+import java.util.List;
+
+/**
+ * Config value set
+ */
+public class JobConfValueSet {
+
+ /**
+ * Job id
+ */
+ private Long jobId;
+
+ /**
+ * Resource config
+ */
+ private List resourceConfig;
+
+ /**
+ * Produce config
+ */
+ private List produceConfig;
+
+ /**
+ * Parameter config
+ */
+ private List parameterConfig;
+
+ /**
+ * Alarm config
+ */
+ private List alarmConfig;
+
+ /**
+ * Permission config
+ */
+ private List permissionConfig;
+
+
+ public Long getJobId() {
+ return jobId;
+ }
+
+ public void setJobId(Long jobId) {
+ this.jobId = jobId;
+ }
+
+ public List getResourceConfig() {
+ return resourceConfig;
+ }
+
+ public void setResourceConfig(List resourceConfig) {
+ this.resourceConfig = resourceConfig;
+ }
+
+ public List getProduceConfig() {
+ return produceConfig;
+ }
+
+ public void setProduceConfig(List produceConfig) {
+ this.produceConfig = produceConfig;
+ }
+
+ public List getParameterConfig() {
+ return parameterConfig;
+ }
+
+ public void setParameterConfig(List parameterConfig) {
+ this.parameterConfig = parameterConfig;
+ }
+
+ public List getAlarmConfig() {
+ return alarmConfig;
+ }
+
+ public void setAlarmConfig(List alarmConfig) {
+ this.alarmConfig = alarmConfig;
+ }
+
+ public List getPermissionConfig() {
+ return permissionConfig;
+ }
+
+ public void setPermissionConfig(List permissionConfig) {
+ this.permissionConfig = permissionConfig;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/vo/JobConfValueVo.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/vo/JobConfValueVo.java
new file mode 100755
index 000000000..7b42b07fb
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/vo/JobConfValueVo.java
@@ -0,0 +1,122 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.launcher.entity.vo;
+
+import java.util.List;
+
+/**
+ * View object
+ */
+public class JobConfValueVo {
+ /**
+ * Config key id
+ */
+ private Long configkeyId;
+ /**
+ * Key
+ */
+ private String key;
+ /**
+ * Name
+ */
+ private String name;
+ /**
+ * Value
+ */
+ private String value;
+ /**
+ * Value list
+ */
+ private List valueLists;
+
+ public JobConfValueVo(){
+
+ }
+
+ public JobConfValueVo(String key, String value){
+ this.key = key;
+ this.value = value;
+ }
+
+ public static class ValueList {
+ private String value;
+ private Boolean selected;
+
+ public ValueList() {
+ }
+
+ public ValueList(String value, Boolean selected) {
+ this.value = value;
+ this.selected = selected;
+ }
+
+ public String getValue() {
+ return value;
+ }
+
+ public void setValue(String value) {
+ this.value = value;
+ }
+
+ public Boolean getSelected() {
+ return selected;
+ }
+
+ public void setSelected(Boolean selected) {
+ this.selected = selected;
+ }
+ }
+
+ public String getKey() {
+ return key;
+ }
+
+ public void setKey(String key) {
+ this.key = key;
+ }
+
+ public String getValue() {
+ return value;
+ }
+
+ public void setValue(String value) {
+ this.value = value;
+ }
+
+ public List getValueLists() {
+ return valueLists;
+ }
+
+ public void setValueLists(List valueLists) {
+ this.valueLists = valueLists;
+ }
+
+ public Long getConfigkeyId() {
+ return configkeyId;
+ }
+
+ public void setConfigkeyId(Long configkeyId) {
+ this.configkeyId = configkeyId;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/JobClient.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/JobClient.scala
new file mode 100644
index 000000000..bcb1dfbbc
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/JobClient.scala
@@ -0,0 +1,33 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.job
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobStateInfo
+
+/**
+ * Job client
+ *
+ * @tparam T job info type
+ */
+trait JobClient[T <: JobInfo] {
+
+ def getJobInfo: T
+
+ /**
+ * Refresh job info and return
+ * @param refresh refresh
+ * @return
+ */
+ def getJobInfo(refresh: Boolean): T
+ /**
+ * Stop the job connected remote
+ * @param snapshot if do snapshot to save the job state
+ * @return return the jobState info (if use snapshot) else return null
+ */
+ def stop(snapshot: Boolean): JobStateInfo
+
+ /**
+ * Stop directly
+ */
+ def stop(): Unit
+
+
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/JobInfo.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/JobInfo.scala
new file mode 100644
index 000000000..6cde5ba12
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/JobInfo.scala
@@ -0,0 +1,66 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.launcher.job
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.{JobState, JobStateInfo}
+
+/**
+ * Basic job information
+ */
+trait JobInfo {
+
+ /**
+ * Job name
+ * @return name
+ */
+ def getName: String
+ /**
+ * Job Id
+ * @return
+ */
+ def getId: String
+
+ /**
+ * Creator
+ * @return
+ */
+ def getUser: String
+
+ /**
+ * Job status
+ * @return
+ */
+ def getStatus: String
+
+ def setStatus(status: String): Unit
+
+ /**
+ * Job log path
+ * @return
+ */
+ def getLogPath: String
+
+ def getResources: java.util.Map[String, Object]
+
+ def getCompletedMsg: String
+
+ /**
+ * Contains the check point and save points
+ * @return
+ */
+ def getJobStates: Array[JobStateInfo]
+
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/LaunchJob.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/LaunchJob.scala
new file mode 100644
index 000000000..b18d56091
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/LaunchJob.scala
@@ -0,0 +1,125 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.launcher.job
+
+import java.util
+
+
+trait LaunchJob {
+
+ /**
+ * Job name
+ * @return
+ */
+ def getJobName: String
+
+ def getSubmitUser: String
+
+ def getLabels: util.Map[String, Any]
+
+ def getJobContent: util.Map[String, Any]
+
+ def getParams: util.Map[String, Any]
+
+ def getSource: util.Map[String, Any]
+
+ def getLaunchConfigs: util.Map[String, Any]
+
+}
+
+object LaunchJob {
+
+ val LAUNCH_CONFIG_CREATE_SERVICE = "createService"
+ val LAUNCH_CONFIG_DESCRIPTION = "description"
+ val LAUNCH_CONFIG_MAX_SUBMIT_TIME = "maxSubmitTime"
+
+ def builder(): Builder = new Builder
+
+ class Builder {
+ private var submitUser: String = _
+ private var jobName: String = _
+ private var labels: util.Map[String, Any] = _
+ private var jobContent: util.Map[String, Any] = _
+ private var params: util.Map[String, Any] = _
+ private var source: util.Map[String, Any] = _
+ private var launchConfigs: util.Map[String, Any] = _
+
+ def setJobName(jobName: String): this.type = {
+ this.jobName = jobName
+ this
+ }
+
+ def setSubmitUser(submitUser: String): this.type = {
+ this.submitUser = submitUser
+ this
+ }
+
+ def setLabels(labels: util.Map[String, Any]): this.type = {
+ this.labels = labels
+ this
+ }
+
+ def setJobContent(jobContent: util.Map[String, Any]): this.type = {
+ this.jobContent = jobContent
+ this
+ }
+
+ def setParams(param: util.Map[String, Any]): this.type = {
+ this.params = param
+ this
+ }
+
+ def setSource(source: util.Map[String, Any]): this.type = {
+ this.source = source
+ this
+ }
+
+ def setLaunchConfigs(launchConfigs: util.Map[String, Any]): this.type = {
+ this.launchConfigs = launchConfigs
+ this
+ }
+
+ def setLaunchJob(launchJob: LaunchJob): this.type = {
+ setSubmitUser(launchJob.getSubmitUser).setLabels(launchJob.getLabels)
+ .setJobContent(launchJob.getJobContent).setParams(launchJob.getParams)
+ .setSource(launchJob.getSource).setLaunchConfigs(launchJob.getLaunchConfigs).setJobName(launchJob.getJobName)
+ }
+
+ def build(): LaunchJob = new LaunchJob {
+ override def getSubmitUser: String = submitUser
+
+ override def getLabels: util.Map[String, Any] = labels
+
+ override def getJobContent: util.Map[String, Any] = jobContent
+
+ override def getParams: util.Map[String, Any] = params
+
+ override def getSource: util.Map[String, Any] = source
+
+ override def getLaunchConfigs: util.Map[String, Any] = launchConfigs
+
+ override def toString: String = s"LaunchJob(submitUser: $submitUser, labels: $labels, jobContent: $jobContent, params: $params, source: $source)"
+
+ /**
+ * Job name
+ *
+ * @return
+ */
+ override def getJobName: String = jobName
+ }
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/manager/JobLaunchManager.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/manager/JobLaunchManager.scala
new file mode 100644
index 000000000..1a1365dfc
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/manager/JobLaunchManager.scala
@@ -0,0 +1,69 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobState
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.{JobClient, JobInfo, LaunchJob}
+
+import java.util.concurrent.ConcurrentHashMap
+
+/**
+ * Basic job manager interface for launching job
+ */
+trait JobLaunchManager[T <: JobInfo] {
+
+ /**
+ * Init method
+ */
+ def init(): Unit
+
+ /**
+ * Destroy method
+ */
+ def destroy(): Unit
+
+ /**
+ * Manager name
+ * @return
+ */
+ def getName: String
+
+ def launch(job: LaunchJob): JobClient[T]
+
+ /**
+ * This method is used to launch a new job.
+ * @param job a StreamisJob wanted to be launched.
+ * @param jobState job state used to launch
+ * @return the job id.
+ */
+ def launch(job: LaunchJob, jobState: JobState): JobClient[T]
+ /**
+ * Connect the job which already launched in another process,
+ * if the job has been stored in process, just return the job info
+ * @param id id
+ * @param jobInfo job info
+ * @return
+ */
+ def connect(id: String, jobInfo: String): JobClient[T]
+
+ def connect(id: String, jobInfo: T): JobClient[T]
+ /**
+ * Job state manager(store the state information, example: Checkpoint/Savepoint)
+ * @return state manager instance
+ */
+ def getJobStateManager: JobStateManager
+
+}
+object JobLaunchManager{
+
+ /**
+ * Store the job launch managers
+ */
+ private val launchManagers = new ConcurrentHashMap[String, JobLaunchManager[_ <: JobInfo]]()
+
+ def registerJobManager(name: String, jobLaunchManager: JobLaunchManager[_ <: JobInfo]): Unit = {
+ launchManagers.put(name, jobLaunchManager)
+ }
+
+ def getJobManager(name: String): JobLaunchManager[_ <: JobInfo] = {
+ launchManagers.get(name)
+ }
+}
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/manager/JobStateManager.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/manager/JobStateManager.scala
new file mode 100644
index 000000000..4d0e7d051
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/manager/JobStateManager.scala
@@ -0,0 +1,76 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.JobInfo
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.{JobState, JobStateFetcher}
+
+import java.net.URI
+
+/**
+ * Job state manager
+ */
+trait JobStateManager {
+
+ /**
+ * Init method
+ */
+ def init(): Unit
+
+ /**
+ * Destroy method
+ */
+ def destroy(): Unit
+
+ /**
+ * Register job state fetcher
+ * @param clazz clazz
+ * @param builder job state fetcher
+ * @tparam T
+ */
+ def registerJobStateFetcher(clazz: Class[_], builder: () => JobStateFetcher[_ <: JobState]): Unit
+ /**
+ * Job state fetcher
+ * @param clazz clazz
+ * @tparam T name
+ * @return
+ */
+ def getOrCreateJobStateFetcher[T <: JobState](clazz: Class[_]): JobStateFetcher[T]
+
+ /**
+ * Get job state
+ * @param jobInfo job info
+ * @tparam T name
+ * @return
+ */
+ def getJobState[T <: JobState](clazz: Class[_], jobInfo: JobInfo): T
+
+
+ def getJobStateDir[T <: JobState](clazz: Class[_], scheme: String, relativePath: String): URI
+
+ def getJobStateDir[T <: JobState](clazz: Class[_], relativePath: String): URI
+ /**
+ * Get job state directory uri
+ * @param clazz clazz
+ * @param scheme scheme
+ * @param authority authority
+ * @param relativePath relative path
+ * @tparam T
+ * @return
+ */
+ def getJobStateDir[T <: JobState](clazz: Class[_], scheme: String, authority: String, relativePath: String): URI
+}
+
+
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/state/JobState.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/state/JobState.scala
new file mode 100644
index 000000000..b05df81c2
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/state/JobState.scala
@@ -0,0 +1,33 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.job.state
+
+import java.net.URI
+
+/**
+ * Job state
+ */
+trait JobState {
+
+ /**
+ * Job state id
+ * @return
+ */
+ def getId: String
+
+ /**
+ * location
+ * @return
+ */
+ def getLocation: URI
+
+ /**
+ * Metadata info
+ * @return
+ */
+ def getMetadataInfo: Any
+
+ /**
+ * Timestamp to save the state
+ * @return
+ */
+ def getTimestamp: Long
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/state/JobStateFetcher.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/state/JobStateFetcher.scala
new file mode 100644
index 000000000..41f47fd75
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/state/JobStateFetcher.scala
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.launcher.job.state
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.JobInfo
+
+trait JobStateFetcher[T <: JobState] {
+
+ /**
+ * Init method
+ */
+ def init(): Unit
+
+ /**
+ * Get state information
+ * @param jobInfo JobInfo
+ * @return
+ */
+ def getState(jobInfo: JobInfo): T
+
+ /**
+ * Destroy method
+ */
+ def destroy(): Unit
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/state/JobStateInfo.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/state/JobStateInfo.scala
new file mode 100644
index 000000000..84e29bf73
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/state/JobStateInfo.scala
@@ -0,0 +1,31 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.job.state
+
+/**
+ * Basic info
+ */
+class JobStateInfo {
+ /**
+ * Location
+ */
+ private var location: String = _
+
+ /**
+ * Timestamp
+ */
+ private var timestamp: Long = -1
+
+ def setLocation(location: String): Unit = {
+ this.location = location
+ }
+
+ def getLocation: String = {
+ this.location
+ }
+
+ def setTimestamp(timestamp: Long): Unit = {
+ this.timestamp = timestamp
+ }
+ def getTimestamp: Long = {
+ timestamp
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/pom.xml b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/pom.xml
new file mode 100644
index 000000000..1e266bedb
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/pom.xml
@@ -0,0 +1,65 @@
+
+
+
+
+
+ streamis-job-launcher
+ com.webank.wedatasphere.streamis
+ 0.2.4
+
+ 4.0.0
+
+ streamis-job-launcher-linkis
+
+
+ 8
+ 8
+
+
+
+
+ com.webank.wedatasphere.streamis
+ streamis-job-launcher-base
+ ${jobmanager.version}
+
+
+ org.apache.linkis
+ linkis-computation-client
+ ${linkis.version}
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-deploy-plugin
+
+
+
+ net.alchim31.maven
+ scala-maven-plugin
+
+
+ org.apache.maven.plugins
+ maven-jar-plugin
+
+
+
+ ${project.artifactId}-${project.version}
+
+
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/exception/StreamisJobLaunchException.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/exception/StreamisJobLaunchException.java
new file mode 100644
index 000000000..aee2c5570
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/exception/StreamisJobLaunchException.java
@@ -0,0 +1,24 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.exception;
+
+import org.apache.linkis.common.exception.ExceptionLevel;
+import org.apache.linkis.common.exception.LinkisRuntimeException;
+
+public class StreamisJobLaunchException extends FlinkJobLaunchErrorException{
+
+ public StreamisJobLaunchException(int errorCode, String errorMsg, Throwable t) {
+ super(errorCode, errorMsg, t);
+ }
+
+ public static class Runtime extends LinkisRuntimeException {
+
+ public Runtime(int errCode, String desc,Throwable t) {
+ super(errCode, desc);
+ super.initCause(t);
+ }
+
+ @Override
+ public ExceptionLevel getLevel() {
+ return ExceptionLevel.ERROR;
+ }
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/AbstractLinkisJobStateFetcher.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/AbstractLinkisJobStateFetcher.java
new file mode 100644
index 000000000..4a423326a
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/AbstractLinkisJobStateFetcher.java
@@ -0,0 +1,236 @@
+
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state;
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.JobInfo;
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobStateManager;
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobState;
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobStateFetcher;
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.conf.JobLauncherConfiguration;
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.exception.FlinkJobStateFetchException;
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.exception.StreamisJobLaunchException;
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.client.StateFileTree;
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.client.LinkisJobStateGetAction;
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.client.LinkisJobStateResult;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.linkis.computation.client.LinkisJobBuilder$;
+import org.apache.linkis.httpclient.Client;
+import org.apache.linkis.httpclient.dws.DWSHttpClient;
+import org.apache.linkis.httpclient.dws.response.DWSResult;
+import org.apache.linkis.httpclient.response.Result;
+import org.apache.linkis.ujes.client.response.ResultSetListResult;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.net.URI;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Objects;
+import java.util.Optional;
+import java.util.concurrent.atomic.AtomicReference;
+import java.util.function.Function;
+
+
+/**
+ * Linkis Job state fetcher
+ * 1) Init to build http client
+ * 2) Invoke the getState method to fetch form /api/rest_j/v1/filesystem/getDirFileTrees, the new JobState info
+ * (Note: linkis doesn't support to fetch the file tree recursively, so should invoke several times)
+ * 3) Destroy to close the http client when the system is closed
+ * @param
+ */
+
+public abstract class AbstractLinkisJobStateFetcher implements JobStateFetcher {
+
+ private static final Logger LOG = LoggerFactory.getLogger(AbstractLinkisJobStateFetcher.class);
+
+ /**
+ * Modify time properties name
+ */
+ private static final String PROPS_MODIFY_TIME = "modifytime";
+
+ /**
+ * Size properties name
+ */
+ private static final String PROPS_SIZE = "size";
+ /**
+ * Http Client
+ */
+ Client client;
+
+ private final Class stateClass;
+
+ private final JobStateManager jobStateManager;
+
+ public AbstractLinkisJobStateFetcher(Class stateClass, JobStateManager jobStateManager){
+ this.stateClass = stateClass;
+ this.jobStateManager = jobStateManager;
+ }
+
+ /**
+ * Init method
+ */
+ @Override
+ public void init() {
+ String fetcherName = this.getClass().getSimpleName();
+ LOG.info("Initialize httpClient in JobStateFetcher for [{}] start", fetcherName);
+ client = new DWSHttpClient(LinkisJobBuilder$.MODULE$.getDefaultClientConfig(), fetcherName + "-Client");
+ LOG.info("Initialize httpClient in JobStateFetcher for [{}] finished", fetcherName);
+ }
+
+ /**
+ * Main entrance
+ * @param jobInfo job info
+ * @return
+ */
+ @Override
+ public T getState(JobInfo jobInfo) {
+ String treeDir = this.jobStateManager.getJobStateDir(stateClass, jobInfo.getName()).toString();
+ StateFileTree stateFileTree = traverseFileTreeToFind(jobInfo, getDirFileTree(jobInfo, treeDir), this::isMatch, false);
+ if (Objects.nonNull(stateFileTree) && StringUtils.isNotBlank(stateFileTree.getPath())){
+ JobStateFileInfo stateFileInfo = new JobStateFileInfo(stateFileTree.getName(),
+ stateFileTree.getPath(), stateFileTree.getParentPath(),
+ Long.parseLong(stateFileTree.getProperties().getOrDefault(PROPS_SIZE, "0")),
+ Long.parseLong(stateFileTree.getProperties().getOrDefault(PROPS_MODIFY_TIME, "0")));
+ return getState(stateFileInfo);
+ }
+ return null;
+ }
+
+
+ @Override
+ public void destroy() {
+ try {
+ client.close();
+ } catch (IOException e) {
+ throw new StreamisJobLaunchException.Runtime(-1,
+ "Fail to destroy httpClient in JobStateFetcher[" + this.getClass().getSimpleName() + "]",e);
+ }
+ }
+
+ /**
+ * Traverse the file tree to find the suitable state file
+ * @param jobInfo job info
+ * @param stateFileTree state file tree
+ * @param matcher matcher
+ * @param resolved resolved
+ * @return
+ */
+ private StateFileTree traverseFileTreeToFind(JobInfo jobInfo, StateFileTree stateFileTree, Function matcher,
+ boolean resolved){
+ AtomicReference latestFileTree = new AtomicReference<>(new StateFileTree());
+ if (Objects.nonNull(stateFileTree)){
+ if (!resolved && stateFileTree.getIsLeaf()){
+ if (matcher.apply(stateFileTree.getPath()) && compareTime(stateFileTree, latestFileTree.get()) > 0){
+ latestFileTree.set(stateFileTree);
+ }
+ } else if (!stateFileTree.getIsLeaf()){
+ Optional.ofNullable(stateFileTree.getChildren()).ifPresent(children -> children.forEach(childStateFileTree -> {
+ StateFileTree candidateFileTree = childStateFileTree.getIsLeaf() ? childStateFileTree :
+ traverseFileTreeToFind(jobInfo,
+ Objects.nonNull(childStateFileTree.getChildren())? childStateFileTree : getDirFileTree(jobInfo, childStateFileTree.getPath()),
+ matcher,
+ true);
+ if (compareTime(candidateFileTree, latestFileTree.get()) > 0 && matcher.apply(candidateFileTree.getPath())){
+ latestFileTree.set(candidateFileTree);
+ }
+ }));
+ }
+ }
+ return latestFileTree.get();
+ }
+
+ /**
+ * Fetch the File tree form directory
+ * @param jobInfo job info
+ * @param dirPath directory path
+ * @return state file tree
+ */
+ private StateFileTree getDirFileTree(JobInfo jobInfo, String dirPath){
+ try {
+ LinkisJobStateGetAction getAction = new LinkisJobStateGetAction(jobInfo.getUser(), dirPath);
+ Result result = client.execute(getAction);
+ String responseBody = Optional.ofNullable(result.getResponseBody()).orElse("");
+ LOG.trace("JobState FileTree => [responseBody: {}]",
+ responseBody.length() > 100? responseBody.substring(0, 100) + "..." : responseBody);
+ StateFileTree stateFileTree;
+ if (result instanceof ResultSetListResult){
+ ResultSetListResult setListResult = (ResultSetListResult)result;
+ checkFetchStateResult(setListResult);
+ stateFileTree = DWSHttpClient.jacksonJson().convertValue(setListResult.getDirFileTrees(), StateFileTree.class);
+ } else if(result instanceof LinkisJobStateResult){
+ LinkisJobStateResult stateResult = (LinkisJobStateResult) result;
+ checkFetchStateResult(stateResult);
+ stateFileTree = stateResult.getStateFileTree();
+ }else {
+ throw new FlinkJobStateFetchException(-1, "JobState FileTree result is not a unrecognized type: " +
+ "[" + result.getClass().getCanonicalName() + "]",null);
+ }
+ if(stateFileTree == null){
+ LOG.warn("'StateFileTree' for path [{}] is null/empty, just return the null FileTree", dirPath);
+ return null;
+ }
+ LOG.trace(stateFileTree.getChildren() + "");
+ return stateFileTree;
+ } catch (FlinkJobStateFetchException e) {
+ throw new StreamisJobLaunchException.Runtime(e.getErrCode(),e.getMessage(),e);
+ } catch (Exception e) {
+ throw new StreamisJobLaunchException.Runtime(-1,"Unexpected exception in fetching JobState FileTree",e);
+ }
+ }
+
+ private void checkFetchStateResult(DWSResult result) throws FlinkJobStateFetchException {
+ if(result.getStatus()!= 0) {
+ String errMsg = result.getMessage();
+ throw new FlinkJobStateFetchException(-1, "Fail to fetch JobState FileTree, message: " + errMsg, null);
+ }
+ }
+ /**
+ * Compare timestamp value in file trees
+ * @param leftTree left
+ * @param rightTree right
+ * @return size
+ */
+ private long compareTime(StateFileTree leftTree, StateFileTree rightTree){
+ long leftTime = 0L,rightTime = 0L;
+ try {
+ leftTime = Long.parseLong(Optional.ofNullable(leftTree.getProperties()).orElse(new HashMap<>()).getOrDefault(PROPS_MODIFY_TIME, "0"));
+ } catch (NumberFormatException e){
+ LOG.warn("Illegal format value for property '{}' in FilePath [{}]", PROPS_MODIFY_TIME, leftTree.getPath(), e);
+ }
+ try {
+ rightTime = Long.parseLong(Optional.ofNullable(rightTree.getProperties()).orElse(new HashMap<>()).getOrDefault(PROPS_MODIFY_TIME, "0"));
+ } catch (NumberFormatException e){
+ LOG.warn("Illegal format value for property '{}' in FilePath [{}]", PROPS_MODIFY_TIME, rightTree.getPath(), e);
+ }
+ return leftTime - rightTime;
+ }
+ /**
+ * Is the path is match
+ * @param path path
+ * @return boolean
+ */
+ protected abstract boolean isMatch(String path);
+
+ /**
+ * Get the concrete JobState entity from FileInfo
+ * @param fileInfo file info
+ * @return JobState
+ */
+ protected abstract T getState(JobStateFileInfo fileInfo);
+
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/CheckpointJobStateFetcher.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/CheckpointJobStateFetcher.java
new file mode 100644
index 000000000..377c525c3
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/CheckpointJobStateFetcher.java
@@ -0,0 +1,69 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state;
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobStateManager;
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.conf.JobLauncherConfiguration;
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.exception.StreamisJobLaunchException;
+import org.apache.commons.lang3.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import static com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.JobStateConf.CHECKPOINT_PATH_PATTERN;
+
+/**
+ * Checkpoint JobState Fetcher
+ */
+public class CheckpointJobStateFetcher extends AbstractLinkisJobStateFetcher {
+
+ private static final Logger LOG = LoggerFactory.getLogger(CheckpointJobStateFetcher.class);
+
+ private static final Pattern PATH_PATTERN = Pattern.compile(CHECKPOINT_PATH_PATTERN.getValue());
+
+ public CheckpointJobStateFetcher(Class stateClass, JobStateManager jobStateManager) {
+ super(stateClass, jobStateManager);
+ }
+
+ @Override
+ protected boolean isMatch(String path) {
+ return PATH_PATTERN.matcher(path).matches();
+ }
+
+ @Override
+ public Checkpoint getState(JobStateFileInfo fileInfo) {
+ // TODO from linkis will lost the authority info
+ URI location = URI.create(fileInfo.getPath());
+ if (StringUtils.isBlank(location.getAuthority()) &&
+ StringUtils.isNotBlank(JobLauncherConfiguration.FLINK_STATE_DEFAULT_AUTHORITY().getValue())){
+ try {
+ location = new URI(location.getScheme(), JobLauncherConfiguration.FLINK_STATE_DEFAULT_AUTHORITY().getValue(),
+ location.getPath(), null, null);
+ } catch (URISyntaxException e) {
+ throw new StreamisJobLaunchException.Runtime(-1, "Fail to resolve checkpoint location, message: " + e.getMessage(), e);
+ }
+ }
+ Checkpoint checkpoint = new Checkpoint(location.toString());
+ checkpoint.setMetadataInfo(fileInfo);
+ checkpoint.setTimestamp(fileInfo.getModifytime());
+ LOG.info("Checkpoint info is [path: {}, timestamp: {}]" ,checkpoint.getLocation(), checkpoint.getTimestamp());
+ return checkpoint;
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/JobStateConf.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/JobStateConf.java
new file mode 100644
index 000000000..be4f27ee0
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/JobStateConf.java
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state;
+
+import org.apache.linkis.common.conf.CommonVars;
+
+/**
+ * JobState configuration
+ */
+public class JobStateConf {
+
+ public static final CommonVars CHECKPOINT_PATH_PATTERN = CommonVars.apply("wds.streamis.job.state.checkpoint.path-pattern", "^[\\s\\S]+?/\\w+?/chk-\\d+/_metadata$");
+
+ public static final CommonVars SAVEPOINT_PATH_PATTERN = CommonVars.apply("wds.streamis.job.state.savepoint.path-pattern", "^[\\s\\S]+?/savepoint-[\\w-]+/_metadata$");
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/JobStateFileInfo.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/JobStateFileInfo.java
new file mode 100644
index 000000000..d0b235217
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/JobStateFileInfo.java
@@ -0,0 +1,74 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state;
+
+/**
+ * JobState File info
+ */
+public class JobStateFileInfo {
+ private String name;
+ private String path;
+ private String parentPath;
+ private long size;
+ private long modifytime;
+
+ public JobStateFileInfo(String name, String path, String parentPath, long size, long modifytime) {
+ this.name = name;
+ this.path = path;
+ this.parentPath = parentPath;
+ this.size = size;
+ this.modifytime = modifytime;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public String getPath() {
+ return path;
+ }
+
+ public void setPath(String path) {
+ this.path = path;
+ }
+
+ public String getParentPath() {
+ return parentPath;
+ }
+
+ public void setParentPath(String parentPath) {
+ this.parentPath = parentPath;
+ }
+
+ public long getSize() {
+ return size;
+ }
+
+ public void setSize(long size) {
+ this.size = size;
+ }
+
+ public long getModifytime() {
+ return modifytime;
+ }
+
+ public void setModifytime(long modifytime) {
+ this.modifytime = modifytime;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/SavepointJobStateFetcher.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/SavepointJobStateFetcher.java
new file mode 100644
index 000000000..69dbb51d0
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/SavepointJobStateFetcher.java
@@ -0,0 +1,67 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state;
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobStateManager;
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.conf.JobLauncherConfiguration;
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.exception.StreamisJobLaunchException;
+import org.apache.commons.lang3.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.regex.Pattern;
+
+import static com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.JobStateConf.SAVEPOINT_PATH_PATTERN;
+
+/**
+ * Savepoint JobState Fetcher
+ */
+public class SavepointJobStateFetcher extends AbstractLinkisJobStateFetcher{
+
+ private static final Logger LOG = LoggerFactory.getLogger(CheckpointJobStateFetcher.class);
+
+ private static final Pattern PATH_PATTERN = Pattern.compile(SAVEPOINT_PATH_PATTERN.getValue());
+
+ public SavepointJobStateFetcher(Class stateClass, JobStateManager jobStateManager) {
+ super(stateClass, jobStateManager);
+ }
+
+ @Override
+ protected boolean isMatch(String path) {
+ return PATH_PATTERN.matcher(path).matches();
+ }
+
+ @Override
+ protected Savepoint getState(JobStateFileInfo fileInfo) {
+ // TODO from linkis will lost the authority info
+ URI location = URI.create(fileInfo.getPath());
+ if (StringUtils.isBlank(location.getAuthority()) &&
+ StringUtils.isNotBlank(JobLauncherConfiguration.FLINK_STATE_DEFAULT_AUTHORITY().getValue())){
+ try {
+ location = new URI(location.getScheme(), JobLauncherConfiguration.FLINK_STATE_DEFAULT_AUTHORITY().getValue(),
+ location.getPath(), null, null);
+ } catch (URISyntaxException e) {
+ throw new StreamisJobLaunchException.Runtime(-1, "Fail to resolve checkpoint location, message: " + e.getMessage(), e);
+ }
+ }
+ Savepoint savepoint = new Savepoint(location.toString());
+ savepoint.setMetadataInfo(fileInfo);
+ savepoint.setTimestamp(fileInfo.getModifytime());
+ LOG.info("Savepoint info is [path: {}, timestamp: {}]", savepoint.getLocation(), savepoint.getTimestamp());
+ return savepoint;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/client/LinkisJobStateResult.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/client/LinkisJobStateResult.java
new file mode 100644
index 000000000..48c6d7a6a
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/client/LinkisJobStateResult.java
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.client;
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.exception.FlinkJobStateFetchException;
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.AbstractJobStateResult;
+import org.apache.linkis.httpclient.dws.DWSHttpClient;
+import org.apache.linkis.httpclient.dws.annotation.DWSHttpMessageResult;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * JobState result
+ */
+@DWSHttpMessageResult("/api/rest_j/v\\d+/filesystem/getDirFileTrees")
+public class LinkisJobStateResult extends AbstractJobStateResult {
+
+ private Map dirFileTrees = new HashMap<>();
+
+ /**
+ * Convert the result data to state file tree
+ * @return state file tree
+ */
+ public StateFileTree getStateFileTree() throws FlinkJobStateFetchException {
+ try {
+ return DWSHttpClient.jacksonJson().convertValue(dirFileTrees, StateFileTree.class);
+ }catch(Exception e){
+ throw new FlinkJobStateFetchException(-1, "Fail to parse JobState result data, message: " + e.getMessage(), e);
+ }
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/client/StateFileTree.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/client/StateFileTree.java
new file mode 100644
index 000000000..3844970c4
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/client/StateFileTree.java
@@ -0,0 +1,62 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.client;
+
+import java.util.HashMap;
+import java.util.List;
+
+public class StateFileTree {
+
+ private String name;
+ private String path;
+ private HashMap properties;
+ private List children;
+ private Boolean isLeaf = false;
+ private String parentPath;
+
+ public Boolean getIsLeaf() {
+ return isLeaf;
+ }
+
+ public void setIsLeaf(Boolean isLeaf) {
+ this.isLeaf = isLeaf;
+ }
+
+ public String getParentPath() {
+ return parentPath;
+ }
+
+ public void setParentPath(String parentPath) {
+ this.parentPath = parentPath;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public String getPath() {
+ return path;
+ }
+
+ public void setPath(String path) {
+ this.path = path;
+ }
+
+ public HashMap getProperties() {
+ return properties;
+ }
+
+ public void setProperties(HashMap properties) {
+ this.properties = properties;
+ }
+
+ public List getChildren() {
+ return children;
+ }
+
+ public void setChildren(List children) {
+ this.children = children;
+ }
+}
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/url/LinkisURLStreamHandler.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/url/LinkisURLStreamHandler.java
new file mode 100644
index 000000000..8394b9d72
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/url/LinkisURLStreamHandler.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.url;
+
+import java.io.IOException;
+import java.net.URL;
+import java.net.URLConnection;
+import java.net.URLStreamHandler;
+
+/**
+ * URL stream handler for linkis client (cannot open connection)
+ */
+public class LinkisURLStreamHandler extends URLStreamHandler {
+ @Override
+ protected URLConnection openConnection(URL url) throws IOException {
+ throw new IllegalArgumentException("Cannot open connection for url [" + url.getPath() + "]");
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/url/LinkisURLStreamHandlerFactory.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/url/LinkisURLStreamHandlerFactory.java
new file mode 100644
index 000000000..1352c8b28
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/url/LinkisURLStreamHandlerFactory.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.url;
+
+
+import java.net.URLStreamHandler;
+import java.net.URLStreamHandlerFactory;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+/**
+ * Default linkis stream handler factory (support specific schemas)
+ */
+public class LinkisURLStreamHandlerFactory implements URLStreamHandlerFactory {
+
+ /**
+ * Support schemas
+ */
+ private final List supportSchemas = new ArrayList<>();
+
+ /**
+ * Stream handler
+ */
+ private final URLStreamHandler defaultStreamHandler;
+
+ public LinkisURLStreamHandlerFactory(String... schemas){
+ supportSchemas.addAll(Arrays.asList(schemas));
+ this.defaultStreamHandler = new LinkisURLStreamHandler();
+ }
+
+ @Override
+ public URLStreamHandler createURLStreamHandler(String protocol) {
+ if (supportSchemas.stream().anyMatch( schema -> schema.equals(protocol))){
+ return this.defaultStreamHandler;
+ }
+ return null;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/conf/JobLauncherConfiguration.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/conf/JobLauncherConfiguration.scala
new file mode 100644
index 000000000..812475016
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/conf/JobLauncherConfiguration.scala
@@ -0,0 +1,52 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.conf
+
+import org.apache.linkis.common.conf.CommonVars
+
+/**
+ * Job Launcher configuration
+ */
+object JobLauncherConfiguration {
+
+
+ val FLINK_FETCH_APPLICATION_INFO_MAX_TIMES: CommonVars[Int] = CommonVars("wds.streamis.application.info.fetch.max", 6)
+
+ val FLINK_STATE_DEFAULT_SCHEME: CommonVars[String] = CommonVars("wds.streamis.launch.flink.state.default.scheme", "hdfs")
+ /**
+ * Support schema protocols to store flink job states
+ */
+ val FLINK_STATE_SUPPORT_SCHEMES: CommonVars[String] = CommonVars("wds.streamis.launch.flink.state.support.schemas", "hdfs,file,viewfs,s3")
+
+ /**
+ * Authority(host) value to store flink job states
+ */
+ val FLINK_STATE_DEFAULT_AUTHORITY: CommonVars[String] = CommonVars("wds.streamis.launch.flink.state.authority", "")
+ /**
+ * Savepoint mode
+ */
+ val FLINK_TRIGGER_SAVEPOINT_MODE: CommonVars[String] = CommonVars("wds.streamis.launch.flink.savepoint.mode", "trigger")
+
+ /**
+ * Savepoint dir
+ */
+ val FLINK_SAVEPOINT_PATH: CommonVars[String] = CommonVars("wds.streamis.launch.flink.savepoint.dir", "/flink/flink-savepoints")
+
+ /**
+ * Checkpoint dir
+ */
+ val FLINK_CHECKPOINT_PATH: CommonVars[String] = CommonVars("wds.streamis.launch.flink.checkpoint.dir", "/flink/flink-checkpoints")
+
+ /**
+ * Linkis release version
+ */
+ val FLINK_LINKIS_RELEASE_VERSION: CommonVars[String] = CommonVars("wds.streamis.launch.flink.linkis.release.version", "")
+ /**
+ * Variable: savepoint path
+ */
+ val VAR_FLINK_SAVEPOINT_PATH: CommonVars[String] = CommonVars("wds.streamis.launch.variable.flink.savepoint.path", "flink.app.savePointPath")
+
+ /**
+ * Variable: flink app
+ */
+ val VAR_FLINK_APP_NAME: CommonVars[String] = CommonVars("wds.streamis.launch.variable.flink.app.name", "flink.app.name")
+
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/core/FlinkLogIterator.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/core/FlinkLogIterator.scala
new file mode 100644
index 000000000..d6fb5c661
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/core/FlinkLogIterator.scala
@@ -0,0 +1,78 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.core
+
+import java.io.Closeable
+import java.util
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.entity.LogRequestPayload
+import org.apache.linkis.common.utils.Utils
+import org.apache.linkis.computation.client.operator.impl.EngineConnLogOperator
+
+/**
+ *
+ * @date 2021-11-10
+ * @author enjoyyin
+ * @since 0.5.0
+ */
+trait FlinkLogIterator extends Iterator[String] with Closeable {
+ val requestPayload: LogRequestPayload
+ val engineConnLogOperator: EngineConnLogOperator
+ def init(): Unit
+ def getLogPath: String
+ def getLogDirSuffix: String
+ def getLogs: util.ArrayList[String]
+ def getEndLine: Long
+}
+
+class SimpleFlinkJobLogIterator(override val requestPayload: LogRequestPayload,
+ override val engineConnLogOperator: EngineConnLogOperator) extends FlinkLogIterator {
+
+ private var logs: util.ArrayList[String] = _
+ private var index = 0
+ private var logPath: String = _
+ private var logDirSuffix: String = _
+ private var isClosed = true
+ private var endLine = 0
+
+ override def init(): Unit = {
+ engineConnLogOperator.setPageSize(requestPayload.getPageSize)
+ engineConnLogOperator.setFromLine(requestPayload.getFromLine)
+ engineConnLogOperator.setIgnoreKeywords(requestPayload.getIgnoreKeywords)
+ engineConnLogOperator.setOnlyKeywords(requestPayload.getOnlyKeywords)
+ engineConnLogOperator.setLastRows(requestPayload.getLastRows)
+ val engineConnLog = engineConnLogOperator()
+ logs = engineConnLog.logs
+ logPath = engineConnLog.logPath
+ endLine = engineConnLog.endLine
+ }
+
+ override def close(): Unit = isClosed = true
+
+ override def hasNext: Boolean = {
+ if(isClosed) return false
+ else if(index < logs.size()) return true
+ logs = engineConnLogOperator().logs
+ while (logs == null || logs.isEmpty) {
+ logs = engineConnLogOperator().logs
+ if(isClosed) return false
+ Utils.sleepQuietly(2000)
+ }
+ index = 0
+ true
+ }
+
+ override def next(): String = {
+ val log = logs.get(index)
+ index += 1
+ log
+ }
+
+ override def getLogPath: String = logPath
+
+ override def getLogs: util.ArrayList[String] = logs
+
+ override def getEndLine: Long = endLine
+
+ def setLogDirSuffix(logDirSuffix: String) : Unit = this.logDirSuffix = logDirSuffix
+
+ override def getLogDirSuffix: String = logDirSuffix
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/entity/LogRequestPayload.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/entity/LogRequestPayload.scala
new file mode 100644
index 000000000..20cb4d081
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/entity/LogRequestPayload.scala
@@ -0,0 +1,40 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.entity
+
+/**
+ *
+ * @date 2021-11-10
+ * @author enjoyyin
+ * @since 0.5.0
+ */
+class LogRequestPayload {
+
+ private var pageSize = 100
+ private var fromLine = 1
+ private var ignoreKeywords: String = _
+ private var onlyKeywords: String = _
+ private var lastRows = 0
+ private var logType: String = _
+ private var logHistory: Boolean = false
+ def getPageSize: Int = pageSize
+ def setPageSize(pageSize: Int): Unit = this.pageSize = pageSize
+
+ def getFromLine: Int = fromLine
+ def setFromLine(fromLine: Int): Unit = this.fromLine = fromLine
+
+ def getIgnoreKeywords: String = ignoreKeywords
+ def setIgnoreKeywords(ignoreKeywords: String): Unit = this.ignoreKeywords = ignoreKeywords
+
+ def getOnlyKeywords: String = onlyKeywords
+ def setOnlyKeywords(onlyKeywords: String): Unit = this.onlyKeywords = onlyKeywords
+
+ def getLastRows: Int = lastRows
+ def setLastRows(lastRows: Int): Unit = this.lastRows = lastRows
+
+ def getLogType: String = logType
+
+ def setLogType(logType: String): Unit = this.logType = logType
+
+ def isLogHistory: Boolean = logHistory
+
+ def setLogHistory(logHistory: Boolean): Unit = this.logHistory = logHistory
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/exception/FlinkJobLaunchErrorException.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/exception/FlinkJobLaunchErrorException.scala
new file mode 100644
index 000000000..d3dddbc4d
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/exception/FlinkJobLaunchErrorException.scala
@@ -0,0 +1,43 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.exception
+
+import org.apache.linkis.common.exception.ErrorException
+
+/**
+ * Basic job launch exception
+ * @param errorCode error code
+ * @param errorMsg error message
+ */
+class FlinkJobLaunchErrorException(errorCode: Int, errorMsg: String, t: Throwable) extends ErrorException(errorCode, errorMsg){
+ this.initCause(t)
+}
+
+/**
+ * Exception in triggering savepoint
+ */
+class FlinkSavePointException(errorCode: Int, errorMsg: String, t: Throwable)
+ extends FlinkJobLaunchErrorException(errorCode, errorMsg, t)
+
+/**
+ * Exception in fetching job state
+ */
+class FlinkJobStateFetchException(errorCode: Int, errorMsg: String, t: Throwable)
+ extends FlinkJobLaunchErrorException(errorCode, errorMsg, t)
+
+class FlinkJobLogFetchException(errorCode: Int, errorMsg: String, t: Throwable)
+ extends FlinkJobLaunchErrorException(errorCode, errorMsg, t)
+
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/FlinkJobClient.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/FlinkJobClient.scala
new file mode 100644
index 000000000..b5c64a523
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/FlinkJobClient.scala
@@ -0,0 +1,220 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.JobClient
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobStateManager
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobStateInfo
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.conf.JobLauncherConfiguration
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.core.{FlinkLogIterator, SimpleFlinkJobLogIterator}
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.entity.LogRequestPayload
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.exception.{FlinkJobLaunchErrorException, FlinkJobStateFetchException, FlinkSavePointException}
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.manager.FlinkJobLaunchManager
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.operator.{FlinkClientLogOperator, FlinkTriggerSavepointOperator, FlinkYarnLogOperator}
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.{Checkpoint, Savepoint}
+import org.apache.commons.lang3.StringUtils
+import org.apache.linkis.common.utils.{Logging, Utils}
+import org.apache.linkis.computation.client.once.action.ECResourceInfoAction
+import org.apache.linkis.computation.client.once.result.ECResourceInfoResult
+import org.apache.linkis.computation.client.once.{LinkisManagerClient, LinkisManagerClientImpl, OnceJob}
+import org.apache.linkis.computation.client.once.simple.{SimpleOnceJob, SimpleOnceJobBuilder}
+import org.apache.linkis.computation.client.operator.impl.EngineConnLogOperator
+import org.apache.linkis.httpclient.dws.DWSHttpClient
+import java.util
+import java.net.URI
+
+class FlinkJobClient(onceJob: OnceJob, var jobInfo: FlinkJobInfo, stateManager: JobStateManager)
+ extends JobClient[FlinkJobInfo] with Logging{
+
+ /**
+ * Log operator
+ */
+ private var logOperatorMap = Map(
+ "client" -> FlinkClientLogOperator.OPERATOR_NAME,
+ "yarn" -> FlinkYarnLogOperator.OPERATOR_NAME
+ )
+ /**
+ * The linkis client in onceJob
+ */
+ private var linkisClient: DWSHttpClient = _
+
+ override def getJobInfo: FlinkJobInfo = {
+ getJobInfo(false)
+ }
+
+ /**
+ * Refresh job info and return
+ *
+ * @param refresh refresh
+ * @return
+ */
+ override def getJobInfo(refresh: Boolean): FlinkJobInfo = {
+ onceJob match {
+ case simpleOnceJob: SimpleOnceJob =>
+ simpleOnceJob.getStatus
+ jobInfo.setStatus(if (refresh) onceJob.getNodeInfo
+ .getOrDefault("nodeStatus", simpleOnceJob.getStatus).asInstanceOf[String] else simpleOnceJob.getStatus)
+ }
+ jobInfo
+ }
+
+ /**
+ * Stop the job connected remote
+ *
+ * @param snapshot if do snapshot to save the job state
+ */
+ override def stop(snapshot: Boolean): JobStateInfo = {
+ var stateInfo: JobStateInfo = null
+ if (snapshot){
+ // Begin to call the savepoint operator
+ info(s"Trigger Savepoint operator for job [${jobInfo.getId}] before pausing job.")
+ Option(triggerSavepoint()) match {
+ case Some(savepoint) =>
+ stateInfo = new JobStateInfo
+ stateInfo.setLocation(savepoint.getLocation.toString)
+ stateInfo.setTimestamp(savepoint.getTimestamp)
+ case _ =>
+ }
+ }
+ onceJob.kill()
+ stateInfo
+ }
+
+ /**
+ * Stop directly
+ */
+ override def stop(): Unit = stop(false)
+/**
+ * Fetch logs
+ * @param requestPayload request payload
+ * @return
+ */
+ def fetchLogs(requestPayload: LogRequestPayload): FlinkLogIterator = {
+ logOperatorMap.get(requestPayload.getLogType) match {
+ case Some(operator) =>
+ onceJob.getOperator(operator) match {
+ case engineConnLogOperator: EngineConnLogOperator =>
+ val logIterator = new SimpleFlinkJobLogIterator(requestPayload, engineConnLogOperator)
+ engineConnLogOperator match {
+ case clientLogOperator: FlinkClientLogOperator =>
+ var logDirSuffix = this.jobInfo.getLogDirSuffix
+ if (StringUtils.isBlank(logDirSuffix) && requestPayload.isLogHistory){
+ // If want to fetch the history log, must get the log directory suffix first
+ getLinkisClient match {
+ case client: DWSHttpClient =>
+ Option(Utils.tryCatch{
+ client.execute(ECResourceInfoAction.newBuilder().setUser(jobInfo.getUser)
+ .setTicketid(clientLogOperator.getTicketId).build()).asInstanceOf[ECResourceInfoResult]
+ }{
+ case e: Exception =>
+ warn("Fail to query the engine conn resource info from linkis", e)
+ null
+ }) match {
+ case Some(result) => logDirSuffix = Utils.tryAndWarn{result.getData.getOrDefault("ecResourceInfoRecord", new util.HashMap[String, Any]).asInstanceOf[util.Map[String, Any]]
+ .getOrDefault("logDirSuffix", "").asInstanceOf[String]}
+ case _ =>
+ }
+ }
+ }
+ clientLogOperator.setLogDirSuffix(logDirSuffix)
+ logIterator.setLogDirSuffix(logDirSuffix)
+ case _ =>
+ }
+ engineConnLogOperator match {
+ case yarnLogOperator: FlinkYarnLogOperator => yarnLogOperator.setApplicationId(jobInfo.getApplicationId)
+ case _ =>
+ }
+ engineConnLogOperator.setECMServiceInstance(jobInfo.getECMInstance)
+ engineConnLogOperator.setEngineConnType(FlinkJobLaunchManager.FLINK_ENGINE_CONN_TYPE)
+ logIterator.init()
+ jobInfo match {
+ case jobInfo: FlinkJobInfo => jobInfo.setLogPath(logIterator.getLogPath)
+ case _ =>
+ }
+ logIterator
+ }
+ case None =>
+ throw new FlinkJobStateFetchException(-1, s"Unrecognized log type: ${requestPayload.getLogType}", null)
+ }
+
+
+ }
+
+ /**
+ * Get check points
+ * @return
+ */
+ def getCheckpoints: Array[Checkpoint] = throw new FlinkJobStateFetchException(30401, "Not support method", null)
+
+
+ /**
+ * Trigger save point operation
+ * @param savePointDir savepoint directory
+ * @param mode mode
+ */
+ def triggerSavepoint(savePointDir: String, mode: String): Savepoint = {
+ Utils.tryCatch{
+ onceJob.getOperator(FlinkTriggerSavepointOperator.OPERATOR_NAME) match{
+ case savepointOperator: FlinkTriggerSavepointOperator => {
+ // TODO Get scheme information from job info
+ savepointOperator.setSavepointDir(savePointDir)
+ savepointOperator.setMode(mode)
+ Option(savepointOperator()) match {
+ case Some(savepoint: Savepoint) =>
+ savepoint
+ // TODO store into job Info
+ case _ => throw new FlinkSavePointException(-1, "The response savepoint info is empty", null)
+ }
+ }
+ }
+ }{
+ case se: FlinkSavePointException =>
+ throw se
+ case e: Exception =>
+ // TODO defined the code for savepoint exception
+ throw new FlinkSavePointException(-1, "Fail to trigger savepoint operator", e)
+ }
+ }
+
+ def triggerSavepoint(): Savepoint = {
+ val savepointURI: URI = this.stateManager.getJobStateDir(classOf[Savepoint], jobInfo.getName)
+ triggerSavepoint(savepointURI.toString, JobLauncherConfiguration.FLINK_TRIGGER_SAVEPOINT_MODE.getValue)
+ }
+
+ /**
+ * Get linkis client
+ * @return
+ */
+ def getLinkisClient: DWSHttpClient = {
+ Utils.tryAndWarn{
+ if (null == this.linkisClient){
+ this.synchronized{
+ if (null == this.linkisClient){
+ this.linkisClient = SimpleOnceJobBuilder.getLinkisManagerClient match {
+ case client: LinkisManagerClient =>
+ val dwsClientField = classOf[LinkisManagerClientImpl].getDeclaredField("dwsHttpClient")
+ dwsClientField.setAccessible(true)
+ dwsClientField.get(client).asInstanceOf[DWSHttpClient]
+ case _ => null
+ }
+
+ }
+ }
+ }
+ this.linkisClient
+ }
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/FlinkJobInfo.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/FlinkJobInfo.scala
new file mode 100644
index 000000000..7de4452bd
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/FlinkJobInfo.scala
@@ -0,0 +1,123 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.{JobState, JobStateInfo}
+import org.apache.linkis.common.ServiceInstance
+import org.apache.linkis.httpclient.dws.DWSHttpClient
+
+import java.util
+
+
+class FlinkJobInfo extends YarnJobInfo {
+
+ private var id: String = _
+ private var name: String = _
+ private var ecmInstance: ServiceInstance = _
+ private var user: String = _
+ private var savepoint: String = _
+ private var checkpoint: String = _
+ private var applicationId: String = _
+ private var applicationUrl: String = _
+ private var status: String = _
+ private var logPath: String = _
+ private var logDirSuffix: String = _
+ private var resources: java.util.Map[String, Object] = _
+ private var completedMsg: String = _
+ private var jobStates: Array[JobStateInfo] = _
+
+ override def getApplicationId: String = applicationId
+
+ def setApplicationId(applicationId: String): Unit = this.applicationId = applicationId
+
+ override def getApplicationUrl: String = applicationUrl
+
+ def setApplicationUrl(applicationUrl: String): Unit = this.applicationUrl = applicationUrl
+
+ override def getId: String = id
+
+ def setId(id: String): Unit = this.id = id
+
+ override def getECMInstance: ServiceInstance = ecmInstance
+
+ def setECMInstance(ecmInstance: ServiceInstance): Unit = this.ecmInstance = ecmInstance
+
+ override def getUser: String = user
+
+ def setUser(user: String): Unit = this.user = user
+
+ override def getStatus: String = status
+
+ override def setStatus(status: String): Unit = this.status = status
+
+ override def getLogPath: String = logPath
+
+ def setLogPath(logPath: String): Unit = this.logPath = logPath
+
+ override def getResources: util.Map[String, Object] = resources
+
+ def setResources(resources: java.util.Map[String, Object]): Unit = this.resources = resources
+
+ def getSavepoint: String = savepoint
+
+ def setSavepoint(savepoint: String): Unit = this.savepoint = savepoint
+
+ def getCheckpoint: String = checkpoint
+
+ def setCheckpoint(checkpoint: String): Unit = this.checkpoint = checkpoint
+
+ override def getCompletedMsg: String = completedMsg
+
+ def setCompletedMsg(completedMsg: String): Unit = this.completedMsg = completedMsg
+
+ override def toString: String = s"FlinkJobInfo(id: $id, name: $name, status: $status, applicationId: $applicationId, applicationUrl: $applicationUrl, logPath: $logPath)"
+
+ /**
+ * Contains the check point and save points
+ *
+ * @return
+ */
+ override def getJobStates: Array[JobStateInfo] = {
+ jobStates
+ }
+
+ def setJobStates(jobStates: Array[JobStateInfo]): Unit = {
+ this.jobStates = jobStates
+ }
+
+ /**
+ * Job name
+ *
+ * @return name
+ */
+ override def getName: String = name
+
+ def setName(name: String): Unit = {
+ this.name = name
+ }
+
+ /**
+ * Job log directory suffix
+ *
+ * @return
+ */
+ override def getLogDirSuffix: String = this.logDirSuffix
+
+ override def setLogDirSuffix(logDirSuffix: String): Unit = {
+ this.logDirSuffix = logDirSuffix
+ }
+}
+
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/LinkisJobInfo.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/LinkisJobInfo.scala
new file mode 100644
index 000000000..4183f5025
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/LinkisJobInfo.scala
@@ -0,0 +1,21 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.JobInfo
+import org.apache.linkis.common.ServiceInstance
+
+trait LinkisJobInfo extends JobInfo {
+
+ /**
+ * Fetch engine conn manager instance info
+ * @return
+ */
+ def getECMInstance: ServiceInstance
+
+ /**
+ * Job log directory suffix
+ * @return
+ */
+ def getLogDirSuffix: String
+
+ def setLogDirSuffix(logDirSuffix: String): Unit
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/YarnJobInfo.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/YarnJobInfo.scala
new file mode 100644
index 000000000..d2907f901
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/YarnJobInfo.scala
@@ -0,0 +1,9 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job
+
+trait YarnJobInfo extends LinkisJobInfo {
+
+ def getApplicationId: String
+
+ def getApplicationUrl: String
+
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/AbstractJobStateManager.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/AbstractJobStateManager.scala
new file mode 100644
index 000000000..767b9e58e
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/AbstractJobStateManager.scala
@@ -0,0 +1,118 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.manager
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.JobInfo
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobStateManager
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.{JobState, JobStateFetcher}
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.conf.JobLauncherConfiguration
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.exception.FlinkJobStateFetchException
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.manager.AbstractJobStateManager.WINDOWS_ROOT_DIR_REGEX
+import org.apache.linkis.common.utils.Utils
+
+import java.net.{URI, URL, URLConnection, URLStreamHandler}
+import java.util.concurrent.ConcurrentHashMap
+import java.util
+import scala.util.matching.Regex
+/**
+ * Abstract job state manager
+ */
+abstract class AbstractJobStateManager extends JobStateManager {
+
+ /**
+ * Hold the job state fetcher with its type
+ */
+ protected val jobStateFetcherHolder: ConcurrentHashMap[String, JobStateFetcher[_ <: JobState]]
+ = new ConcurrentHashMap[String, JobStateFetcher[_ <: JobState]]()
+
+ /**
+ * Fetcher loaders
+ */
+ protected val stateFetcherLoaders: util.Map[String, ()=> JobStateFetcher[_ <: JobState]] = new util.HashMap[String, () => JobStateFetcher[_ <: JobState]]()
+
+ override def getOrCreateJobStateFetcher[T <: JobState](clazz: Class[_]): JobStateFetcher[T] = {
+ val stateType = clazz.getCanonicalName
+ val loader = Option(stateFetcherLoaders.get(stateType))
+ if (loader.isEmpty){
+ throw new FlinkJobStateFetchException(-1, s"Cannot find the fetcher loader for [$stateType]", null)
+ }
+ jobStateFetcherHolder.computeIfAbsent(stateType, new util.function.Function[String, JobStateFetcher[_ <: JobState]]{
+ override def apply(t: String): JobStateFetcher[_ <: JobState] = {
+ val fetcher = loader.get.apply()
+ Utils.tryCatch(fetcher.init()){
+ case e: Exception =>
+ throw new FlinkJobStateFetchException(-1, s"Unable to init the state fetcher [${fetcher.getClass.getName}", e)
+ }
+ fetcher
+ }
+ }).asInstanceOf[JobStateFetcher[T]]
+ }
+
+ override def getJobState[T <: JobState](clazz: Class[_], jobInfo: JobInfo): T = Option(getOrCreateJobStateFetcher[T](clazz)) match {
+ case Some(jobStateFetcher: JobStateFetcher[T]) =>jobStateFetcher.getState(jobInfo)
+ case _ => null.asInstanceOf[T]
+ }
+
+ /**
+ * Register job state fetcher
+ *
+ * @param clazz clazz
+ * @param builder job state fetcher loader/builder
+ * @tparam T
+ */
+ override def registerJobStateFetcher(clazz: Class[_], builder: () => JobStateFetcher[_ <: JobState]): Unit = {
+ stateFetcherLoaders.put(clazz.getCanonicalName, builder)
+ }
+
+ override def getJobStateDir[T <: JobState](clazz: Class[_], scheme: String, relativePath: String): URI = {
+ getJobStateDir(clazz, scheme, null, relativePath)
+ }
+
+
+ override def getJobStateDir[T <: JobState](clazz: Class[_], relativePath: String): URI = {
+ getJobStateDir(clazz, JobLauncherConfiguration.FLINK_STATE_DEFAULT_SCHEME.getValue,
+ JobLauncherConfiguration.FLINK_STATE_DEFAULT_AUTHORITY.getValue, relativePath)
+ }
+
+ /**
+ * Get job state directory uri
+ *
+ * @param clazz clazz
+ * @param scheme scheme
+ * @param authority authority
+ * @param relativePath relative path
+ * @tparam T
+ * @return
+ */
+ override def getJobStateDir[T <: JobState](clazz: Class[_], scheme: String, authority: String, relativePath: String): URI = {
+ // To Support all schema
+ new URI(scheme, authority, normalizePath(getJobStateRootPath(clazz, scheme) + "/" + relativePath), null, null)
+ }
+
+ private def normalizePath(input: String): String = {
+ var path = input.replace("\\", "/")
+ path = path.replaceAll("/+", "/")
+ // Replace "." to "/"
+ path = path.replaceAll("\\.", "/")
+ if (path.endsWith("/") && !(path == "/") && !WINDOWS_ROOT_DIR_REGEX.pattern.matcher(path).matches()) path = path.substring(0, path.length - "/".length)
+ path
+ }
+ def getJobStateRootPath[T <: JobState](clazz: Class[_], schema: String): String
+}
+
+object AbstractJobStateManager{
+ val WINDOWS_ROOT_DIR_REGEX: Regex = "/\\p{Alpha}+:/".r
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/FlinkJobLaunchManager.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/FlinkJobLaunchManager.scala
new file mode 100644
index 000000000..a00edd8e8
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/FlinkJobLaunchManager.scala
@@ -0,0 +1,132 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.manager
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.{JobClient, LaunchJob}
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobStateManager
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobState
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.conf.JobLauncherConfiguration.{VAR_FLINK_APP_NAME, VAR_FLINK_SAVEPOINT_PATH}
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.exception.FlinkJobLaunchErrorException
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.LinkisJobInfo
+import org.apache.linkis.common.utils.{Logging, Utils}
+import org.apache.linkis.computation.client.once.{OnceJob, SubmittableOnceJob}
+import org.apache.linkis.computation.client.utils.LabelKeyUtils
+import org.apache.linkis.protocol.utils.TaskUtils
+
+
+
+trait FlinkJobLaunchManager extends LinkisJobLaunchManager with Logging {
+
+ protected var jobStateManager: JobStateManager = _
+
+ protected def buildOnceJob(job: LaunchJob): SubmittableOnceJob
+
+ protected def createSubmittedOnceJob(id: String, jobInfo: LinkisJobInfo): OnceJob
+
+
+ protected def createJobInfo(onceJob: SubmittableOnceJob, job: LaunchJob, jobState: JobState): LinkisJobInfo
+
+ protected def createJobInfo(jobInfo: String): LinkisJobInfo
+
+ /**
+ * This method is used to launch a new job.
+ *
+ * @param job a StreamisJob wanted to be launched.
+ * @param jobState job state used to launch
+ * @return the job id.
+ */
+ override def innerLaunch(job: LaunchJob, jobState: JobState): JobClient[LinkisJobInfo] = {
+ // Transform the JobState into the params in LaunchJob
+ Option(jobState).foreach(state => {
+ val startUpParams = TaskUtils.getStartupMap(job.getParams)
+ startUpParams.putIfAbsent(VAR_FLINK_SAVEPOINT_PATH.getValue,
+ state.getLocation.toString)
+ })
+ TaskUtils.getStartupMap(job.getParams).put(VAR_FLINK_APP_NAME.getValue,
+ Option(job.getJobName) match {
+ case None => "EngineConn-Flink"
+ case Some(jobName) =>
+ val index = jobName.lastIndexOf(".")
+ if (index > 0) jobName.substring(0, index) else jobName
+ })
+ job.getLabels.get(LabelKeyUtils.ENGINE_TYPE_LABEL_KEY) match {
+ case engineConnType: String =>
+ if(!engineConnType.toLowerCase.startsWith(FlinkJobLaunchManager.FLINK_ENGINE_CONN_TYPE))
+ throw new FlinkJobLaunchErrorException(30401, s"Only ${FlinkJobLaunchManager.FLINK_ENGINE_CONN_TYPE} job is supported to be launched to Linkis, but $engineConnType is found.", null)
+ case _ => throw new FlinkJobLaunchErrorException(30401, s"Not exists ${LabelKeyUtils.ENGINE_TYPE_LABEL_KEY}, StreamisJob cannot be submitted to Linkis successfully.", null)
+ }
+ Utils.tryCatch {
+ val onceJob = buildOnceJob(job)
+ onceJob.submit()
+ val jobInfo = Utils.tryCatch(createJobInfo(onceJob, job, jobState)) {
+ case e: FlinkJobLaunchErrorException =>
+ throw e
+ case t: Throwable =>
+ error(s"${job.getSubmitUser} create jobInfo failed, now stop this EngineConn ${onceJob.getId}.")
+ Utils.tryAndWarn(onceJob.kill())
+ throw new FlinkJobLaunchErrorException(-1, "Fail to obtain launched job info", t)
+ }
+ createJobClient(onceJob, jobInfo)
+ }{
+ case e: FlinkJobLaunchErrorException => throw e
+ case t: Throwable =>
+ error(s"Server Exception in submitting Flink job [${job.getJobName}] to Linkis remote server", t)
+ throw new FlinkJobLaunchErrorException(-1, s"Exception in submitting Flink job to Linkis remote server (提交至Linkis服务失败,请检查服务及网络)", t)
+ }
+ }
+
+ override def launch(job: LaunchJob): JobClient[LinkisJobInfo] = {
+ launch(job, null)
+ }
+
+
+ override def connect(id: String, jobInfo: String): JobClient[LinkisJobInfo] = {
+ connect(id, createJobInfo(jobInfo))
+ }
+
+ override def connect(id: String, jobInfo: LinkisJobInfo): JobClient[LinkisJobInfo] = {
+ createJobClient(createSubmittedOnceJob(id, jobInfo), jobInfo)
+ }
+
+
+ /**
+ * Job state manager(store the state information, example: Checkpoint/Savepoint)
+ *
+ * @return state manager instance
+ */
+ override def getJobStateManager: JobStateManager = {
+ Option(jobStateManager) match {
+ case None =>
+ this synchronized{
+ // Flink job state manager
+ jobStateManager = new FlinkJobStateManager
+ }
+ jobStateManager
+ case Some(stateManager) => stateManager
+ }
+ }
+
+ /**
+ * Create job client
+ * @param onceJob once job
+ * @param jobInfo job info
+ * @return
+ */
+ protected def createJobClient(onceJob: OnceJob, jobInfo: LinkisJobInfo): JobClient[LinkisJobInfo]
+}
+object FlinkJobLaunchManager {
+ val FLINK_ENGINE_CONN_TYPE = "flink"
+}
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/FlinkJobStateManager.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/FlinkJobStateManager.scala
new file mode 100644
index 000000000..75a4af2b1
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/FlinkJobStateManager.scala
@@ -0,0 +1,56 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.manager
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobState
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.conf.JobLauncherConfiguration
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.{Checkpoint, CheckpointJobStateFetcher, Savepoint, SavepointJobStateFetcher}
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.url.LinkisURLStreamHandlerFactory
+import org.apache.linkis.common.utils.{Logging, Utils}
+
+import java.net.URL
+import scala.collection.JavaConverters.mapAsScalaMapConverter
+
+
+/**
+ * Flink job state manager
+ */
+class FlinkJobStateManager extends AbstractJobStateManager with Logging{
+ /**
+ * State type => root path
+ */
+ val stateRootPath: Map[String, String] = Map(
+ classOf[Savepoint].getCanonicalName -> JobLauncherConfiguration.FLINK_SAVEPOINT_PATH.getValue,
+ classOf[Checkpoint].getCanonicalName -> JobLauncherConfiguration.FLINK_CHECKPOINT_PATH.getValue
+ )
+
+ override def getJobStateRootPath[T <: JobState](clazz: Class[_], schema: String): String = {
+ stateRootPath.getOrElse(clazz.getCanonicalName, "")
+ }
+
+ /**
+ * Init method
+ */
+ override def init(): Unit = {
+ info("Register the loader for JobState fetcher")
+ // TODO register the fetcher
+ registerJobStateFetcher(classOf[Checkpoint], () => new CheckpointJobStateFetcher(classOf[Checkpoint], this))
+ registerJobStateFetcher(classOf[Savepoint], () => new SavepointJobStateFetcher(classOf[Savepoint], this))
+ }
+
+ /**
+ * Destroy method
+ */
+ override def destroy(): Unit = {
+ // Close the loaded fetcher
+ jobStateFetcherHolder.asScala.foreach(stateFetcher => {
+ Utils.tryAndWarn(stateFetcher._2.destroy())
+ })
+ }
+}
+
+object FlinkJobStateManager{
+ // set urlStreamHandler use support schemas
+ URL.setURLStreamHandlerFactory(new LinkisURLStreamHandlerFactory(JobLauncherConfiguration.FLINK_STATE_SUPPORT_SCHEMES.getValue.split(","): _*))
+
+ def main(args: Array[String]): Unit = {
+
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/LinkisJobLaunchManager.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/LinkisJobLaunchManager.scala
new file mode 100644
index 000000000..26d76cfdf
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/LinkisJobLaunchManager.scala
@@ -0,0 +1,97 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.manager
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobLaunchManager
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobState
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.{JobClient, LaunchJob}
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.conf.JobLauncherConfiguration
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.LinkisJobInfo
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.manager.LinkisJobLaunchManager.LINKIS_JAR_VERSION_PATTERN
+import org.apache.commons.io.IOUtils
+import org.apache.commons.lang3.StringUtils
+import org.apache.linkis.common.utils.{Logging, Utils}
+import org.apache.linkis.computation.client.LinkisJob
+import org.apache.linkis.protocol.utils.TaskUtils
+
+import java.util
+import scala.collection.JavaConverters._
+import scala.util.matching.Regex
+
+trait LinkisJobLaunchManager extends JobLaunchManager[LinkisJobInfo] with Logging{
+ /**
+ * This method is used to launch a new job.
+ *
+ * @param job a StreamisJob wanted to be launched.
+ * @param jobState job state used to launch
+ * @return the job id.
+ */
+ override def launch(job: LaunchJob, jobState: JobState): JobClient[LinkisJobInfo] = {
+ // Support different version of Linkis
+ var linkisVersion = JobLauncherConfiguration.FLINK_LINKIS_RELEASE_VERSION.getValue
+ if (StringUtils.isBlank(linkisVersion)) {
+ val linkisJarPath = classOf[LinkisJob].getProtectionDomain.getCodeSource.getLocation.getPath;
+ val lastSplit = linkisJarPath.lastIndexOf(IOUtils.DIR_SEPARATOR);
+ if (lastSplit >= 0) {
+ linkisVersion = linkisJarPath.substring(lastSplit + 1)
+ }
+ }
+ if (StringUtils.isNotBlank(linkisVersion)) {
+ Utils.tryAndWarn {
+ val LINKIS_JAR_VERSION_PATTERN(version) = linkisVersion
+ linkisVersion = version
+ }
+ }
+ if (StringUtils.isNotBlank(linkisVersion)){
+ val versionSplitter: Array[String] = linkisVersion.split("\\.")
+ val major = Integer.valueOf(versionSplitter(0))
+ val sub = Integer.valueOf(versionSplitter(1))
+ val fix = Integer.valueOf(versionSplitter(2))
+ val versionNum = major * 10000 + sub * 100 + fix
+ info(s"Recognized the linkis release version: [${linkisVersion}, version number: [${versionNum}]")
+ if (versionNum <= 10101){
+ warn("Linkis version number is less than [10101], should compatible the startup params in launcher.")
+ val startupParams = TaskUtils.getStartupMap(job.getParams)
+ // Change the unit of memory params for linkis older version
+ changeUnitOfMemoryToG(startupParams, "flink.taskmanager.memory")
+ changeUnitOfMemoryToG(startupParams, "flink.jobmanager.memory")
+ // Avoid the _FLINK_CONFIG_. prefix for linkis older version
+ val newParams = avoidParamsPrefix(startupParams, "_FLINK_CONFIG_.")
+ startupParams.clear();
+ startupParams.putAll(newParams)
+ }
+ }
+ innerLaunch(job, jobState)
+ }
+
+ private def changeUnitOfMemoryToG(params: util.Map[String, Any], name: String): Unit = {
+ params.get(name) match {
+ case memory: String =>
+ var actualMem = Integer.valueOf(memory) / 1024
+ actualMem = if (actualMem <= 0) 1 else actualMem
+ info(s"Change the unit of startup param: [${name}], value [${memory}] => [${actualMem}]")
+ params.put(name, actualMem)
+ case _ => // Ignores
+ }
+ }
+
+ /**
+ * Avoid params prefix
+ * @param params params
+ * @param prefix prefix
+ */
+ private def avoidParamsPrefix(params: util.Map[String, Any], prefix: String): util.Map[String, Any] = {
+ params.asScala.map{
+ case (key, value) =>
+ if (key.startsWith(prefix)){
+ info(s"Avoid the prefix of startup param: [${key}] => [${key.substring(prefix.length)}]")
+ (key.substring(prefix.length), value)
+ } else {
+ (key, value)
+ }
+ }.toMap.asJava
+ }
+ def innerLaunch(job: LaunchJob, jobState: JobState): JobClient[LinkisJobInfo]
+}
+
+object LinkisJobLaunchManager{
+ val LINKIS_JAR_VERSION_PATTERN: Regex = "^[\\s\\S]*([\\d]+\\.[\\d]+\\.[\\d]+)[\\s\\S]*$".r
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/SimpleFlinkJobLaunchManager.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/SimpleFlinkJobLaunchManager.scala
new file mode 100644
index 000000000..8b4308a2b
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/SimpleFlinkJobLaunchManager.scala
@@ -0,0 +1,133 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.manager
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobState
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.{JobClient, LaunchJob}
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.conf.JobLauncherConfiguration
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.manager.SimpleFlinkJobLaunchManager.INSTANCE_NAME
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.{FlinkJobClient, FlinkJobInfo, LinkisJobInfo}
+import org.apache.commons.lang3.StringEscapeUtils
+import org.apache.linkis.common.utils.{RetryHandler, Utils}
+import org.apache.linkis.computation.client.once.simple.{SimpleOnceJob, SubmittableSimpleOnceJob}
+import org.apache.linkis.computation.client.once.{OnceJob, SubmittableOnceJob}
+import org.apache.linkis.computation.client.operator.impl.EngineConnApplicationInfoOperator
+import org.apache.linkis.httpclient.dws.DWSHttpClient
+import org.apache.linkis.ujes.client.exception.UJESJobException
+
+import java.util
+import scala.collection.JavaConverters.mapAsScalaMapConverter
+
+class SimpleFlinkJobLaunchManager extends FlinkJobLaunchManager {
+
+ override def getName: String = INSTANCE_NAME
+
+ protected def buildOnceJob(job: LaunchJob): SubmittableOnceJob = {
+ val builder = SimpleOnceJob.builder().addExecuteUser(job.getSubmitUser).setLabels(job.getLabels)
+ .setJobContent(job.getJobContent).setParams(job.getParams).setSource(job.getSource)
+ if(job.getLaunchConfigs != null) {
+ job.getLaunchConfigs.asScala.get(LaunchJob.LAUNCH_CONFIG_CREATE_SERVICE).foreach{ case createService: String => builder.setCreateService(createService)}
+ job.getLaunchConfigs.asScala.get(LaunchJob.LAUNCH_CONFIG_DESCRIPTION).foreach{ case desc: String => builder.setDescription(desc)}
+ job.getLaunchConfigs.asScala.get(LaunchJob.LAUNCH_CONFIG_MAX_SUBMIT_TIME).foreach{ case maxSubmitTime: Long => builder.setMaxSubmitTime(maxSubmitTime)}
+ }
+ builder.build()
+ }
+
+ override protected def createSubmittedOnceJob(id: String, jobInfo: LinkisJobInfo): OnceJob = SimpleOnceJob.build(id, jobInfo.getUser)
+
+
+ override protected def createJobInfo(onceJob: SubmittableOnceJob, job: LaunchJob, jobState: JobState): LinkisJobInfo = {
+ val nodeInfo = onceJob.getNodeInfo
+ val jobInfo = new FlinkJobInfo
+ // Escape the job name
+ jobInfo.setName(StringEscapeUtils.escapeJava(job.getJobName))
+ jobInfo.setId(onceJob.getId)
+ jobInfo.setUser(job.getSubmitUser)
+ onceJob match {
+ case simpleOnceJob: SubmittableSimpleOnceJob =>
+ jobInfo.setECMInstance(simpleOnceJob.getECMServiceInstance)
+ case _ =>
+ }
+ Utils.tryCatch(fetchApplicationInfo(onceJob, jobInfo)) { t =>
+ val message = s"Unable to fetch the application info of launched job [${job.getJobName}], maybe the engine has been shutdown"
+ error(message, t)
+ // Mark failed
+ jobInfo.setStatus("failed")
+ jobInfo.setCompletedMsg(message)
+ }
+ jobInfo.setResources(nodeInfo.get("nodeResource").asInstanceOf[util.Map[String, Object]])
+ // Set job state info into
+// Option(jobState).foreach(state => {
+// val stateInfo = new JobStateInfo
+// stateInfo.setTimestamp(state.getTimestamp)
+// stateInfo.setLocation(state.getLocation.toString)
+// jobInfo.setJobStates(Array(stateInfo))
+// })
+ jobInfo
+ }
+
+ override protected def createJobInfo(jobInfo: String): LinkisJobInfo = DWSHttpClient.jacksonJson.readValue(jobInfo, classOf[FlinkJobInfo])
+
+ protected def fetchApplicationInfo(onceJob: OnceJob, jobInfo: FlinkJobInfo): Unit = {
+ onceJob.getOperator(EngineConnApplicationInfoOperator.OPERATOR_NAME) match {
+ case applicationInfoOperator: EngineConnApplicationInfoOperator =>
+ val retryHandler = new RetryHandler {}
+ retryHandler.setRetryNum(JobLauncherConfiguration.FLINK_FETCH_APPLICATION_INFO_MAX_TIMES.getValue)
+ retryHandler.setRetryMaxPeriod(5000)
+ retryHandler.setRetryPeriod(500)
+ retryHandler.addRetryException(classOf[UJESJobException])
+ val applicationInfo = retryHandler.retry(applicationInfoOperator(), "Fetch-Yarn-Application-Info")
+ jobInfo.setApplicationId(applicationInfo.applicationId)
+ jobInfo.setApplicationUrl(applicationInfo.applicationUrl)
+ }
+ }
+
+ /**
+ * Create job client
+ *
+ * @param onceJob once job
+ * @param jobInfo job info
+ * @return
+ */
+ override protected def createJobClient(onceJob: OnceJob, jobInfo: LinkisJobInfo): JobClient[LinkisJobInfo] = {
+ jobInfo match {
+ case flinkJobInfo: FlinkJobInfo =>
+ new FlinkJobClient(onceJob, flinkJobInfo, this.jobStateManager).asInstanceOf[JobClient[LinkisJobInfo]]
+ case _ => null
+ }
+ }
+
+ /**
+ * Init method
+ */
+ override def init(): Unit = {
+ // Init the job state manager
+ getJobStateManager.init()
+ }
+
+
+ /**
+ * Destroy method
+ */
+ override def destroy(): Unit = {
+ // Destroy the job state manager
+ getJobStateManager.destroy()
+ }
+}
+object SimpleFlinkJobLaunchManager{
+
+ val INSTANCE_NAME = "simpleFlink";
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/operator/FlinkClientLogOperator.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/operator/FlinkClientLogOperator.scala
new file mode 100644
index 000000000..a41018a74
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/operator/FlinkClientLogOperator.scala
@@ -0,0 +1,34 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.operator
+
+import org.apache.commons.lang3.StringUtils
+import org.apache.linkis.computation.client.once.action.EngineConnOperateAction
+import org.apache.linkis.computation.client.operator.impl.{EngineConnLogOperator, EngineConnLogs}
+
+/**
+ * Append "logDirSuffix" parameter
+ */
+class FlinkClientLogOperator extends EngineConnLogOperator{
+
+ private var logDirSuffix: String = _
+
+ def setLogDirSuffix(logDirSuffix: String): Unit = {
+ this.logDirSuffix = logDirSuffix
+ }
+
+ protected override def addParameters(builder: EngineConnOperateAction.Builder): Unit = {
+ builder.operatorName(EngineConnLogOperator.OPERATOR_NAME)
+ if (StringUtils.isNotBlank(this.logDirSuffix)) {
+ builder.addParameter("logDirSuffix", logDirSuffix)
+ }
+ super.addParameters(builder)
+ }
+
+
+ override def getTicketId: String = super.getTicketId
+
+ override def getName: String = FlinkClientLogOperator.OPERATOR_NAME
+}
+
+object FlinkClientLogOperator {
+ val OPERATOR_NAME = "engineConnLog_flink"
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/operator/FlinkTriggerSavepointOperator.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/operator/FlinkTriggerSavepointOperator.scala
new file mode 100644
index 000000000..993847836
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/operator/FlinkTriggerSavepointOperator.scala
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.operator
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.Savepoint
+import org.apache.linkis.computation.client.once.action.EngineConnOperateAction
+import org.apache.linkis.computation.client.once.result.EngineConnOperateResult
+import org.apache.linkis.computation.client.operator.OnceJobOperator
+
+/**
+ * Flink trigger savepoint operator
+ */
+class FlinkTriggerSavepointOperator extends OnceJobOperator[Savepoint]{
+
+ /**
+ * Save point directory
+ */
+ private var savepointDir: String = _
+
+ /**
+ * Mode
+ */
+ private var mode: String = _
+
+ def setSavepointDir(savepointDir: String): Unit ={
+ this.savepointDir = savepointDir
+ }
+
+ def setMode(mode: String): Unit = {
+ this.mode = mode
+ }
+
+ override protected def addParameters(builder: EngineConnOperateAction.Builder): Unit = {
+ builder.addParameter("savepointPath", savepointDir)
+ builder.addParameter("mode", mode)
+ }
+
+ override protected def resultToObject(result: EngineConnOperateResult): Savepoint = {
+ val savepointPath:String = result.getAs("writtenSavepoint")
+ info(s"Get the savepoint store path: [$savepointPath] form ${FlinkTriggerSavepointOperator.OPERATOR_NAME} operation")
+ new Savepoint(savepointPath)
+ }
+
+ override def getName: String = FlinkTriggerSavepointOperator.OPERATOR_NAME
+}
+object FlinkTriggerSavepointOperator{
+ val OPERATOR_NAME = "doSavepoint"
+}
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/operator/FlinkYarnLogOperator.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/operator/FlinkYarnLogOperator.scala
new file mode 100644
index 000000000..975b23405
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/operator/FlinkYarnLogOperator.scala
@@ -0,0 +1,43 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.operator
+
+import org.apache.linkis.computation.client.once.action.EngineConnOperateAction
+import org.apache.linkis.computation.client.operator.impl.EngineConnLogOperator
+
+/**
+ * Extend the flink client log operator
+ */
+class FlinkYarnLogOperator extends FlinkClientLogOperator {
+
+ private var applicationId: String = _
+
+ def setApplicationId(applicationId: String): Unit = {
+ this.applicationId = applicationId
+ }
+
+ protected override def addParameters(builder: EngineConnOperateAction.Builder): Unit = {
+ super.addParameters(builder)
+ builder.operatorName(getName)
+ builder.addParameter("yarnApplicationId", this.applicationId)
+ }
+
+ override def getName: String = FlinkYarnLogOperator.OPERATOR_NAME
+}
+
+object FlinkYarnLogOperator{
+ val OPERATOR_NAME = "engineConnYarnLog"
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/AbstractJobStateResult.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/AbstractJobStateResult.scala
new file mode 100644
index 000000000..0f5cbe327
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/AbstractJobStateResult.scala
@@ -0,0 +1,7 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state
+
+import org.apache.linkis.httpclient.dws.response.DWSResult
+
+abstract class AbstractJobStateResult extends DWSResult {
+
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/Checkpoint.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/Checkpoint.scala
new file mode 100644
index 000000000..a5e42e599
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/Checkpoint.scala
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobState
+
+/**
+ * Hold the check point information
+ */
+class Checkpoint(location: String) extends GenericFlinkJobState(location) with JobState {
+
+ /**
+ * Record the sequence of checkpoint
+ */
+ private var order: Long = -1
+
+ def setOrder(order: Long): Unit = {
+ this.order = order
+ }
+
+ def getOrder: Long = {
+ this.order
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/GenericFlinkJobState.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/GenericFlinkJobState.scala
new file mode 100644
index 000000000..32aa1bc86
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/GenericFlinkJobState.scala
@@ -0,0 +1,62 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobState
+
+import java.net.URI
+
+/**
+ * Generic flink job state
+ */
+class GenericFlinkJobState(location: String) extends JobState{
+
+ private var timestamp: Long = -1
+
+ private var id: String = "{ID}"
+
+ private var metadataInfo: Any = _
+
+ override def getLocation: URI = URI.create(location)
+
+ override def getMetadataInfo: Any = {
+ metadataInfo
+ }
+
+ def setMetadataInfo(metadataInfo: Any): Unit = {
+ this.metadataInfo = metadataInfo
+ }
+
+ /**
+ * Job state id
+ *
+ * @return
+ */
+ override def getId: String = id
+
+ def setId(id: String): Unit = {
+ this.id = id
+ }
+ /**
+ * Timestamp to save the state
+ *
+ * @return
+ */
+ override def getTimestamp: Long = timestamp
+
+ def setTimestamp(timestamp: Long): Unit = {
+ this.timestamp = timestamp
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/Savepoint.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/Savepoint.scala
new file mode 100644
index 000000000..fd91292c6
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/Savepoint.scala
@@ -0,0 +1,22 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobState
+
+class Savepoint(location: String) extends GenericFlinkJobState(location) with JobState {
+
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/client/LinkisJobStateClientConf.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/client/LinkisJobStateClientConf.scala
new file mode 100644
index 000000000..45e5cc54c
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/client/LinkisJobStateClientConf.scala
@@ -0,0 +1,10 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.client
+
+
+class LinkisJobStateClientConf {
+
+}
+object LinkisJobStateClientConf{
+
+}
+
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/client/LinkisJobStateGetAction.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/client/LinkisJobStateGetAction.scala
new file mode 100644
index 000000000..180cf95ff
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/client/LinkisJobStateGetAction.scala
@@ -0,0 +1,25 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.client
+
+import org.apache.linkis.httpclient.dws.request.DWSHttpAction
+import org.apache.linkis.httpclient.request.{GetAction, UserAction}
+
+/**
+ * Get job state action
+ */
+class LinkisJobStateGetAction extends GetAction with DWSHttpAction with UserAction{
+
+ private var user: String = _
+
+ def this(user: String, path: String) = {
+ this()
+ this.user = user
+ this.setParameter("path", path);
+ }
+
+
+ override def suffixURLs: Array[String] = Array("filesystem", "getDirFileTrees")
+
+ override def setUser(user: String): Unit = this.user = user
+
+ override def getUser: String = user
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/pom.xml b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/pom.xml
new file mode 100755
index 000000000..d9c4324e7
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/pom.xml
@@ -0,0 +1,81 @@
+
+
+
+
+
+ streamis-jobmanager
+ com.webank.wedatasphere.streamis
+ 0.2.4
+ ../../pom.xml
+
+ 4.0.0
+
+ streamis-job-launcher-service
+
+
+ 8
+ 8
+
+
+
+
+ org.apache.linkis
+ linkis-mybatis
+
+
+ com.webank.wedatasphere.streamis
+ streamis-job-launcher-base
+ ${jobmanager.version}
+
+
+ com.webank.wedatasphere.streamis
+ streamis-job-manager-base
+ ${jobmanager.version}
+
+
+ org.apache.linkis
+ linkis-module
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-deploy-plugin
+
+
+
+ net.alchim31.maven
+ scala-maven-plugin
+
+
+ org.apache.maven.plugins
+ maven-jar-plugin
+
+
+
+
+ src/main/java
+
+ **/*.xml
+
+
+
+ ${project.artifactId}-${project.version}
+
+
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/JobLauncherAutoConfiguration.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/JobLauncherAutoConfiguration.java
new file mode 100644
index 000000000..58b781b1c
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/JobLauncherAutoConfiguration.java
@@ -0,0 +1,70 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.launcher;
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.JobInfo;
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobLaunchManager;
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobLaunchManager$;
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.manager.SimpleFlinkJobLaunchManager$;
+import org.apache.linkis.common.utils.ClassUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+
+import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationTargetException;
+import java.util.Objects;
+
+@Configuration
+public class JobLauncherAutoConfiguration {
+
+ private static final Logger LOG = LoggerFactory.getLogger(JobLauncherAutoConfiguration.class);
+
+ public static final String DEFAULT_JOB_LAUNCH_MANGER = SimpleFlinkJobLaunchManager$.MODULE$.INSTANCE_NAME();
+
+ @Bean(initMethod = "init", destroyMethod = "destroy")
+ @ConditionalOnMissingBean(JobLaunchManager.class)
+ @SuppressWarnings("unchecked")
+ public JobLaunchManager extends JobInfo> defaultJobLaunchManager(){
+ // First to scan the available job launch manager
+ ClassUtils.reflections().getSubTypesOf(JobLaunchManager.class).stream()
+ .filter(clazz -> !ClassUtils.isInterfaceOrAbstract(clazz)).forEach(clazz -> {
+ Constructor> constructor = null;
+ try {
+ constructor = clazz.getConstructor();
+ } catch (NoSuchMethodException e) {
+ LOG.warn("Job launch manger: [{}] has no empty constructor ", clazz.getCanonicalName(), e);
+ }
+ if (Objects.nonNull(constructor)){
+ try {
+ JobLaunchManager extends JobInfo> launchManager = (JobLaunchManager extends JobInfo>) constructor.newInstance();
+ JobLaunchManager$.MODULE$.registerJobManager(launchManager.getName(), launchManager);
+ } catch (InstantiationException | IllegalAccessException | InvocationTargetException e) {
+ LOG.warn("Unable to instance the job launch manager: [{}]", clazz.getCanonicalName(), e);
+ }
+ }
+ });
+ // Use the flink job launch manager as default
+ JobLaunchManager extends JobInfo> defaultManager = JobLaunchManager$.MODULE$.getJobManager(DEFAULT_JOB_LAUNCH_MANGER);
+ if (Objects.isNull(defaultManager)){
+ throw new IllegalArgumentException("Unable to find the default job launch manger: [" + DEFAULT_JOB_LAUNCH_MANGER +
+ "], please check the jar classpath and configuration");
+ }
+ return defaultManager;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/dao/StreamJobConfMapper.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/dao/StreamJobConfMapper.java
new file mode 100644
index 000000000..8aba65896
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/dao/StreamJobConfMapper.java
@@ -0,0 +1,66 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.launcher.dao;
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.entity.JobConfDefinition;
+import com.webank.wedatasphere.streamis.jobmanager.launcher.entity.JobConfValue;
+import org.apache.ibatis.annotations.Param;
+
+import java.util.List;
+
+/**
+ * Operate the job configuration
+ */
+public interface StreamJobConfMapper {
+
+ /**
+ * Select all config definitions
+ * @return list
+ */
+ List loadAllDefinitions();
+
+ /**
+ * Get raw value
+ * @param jobId job id
+ * @param key key
+ * @return
+ */
+ String getRawConfValue(@Param("jobId")Long jobId, @Param("key")String key);
+ /**
+ * Get config values by job id
+ * @param jobId job id
+ * @return
+ */
+ List getConfValuesByJobId(@Param("jobId")Long jobId);
+
+ /**
+ * Delete values by job id
+ * @param jobId job id
+ */
+ int deleteConfValuesByJobId(@Param("jobId")Long jobId);
+
+ /**
+ * Delete temporary config value
+ * @param jobId job id
+ * @return affect rows
+ */
+ int deleteTemporaryConfValue(@Param("jobId")Long jobId);
+ /**
+ * Batch insert
+ * @param values values
+ */
+ void batchInsertValues(@Param("values")List values);
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/dao/impl/StreamJobConfMapper.xml b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/dao/impl/StreamJobConfMapper.xml
new file mode 100644
index 000000000..94556b3f9
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/dao/impl/StreamJobConfMapper.xml
@@ -0,0 +1,71 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ SELECT * FROM `linkis_stream_job_config_def`;
+
+
+
+ SELECT `value` FROM `linkis_stream_job_config` WHERE job_id = #{jobId} AND `key` = #{key};
+
+
+ SELECT * FROM `linkis_stream_job_config` WHERE job_id = #{jobId};
+
+
+
+ DELETE FROM `linkis_stream_job_config` WHERE job_id = #{jobId};
+
+
+
+ DELETE c FROM `linkis_stream_job_config` c INNER JOIN `linkis_stream_job_config_def` d ON c.job_id = #{jobId} AND d.id = c.ref_def_id AND d.is_temp = 1;
+
+
+ INSERT INTO `linkis_stream_job_config`(`job_id`, `job_name`, `key`, `value`, `ref_def_id`) VALUES
+
+ (#{item.jobId}, #{item.jobName}, #{item.key}, #{item.value}, #{item.referDefId})
+
+
+
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/conf/JobConfKeyConstants.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/conf/JobConfKeyConstants.scala
new file mode 100644
index 000000000..187b02288
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/conf/JobConfKeyConstants.scala
@@ -0,0 +1,92 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.launcher.conf
+
+import org.apache.linkis.common.conf.CommonVars
+
+
+/**
+ * Config key constants
+ */
+object JobConfKeyConstants {
+
+ /**
+ * Config group for streamis internal configuration
+ */
+ val GROUP_INTERNAL: CommonVars[String] = CommonVars("wds.streamis.job.internal.config.group", "wds.streamis.internal.params")
+ /**
+ * Group: Flink extra
+ */
+ val GROUP_FLINK_EXTRA: CommonVars[String] = CommonVars("wds.streamis.job.config.key.group.flink-extra", "wds.linkis.flink.custom")
+
+ /**
+ * Group: produce
+ */
+ val GROUP_PRODUCE: CommonVars[String] = CommonVars("wds.streamis.job.config.key.group.produce", "wds.linkis.flink.produce")
+
+ /**
+ * Group: resource
+ */
+ val GROUP_RESOURCE: CommonVars[String] = CommonVars("wds.streamis.job.config.key.group.resource", "wds.linkis.flink.resource")
+
+ /**
+ * Group: permission
+ */
+ val GROUP_PERMISSION: CommonVars[String] = CommonVars("wds.streamis.job.config.key.group.permission", "wds.linkis.flink.authority")
+
+ /**
+ * Group: alert
+ */
+ val GROUP_ALERT: CommonVars[String] = CommonVars("wds.streamis.job.config.key.group.alert", "wds.linkis.flink.alert")
+ /**
+ * Checkpoint prefix
+ */
+ val CHECKPOINT: CommonVars[String] = CommonVars("wds.streamis.job.config.key.checkpoint", "wds.linkis.flink.checkpoint.")
+
+ /**
+ * Checkpoint switch
+ */
+ val CHECKPOINT_SWITCH: CommonVars[String] = CommonVars("wds.streamis.job.config.key.checkpoint.switch", "wds.linkis.flink.checkpoint.switch")
+ /**
+ * Savepoint prefix
+ */
+ val SAVEPOINT: CommonVars[String] = CommonVars("wds.streamis.job.config.key.savepoint", "wds.linkis.flink.savepoint.")
+
+ /**
+ * Switch to restart job automatically when fail
+ */
+ val FAIL_RESTART_SWITCH: CommonVars[String] = CommonVars("wds.streamis.job.config.key.fail-restart.switch", "wds.linkis.flink.app.fail-restart.switch")
+
+ /**
+ * Switch to restore job automatically when starting
+ */
+ val START_AUTO_RESTORE_SWITCH: CommonVars[String] = CommonVars("wds.streamis.job.config.key.start-auto-restore.switch", "wds.linkis.flink.app.start-auto-restore.switch")
+
+ /**
+ * Authority author
+ */
+ val AUTHORITY_AUTHOR_VISIBLE: CommonVars[String] = CommonVars("wds.streamis.job.config.key.authority.visible", "wds.linkis.flink.authority.visible")
+
+ /**
+ * Alert user
+ */
+ val ALERT_USER: CommonVars[String] = CommonVars("wds.streamis.job.config.key.alert.user", "wds.linkis.flink.alert.failure.user")
+
+ /**
+ * Alert level
+ */
+ val ALERT_LEVEL: CommonVars[String] = CommonVars("wds.streamis.job.config.key.alert.level", "wds.linkis.flink.alert.level")
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/exception/ConfigurationException.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/exception/ConfigurationException.java
new file mode 100644
index 000000000..d84382cc1
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/exception/ConfigurationException.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.launcher.exception;
+
+
+public class ConfigurationException extends Exception {
+ public ConfigurationException() {
+ }
+
+ public ConfigurationException(String message) {
+ super(message);
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/service/DefaultStreamJobConfService.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/service/DefaultStreamJobConfService.scala
new file mode 100644
index 000000000..7eaf7c8a5
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/service/DefaultStreamJobConfService.scala
@@ -0,0 +1,237 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.service
+import com.webank.wedatasphere.streamis.jobmanager.launcher.conf.JobConfKeyConstants
+import com.webank.wedatasphere.streamis.jobmanager.launcher.dao.StreamJobConfMapper
+import com.webank.wedatasphere.streamis.jobmanager.launcher.entity.vo.JobConfValueVo.ValueList
+import com.webank.wedatasphere.streamis.jobmanager.launcher.entity.vo.{JobConfValueSet, JobConfValueVo}
+import com.webank.wedatasphere.streamis.jobmanager.launcher.entity.{JobConfDefinition, JobConfValue}
+import com.webank.wedatasphere.streamis.jobmanager.launcher.exception.ConfigurationException
+import com.webank.wedatasphere.streamis.jobmanager.launcher.service.tools.JobConfValueUtils
+import com.webank.wedatasphere.streamis.jobmanager.manager.dao.StreamJobMapper
+import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamJob
+import org.apache.commons.lang3.StringUtils
+import org.apache.linkis.common.utils.Logging
+import org.springframework.stereotype.Service
+import org.springframework.transaction.annotation.Transactional
+
+import java.util
+import javax.annotation.Resource
+import scala.collection.JavaConverters._
+
+@Service
+class DefaultStreamJobConfService extends StreamJobConfService with Logging{
+
+ @Resource
+ private var streamJobConfMapper: StreamJobConfMapper = _
+
+ @Resource
+ private var streamJobMapper: StreamJobMapper = _
+ /**
+ * Get all config definitions
+ *
+ * @return list
+ */
+ override def loadAllDefinitions(): util.List[JobConfDefinition] = {
+ streamJobConfMapper.loadAllDefinitions()
+ }
+
+ /**
+ * Save job configuration
+ *
+ * @param jobId job id
+ * @param valueMap value map
+ */
+ @Transactional(rollbackFor = Array(classOf[Exception]))
+ override def saveJobConfig(jobId: Long, valueMap: util.Map[String, Any]): Unit = {
+ val definitions = Option(this.streamJobConfMapper.loadAllDefinitions())
+ .getOrElse(new util.ArrayList[JobConfDefinition]())
+ // Can deserialize the value map at first
+ val configValues = JobConfValueUtils.deserialize(valueMap, definitions)
+ suppleDefaultConfValue(configValues, definitions)
+ saveJobConfig(jobId, configValues)
+ }
+
+ /**
+ * Query the job configuration
+ *
+ * @param jobId job id
+ * @return
+ */
+ override def getJobConfig(jobId: Long): util.Map[String, Any] = {
+ getJobConfig(jobId, this.streamJobConfMapper.loadAllDefinitions())
+ }
+
+ /**
+ * Query the job value
+ *
+ * @param jobId job id
+ * @param configKey config key
+ * @return
+ */
+ override def getJobConfValue(jobId: Long, configKey: String): String = {
+ this.streamJobConfMapper.getRawConfValue(jobId, configKey)
+ }
+
+ /**
+ * Get job configuration value set
+ *
+ * @param jobId job id
+ * @return
+ */
+ override def getJobConfValueSet(jobId: Long): JobConfValueSet = {
+ val valueSet = new JobConfValueSet
+ val definitions: util.List[JobConfDefinition] = this.streamJobConfMapper.loadAllDefinitions()
+ val jobConfig: util.Map[String, Any] = getJobConfig(jobId, definitions)
+ val definitionMap: util.Map[String, JobConfDefinition] = definitions.asScala.map(definition => (definition.getKey, definition)).toMap.asJava
+ valueSet.setResourceConfig(resolveConfigValueVo(JobConfKeyConstants.GROUP_RESOURCE.getValue, jobConfig, definitionMap))
+ valueSet.setParameterConfig(resolveConfigValueVo(JobConfKeyConstants.GROUP_FLINK_EXTRA.getValue, jobConfig, definitionMap))
+ valueSet.setProduceConfig(resolveConfigValueVo(JobConfKeyConstants.GROUP_PRODUCE.getValue, jobConfig, definitionMap))
+ valueSet.setPermissionConfig(resolveConfigValueVo(JobConfKeyConstants.GROUP_PERMISSION.getValue, jobConfig, definitionMap))
+ valueSet.setAlarmConfig(resolveConfigValueVo(JobConfKeyConstants.GROUP_ALERT.getValue, jobConfig, definitionMap))
+ valueSet.setJobId(jobId)
+ valueSet
+ }
+
+ /**
+ * Save job configuration value set
+ *
+ * @param valueSet value set
+ */
+ override def saveJobConfValueSet(valueSet: JobConfValueSet): Unit = {
+ val configValues: util.List[JobConfValue] = new util.ArrayList[JobConfValue]()
+ val definitions = this.streamJobConfMapper.loadAllDefinitions()
+ val definitionMap: util.Map[String, JobConfDefinition] = definitions
+ .asScala.map(definition => (definition.getKey, definition)).toMap.asJava
+ configValues.addAll(convertToConfigValue(
+ valueSet.getResourceConfig, definitionMap, Option(definitionMap.get(JobConfKeyConstants.GROUP_RESOURCE.getValue)) match {
+ case Some(definition) => definition.getId
+ case _ => 0
+ }))
+ configValues.addAll(convertToConfigValue(
+ valueSet.getParameterConfig, definitionMap, Option(definitionMap.get(JobConfKeyConstants.GROUP_FLINK_EXTRA.getValue)) match {
+ case Some(definition) => definition.getId
+ case _ => 0
+ }))
+ configValues.addAll(convertToConfigValue(
+ valueSet.getProduceConfig, definitionMap, Option(definitionMap.get(JobConfKeyConstants.GROUP_PRODUCE.getValue)) match {
+ case Some(definition) => definition.getId
+ case _ => 0
+ }))
+ configValues.addAll(convertToConfigValue(
+ valueSet.getPermissionConfig, definitionMap, Option(definitionMap.get(JobConfKeyConstants.GROUP_PERMISSION.getValue)) match {
+ case Some(definition) => definition.getId
+ case _ => 0
+ }))
+ configValues.addAll(convertToConfigValue(
+ valueSet.getAlarmConfig, definitionMap, Option(definitionMap.get(JobConfKeyConstants.GROUP_ALERT.getValue)) match {
+ case Some(definition) => definition.getId
+ case _ => 0
+ }))
+ suppleDefaultConfValue(configValues, definitions)
+ saveJobConfig(valueSet.getJobId, configValues)
+ }
+ /**
+ * Get job configuration map
+ * @param jobId job id
+ * @param definitions definitions
+ * @return
+ */
+ private def getJobConfig(jobId: Long, definitions: util.List[JobConfDefinition]): util.Map[String, Any] = {
+ Option(this.streamJobConfMapper.getConfValuesByJobId(jobId)) match {
+ case None => new util.HashMap[String, Any]()
+ case Some(list: util.List[JobConfValue]) =>
+ JobConfValueUtils.serialize(list,
+ Option(definitions)
+ .getOrElse(new util.ArrayList[JobConfDefinition]()))
+ }
+ }
+
+ private def saveJobConfig(jobId: Long, configValues: util.List[JobConfValue]): Unit = {
+ trace(s"Query and lock the StreamJob in [$jobId] before saving/update configuration")
+ Option(streamJobMapper.queryAndLockJobById(jobId)) match {
+ case None => throw new ConfigurationException(s"Unable to saving/update configuration, the StreamJob [$jobId] is not exists.")
+ case Some(job: StreamJob) =>
+ // Delete all configuration
+ this.streamJobConfMapper.deleteConfValuesByJobId(job.getId)
+ configValues.asScala.foreach(configValue => {{
+ configValue.setJobId(job.getId)
+ configValue.setJobName(job.getName)
+ }})
+ info(s"Save the job configuration size: ${configValues.size()}, jobName: ${job.getName}")
+ if (!configValues.isEmpty) {
+ // Send to save the configuration new
+ this.streamJobConfMapper.batchInsertValues(configValues)
+ }
+ }
+ }
+
+ /**
+ * Supple the default value into the configuration
+ * @param configValues config value list
+ * @param definitions definitions
+ */
+ private def suppleDefaultConfValue(configValues: util.List[JobConfValue], definitions: util.List[JobConfDefinition]): Unit = {
+ val configMark = configValues.asScala.filter(configValue => configValue.getReferDefId != null)
+ .map(configValue => (configValue.getReferDefId, 1)).toMap
+ definitions.asScala.filter(definition => definition.getLevel > 0 && StringUtils.isNotBlank(definition.getDefaultValue))
+ .foreach(definition => configMark.get(definition.getId) match {
+ case Some(mark) =>
+ case None =>
+ val configValue = new JobConfValue(definition.getKey, definition.getDefaultValue, definition.getId)
+ configValues.add(configValue)
+ }
+ )
+ }
+ /**
+ * Resolve to config value view object
+ * @param group group
+ * @param jobConfig job config
+ * @param definitionMap (key => definition)
+ */
+ private def resolveConfigValueVo(group: String, jobConfig: util.Map[String, Any],
+ definitionMap: util.Map[String, JobConfDefinition]): util.List[JobConfValueVo] = {
+ Option(jobConfig.get(group)) match {
+ case Some(configMap: util.Map[String, Any]) =>
+ configMap.asScala.map{
+ case (key, value) =>
+ val configValue = new JobConfValueVo(key, String.valueOf(value))
+ Option(definitionMap.get(key)) match {
+ case Some(definition) =>
+ configValue.setConfigkeyId(definition.getId)
+ configValue.setName(definition.getName)
+ val refValues = definition.getRefValues
+ if (StringUtils.isNotBlank(refValues)){
+ val valueList = new util.ArrayList[ValueList]()
+ refValues.split(",").foreach(refValue =>{
+ valueList.add(new ValueList(refValue, refValue.equals(value)))
+ })
+ configValue.setValueLists(valueList)
+ }
+ case _ =>
+ }
+ configValue
+ }.toList.asJava
+ case None => new util.ArrayList[JobConfValueVo]()
+ }
+ }
+
+ /**
+ * Convert to config value entities
+ * @param configValueVos view object
+ * @param definitionMap definition map
+ * @param parentRef parent ref id
+ * @return
+ */
+ private def convertToConfigValue(configValueVos: util.List[JobConfValueVo],
+ definitionMap: util.Map[String, JobConfDefinition], parentRef: Long): util.List[JobConfValue] = {
+ Option(configValueVos) match {
+ case Some(voList) =>
+ voList.asScala.map(vo => {
+ val definition = definitionMap.get(vo.getKey)
+ val confValue = new JobConfValue(vo.getKey, vo.getValue, if (null == definition) parentRef else definition.getId)
+ confValue
+ }).asJava
+ case _ => new util.ArrayList[JobConfValue]()
+ }
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/service/StreamJobConfService.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/service/StreamJobConfService.scala
new file mode 100644
index 000000000..0e87a9e4c
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/service/StreamJobConfService.scala
@@ -0,0 +1,66 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.webank.wedatasphere.streamis.jobmanager.launcher.service
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.entity.JobConfDefinition
+import com.webank.wedatasphere.streamis.jobmanager.launcher.entity.vo.JobConfValueSet
+
+import java.util
+/**
+ * Job configuration service
+ */
+trait StreamJobConfService {
+
+ /**
+ * Get all config definitions
+ * @return list
+ */
+ def loadAllDefinitions(): util.List[JobConfDefinition]
+
+ /**
+ * Save job configuration
+ * @param jobId job id
+ * @param valueMap value map
+ */
+ def saveJobConfig(jobId: Long, valueMap: util.Map[String, Any]): Unit
+
+ /**
+ * Query the job configuration
+ * @param jobId job id
+ * @return
+ */
+ def getJobConfig(jobId: Long): util.Map[String, Any]
+
+ /**
+ * Query the job value
+ * @param jobId job id
+ * @param configKey config key
+ * @return
+ */
+ def getJobConfValue(jobId: Long, configKey: String): String
+
+ /**
+ * Get job configuration value set
+ * @param jobId job id
+ * @return
+ */
+ def getJobConfValueSet(jobId: Long): JobConfValueSet
+
+ /**
+ * Save job configuration value set
+ * @param valueSet value set
+ */
+ def saveJobConfValueSet(valueSet: JobConfValueSet): Unit
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/service/tools/JobConfValueUtils.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/service/tools/JobConfValueUtils.scala
new file mode 100644
index 000000000..9cd088488
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/service/tools/JobConfValueUtils.scala
@@ -0,0 +1,180 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.launcher.service.tools
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.entity.{JobConfDefinition, JobConfValue}
+import org.apache.commons.lang.StringUtils
+
+import scala.collection.JavaConverters._
+import java.util
+/**
+ * TODO dive into JobConfValueSerializer and JobConfValueDeserializer
+ */
+class JobConfValueUtils {
+
+}
+
+object JobConfValueUtils{
+ /**
+ * Serialize the job conf values
+ * @return
+ */
+ def serialize(configValues: util.List[JobConfValue], definitions: util.List[JobConfDefinition]): util.Map[String, Any] = {
+ // First to build a definition map
+ val definitionMap: util.Map[String, JobConfDefinition] = definitions.asScala.map(definition => {
+ (definition.getId.toString, definition)
+ }).toMap.asJava
+ // Init a value map to store relation of config values
+ val relationMap: util.Map[String, Any] = new util.HashMap[String, Any]()
+ configValues.asScala.foreach(keyValue => {
+ val refDefId = keyValue.getReferDefId
+ if (null != refDefId) {
+ Option(relationMap.get(refDefId.toString)) match {
+ case Some(value: util.Map[String, Any]) => {
+ // Put the value into relation
+ value.put(keyValue.getKey, keyValue.getValue)
+ }
+ case Some(value: String) => {
+ // Overwrite it's value
+ relationMap.put(refDefId.toString, keyValue.getValue)
+ }
+ case _ =>
+ // Set the value/relation recursively
+ var definition = definitionMap.get(refDefId.toString)
+ var value: Any = if (null != definition && (StringUtils.isBlank(definition.getType) ||
+ definition.getType.equalsIgnoreCase("NONE"))) {
+ val relation = new util.HashMap[String, Any]()
+ relation.put(keyValue.getKey, keyValue.getValue)
+ relation
+ } else {
+ keyValue.getValue
+ }
+ while (null != definition){
+ value = Option(relationMap.get(definition.getId.toString)) match {
+ case Some(existV: util.Map[String, Any]) => {
+ value match {
+ case map: util.Map[String, Any] =>
+ existV.putAll(map)
+ existV
+ case _ =>
+ relationMap.put(definition.getId.toString, value)
+ value
+ }
+ }
+ case _ =>
+ relationMap.put(definition.getId.toString, value)
+ value
+ }
+ Option(definition.getParentRef) match {
+ case Some(parentRef) =>
+ val newValue: util.Map[String, Any] = new util.HashMap[String, Any]()
+ newValue.put(definition.getKey, value)
+ definition = definitionMap.get(parentRef.toString)
+ value = newValue
+ case _ => definition = null
+ }
+ }
+ }
+ }
+ })
+ // Filter the root configuration
+ relationMap.asScala
+ .filter(entry=> definitionMap.get(entry._1).getLevel == 0).map{
+ case (defId, value) => (definitionMap.get(defId).getKey, value)
+ }.asJava
+
+ }
+
+ /**
+ * Deserialize
+ * @param valueMap value map
+ * @param definitions definitions
+ * @return
+ */
+ def deserialize(valueMap: util.Map[String, Any], definitions: util.List[JobConfDefinition]):util.List[JobConfValue] = {
+ // First to build a definition map
+ val definitionMap: util.Map[String, JobConfDefinition] = definitions.asScala.map(definition => {
+ (definition.getKey, definition)
+ }).toMap.asJava
+ // Configuration value list
+ val configValues: util.List[JobConfValue] = new util.ArrayList[JobConfValue]()
+ valueMap.asScala.foreach{
+ case (key, value) => {
+ Option(definitionMap.get(key)) match {
+ case Some(definition) => if (definition.getLevel == 0){
+ configValues.addAll(deserializeInnerObj(key, value, null, definitionMap))
+ definition.setMark(true)
+ }
+ case _ =>
+ }
+ }
+ }
+ configValues
+ }
+
+ private def deserializeInnerObj(key: String, value: Any, parentRef: String,
+ definitionMap: util.Map[String, JobConfDefinition]): util.List[JobConfValue] = {
+ val result: util.List[JobConfValue] = new util.ArrayList[JobConfValue]()
+ if (null != value) {
+ value match {
+ case innerMap: util.Map[String, Any] =>
+ Option(definitionMap.get(key)) match {
+ case Some(definition) =>
+ innerMap.asScala.foreach{
+ case (childK, childV) => {
+ val childResult = deserializeInnerObj(childK, childV,
+ definition.getId.toString, definitionMap)
+ childResult.asScala.foreach(confValue => if (confValue.getReferDefId == null){
+ confValue.setReferDefId(definition.getId)
+ })
+ result.addAll(childResult)
+ }
+ }
+ // Mark it used
+ definition.setMark(true)
+ case _ => //ignore
+ }
+
+ case other: Any =>
+ Option(definitionMap.get(key)) match {
+ case Some(definition) =>
+ if (StringUtils.isBlank(parentRef) || parentRef.equals(String.valueOf(definition.getParentRef))){
+ result.add(new JobConfValue(key, String.valueOf(other), definition.getId))
+ // Mark it used
+ definition.setMark(true)
+ }
+ case _ => result.add(new JobConfValue(key, String.valueOf(other), null))
+ }
+ }
+ }
+ result
+ }
+// def main(args: Array[String]): Unit = {
+// val definitions: util.List[JobConfDefinition] = new util.ArrayList[JobConfDefinition]()
+// val configValues: util.List[JobConfValue] = new util.ArrayList[JobConfValue]()
+// definitions.add(new JobConfDefinition(0, "wds.linkis.flink.resource", "None", null, 0))
+// definitions.add(new JobConfDefinition(1, "wds.linkis.flink.custom", "None", null, 0))
+// definitions.add(new JobConfDefinition(2, "wds.linkis.flink.taskmanager.num", "NUMBER", 0, 1))
+// definitions.add(new JobConfDefinition(3, "wds.linkis.flink.jobmanager.memeory", "NUMBER", 0, 1))
+// configValues.add(new JobConfValue("wds.linkis.flink.taskmanager.num", "1", 2))
+// configValues.add(new JobConfValue("env.java.opts", "-DHADOOP_USER_NAME=hadoop", 1))
+// configValues.add(new JobConfValue("security.kerberos.login.principal", "hadoop@WEBANK.com", 1))
+// configValues.add(new JobConfValue("wds.linkis.flink.jobmanager.memeory", "1024", 3))
+// val result = serialize(configValues, definitions)
+// println(DWSHttpClient.jacksonJson.writeValueAsString(result))
+// println(DWSHttpClient.jacksonJson.writeValueAsString(deserialize(result, definitions)))
+// }
+}
diff --git a/streamis-jobmanager/streamis-job-log/flink-streamis-log-collector/target/classes/META-INF/services/com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired b/streamis-jobmanager/streamis-job-log/flink-streamis-log-collector/target/classes/META-INF/services/com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired
new file mode 100644
index 000000000..dc13253b7
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/flink-streamis-log-collector/target/classes/META-INF/services/com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired
@@ -0,0 +1 @@
+com.webank.wedatasphere.streamis.jobmanager.log.collector.flink.FlinkStreamisConfigAutowired
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/pom.xml b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/pom.xml
new file mode 100644
index 000000000..cf612b7dd
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/pom.xml
@@ -0,0 +1,96 @@
+
+
+
+ streamis-job-log
+ com.webank.wedatasphere.streamis
+ 0.2.4
+ ../../pom.xml
+
+ 4.0.0
+
+ flink-streamis-log-collector
+
+
+ 8
+ 8
+
+ 1.12.2
+ 2.17.1
+ 1.7.15
+
+
+
+ com.webank.wedatasphere.streamis
+ streamis-job-log-collector
+ ${streamis.version}
+
+
+
+ org.apache.flink
+ flink-java
+ ${flink.version}
+ provided
+
+
+ org.apache.flink
+ flink-yarn_2.11
+ ${flink.version}
+ provided
+
+
+
+ junit
+ junit
+ ${junit.version}
+ test
+
+
+
+ org.slf4j
+ slf4j-api
+ ${slf4j.version}
+ provided
+
+
+
+ org.apache.logging.log4j
+ log4j-slf4j-impl
+ ${log4j.version}
+ provided
+
+
+
+ org.apache.logging.log4j
+ log4j-api
+ ${log4j.version}
+ provided
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-assembly-plugin
+ 2.3
+
+
+ assemble
+
+ single
+
+
+ install
+
+
+
+
+ src/main/assembly/package.xml
+
+ false
+
+
+
+
+
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/assembly/package.xml b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/assembly/package.xml
new file mode 100644
index 000000000..8da27bf2c
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/assembly/package.xml
@@ -0,0 +1,19 @@
+
+
+ package
+
+
+ jar
+
+ false
+
+
+ /
+ true
+ runtime
+
+
+
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/flink/FlinkStreamisConfigAutowired.java b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/flink/FlinkStreamisConfigAutowired.java
new file mode 100644
index 000000000..a5459021c
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/flink/FlinkStreamisConfigAutowired.java
@@ -0,0 +1,130 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.flink;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.StreamisLogAppenderConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j2.StreamisLog4j2AppenderConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j2.filters.KeywordThresholdFilter;
+import com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.flink.configuration.Configuration;
+import org.apache.flink.configuration.GlobalConfiguration;
+import org.apache.flink.runtime.util.EnvironmentInformation;
+import org.apache.flink.yarn.configuration.YarnConfigOptions;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.core.Filter;
+import org.apache.logging.log4j.core.filter.LevelMatchFilter;
+import org.apache.logging.log4j.core.filter.RegexFilter;
+import org.apache.logging.log4j.core.filter.ThresholdFilter;
+
+import java.util.Enumeration;
+import java.util.List;
+import java.util.Properties;
+
+import static com.webank.wedatasphere.streamis.jobmanager.log.collector.flink.FlinkStreamisConfigDefine.*;
+
+/**
+ * Autoconfigure the streamis config inf Flink environment
+ */
+public class FlinkStreamisConfigAutowired implements StreamisConfigAutowired {
+
+ /**
+ * Flink configuration
+ */
+ private Configuration configuration;
+
+ public FlinkStreamisConfigAutowired(){
+ // First to load configuration
+ // We should sleep and wait for append of the flink-yaml.conf
+ }
+ @Override
+ public StreamisLogAppenderConfig logAppenderConfig(StreamisLogAppenderConfig.Builder builder) throws Exception{
+ this.configuration = loadConfiguration();
+ String applicationName =
+ this.configuration.getString(YarnConfigOptions.APPLICATION_NAME);
+ if (StringUtils.isNotBlank(applicationName)){
+ builder.setAppName(applicationName);
+ }
+ String gateway = this.configuration.getString(LOG_GATEWAY_ADDRESS);
+ if (StringUtils.isNotBlank(gateway)){
+ if (gateway.endsWith("/")){
+ gateway = gateway.substring(0, gateway.length() - 1);
+ }
+ gateway += this.configuration.getString(LOG_COLLECT_PATH, "/");
+ builder.setRpcAddress(gateway);
+ }
+ if (builder instanceof StreamisLog4j2AppenderConfig.Builder) {
+ List filterStrategies = this.configuration.get(LOG_FILTER_STRATEGIES);
+ for (String filterStrategy : filterStrategies) {
+ if ("LevelMatch".equals(filterStrategy)) {
+ ((StreamisLog4j2AppenderConfig.Builder)builder).withFilter(LevelMatchFilter.newBuilder().setOnMatch(Filter.Result.ACCEPT).setOnMismatch(Filter.Result.DENY)
+ .setLevel(Level.getLevel(this.configuration.getString(LOG_FILTER_LEVEL_MATCH))).build());
+ } else if ("ThresholdMatch".equals(filterStrategy)) {
+ ((StreamisLog4j2AppenderConfig.Builder)builder).withFilter(ThresholdFilter.createFilter(Level
+ .getLevel(this.configuration.getString(LOG_FILTER_THRESHOLD_MATCH)), Filter.Result.ACCEPT, Filter.Result.DENY));
+ } else if ("RegexMatch".equals(filterStrategy)) {
+ ((StreamisLog4j2AppenderConfig.Builder)builder).withFilter(RegexFilter.createFilter(this.configuration.getString(LOG_FILTER_REGEX),
+ null, true, Filter.Result.ACCEPT, Filter.Result.DENY));
+ } else if ("Keyword".equals(filterStrategy)){
+ ((StreamisLog4j2AppenderConfig.Builder)builder).withFilter(
+ new KeywordThresholdFilter(
+ StringUtils.split(this.configuration.getString(LOG_FILTER_KEYWORDS), ","),
+ StringUtils.split(this.configuration.getString(LOG_FILTER_KEYWORDS_EXCLUDE), ",")));
+ }
+ }
+ }
+ String hadoopUser = EnvironmentInformation.getHadoopUser();
+ if (hadoopUser.equals("") || hadoopUser.equals("")){
+ hadoopUser = System.getProperty("user.name");
+ }
+ return builder.setDebugMode(this.configuration.getBoolean(DEBUG_MODE))
+ .setRpcConnTimeout(this.configuration.getInteger(LOG_RPC_CONN_TIMEOUT))
+ .setRpcSocketTimeout(this.configuration.getInteger(LOG_RPC_SOCKET_TIMEOUT))
+ .setRpcSendRetryCnt(this.configuration.getInteger(LOG_RPC_SEND_RETRY_COUNT))
+ .setRpcServerRecoveryTimeInSec(this.configuration.getInteger(LOG_RPC_SERVER_RECOVERY_TIME))
+ .setRpcMaxDelayTimeInSec(this.configuration.getInteger(LOG_RPC_MAX_DELAY_TIME))
+ .setRpcAuthTokenCodeKey(this.configuration.getString(LOG_RPC_AUTH_TOKEN_CODE_KEY))
+ .setRpcAuthTokenUserKey(this.configuration.getString(LOG_RPC_AUTH_TOKEN_USER_KEY))
+ .setRpcAuthTokenCode(this.configuration.getString(LOG_RPC_AUTH_TOKEN_CODE))
+ .setRpcAuthTokenUser(this.configuration.getString(LOG_RPC_AUTH_TOKEN_USER,
+ hadoopUser))
+ .setRpcCacheSize(this.configuration.getInteger(LOG_RPC_CACHE_SIZE))
+ .setRpcCacheMaxConsumeThread(this.configuration.getInteger(LOG_PRC_CACHE_MAX_CONSUME_THREAD))
+ .setDiscard(this.configuration.getBoolean(LOG_RPC_CACHE_DISCARD))
+ .setDiscardWindow(this.configuration.getInteger(LOG_RPC_CACHE_DISCARD_WINDOW))
+ .setRpcBufferSize(this.configuration.getInteger(LOG_RPC_BUFFER_SIZE))
+ .setRpcBufferExpireTimeInSec(this.configuration.getInteger(LOG_RPC_BUFFER_EXPIRE_TIME)).build();
+ }
+
+
+ /**
+ * According to :
+ * String launchCommand =
+ * BootstrapTools.getTaskManagerShellCommand(
+ * flinkConfig,
+ * tmParams,
+ * ".",
+ * ApplicationConstants.LOG_DIR_EXPANSION_VAR,
+ * hasLogback,
+ * hasLog4j,
+ * hasKrb5,
+ * taskManagerMainClass,
+ * taskManagerDynamicProperties);
+ * the configuration directory of Flink yarn container is always ".",
+ * @return configuration
+ */
+ private synchronized Configuration loadConfiguration(){
+// String configDir = System.getenv("FLINK_CONF_DIR");
+// if (null == configDir){
+// configDir = ".";
+// }
+ String configDir = ".";
+ Properties properties = System.getProperties();
+ Enumeration> enumeration = properties.propertyNames();
+ Configuration dynamicConfiguration = new Configuration();
+ while(enumeration.hasMoreElements()){
+ String prop = String.valueOf(enumeration.nextElement());
+ dynamicConfiguration.setString(prop, properties.getProperty(prop));
+ }
+ return GlobalConfiguration.loadConfiguration(configDir, dynamicConfiguration);
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/flink/FlinkStreamisConfigDefine.java b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/flink/FlinkStreamisConfigDefine.java
new file mode 100644
index 000000000..5f23ae681
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/flink/FlinkStreamisConfigDefine.java
@@ -0,0 +1,155 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.flink;
+
+import org.apache.flink.configuration.ConfigOption;
+import org.apache.flink.configuration.ConfigOptions;
+import scala.Int;
+
+import java.util.List;
+
+/**
+ * Config definition
+ */
+public class FlinkStreamisConfigDefine {
+
+ /**
+ * Gateway address of log module for streamis
+ */
+ public static final ConfigOption LOG_GATEWAY_ADDRESS = ConfigOptions.key("stream.log.gateway.address")
+ .stringType().noDefaultValue().withDescription("The gateway address ex: http://127.0.0.1:8080");
+
+ /**
+ * Entrypoint path of collecting log
+ */
+ public static final ConfigOption LOG_COLLECT_PATH = ConfigOptions.key("stream.log.collect.path")
+ .stringType().defaultValue("/api/rest_j/v1/streamis/streamJobManager/log/collect/events").withDescription("The entrypoint path of collecting log");
+
+ /**
+ * Connection timeout(in milliseconds) in log RPC module
+ */
+ public static final ConfigOption LOG_RPC_CONN_TIMEOUT = ConfigOptions.key("stream.log.rpc.connect-timeout")
+ .intType().defaultValue(3000).withDescription("Connection timeout(ms) in log RPC module");
+
+ /**
+ * Socket timeout(in milliseconds) in log RPC module
+ */
+ public static final ConfigOption LOG_RPC_SOCKET_TIMEOUT = ConfigOptions.key("stream.log.rpc.socket-timeout")
+ .intType().defaultValue(15000).withDescription("Socket timeout(ms) in log RPC module");
+
+ /**
+ * Max retry count of sending message in log RPC module
+ */
+ public static final ConfigOption LOG_RPC_SEND_RETRY_COUNT = ConfigOptions.key("stream.log.rpc.send-retry-count")
+ .intType().defaultValue(3).withDescription("Max retry count of sending message in log RPC module");
+
+ /**
+ * Server recovery time(in seconds) in log RPC module
+ */
+ public static final ConfigOption LOG_RPC_SERVER_RECOVERY_TIME = ConfigOptions.key("stream.log.rpc.server-recovery-time-in-sec")
+ .intType().defaultValue(5).withDescription("Server recovery time(sec) in log RPC module");
+
+ /**
+ * Max delay time(in seconds) in log RPC module. if reach the limit, the message will be dropped
+ */
+ public static final ConfigOption LOG_RPC_MAX_DELAY_TIME = ConfigOptions.key("stream.log.rpc.max-delay-time")
+ .intType().defaultValue(60).withDescription("Max delay time(sec) in log RPC module");
+
+ /**
+ * Token code key in log RPC auth module
+ */
+ public static final ConfigOption LOG_RPC_AUTH_TOKEN_CODE_KEY = ConfigOptions.key("stream.log.rpc.auth.token-code-key")
+ .stringType().defaultValue("Token-Code").withDescription("Token code key in log RPC auth module");
+
+ /**
+ * Token user key in log RPC auth module
+ */
+ public static final ConfigOption LOG_RPC_AUTH_TOKEN_USER_KEY = ConfigOptions.key("stream.log.rpc.auth.token-user-key")
+ .stringType().defaultValue("Token-User").withDescription("Token user key in log RPC auth module");
+
+ /**
+ * Token code in log RPC auth module
+ */
+ public static final ConfigOption LOG_RPC_AUTH_TOKEN_CODE = ConfigOptions.key("stream.log.rpc.auth.token-code")
+ .stringType().defaultValue("STREAM-LOG").withDescription("Token code in log RPC auth module");
+
+ /**
+ * Token user in log RPC auth module
+ */
+ public static final ConfigOption LOG_RPC_AUTH_TOKEN_USER = ConfigOptions.key("stream.log.rpc.auth.token-user")
+ .stringType().defaultValue(System.getProperty("user.name")).withDescription("Token user in log RPC auth module");
+
+ /**
+ * Cache size in log RPC module
+ */
+ public static final ConfigOption LOG_RPC_CACHE_SIZE = ConfigOptions.key("stream.log.rpc.cache.size")
+ .intType().defaultValue(150).withDescription("Cache size in log RPC module");
+
+ /**
+ * Max cache consume threads in log RPC module
+ */
+ public static final ConfigOption LOG_PRC_CACHE_MAX_CONSUME_THREAD = ConfigOptions.key("stream.log.rpc.cache.max-consume-thread")
+ .intType().defaultValue(2).withDescription("Max cache consume threads in log RPC module");
+
+ /**
+ * If discard the useless log
+ */
+ public static final ConfigOption LOG_RPC_CACHE_DISCARD = ConfigOptions.key("stream.log.rpc.cache.discard")
+ .booleanType().defaultValue(true).withDescription("If discard the useless log");
+
+ /**
+ * The window size of discarding
+ */
+ public static final ConfigOption LOG_RPC_CACHE_DISCARD_WINDOW = ConfigOptions.key("stream.log.rpc.cache.discard-window")
+ .intType().defaultValue(2).withDescription("The window size of discarding");
+ /**
+ * Buffer size in log RPC module
+ */
+ public static final ConfigOption LOG_RPC_BUFFER_SIZE = ConfigOptions.key("stream.log.rpc.buffer.size")
+ .intType().defaultValue(50).withDescription("Buffer size in log RPC module");
+
+ /**
+ * Buffer expire time(sec) in log RPC module
+ */
+ public static final ConfigOption LOG_RPC_BUFFER_EXPIRE_TIME = ConfigOptions.key("stream.log.rpc.buffer.expire-time-in-sec")
+ .intType().defaultValue(2).withDescription("Buffer expire time (sec) in log RPC module");
+
+ /**
+ * Log filter strategy list
+ */
+ public static final ConfigOption> LOG_FILTER_STRATEGIES = ConfigOptions.key("stream.log.filter.strategies")
+ .stringType().asList().defaultValues("Keyword").withDescription("Log filter strategy list");
+
+ /**
+ * Level value of LevelMatch filter strategy
+ */
+ public static final ConfigOption LOG_FILTER_LEVEL_MATCH = ConfigOptions.key("stream.log.filter.level-match.level")
+ .stringType().defaultValue("ERROR").withDescription("Level value of LevelMatch filter strategy");
+
+ /**
+ * Level value of ThresholdMatch filter strategy
+ */
+ public static final ConfigOption LOG_FILTER_THRESHOLD_MATCH = ConfigOptions.key("stream.log.filter.threshold.level")
+ .stringType().defaultValue("ERROR").withDescription("Level value of ThresholdMatch filter strategy");
+ /**
+ * Regex value of RegexMatch filter strategy
+ */
+ public static final ConfigOption LOG_FILTER_REGEX = ConfigOptions.key("stream.log.filter.regex.value")
+ .stringType().defaultValue(".*").withDescription("Regex value of RegexMatch filter strategy");
+
+ /**
+ * Accept keywords of Keyword filter strategy
+ */
+ public static final ConfigOption LOG_FILTER_KEYWORDS = ConfigOptions.key("stream.log.filter.keywords")
+ .stringType().defaultValue("ERROR").withDescription("Accept keywords of Keyword filter strategy");
+
+ /**
+ * Exclude keywords of Keyword filter strategy
+ */
+ public static final ConfigOption LOG_FILTER_KEYWORDS_EXCLUDE = ConfigOptions.key("stream.log.filter.keywords.exclude")
+ .stringType().defaultValue("").withDescription("Exclude keywords of Keyword filter strategy");
+
+ /**
+ * Debug mode
+ */
+ public static final ConfigOption DEBUG_MODE = ConfigOptions.key("stream.log.debug")
+ .booleanType().defaultValue(false).withDescription("Debug mode");
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/resources/META-INF/services/com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/resources/META-INF/services/com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired
new file mode 100644
index 000000000..dc13253b7
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/resources/META-INF/services/com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired
@@ -0,0 +1 @@
+com.webank.wedatasphere.streamis.jobmanager.log.collector.flink.FlinkStreamisConfigAutowired
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/flink/FlinkConfigurationLoadTest.java b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/flink/FlinkConfigurationLoadTest.java
new file mode 100644
index 000000000..79ad46014
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/flink/FlinkConfigurationLoadTest.java
@@ -0,0 +1,28 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.flink;
+
+import org.apache.flink.configuration.Configuration;
+import org.apache.flink.configuration.GlobalConfiguration;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Enumeration;
+import java.util.Objects;
+import java.util.Properties;
+
+public class FlinkConfigurationLoadTest {
+ private static final Logger LOG = LoggerFactory.getLogger(FlinkConfigurationLoadTest.class);
+ @Test
+ public void loadConfiguration() {
+ String configDir = Objects.requireNonNull(FlinkConfigurationLoadTest.class.getResource("/")).getFile();
+ Properties properties = System.getProperties();
+ Enumeration> enumeration = properties.propertyNames();
+ Configuration dynamicConfiguration = new Configuration();
+ while(enumeration.hasMoreElements()){
+ String prop = String.valueOf(enumeration.nextElement());
+ dynamicConfiguration.setString(prop, properties.getProperty(prop));
+ }
+ GlobalConfiguration.loadConfiguration(configDir, dynamicConfiguration);
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/test/resources/META-INF/services/com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/test/resources/META-INF/services/com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired
new file mode 100644
index 000000000..dc13253b7
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/test/resources/META-INF/services/com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired
@@ -0,0 +1 @@
+com.webank.wedatasphere.streamis.jobmanager.log.collector.flink.FlinkStreamisConfigAutowired
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/test/resources/flink-conf.yaml b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/test/resources/flink-conf.yaml
new file mode 100644
index 000000000..e69de29bb
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/test/resources/log4j2.xml b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/test/resources/log4j2.xml
new file mode 100644
index 000000000..ee3f4125a
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/test/resources/log4j2.xml
@@ -0,0 +1,37 @@
+
+
+
+
+
+
+
+
+
+
+
+
+ `
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/pom.xml b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/pom.xml
new file mode 100644
index 000000000..643119621
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/pom.xml
@@ -0,0 +1,40 @@
+
+
+
+ streamis-job-log
+ com.webank.wedatasphere.streamis
+ 0.2.4
+ ../../pom.xml
+
+ 4.0.0
+
+ streamis-job-log-collector-core
+
+
+ 8
+ 8
+ 4.5.13
+ 4.5.4
+
+
+
+
+ com.webank.wedatasphere.streamis
+ streamis-job-log-common
+ ${streamis.version}
+
+
+
+ org.apache.httpcomponents
+ httpclient
+ ${httpclient.version}
+
+
+ org.apache.httpcomponents
+ httpmime
+ ${httpmine.version}
+
+
+
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/ExceptionListener.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/ExceptionListener.java
new file mode 100644
index 000000000..4c9ac6ea8
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/ExceptionListener.java
@@ -0,0 +1,15 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector;
+
+/**
+ * Exception listener
+ */
+public interface ExceptionListener {
+
+ /**
+ * Listen the exception
+ * @param subject the subject that throws the exception
+ * @param t Throwable
+ * @param message message
+ */
+ void onException(Object subject, Throwable t, String message);
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/cache/LogCache.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/cache/LogCache.java
new file mode 100644
index 000000000..f11556cc8
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/cache/LogCache.java
@@ -0,0 +1,43 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.cache;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.entities.LogElement;
+
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Log cache
+ * @param element
+ */
+public interface LogCache {
+
+ /**
+ * Cache log
+ * @param logElement log element
+ */
+ void cacheLog(E logElement) throws InterruptedException;
+
+ /**
+ * Drain log elements into collection
+ * @param elements elements
+ * @param maxElements max elements size
+ * @return count
+ */
+ int drainLogsTo(List elements, int maxElements);
+
+ /**
+ * Take log element
+ * @return log element
+ */
+ E takeLog(long timeout, TimeUnit unit) throws InterruptedException;
+
+ /**
+ * If the cache is full
+ * @return
+ */
+ boolean isCacheable();
+ /**
+ * Release the resource
+ */
+ void destroy();
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/RpcAuthConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/RpcAuthConfig.java
new file mode 100644
index 000000000..ebf9b7f68
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/RpcAuthConfig.java
@@ -0,0 +1,86 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.config;
+
+
+/**
+ * Authentication config
+ */
+public class RpcAuthConfig {
+ /**
+ * Key of token-code
+ */
+ private String tokenCodeKey = "Token-Code";
+
+ /**
+ * Key of token-user
+ */
+ private String tokenUserKey = "Token-User";
+
+ /**
+ * Token user
+ */
+ private String tokenUser = System.getProperty("user.name");
+
+ /**
+ * Token code
+ */
+ private String tokenCode = "STREAM-LOG";
+
+ public RpcAuthConfig(){
+
+ }
+
+ public RpcAuthConfig(String tokenCodeKey, String tokenCode, String tokenUserKey, String tokenUser){
+ if (null != tokenCodeKey) {
+ this.tokenCodeKey = tokenCodeKey;
+ }
+ if (null != tokenCode){
+ this.tokenCode = tokenCode;
+ }
+ if (null != tokenUserKey){
+ this.tokenUserKey = tokenUserKey;
+ }
+ if (null != tokenUser){
+ this.tokenUser = tokenUser;
+ }
+ }
+
+ public String getTokenCodeKey() {
+ return tokenCodeKey;
+ }
+
+ public void setTokenCodeKey(String tokenCodeKey) {
+ this.tokenCodeKey = tokenCodeKey;
+ }
+
+ public String getTokenUserKey() {
+ return tokenUserKey;
+ }
+
+ public void setTokenUserKey(String tokenUserKey) {
+ this.tokenUserKey = tokenUserKey;
+ }
+
+ public String getTokenUser() {
+ return tokenUser;
+ }
+
+ public void setTokenUser(String tokenUser) {
+ this.tokenUser = tokenUser;
+ }
+
+ public String getTokenCode() {
+ return tokenCode;
+ }
+
+ public void setTokenCode(String tokenCode) {
+ this.tokenCode = tokenCode;
+ }
+
+ @Override
+ public String toString() {
+ return "RpcAuthConfig{" +
+ ", tokenUserKey='" + tokenUserKey + '\'' +
+ ", tokenUser='" + tokenUser + '\'' +
+ '}';
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/RpcLogSenderConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/RpcLogSenderConfig.java
new file mode 100644
index 000000000..0fb03185f
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/RpcLogSenderConfig.java
@@ -0,0 +1,180 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.config;
+
+
+import java.util.Objects;
+
+/**
+ * Rpc sender configuration
+ */
+public class RpcLogSenderConfig {
+
+ /**
+ * Send address
+ */
+ private String address;
+
+ /**
+ * Timeout of connecting
+ */
+ private int connectionTimeout = 3000;
+
+ /**
+ * Timeout of reading from socket
+ */
+ private int socketTimeout = 15000;
+
+ /**
+ * Retry count of sending
+ */
+ private int sendRetryCnt = 3;
+
+ /**
+ * The time for server recovery
+ */
+ private int serverRecoveryTimeInSec = 5;
+
+ /**
+ * Retry max delay time of sender
+ */
+ private int maxDelayTimeInSec = 60;
+
+ /**
+ * If open debug mode
+ */
+ private boolean debugMode = false;
+ /**
+ * Auth config
+ */
+ private RpcAuthConfig authConfig = new RpcAuthConfig();
+
+ /**
+ * Cache config
+ */
+ private SendLogCacheConfig cacheConfig = new SendLogCacheConfig();
+
+ /**
+ * Buffer config
+ */
+ private SendBufferConfig bufferConfig = new SendBufferConfig();
+
+ public RpcLogSenderConfig(){
+
+ }
+
+ public RpcLogSenderConfig(String address, int sendRetryCnt, int connectionTimeout, int socketTimeout,
+ int serverRecoveryTimeInSec, int maxDelayTimeInSec,
+ RpcAuthConfig authConfig, SendLogCacheConfig cacheConfig, SendBufferConfig bufferConfig){
+ this.address = address;
+ this.sendRetryCnt = sendRetryCnt;
+ this.connectionTimeout = connectionTimeout;
+ this.socketTimeout = socketTimeout;
+ this.serverRecoveryTimeInSec = serverRecoveryTimeInSec;
+ this.maxDelayTimeInSec = maxDelayTimeInSec;
+ if (Objects.nonNull(authConfig)){
+ this.authConfig = authConfig;
+ }
+ if (Objects.nonNull(cacheConfig)){
+ this.cacheConfig = cacheConfig;
+ }
+ if (Objects.nonNull(bufferConfig)){
+ this.bufferConfig = bufferConfig;
+ }
+ }
+
+ public RpcAuthConfig getAuthConfig() {
+ return authConfig;
+ }
+
+ public void setAuthConfig(RpcAuthConfig authConfig) {
+ this.authConfig = authConfig;
+ }
+
+ public SendLogCacheConfig getCacheConfig() {
+ return cacheConfig;
+ }
+
+ public void setCacheConfig(SendLogCacheConfig cacheConfig) {
+ this.cacheConfig = cacheConfig;
+ }
+
+ public SendBufferConfig getBufferConfig() {
+ return bufferConfig;
+ }
+
+ public void setBufferConfig(SendBufferConfig bufferConfig) {
+ this.bufferConfig = bufferConfig;
+ }
+
+ public String getAddress() {
+ return address;
+ }
+
+ public void setAddress(String address) {
+ this.address = address;
+ }
+
+ public int getSendRetryCnt() {
+ return sendRetryCnt;
+ }
+
+ public void setSendRetryCnt(int sendRetryCnt) {
+ this.sendRetryCnt = sendRetryCnt;
+ }
+
+ public int getConnectionTimeout() {
+ return connectionTimeout;
+ }
+
+ public void setConnectionTimeout(int connectionTimeout) {
+ this.connectionTimeout = connectionTimeout;
+ }
+
+ public int getSocketTimeout() {
+ return socketTimeout;
+ }
+
+ public void setSocketTimeout(int socketTimeout) {
+ this.socketTimeout = socketTimeout;
+ }
+
+ public int getMaxDelayTimeInSec() {
+ return maxDelayTimeInSec;
+ }
+
+ public void setMaxDelayTimeInSec(int maxDelayTimeInSec) {
+ this.maxDelayTimeInSec = maxDelayTimeInSec;
+ }
+
+ public int getServerRecoveryTimeInSec() {
+ return serverRecoveryTimeInSec;
+ }
+
+ public void setServerRecoveryTimeInSec(int serverRecoveryTimeInSec) {
+ this.serverRecoveryTimeInSec = serverRecoveryTimeInSec;
+ }
+
+ public boolean isDebugMode() {
+ return debugMode;
+ }
+
+ public void setDebugMode(boolean debugMode) {
+ this.debugMode = debugMode;
+ }
+
+ @Override
+ public String toString() {
+ return "RpcLogSenderConfig{" +
+ "address='" + address + '\'' +
+ ", connectionTimeout=" + connectionTimeout +
+ ", socketTimeout=" + socketTimeout +
+ ", sendRetryCnt=" + sendRetryCnt +
+ ", serverRecoveryTimeInSec=" + serverRecoveryTimeInSec +
+ ", maxDelayTimeInSec=" + maxDelayTimeInSec +
+ ", authConfig=" + authConfig +
+ ", cacheConfig=" + cacheConfig +
+ ", bufferConfig=" + bufferConfig +
+ ", debug=" + debugMode +
+ '}';
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/SendBufferConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/SendBufferConfig.java
new file mode 100644
index 000000000..6be0ae826
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/SendBufferConfig.java
@@ -0,0 +1,47 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.config;
+
+
+public class SendBufferConfig {
+ /**
+ * Size of send buffer
+ */
+ private int size = 50;
+
+ /**
+ * Expire time of send buffer
+ */
+ private long expireTimeInSec = 2;
+
+ public SendBufferConfig(){
+
+ }
+
+ public SendBufferConfig(int size, long expireTimeInSec){
+ this.size = size;
+ this.expireTimeInSec = expireTimeInSec;
+ }
+
+ public int getSize() {
+ return size;
+ }
+
+ public void setSize(int size) {
+ this.size = size;
+ }
+
+ public long getExpireTimeInSec() {
+ return expireTimeInSec;
+ }
+
+ public void setExpireTimeInSec(long expireTimeInSec) {
+ this.expireTimeInSec = expireTimeInSec;
+ }
+
+ @Override
+ public String toString() {
+ return "SendBufferConfig{" +
+ "size=" + size +
+ ", expireTimeInSec=" + expireTimeInSec +
+ '}';
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/SendLogCacheConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/SendLogCacheConfig.java
new file mode 100644
index 000000000..e40a630c7
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/SendLogCacheConfig.java
@@ -0,0 +1,81 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.config;
+
+/**
+ * Cache config
+ */
+
+public class SendLogCacheConfig {
+ /**
+ * Size of send cache
+ */
+ private int size = 150;
+
+ /**
+ * Max number of consuming thread
+ */
+ private int maxConsumeThread = 2;
+
+ /**
+ * The switch to discard log
+ */
+ private boolean discard = true;
+
+ /**
+ * Discard window in second
+ */
+ private int discardWindow = 2;
+
+ public SendLogCacheConfig(){
+
+ }
+
+ public SendLogCacheConfig(int size, int maxConsumeThread){
+ this.size = size;
+ this.maxConsumeThread = maxConsumeThread;
+ }
+
+ public int getSize() {
+ return size;
+ }
+
+ public void setSize(int size) {
+ this.size = size;
+ }
+
+ public int getMaxConsumeThread() {
+ return maxConsumeThread;
+ }
+
+ public void setMaxConsumeThread(int maxConsumeThread) {
+ this.maxConsumeThread = maxConsumeThread;
+ }
+
+ public boolean isDiscard() {
+ return discard;
+ }
+
+ public void setDiscard(boolean discard) {
+ this.discard = discard;
+ }
+
+ public int getDiscardWindow() {
+ return discardWindow;
+ }
+
+ public void setDiscardWindow(int discardWindow) {
+ this.discardWindow = discardWindow;
+ }
+
+ @Override
+ public String toString() {
+ return "SendLogCacheConfig{" +
+ "size=" + size +
+ ", maxConsumeThread=" + maxConsumeThread +
+ ", discard=" + discard +
+ ", discardWindow=" + discardWindow +
+ '}';
+ }
+
+
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/StreamisLogAppenderConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/StreamisLogAppenderConfig.java
new file mode 100644
index 000000000..76fbd0c91
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/StreamisLogAppenderConfig.java
@@ -0,0 +1,261 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.config;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters.LogMessageFilter;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Objects;
+import java.util.Optional;
+
+/**
+ * Appender config
+ */
+public class StreamisLogAppenderConfig {
+
+ protected final String applicationName;
+
+
+ protected final RpcLogSenderConfig senderConfig;
+
+ /**
+ * Message filters
+ */
+ protected final List messageFilters;
+ protected StreamisLogAppenderConfig(String applicationName, RpcLogSenderConfig rpcLogSenderConfig,
+ List messageFilters){
+ this.applicationName = applicationName;
+ this.senderConfig = null != rpcLogSenderConfig? rpcLogSenderConfig : new RpcLogSenderConfig();
+ this.messageFilters = messageFilters;
+ }
+
+ public static class Builder{
+ /**
+ * Application name
+ */
+ protected String applicationName;
+
+ /**
+ * Sender config
+ */
+ protected final RpcLogSenderConfig rpcLogSenderConfig;
+
+ /**
+ * Message filters
+ */
+ protected final List messageFilters = new ArrayList<>();
+
+ public Builder(String applicationName,
+ RpcLogSenderConfig rpcLogSenderConfig){
+ this.applicationName = applicationName;
+
+ this.rpcLogSenderConfig = Optional.ofNullable(rpcLogSenderConfig).orElse(new RpcLogSenderConfig());
+ }
+
+ /**
+ * Set application name
+ * @param applicationName application name
+ * @return builder
+ */
+ public StreamisLogAppenderConfig.Builder setAppName(String applicationName){
+ this.applicationName = applicationName;
+ return this;
+ }
+
+
+
+ /**
+ * Rpc address
+ * @param address address
+ * @return builder
+ */
+ public StreamisLogAppenderConfig.Builder setRpcAddress(String address){
+ this.rpcLogSenderConfig.setAddress(address);
+ return this;
+ }
+
+ /**
+ * Rpc connect timeout
+ * @param connectionTimeout connection timeout
+ * @return builder
+ */
+ public StreamisLogAppenderConfig.Builder setRpcConnTimeout(int connectionTimeout){
+ this.rpcLogSenderConfig.setConnectionTimeout(connectionTimeout);
+ return this;
+ }
+
+ /**
+ * Rpc socket timeout
+ * @param socketTimeout socket timeout
+ * @return builder
+ */
+ public StreamisLogAppenderConfig.Builder setRpcSocketTimeout(int socketTimeout){
+ this.rpcLogSenderConfig.setSocketTimeout(socketTimeout);
+ return this;
+ }
+
+ /**
+ * Rpc send retry count
+ * @param sendRetryCnt send retry count
+ * @return builder
+ */
+ public StreamisLogAppenderConfig.Builder setRpcSendRetryCnt(int sendRetryCnt){
+ this.rpcLogSenderConfig.setSendRetryCnt(sendRetryCnt);
+ return this;
+ }
+
+ /**
+ * Rpc server recovery time in seconds
+ * @param serverRecoveryTimeInSec server recovery time
+ * @return builder
+ */
+ public StreamisLogAppenderConfig.Builder setRpcServerRecoveryTimeInSec(int serverRecoveryTimeInSec){
+ this.rpcLogSenderConfig.setServerRecoveryTimeInSec(serverRecoveryTimeInSec);
+ return this;
+ }
+
+ /**
+ * Rpc max delay time in seconds
+ * @param maxDelayTimeInSec max delay time in seconds
+ * @return builder
+ */
+ public StreamisLogAppenderConfig.Builder setRpcMaxDelayTimeInSec(int maxDelayTimeInSec){
+ this.rpcLogSenderConfig.setMaxDelayTimeInSec(maxDelayTimeInSec);
+ return this;
+ }
+
+ /**
+ * Rpc auth token code key
+ * @param tokenCodeKey key of token code
+ * @return builder
+ */
+ public StreamisLogAppenderConfig.Builder setRpcAuthTokenCodeKey(String tokenCodeKey){
+ this.rpcLogSenderConfig.getAuthConfig().setTokenCodeKey(tokenCodeKey);
+ return this;
+ }
+
+ /**
+ * Rpc auth token user key
+ * @param tokenUserKey key of token user
+ * @return builder
+ */
+ public StreamisLogAppenderConfig.Builder setRpcAuthTokenUserKey(String tokenUserKey){
+ this.rpcLogSenderConfig.getAuthConfig().setTokenUserKey(tokenUserKey);
+ return this;
+ }
+
+ /**
+ * Rpc auth token user
+ * @param tokenUser token user
+ * @return builder
+ */
+ public StreamisLogAppenderConfig.Builder setRpcAuthTokenUser(String tokenUser){
+ this.rpcLogSenderConfig.getAuthConfig().setTokenUser(tokenUser);
+ return this;
+ }
+
+ /**
+ * Rpc auth token code
+ * @param tokenCode token code
+ * @return builder
+ */
+ public StreamisLogAppenderConfig.Builder setRpcAuthTokenCode(String tokenCode){
+ this.rpcLogSenderConfig.getAuthConfig().setTokenCode(tokenCode);
+ return this;
+ }
+
+ /**
+ * Rpc cache size
+ * @param cacheSize cache size
+ * @return builder
+ */
+ public StreamisLogAppenderConfig.Builder setRpcCacheSize(int cacheSize){
+ this.rpcLogSenderConfig.getCacheConfig().setSize(cacheSize);
+ return this;
+ }
+
+ /**
+ * Rpc cache max consume thread
+ * @param maxConsumeThread max consume thread
+ * @return builder
+ */
+ public StreamisLogAppenderConfig.Builder setRpcCacheMaxConsumeThread(int maxConsumeThread){
+ this.rpcLogSenderConfig.getCacheConfig().setMaxConsumeThread(maxConsumeThread);
+ return this;
+ }
+
+ /**
+ * Rpc buffer size
+ * @param bufferSize buffer size
+ * @return builder
+ */
+ public StreamisLogAppenderConfig.Builder setRpcBufferSize(int bufferSize){
+ this.rpcLogSenderConfig.getBufferConfig().setSize(bufferSize);
+ return this;
+ }
+
+ /**
+ * Rpc buffer expire time in seconds
+ * @param expireTimeInSec expire time
+ * @return builder
+ */
+ public StreamisLogAppenderConfig.Builder setRpcBufferExpireTimeInSec(int expireTimeInSec){
+ this.rpcLogSenderConfig.getBufferConfig().setExpireTimeInSec(expireTimeInSec);
+ return this;
+ }
+
+ /**
+ * Add log message filter
+ * @param messageFilter message filter
+ * @return builder
+ */
+ public StreamisLogAppenderConfig.Builder withMessageFilter(LogMessageFilter messageFilter){
+ this.messageFilters.add(messageFilter);
+ return this;
+ }
+
+ /**
+ * Set to discard the useless log
+ * @param discard discard
+ * @return builder
+ */
+ public StreamisLogAppenderConfig.Builder setDiscard(boolean discard){
+ this.rpcLogSenderConfig.getCacheConfig().setDiscard(discard);
+ return this;
+ }
+
+ /**
+ * Set the window size of discarding
+ * @param windowSize
+ * @return
+ */
+ public StreamisLogAppenderConfig.Builder setDiscardWindow(int windowSize){
+ this.rpcLogSenderConfig.getCacheConfig().setDiscardWindow(windowSize);
+ return this;
+ }
+ /**
+ * Switch to debug
+ * @param debugMode debug mode
+ * @return builder
+ */
+ public StreamisLogAppenderConfig.Builder setDebugMode(boolean debugMode){
+ this.rpcLogSenderConfig.setDebugMode(debugMode);
+ return this;
+ }
+
+ public StreamisLogAppenderConfig build(){
+ return new StreamisLogAppenderConfig(applicationName, rpcLogSenderConfig, messageFilters);
+ }
+ }
+ public String getApplicationName() {
+ return applicationName;
+ }
+
+
+ public RpcLogSenderConfig getSenderConfig() {
+ return senderConfig;
+ }
+
+ public List getMessageFilters() {
+ return messageFilters;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/message/filters/KeywordMessageFilter.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/message/filters/KeywordMessageFilter.java
new file mode 100644
index 000000000..5d12ea071
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/message/filters/KeywordMessageFilter.java
@@ -0,0 +1,126 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters;
+
+import java.lang.reflect.Field;
+import java.util.Arrays;
+import java.util.Comparator;
+import java.util.regex.Pattern;
+
+/**
+ * Message filter of keyword
+ */
+public class KeywordMessageFilter implements LogMessageFilter{
+
+ /**
+ * Accept keywords
+ */
+ private final String[] acceptKeywords;
+
+ /**
+ * Regex pattern of accept keywords
+ */
+ private Pattern acceptPattern;
+ /**
+ * Exclude keywords
+ */
+ private final String[] excludeKeywords;
+
+ /**
+ * Regex pattern of exclude keywords
+ */
+ private Pattern excludePattern;
+
+ /**
+ * Flags for pattern
+ */
+ private int patternFlag = 0;
+
+ public KeywordMessageFilter(String[] acceptKeywords, String[] excludeKeywords){
+ this(acceptKeywords, excludeKeywords, null);
+ }
+
+ public KeywordMessageFilter(String[] acceptKeywords, String[] excludeKeywords, String[] patternFlags){
+ this.acceptKeywords = acceptKeywords;
+ this.excludeKeywords = excludeKeywords;
+ try {
+ this.patternFlag = toPatternFlags(patternFlags);
+ } catch (IllegalAccessException e) {
+ // Ignore
+ }
+ // Build regex pattern
+ if (acceptKeywords != null && acceptKeywords.length > 0){
+ this.acceptPattern = toMatchPattern(acceptKeywords, this.patternFlag);
+ }
+ if (excludeKeywords != null && excludeKeywords.length > 0){
+ this.excludePattern = toMatchPattern(excludeKeywords, this.patternFlag);
+ }
+ }
+
+ @Override
+ public boolean doFilter(String logger, String message) {
+ boolean accept = true;
+ if (null != acceptPattern){
+ accept = acceptPattern.matcher(message).find();
+ }
+ if (accept && null != excludePattern){
+ accept = !excludePattern.matcher(message).find();
+ }
+ return accept;
+ }
+
+ /**
+ * Convert to pattern
+ * @param keywords keyword array
+ * @param flag pattern flag
+ * @return Regex pattern
+ */
+ protected Pattern toMatchPattern(String[] keywords, int flag){
+ StringBuilder patternStr = new StringBuilder("(");
+ for(int i = 0; i < keywords.length; i++){
+ patternStr.append(keywords[i]);
+ if (i != keywords.length - 1){
+ patternStr.append("|");
+ }
+ }
+ patternStr.append(")");
+ return Pattern.compile(patternStr.toString(), flag);
+ }
+
+ /**
+ * Convert the pattern flag array to int
+ * @param patternFlags flag string array
+ * @return int value
+ * @throws IllegalArgumentException
+ * @throws IllegalAccessException
+ */
+ private static int toPatternFlags(final String[] patternFlags) throws IllegalArgumentException,
+ IllegalAccessException {
+ if (patternFlags == null || patternFlags.length == 0) {
+ return 0;
+ }
+ final Field[] fields = Pattern.class.getDeclaredFields();
+ final Comparator comparator = Comparator.comparing(Field::getName);
+ Arrays.sort(fields, comparator);
+ final String[] fieldNames = new String[fields.length];
+ for (int i = 0; i < fields.length; i++) {
+ fieldNames[i] = fields[i].getName();
+ }
+ int flags = 0;
+ for (final String test : patternFlags) {
+ final int index = Arrays.binarySearch(fieldNames, test);
+ if (index >= 0) {
+ final Field field = fields[index];
+ flags |= field.getInt(Pattern.class);
+ }
+ }
+ return flags;
+ }
+
+ public final String[] getAcceptKeywords(){
+ return this.acceptKeywords;
+ }
+
+ public final String[] getExcludeKeywords(){
+ return this.excludeKeywords;
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/message/filters/LogMessageFilter.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/message/filters/LogMessageFilter.java
new file mode 100644
index 000000000..bc778bea5
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/message/filters/LogMessageFilter.java
@@ -0,0 +1,15 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters;
+
+/**
+ * Log message filter, filter the message content (layout formatted)
+ */
+public interface LogMessageFilter {
+ /**
+ * Filter formatted message
+ * @param logger logger name
+ * @param message message content
+ * @return if match the filter
+ */
+ boolean doFilter(String logger, String message);
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/message/filters/LogMessageFilterAdapter.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/message/filters/LogMessageFilterAdapter.java
new file mode 100644
index 000000000..f98427afa
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/message/filters/LogMessageFilterAdapter.java
@@ -0,0 +1,13 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters;
+
+/**
+ * Interface for adaptor
+ */
+public interface LogMessageFilterAdapter {
+
+ /**
+ * Message filter
+ * @return filter
+ */
+ LogMessageFilter getLogMessageFilter();
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/AbstractRpcLogSender.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/AbstractRpcLogSender.java
new file mode 100644
index 000000000..14e3cceea
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/AbstractRpcLogSender.java
@@ -0,0 +1,573 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.ExceptionListener;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.cache.LogCache;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcLogSenderConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.SendLogCacheConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.buf.ImmutableSendBuffer;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.buf.SendBuffer;
+import com.webank.wedatasphere.streamis.jobmanager.log.entities.LogElement;
+
+import java.util.*;
+import java.util.concurrent.*;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.concurrent.locks.Condition;
+import java.util.concurrent.locks.ReentrantLock;
+
+/**
+ * Abstract rpc log sender
+ * @param
+ * @param
+ */
+public abstract class AbstractRpcLogSender implements RpcLogSender{
+
+ /**
+ * Size of log cache
+ */
+ int cacheSize;
+
+ /**
+ * The buffer size of sender
+ */
+ int sendBufSize;
+
+ /**
+ * Max thread num of send
+ */
+ int maxCacheConsume;
+
+ /**
+ * Connect config
+ */
+ protected RpcLogSenderConfig rpcSenderConfig;
+
+ /**
+ * Rpc log context
+ */
+ private volatile RpcLogContext rpcLogContext;
+
+ protected boolean isTerminated = false;
+ /**
+ * Use the listener instead of log4j structure
+ */
+ protected ExceptionListener exceptionListener;
+
+ public AbstractRpcLogSender(RpcLogSenderConfig rpcSenderConfig){
+ this.rpcSenderConfig = rpcSenderConfig;
+ SendLogCacheConfig cacheConfig = rpcSenderConfig.getCacheConfig();
+ this.cacheSize = cacheConfig.getSize();
+ this.maxCacheConsume = cacheConfig.getMaxConsumeThread();
+ this.sendBufSize = rpcSenderConfig.getBufferConfig().getSize();
+ if (sendBufSize > cacheSize) {
+ throw new IllegalArgumentException("Size of send buffer is larger than cache size");
+ }
+
+ }
+
+ @Override
+ public LogCache getOrCreateLogCache() {
+ return getOrCreateRpcLogContext().getLogCache();
+ }
+
+ @Override
+ public void sendLog(T log) {
+ // Just send it into log cache
+ try {
+ getOrCreateLogCache().cacheLog(log);
+ } catch (InterruptedException e) {
+ // Invoke exception listener
+ Optional.ofNullable(exceptionListener).ifPresent(listener ->
+ listener.onException(this, e, null));
+ }
+ }
+
+ @Override
+ public void syncSendLog(T log) {
+
+ }
+
+ @Override
+ public void setExceptionListener(ExceptionListener listener) {
+ this.exceptionListener = listener;
+ }
+
+ @Override
+ public void close() {
+ getOrCreateRpcLogContext().destroyCacheConsumers();
+ this.isTerminated = true;
+ }
+
+ /**
+ * Aggregate send buffer for sending
+ * @param sendBuffer send buffer
+ * @return E aggregated entity
+ */
+ protected abstract E aggregateBuffer(SendBuffer sendBuffer);
+
+ /**
+ * Sending operation
+ * @param aggregatedEntity agg entity
+ * @param rpcSenderConfig rpc sender config
+ */
+ protected abstract void doSend(E aggregatedEntity, RpcLogSenderConfig rpcSenderConfig) throws Exception;
+
+ /**
+ * Send log exception strategy
+ * @return exception strategy
+ */
+ protected abstract SendLogExceptionStrategy getSendLogExceptionStrategy();
+
+ protected RpcLogContext getOrCreateRpcLogContext(){
+ if (null == this.rpcLogContext){
+ synchronized (this){
+ if (null == this.rpcLogContext){
+ // Use fair lock
+ SendLogCache logCache = new QueuedSendLogCache(this.cacheSize,
+ this.rpcSenderConfig.getCacheConfig().isDiscard(),
+ this.rpcSenderConfig.getCacheConfig().getDiscardWindow() * 1000,false);
+ this.rpcLogContext = new RpcLogContext(logCache);
+ // Start cache consumers
+ for(int i = 0; i < maxCacheConsume; i++) {
+ this.rpcLogContext.startCacheConsumer();
+ }
+ }
+ }
+
+ }
+ return this.rpcLogContext;
+ }
+
+ private class RpcLogContext{
+
+ private static final String RPC_LOG_CACHE_CONSUMER = "RpcLog-Cache-Consumer-Thread-";
+ /**
+ * Send log cache
+ */
+ private final SendLogCache logCache;
+
+ /**
+ * Consume pool
+ */
+ private final ThreadPoolExecutor consumePool;
+
+ /**
+ * Count of the consumers
+ */
+ private int consumers = 0;
+
+ /**
+ * Futures of consumers
+ */
+ private final LinkedList> sendLogCacheConsumers = new LinkedList<>();
+ /**
+ * Context lock
+ */
+ private final ReentrantLock ctxLock;
+ public RpcLogContext(SendLogCache logCache){
+ this.logCache = logCache;
+ this.ctxLock = new ReentrantLock();
+ this.consumePool = new ThreadPoolExecutor(0, maxCacheConsume,
+ 60L, TimeUnit.SECONDS,
+ new SynchronousQueue<>(), new ThreadFactory() {
+ private final ThreadGroup group = Thread.currentThread().getThreadGroup();
+ private final AtomicInteger threadNum = new AtomicInteger(1);
+ @Override
+ public Thread newThread(Runnable r) {
+ Thread t = new Thread(group, r, RPC_LOG_CACHE_CONSUMER
+ + threadNum.getAndIncrement(), 0);
+ if (t.isDaemon()) {
+ t.setDaemon(false);
+ }
+ if (t.getPriority() != Thread.NORM_PRIORITY) {
+ t.setPriority(Thread.NORM_PRIORITY);
+ }
+ return t;
+ }
+ });
+ }
+
+ public boolean startCacheConsumer(){
+ if (consumers >= maxCacheConsume) {
+// throw new IllegalStateException("Over the limit number of cache consumers: [" + maxCacheConsume + "]");
+ return false;
+ }
+ this.ctxLock.lock();
+ try {
+ if (consumers < maxCacheConsume) {
+ String id = UUID.randomUUID().toString();
+ SendBuffer sendBuffer = new ImmutableSendBuffer<>(sendBufSize);
+ SendLogCacheConsumer consumer = new SendLogCacheConsumer(id, logCache, sendBuffer, rpcSenderConfig) {
+ @Override
+ protected void onFlushAndSend(SendBuffer sendBuffer) {
+ // First to aggregate the buffer
+ E aggEntity = aggregateBuffer(sendBuffer);
+ Optional.ofNullable(getSendLogExceptionStrategy()).ifPresent(
+ strategy -> strategy.doSend(() -> {
+ doSend(aggEntity, rpcSenderConfig);
+ return null;
+ }, sendBuffer));
+ }
+ };
+ Future> future = this.consumePool.submit(consumer);
+ consumer.setFuture(future);
+ sendLogCacheConsumers.add(consumer);
+ this.consumers++;
+ return true;
+ }
+ } finally {
+ this.ctxLock.unlock();
+ }
+ return false;
+ }
+
+ public SendLogCache getLogCache(){
+ return this.logCache;
+ }
+
+ /**
+ * Destroy cache consumer(select the tail one)
+ */
+ public boolean destroyCacheConsumer(){
+ if (this.consumers <= 1){
+ return false;
+ }
+ this.ctxLock.lock();
+ try {
+ if (this.consumers > 1 && this.sendLogCacheConsumers.size() > 1) {
+ SendLogCacheConsumer consumer = sendLogCacheConsumers.removeLast();
+ consumer.shutdown();
+ this.consumers --;
+ return true;
+ }
+ } finally {
+ this.ctxLock.unlock();
+ }
+ return false;
+ }
+
+ /**
+ * Destroy all the consumers
+ */
+ public void destroyCacheConsumers(){
+ this.ctxLock.lock();
+ try {
+ sendLogCacheConsumers.forEach(SendLogCacheConsumer::shutdown);
+ sendLogCacheConsumers.clear();
+ this.consumers = 0;
+ } finally {
+ this.ctxLock.unlock();
+ }
+ }
+ }
+ /**
+ * Act as ArrayBlockingQueue (jdk 1.8)
+ */
+ private class QueuedSendLogCache implements SendLogCache{
+
+ // Queued items
+ final Object[] items;
+
+ // Take index
+ int takeIndex;
+
+ // Put index
+ int putIndex;
+
+ // Count
+ int count;
+
+ // Wait time in caching
+ final AtomicLong cacheWaitTime = new AtomicLong(0);
+
+ // Wait time in taking
+ final AtomicLong takeWaitTime = new AtomicLong(0);
+
+ // Performance of processing
+ final AtomicLong process = new AtomicLong(0);
+
+ // Control flow
+ final AtomicLong control = new AtomicLong(Long.MAX_VALUE - 1);
+
+ // If enable to discard log
+ boolean discard;
+
+ int discardCount = 0;
+
+ // Time clock
+ long clock = System.currentTimeMillis();
+
+ // interval to control
+ long controlInterval = 1 * 1000;
+
+ // Reentrant lock
+ final ReentrantLock lock;
+
+ // Condition for waiting takes
+ private final Condition notEmpty;
+
+ // Condition for waiting puts(cacheLog)
+ private final Condition notFull;
+
+ public QueuedSendLogCache(int capacity, boolean discard, int discardWind, boolean fair) {
+ this.items = new Object[capacity];
+ lock = new ReentrantLock(fair);
+ this.notEmpty = lock.newCondition();
+ this.notFull = lock.newCondition();
+ this.discard = discard;
+ // Make the discard window size as the control interval
+ this.controlInterval = discardWind;
+ this.clock = System.currentTimeMillis() + controlInterval;
+ }
+
+ @Override
+ public void cacheLog(T logElement) throws InterruptedException {
+ // Skip the null element
+ if (Objects.nonNull(logElement)){
+ final ReentrantLock lock = this.lock;
+ boolean tryLock = lock.tryLock();
+ if (!tryLock){
+ lock.lockInterruptibly();
+ }
+ try{
+ flowControl();
+ if (discard && control.decrementAndGet() <= 0){
+ if (logElement.mark() < 2){
+ discardCount++;
+ return;
+ }
+ }
+ while (count == items.length){
+// System.out.println("The queue is full, maybe lost the data");
+ long ws = System.currentTimeMillis();
+ notFull.await();
+ cacheWaitTime.addAndGet(System.currentTimeMillis() - ws);
+ }
+ enqueue(logElement);
+ }finally{
+ lock.unlock();
+ }
+ }
+ }
+
+ @Override
+ public int drainLogsTo(List elements, int maxElements) {
+ if (Objects.nonNull(elements) && maxElements > 0){
+ final Object[] items = this.items;
+ final ReentrantLock lock = this.lock;
+ lock.lock();
+ try{
+ int n = Math.min(maxElements, count);
+ int take = takeIndex;
+ int i = 0;
+ try {
+ while (i < n){
+ @SuppressWarnings("unchecked")
+ T x = (T) items[take];
+ elements.add(x);
+ items[take] = null;
+ if (++ take == items.length)
+ take = 0;
+ i++;
+ }
+ return n;
+ }finally {
+ restoreInvariants(i, take, false);
+ }
+ } finally {
+ lock.unlock();
+ }
+ }
+ return 0;
+ }
+
+ // Equal to the poll method in ArrayBlockingQueue
+ @Override
+ public T takeLog(long timeout, TimeUnit unit) throws InterruptedException {
+ long nanos = unit.toNanos(timeout);
+ final ReentrantLock lock = this.lock;
+ T element;
+ lock.lockInterruptibly();
+ try{
+ flowControl();
+ while (count == 0){
+ long ws = System.currentTimeMillis();
+ if (nanos <= 0){
+ return null;
+ }
+ nanos = notEmpty.awaitNanos(nanos);
+ takeWaitTime.addAndGet(System.currentTimeMillis() - ws);
+ }
+ element = dequeue();
+ process.incrementAndGet();
+ } finally {
+ lock.unlock();
+ }
+ return element;
+ }
+
+ @Override
+ public boolean isCacheable() {
+ final ReentrantLock lock = this.lock;
+ lock.lock();
+ try {
+ return count < items.length;
+ }finally {
+ lock.unlock();
+ }
+ }
+
+ // The same as the clear() method,
+ @Override
+ public void destroy() {
+ final Object[] items = this.items;
+ final ReentrantLock lock = this.lock;
+ lock.lock();
+ try {
+ int k = count;
+ if (k > 0) {
+ final int putIndex = this.putIndex;
+ int i = takeIndex;
+ do {
+ items[i] = null;
+ if (++i == items.length)
+ i = 0;
+ } while (i != putIndex);
+ takeIndex = putIndex;
+ count = 0;
+ for (; k > 0 && lock.hasWaiters(notFull); k--)
+ notFull.signal();
+ }
+ } finally {
+ lock.unlock();
+ }
+ }
+
+ /**
+ * Drain the elements into send buffer
+ * @param sendBuffer send buffer
+ * @param maxElements max element size
+ * @return int
+ */
+ @Override
+ public int drainLogsTo(SendBuffer sendBuffer, int maxElements) {
+ if (Objects.nonNull(sendBuffer) && maxElements > 0){
+ final Object[] items = this.items;
+ final ReentrantLock lock = this.lock;
+ lock.lock();
+ try{
+ flowControl();
+ int n = Math.min(maxElements, count);
+ int take = takeIndex;
+ int i = 0;
+ int send;
+ try {
+ while (n > 0) {
+ int len = items.length - take;
+ int send0 = Math.min(n, len);
+ // Copy the array element to buffer directly
+ send = sendBuf(sendBuffer, this.items, take, send0);
+ n -= send;
+ if ((take = take + send) >= items.length) {
+ take = 0;
+ }
+ i += send;
+ if (send < send0 || send <= 0) {
+ break;
+ }
+ }
+ process.addAndGet(i);
+ return i;
+ } finally {
+ if (i > 0){
+ restoreInvariants(i, take, true);
+ }
+ }
+ }finally {
+ lock.unlock();
+ }
+ }
+ return 0;
+ }
+
+ @SuppressWarnings("unchecked")
+ private int sendBuf(SendBuffer sendBuffer, Object[] items, int takeIndex, int len){
+ int send = sendBuffer.writeBuf(items, takeIndex, len);
+ if (send < len){
+ // Buffer full exception
+ exceptionListener.onException(this, null, "The sender buffer is full," +
+ " expected: [" + len + "], actual: [" + send + "]");
+ }
+ // Allow data loss
+ return send;
+ }
+
+ private void restoreInvariants(int i, int take, boolean clearItems){
+ this.count -= i;
+ if (clearItems){
+ int index = this.takeIndex;
+ int j = i;
+ for (; j > 0; j --){
+ this.items[index] = null;
+ if (++index == items.length){
+ index = 0;
+ }
+ }
+ //At last index equals take
+ }
+ this.takeIndex = take;
+ for (; i > 0 && lock.hasWaiters(notFull); i--){
+ notFull.signal();
+ }
+ }
+ // Inserts element at current put position, advances, and signals. Call only when holding lock.
+ private void enqueue(T element){
+ this.items[putIndex] = element;
+ if (++putIndex >= items.length){
+ putIndex = 0;
+ }
+ count ++;
+ notEmpty.signal();
+ }
+
+ // Extracts element at current take position, advances, and signals. Call only when holding lock.
+ private T dequeue(){
+ @SuppressWarnings("unchecked")
+ T element = (T)this.items[takeIndex];
+ this.items[takeIndex] = null;
+ if ( ++ takeIndex == items.length){
+ this.takeIndex = 0;
+ }
+ count --;
+ // Not need to support iterator
+ notFull.signal();
+ return element;
+ }
+
+ /**
+ * Flow control
+ */
+ private void flowControl(){
+ long ws = System.currentTimeMillis();
+ if (clock <= ws) {
+ long interval = ws - clock + controlInterval;
+ clock = ws + controlInterval;
+ if (rpcSenderConfig.isDebugMode()) {
+ System.out.println("cacheWait: " + cacheWaitTime.get() + ", takeWait:" + takeWaitTime.get() + ", discarded: " + discardCount);
+ }
+ if (takeWaitTime.get() <= 0 && process.get() > 0){
+ this.control.set((long) ((double)process.get() * ((double)controlInterval / (double)interval)));
+ if (rpcSenderConfig.isDebugMode()) {
+ System.out.println("new window control: " + this.control.get());
+ }
+ } else {
+ this.control.set(Long.MAX_VALUE);
+ }
+ cacheWaitTime.set(0);
+ takeWaitTime.set(0);
+ process.set(0);
+ discardCount = 0;
+ }
+ }
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/RpcLogSender.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/RpcLogSender.java
new file mode 100644
index 000000000..8254f0a34
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/RpcLogSender.java
@@ -0,0 +1,39 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.ExceptionListener;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.cache.LogCache;
+import com.webank.wedatasphere.streamis.jobmanager.log.entities.LogElement;
+
+/**
+ * Rpc Log sender
+ */
+public interface RpcLogSender {
+
+ /**
+ * Produce log cache
+ * @return log cache
+ */
+ LogCache getOrCreateLogCache();
+
+ /**
+ * Send log (async)
+ * @param log log element
+ */
+ void sendLog(T log);
+
+ /**
+ * Send log (sync)
+ * @param log log element
+ */
+ void syncSendLog(T log);
+
+ /**
+ * Exception listener
+ * @param listener listener
+ */
+ void setExceptionListener(ExceptionListener listener);
+ /**
+ * Close sender
+ */
+ void close();
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/SendLogCache.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/SendLogCache.java
new file mode 100644
index 000000000..200c573d9
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/SendLogCache.java
@@ -0,0 +1,20 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.cache.LogCache;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.buf.SendBuffer;
+import com.webank.wedatasphere.streamis.jobmanager.log.entities.LogElement;
+
+/**
+ * Send log cache
+ * @param
+ */
+public interface SendLogCache extends LogCache {
+
+ /**
+ * Drain the logs into send buffer
+ * @param sendBuffer send buffer
+ * @param maxElements max element size
+ * @return count
+ */
+ int drainLogsTo(SendBuffer sendBuffer, int maxElements);;
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/SendLogCacheConsumer.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/SendLogCacheConsumer.java
new file mode 100644
index 000000000..fac98b90a
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/SendLogCacheConsumer.java
@@ -0,0 +1,128 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcLogSenderConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.buf.SendBuffer;
+import com.webank.wedatasphere.streamis.jobmanager.log.entities.LogElement;
+
+import java.util.concurrent.Future;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Send log consumer
+ * Consume the log elements from cache and put into send buffer
+ * @param
+ */
+public abstract class SendLogCacheConsumer implements Runnable{
+
+ private boolean isTerminated = false;
+
+ /**
+ * Buffer expire time in milliseconds
+ */
+ private final long bufferExpireTimeInMills;
+ /**
+ * Send log cache
+ */
+ private final SendLogCache cache;
+
+ /**
+ * Send buffer
+ */
+ private final SendBuffer sendBuffer;
+
+ private final String id;
+
+ /**
+ * Future for execution
+ */
+ private Future> future;
+
+ public SendLogCacheConsumer(String id, SendLogCache cache,
+ SendBuffer sendBuffer,
+ RpcLogSenderConfig rpcSenderConfig){
+ this.id = id;
+ this.cache = cache;
+ this.sendBuffer = sendBuffer;
+ long expireTimeInSec = rpcSenderConfig.getBufferConfig().getExpireTimeInSec();
+ this.bufferExpireTimeInMills = expireTimeInSec > 0 ? TimeUnit.SECONDS
+ .toMillis(expireTimeInSec) : -1;
+
+ }
+
+ @Override
+ public void run() {
+ int remain;
+ long expireTimeInMills = requireNewFlushTime();
+ int capacity = sendBuffer.capacity();
+ while (!this.isTerminated) {
+ try {
+ remain = this.sendBuffer.remaining();
+ if ((expireTimeInMills > 0 && expireTimeInMills <= System.currentTimeMillis()) || remain <= 0) {
+ // Transient to the read mode
+ if (remain < capacity) {
+ sendBuffer.flip();
+ onFlushAndSend(sendBuffer);
+ }
+ expireTimeInMills = requireNewFlushTime();
+ if (sendBuffer.isReadMode()) {
+ // Clear the buffer and transient to the write mode, otherwise continue writing
+ sendBuffer.clear();
+ }
+ remain = this.sendBuffer.remaining();
+ }
+ if (remain > 0) {
+ int inBuf = this.cache.drainLogsTo(sendBuffer, remain);
+ if (inBuf < remain) {
+ // Means that the cache is empty, take and wait the log element
+ long waitTime = expireTimeInMills - System.currentTimeMillis();
+ if (waitTime > 0) {
+ T logElement = this.cache.takeLog(waitTime, TimeUnit.MILLISECONDS);
+ if (null != logElement) {
+ sendBuffer.writeBuf(logElement);
+ }
+ }
+ }
+ }
+ } catch (Throwable e){
+ if (this.isTerminated && e instanceof InterruptedException){
+ return;
+ } else {
+ e.printStackTrace();
+ System.err.println("SendLogCacheConsumer[" + Thread.currentThread().getName() + "] occurred exception [" + e.getLocalizedMessage() + "]");
+ // For the unknown exception clear the cache
+ sendBuffer.clear();
+ expireTimeInMills = requireNewFlushTime();
+ }
+ try {
+ Thread.sleep(500);
+ } catch (InterruptedException ex) {
+ // Ignore
+ }
+ }
+ }
+ }
+
+ public void shutdown(){
+ this.isTerminated = true;
+ if (null != this.future){
+ this.future.cancel(true);
+ }
+ }
+
+ public Future> getFuture() {
+ return future;
+ }
+
+ public void setFuture(Future> future) {
+ this.future = future;
+ }
+
+ private long requireNewFlushTime(){
+ return bufferExpireTimeInMills > 0 ? System.currentTimeMillis() + bufferExpireTimeInMills : -1;
+ }
+ /**
+ * When the buffer is full or reach the idle time, invoke the method
+ * @param sendBuffer send buffer
+ */
+ protected abstract void onFlushAndSend(SendBuffer sendBuffer);
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/SendLogExceptionStrategy.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/SendLogExceptionStrategy.java
new file mode 100644
index 000000000..d33b7d2e4
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/SendLogExceptionStrategy.java
@@ -0,0 +1,61 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.buf.SendBuffer;
+import com.webank.wedatasphere.streamis.jobmanager.log.entities.LogElement;
+
+import java.util.Objects;
+import java.util.concurrent.Callable;
+
+/**
+ * Strategy control the action on exception
+ */
+public abstract class SendLogExceptionStrategy {
+
+ protected final RpcLogSender sender;
+
+ public SendLogExceptionStrategy(RpcLogSender sender){
+ this.sender = sender;
+ }
+ /**
+ * Retry count
+ * @return retry
+ */
+ public abstract int retryCount();
+
+ /**
+ *
+ * @param e exception
+ * @return boolean
+ */
+ public abstract RetryDescription onException(Exception e, SendBuffer sendBuffer);
+
+ V doSend(Callable sendOperation, SendBuffer sendBuffer){
+ int retryCount = retryCount();
+ int count = 0;
+ RetryDescription retryDescription;
+ while (++count <= retryCount) {
+ try {
+ return sendOperation.call();
+ } catch (Exception e) {
+ retryDescription = onException(e, sendBuffer);
+ if (Objects.isNull(retryDescription) || !retryDescription.canRetry) {
+ break;
+ }
+ }
+ }
+ return null;
+ }
+
+ protected static class RetryDescription{
+
+ private final boolean canRetry;
+
+ public RetryDescription(boolean canRetry){
+ this.canRetry = canRetry;
+ }
+
+ public boolean isCanRetry() {
+ return canRetry;
+ }
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/StreamisRpcLogSender.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/StreamisRpcLogSender.java
new file mode 100644
index 000000000..d019c29f5
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/StreamisRpcLogSender.java
@@ -0,0 +1,45 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender;
+
+
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcLogSenderConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.buf.SendBuffer;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.http.AbstractHttpLogSender;
+import com.webank.wedatasphere.streamis.jobmanager.log.entities.StreamisLogEvent;
+import com.webank.wedatasphere.streamis.jobmanager.log.entities.StreamisLogEvents;
+
+/**
+ * Log sender for streamis
+ */
+public class StreamisRpcLogSender extends AbstractHttpLogSender {
+
+ /**
+ * Each sender register an application
+ */
+ private final String applicationName;
+
+ public StreamisRpcLogSender(String applicationName, RpcLogSenderConfig rpcSenderConfig) {
+ super(rpcSenderConfig);
+ this.applicationName = applicationName;
+ }
+
+ /**
+ * Aggregate to streamis log events
+ * @param sendBuffer send buffer
+ * @return
+ */
+ @Override
+ protected StreamisLogEvents aggregateBuffer(SendBuffer sendBuffer) {
+ int remain = sendBuffer.remaining();
+ if (remain > 0) {
+ StreamisLogEvent[] logEvents = new StreamisLogEvent[remain];
+ sendBuffer.readBuf(logEvents, 0, logEvents.length);
+ return new StreamisLogEvents(applicationName, logEvents);
+ }
+ return null;
+ }
+
+ @Override
+ protected String convertToJsonString(StreamisLogEvents aggregatedEntity) {
+ return aggregatedEntity.toJson();
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/buf/AbstractSendBuffer.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/buf/AbstractSendBuffer.java
new file mode 100644
index 000000000..1b42ad957
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/buf/AbstractSendBuffer.java
@@ -0,0 +1,135 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.buf;
+
+/**
+ * Abstract sender buffer;
+ * non-blocking and reduces out-of-bounds exceptions
+ */
+public abstract class AbstractSendBuffer implements SendBuffer{
+
+ protected enum Flag{
+ WRITE_MODE, READ_MODE
+ }
+
+ /**
+ * Access flag
+ */
+ private Flag accessFlag = Flag.WRITE_MODE;
+
+ private int position = 0;
+ private int limit;
+ /**
+ * The capacity is mutable
+ */
+ protected int capacity;
+
+
+ public AbstractSendBuffer(int capacity){
+ this.capacity = capacity;
+ limit(this.capacity);
+ }
+
+ public AbstractSendBuffer(){
+ this(Integer.MAX_VALUE);
+ }
+
+ @Override
+ public boolean isReadMode() {
+ return accessFlag == Flag.READ_MODE;
+ }
+
+ @Override
+ public boolean isWriteMode() {
+ return accessFlag == Flag.WRITE_MODE;
+ }
+
+ @Override
+ public int capacity() {
+ return this.capacity;
+ }
+
+ @Override
+ public int remaining() {
+ int rem = this.limit - this.position;
+ return Math.max(rem, 0);
+ }
+
+ @Override
+ public void flip() {
+ checkFlag(Flag.WRITE_MODE);
+ this.limit = this.position;
+ this.position = 0;
+ this.accessFlag = Flag.READ_MODE;
+ }
+
+ @Override
+ public void rewind() {
+ position = 0;
+ }
+
+ @Override
+ public void clear() {
+ limit(this.capacity);
+ this.position = 0;
+ this.accessFlag = Flag.WRITE_MODE;
+ clearBuf();
+ }
+
+ /**
+ * Change the limit value
+ * @param newLimit new limit
+ */
+ final void limit(int newLimit){
+ if (newLimit > this.capacity || (newLimit < 0)){
+ throw new IllegalArgumentException("Set the illegal limit value: " + newLimit + " in send buffer, [capacity: " + this.capacity + "]");
+ }
+ this.limit = newLimit;
+ if (this.position > newLimit){
+ this.position = newLimit;
+ }
+ }
+
+ /**
+ * Inc the position with offset
+ * @param offset offset value
+ * @param accessFlag access flag
+ * @return the current position value
+ */
+ final int nextPosition(int offset, Flag accessFlag){
+ checkFlag(accessFlag);
+ int p = position;
+ // Reach the limit, return -1 value
+ if (p >= limit){
+ return -1;
+ }
+ if (p + offset > limit){
+ this.position = limit;
+ } else {
+ this.position = p + offset;
+ }
+ return p;
+ }
+
+ final void checkFlag(Flag accessFlag){
+ if (this.accessFlag != accessFlag){
+ throw new IllegalStateException("Illegal access flag [" + accessFlag + "] for send buffer");
+ }
+ }
+ final void setFlag(Flag accessFlag){
+ this.accessFlag = accessFlag;
+ }
+ /**
+ *
+ * @return the current position
+ */
+ final int position(){
+ return this.position;
+ }
+
+ final void position(int position){
+ this.position = position;
+ }
+ /**
+ * Do the actual clear
+ */
+ protected abstract void clearBuf();
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/buf/ImmutableSendBuffer.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/buf/ImmutableSendBuffer.java
new file mode 100644
index 000000000..0e64c4ffa
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/buf/ImmutableSendBuffer.java
@@ -0,0 +1,102 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.buf;
+
+import java.util.Arrays;
+import java.util.function.Function;
+
+/**
+ * Immutable send buffer (use array)
+ */
+public class ImmutableSendBuffer extends AbstractSendBuffer{
+
+ /**
+ * Buffer object array
+ */
+ private final Object[] buf;
+
+ public ImmutableSendBuffer(int capacity) {
+ super(capacity);
+ buf = new Object[capacity];
+ }
+
+ @Override
+ protected void clearBuf() {
+ // Release the memory occupied
+ Arrays.fill(buf, null);
+ }
+
+ @Override
+ public void capacity(String newCapacity) {
+ throw new IllegalArgumentException("Unsupported to scale-in/scale-up the send buffer");
+ }
+
+ @Override
+ @SuppressWarnings("all")
+ public int writeBuf(Object[] elements, int srcIndex, int length) {
+ if (srcIndex < elements.length){
+ int startPos = nextPosition(Math.min(elements.length - srcIndex, length), Flag.WRITE_MODE);
+ if (startPos >= 0){
+ int writes = position() - startPos;
+ System.arraycopy(elements, srcIndex, this.buf, startPos, writes);
+ return writes;
+ }
+ }
+ return -1;
+ }
+
+ @Override
+ @SuppressWarnings("all")
+ public int readBuf(Object[] elements, int srcIndex, int length) {
+ if (srcIndex < elements.length){
+ int startPos = nextPosition(Math.min(elements.length - srcIndex, length), Flag.READ_MODE);
+ if (startPos >= 0){
+ int reads = position() - startPos;
+ System.arraycopy(this.buf, startPos, elements, srcIndex, reads);
+ return reads;
+ }
+ }
+ return -1;
+ }
+
+ @Override
+ public int writeBuf(E element) {
+ int startPos = nextPosition(1, Flag.WRITE_MODE);
+ if (startPos >= 0){
+ buf[startPos] = element;
+ return 1;
+ }
+ return -1;
+ }
+
+ @Override
+ @SuppressWarnings("unchecked")
+ public E readBuf() {
+ int startPos = nextPosition(1, Flag.READ_MODE);
+ if (startPos >= 0){
+ return (E)buf[startPos];
+ }
+ return null;
+ }
+
+ @Override
+ @SuppressWarnings("unchecked")
+ public SendBuffer compact(Function dropAble) {
+ checkFlag(Flag.READ_MODE);
+ int offset = 0;
+ int compact = position() - 1;
+ for(int i = position(); i < capacity; i ++){
+ Object element = buf[i];
+ if (dropAble.apply((E)element)){
+ buf[i] = null;
+ offset ++;
+ } else {
+ compact = i - offset;
+ buf[compact] = element;
+ }
+ }
+ position(compact + 1);
+ limit(this.capacity);
+ setFlag(Flag.WRITE_MODE);
+ return this;
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/buf/SendBuffer.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/buf/SendBuffer.java
new file mode 100644
index 000000000..0a98580fb
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/buf/SendBuffer.java
@@ -0,0 +1,92 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.buf;
+
+import java.util.function.Function;
+
+/**
+ * Buffer for Rpc sender
+ * @param buffer element
+ */
+public interface SendBuffer {
+
+ /**
+ * Capacity
+ * @return int
+ */
+ int capacity();
+
+ /**
+ * Is read mode
+ * @return boolean
+ */
+ boolean isReadMode();
+
+ /**
+ * Is write mode
+ * @return boolean
+ */
+ boolean isWriteMode();
+ /**
+ * Scale-up or scale-in
+ * @param newCapacity new capacity
+ */
+ void capacity(String newCapacity);
+ /**
+ * Remain size
+ * (remain space for writing or remain elements for reading)
+ * @return int
+ */
+ int remaining();
+
+ /**
+ * Transient between write-mode and read-mode
+ */
+ void flip();
+
+ /**
+ * Restart from the beginning of window
+ */
+ void rewind();
+ /**
+ * Clear to reuse the buffer
+ */
+ void clear();
+ /**
+ * Write buffer element
+ * @param element element
+ * @return if succeed
+ */
+ int writeBuf(E element);
+
+ /**
+ * Write buffer element array
+ * @param elements elements
+ * @param srcIndex the src index in elements
+ * @param length the length to read
+ * @return write num
+ */
+ int writeBuf(Object[] elements, int srcIndex, int length);
+
+ /**
+ * Read buffer element
+ * @return element
+ */
+ E readBuf();
+
+ /**
+ * Read buffer element array
+ * @param elements elements
+ * @param srcIndex the src index in elements
+ * @param length the length to write
+ * @return read num
+ */
+ int readBuf(Object[] elements, int srcIndex, int length);
+
+ /**
+ * Compact the buffer, avoid the useless elements
+ * @param dropAble drop function
+ * @return send buffer
+ */
+ SendBuffer compact(Function dropAble);
+
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/AbstractHttpLogSender.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/AbstractHttpLogSender.java
new file mode 100644
index 000000000..c693d0152
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/AbstractHttpLogSender.java
@@ -0,0 +1,163 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.http;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcAuthConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.AbstractRpcLogSender;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcLogSenderConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.SendLogExceptionStrategy;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.buf.SendBuffer;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.http.request.StringPostAction;
+import com.webank.wedatasphere.streamis.jobmanager.log.entities.LogElement;
+import org.apache.http.HttpResponse;
+import org.apache.http.client.HttpClient;
+import org.apache.http.client.HttpResponseException;
+import org.apache.http.client.methods.CloseableHttpResponse;
+import org.apache.http.conn.ConnectTimeoutException;
+
+import javax.net.ssl.SSLException;
+import java.io.*;
+import java.net.UnknownHostException;
+import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
+import java.util.Optional;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicLong;
+
+public abstract class AbstractHttpLogSender extends AbstractRpcLogSender {
+
+ /**
+ * Retry strategy
+ */
+ private final SendLogExceptionStrategy sendRetryStrategy;
+
+ /**
+ * Exception counter
+ */
+ private final AtomicInteger exceptionCounter = new AtomicInteger();
+ /**
+ * Hold the global http client
+ */
+ private final HttpClient globalHttpClient;
+
+ /**
+ * Recover time point
+ */
+ private final AtomicLong serverRecoveryTimePoint = new AtomicLong(-1L);
+
+ public AbstractHttpLogSender(RpcLogSenderConfig rpcSenderConfig) {
+ super(rpcSenderConfig);
+ this.globalHttpClient = HttpClientTool.createHttpClient(rpcSenderConfig);
+ this.sendRetryStrategy = new SendLogExceptionStrategy(this) {
+
+ private final Class>[] retryOnExceptions = new Class>[]{
+ InterruptedIOException.class, UnknownHostException.class,
+ ConnectTimeoutException.class, SSLException.class};
+ @Override
+ public int retryCount() {
+ return rpcSenderConfig.getSendRetryCnt();
+ }
+
+ @Override
+ public SendLogExceptionStrategy.RetryDescription onException(Exception e, SendBuffer sendBuffer) {
+ boolean shouldRetry = false;
+ // Limit of exception number is the same as the retry times
+ if (exceptionCounter.incrementAndGet() > retryCount()){
+ serverRecoveryTimePoint.set(System.currentTimeMillis() +
+ TimeUnit.SECONDS.toMillis(rpcSenderConfig.getServerRecoveryTimeInSec()));
+ } else {
+ for (Class> retryOnException : retryOnExceptions) {
+ if (retryOnException.equals(e.getClass())) {
+ shouldRetry = true;
+ break;
+ }
+ }
+ if (!shouldRetry && e instanceof HttpResponseException){
+ if (((HttpResponseException) e).getStatusCode() < 500){
+ shouldRetry = true;
+ }
+ }
+ }
+ if (shouldRetry && !sender.getOrCreateLogCache().isCacheable()){
+ // Means that the cache is full
+ // Set the position of buffer to 0
+ sendBuffer.rewind();
+ // Compact the buffer and transient to write mode;
+ sendBuffer.compact( element -> element.mark() > 1);
+ shouldRetry = false;
+ }
+ Optional.ofNullable(exceptionListener).ifPresent(listener -> listener.onException(sender, e, null));
+ return new RetryDescription(shouldRetry);
+ }
+ };
+ }
+
+ @Override
+ protected SendLogExceptionStrategy getSendLogExceptionStrategy() {
+ return this.sendRetryStrategy;
+ }
+
+ @Override
+ protected void doSend(E aggregatedEntity, RpcLogSenderConfig rpcSenderConfig) throws IOException {
+ if (System.currentTimeMillis() >= serverRecoveryTimePoint.get()) {
+ if (aggregatedEntity instanceof LogElement) {
+ long timestamp = ((LogElement) aggregatedEntity).getLogTimeStamp();
+ if (System.currentTimeMillis() - timestamp > rpcSenderConfig.getMaxDelayTimeInSec() * 1000L) {
+ // Abort the entity
+ return;
+ }
+ }
+ String address = rpcSenderConfig.getAddress();
+ if (null != address && !address.trim().equals("")) {
+ StringPostAction postAction = new StringPostAction(rpcSenderConfig.getAddress(), convertToJsonString(aggregatedEntity));
+ RpcAuthConfig authConfig = rpcSenderConfig.getAuthConfig();
+ postAction.getRequestHeaders().put(authConfig.getTokenUserKey(), authConfig.getTokenUser());
+ HttpResponse response = null;
+ try {
+ response = postAction.execute(this.globalHttpClient);
+ int statusCode = response.getStatusLine().getStatusCode();
+ if (statusCode > 200){
+ throw new HttpResponseException(statusCode,
+ convertToString(response.getEntity().getContent(), StandardCharsets.UTF_8));
+ }
+ }finally {
+ // Close the response and release the conn
+ if (null != response){
+ if (response instanceof CloseableHttpResponse){
+ ((CloseableHttpResponse)response).close();
+ } else {
+ // Destroy the stream
+ response.getEntity().getContent().close();
+ }
+ }
+ }
+ // Init the counter
+ this.exceptionCounter.set(0);
+ }
+ }
+ }
+
+ /**
+ * Convert input to string
+ * @param inputStream input stream
+ * @param charset charset
+ * @return string value
+ * @throws IOException
+ */
+ private String convertToString(InputStream inputStream, Charset charset) throws IOException {
+ StringBuilder builder = new StringBuilder();
+ try(BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream, charset))){
+ String line;
+ while((line = reader.readLine()) != null){
+ builder.append(line);
+ }
+ }
+ return builder.toString();
+ }
+
+ /**
+ * Convert the entity to json
+ * @param aggregatedEntity aggregated entity
+ * @return json string
+ */
+ protected abstract String convertToJsonString(E aggregatedEntity);
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/HttpClientTool.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/HttpClientTool.java
new file mode 100644
index 000000000..12f3f7dab
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/HttpClientTool.java
@@ -0,0 +1,72 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.http;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcLogSenderConfig;
+import org.apache.http.Header;
+import org.apache.http.client.HttpClient;
+import org.apache.http.client.config.RequestConfig;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.impl.client.HttpClientBuilder;
+import org.apache.http.impl.client.HttpClients;
+import org.apache.http.message.BasicHeader;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Http clients
+ */
+public class HttpClientTool {
+
+ /**
+ * Connect timeout
+ */
+ public static final int DEFAULT_CONNECT_TIMEOUT = 3000;
+
+ /**
+ * Socket timeout
+ */
+ public static final int DEFAULT_SOCKET_TIMEOUT = 15000;
+
+ /**
+ * Max connections
+ */
+ public static final int DEFAULT_MAX_CONN = 10;
+
+ /**
+ * Create http client
+ * @param rpcSenderConfig rpc sender config
+ * @return http client
+ */
+ public static HttpClient createHttpClient(RpcLogSenderConfig rpcSenderConfig){
+ int connectTimeout = rpcSenderConfig.getConnectionTimeout() > 0? rpcSenderConfig.getConnectionTimeout() : DEFAULT_CONNECT_TIMEOUT;
+ int socketTimeout = rpcSenderConfig.getSocketTimeout() > 0? rpcSenderConfig.getSocketTimeout() : DEFAULT_SOCKET_TIMEOUT;
+ RequestConfig requestConfig = RequestConfig.custom()
+ .setConnectTimeout(connectTimeout)
+ .setConnectionRequestTimeout(socketTimeout)
+ .setSocketTimeout(socketTimeout)
+ .build();
+ int maxConsumeThread = rpcSenderConfig.getCacheConfig().getMaxConsumeThread();
+ int maxConn = maxConsumeThread > 0? maxConsumeThread : DEFAULT_MAX_CONN;
+ HttpClientBuilder clientBuilder = HttpClients.custom();
+ String tokenValue = rpcSenderConfig.getAuthConfig().getTokenCode();
+ List defaultHeaders = new ArrayList<>();
+ if (null != tokenValue && !tokenValue.trim().equals("")){
+ defaultHeaders.add(new BasicHeader(rpcSenderConfig.getAuthConfig().getTokenCodeKey(), tokenValue));
+ }
+ clientBuilder.setDefaultRequestConfig(requestConfig).setDefaultHeaders(defaultHeaders)
+ .useSystemProperties().setMaxConnTotal(maxConn).setMaxConnPerRoute(maxConn);
+ CloseableHttpClient httpClient = clientBuilder.build();
+ Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
+ @Override
+ public void run() {
+ try {
+ httpClient.close();
+ } catch (IOException e) {
+ // Ignore
+ }
+ }
+ }));
+ return httpClient;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/entities/Resource.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/entities/Resource.java
new file mode 100644
index 000000000..eaa355e92
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/entities/Resource.java
@@ -0,0 +1,17 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.http.entities;
+
+import java.io.File;
+import java.util.List;
+
+/**
+ * Entity with resources
+ */
+public interface Resource {
+
+ /**
+ * Resources related
+ * @return file list
+ */
+ List getResources();
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/AbstractHttpAction.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/AbstractHttpAction.java
new file mode 100644
index 000000000..143f72b8e
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/AbstractHttpAction.java
@@ -0,0 +1,63 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.http.request;
+
+import org.apache.http.HttpResponse;
+import org.apache.http.client.HttpClient;
+import org.apache.http.client.methods.HttpRequestBase;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Abstract implement
+ * @param
+ */
+public abstract class AbstractHttpAction implements HttpAction {
+
+ protected String uri;
+
+ protected String user;
+
+ public AbstractHttpAction(String uri){
+ this.uri = uri;
+ }
+
+ @Override
+ public String uri() {
+ return uri;
+ }
+
+ /**
+ * Request method
+ * @return method
+ */
+ protected abstract T getRequestMethod();
+
+ private Map requestHeaders = new HashMap<>();
+
+ private Map requestPayload = new HashMap<>();
+
+ @Override
+ public Map getRequestHeaders() {
+ return this.requestHeaders;
+ }
+
+ @Override
+ public Map getRequestPayload() {
+ return this.requestPayload;
+ }
+
+ @Override
+ public HttpResponse execute(HttpClient httpClient) throws IOException {
+ HttpRequestBase requestBase = getRequestMethod();
+ try{
+ requestBase.setURI(new URI(uri));
+ } catch (URISyntaxException e) {
+ throw new IllegalArgumentException("URI maybe has wrong format", e);
+ }
+ requestHeaders.forEach(requestBase::setHeader);
+ return httpClient.execute(requestBase);
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/GetAction.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/GetAction.java
new file mode 100644
index 000000000..f5a8a5fef
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/GetAction.java
@@ -0,0 +1,4 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.http.request;
+
+public class GetAction {
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/HttpAction.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/HttpAction.java
new file mode 100644
index 000000000..87435f8a3
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/HttpAction.java
@@ -0,0 +1,38 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.http.request;
+
+import org.apache.http.HttpResponse;
+import org.apache.http.client.HttpClient;
+
+import java.io.IOException;
+import java.util.Map;
+
+/**
+ * Http action
+ */
+public interface HttpAction {
+
+ /**
+ * URI path
+ * @return path
+ */
+ String uri();
+
+ /**
+ * Request headers
+ * @return map
+ */
+ Map getRequestHeaders();
+
+ /**
+ * Request pay load(body)
+ * @return map
+ */
+ Map getRequestPayload();
+
+ /**
+ * Execute http action
+ * @return http response
+ */
+ HttpResponse execute(HttpClient httpClient) throws IOException;
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/StringPostAction.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/StringPostAction.java
new file mode 100644
index 000000000..6ce0d8cdf
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/StringPostAction.java
@@ -0,0 +1,29 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.http.request;
+
+import org.apache.http.client.methods.HttpPost;
+import org.apache.http.entity.ContentType;
+import org.apache.http.entity.StringEntity;
+
+/**
+ * Use string to request
+ */
+public class StringPostAction extends AbstractHttpAction {
+
+ /**
+ * Raw string value
+ */
+ private final String rawString;
+ public StringPostAction(String uri, String rawString) {
+ super(uri);
+ this.rawString = rawString;
+ }
+
+ @Override
+ protected HttpPost getRequestMethod() {
+ HttpPost httpPost = new HttpPost();
+ StringEntity stringEntity = new StringEntity(rawString, "UTF-8");
+ stringEntity.setContentType(ContentType.APPLICATION_JSON.toString());
+ httpPost.setEntity(stringEntity);
+ return httpPost;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/utils/StringUtils.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/utils/StringUtils.java
new file mode 100644
index 000000000..4b1660e65
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/utils/StringUtils.java
@@ -0,0 +1,22 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.utils;
+
+/**
+ * Tool to operate str
+ */
+public class StringUtils {
+
+ /**
+ * Convert string to array
+ * @param input string
+ * @param delimiter delimiter
+ * @return array
+ */
+ public static String[] convertStrToArray(String input, String delimiter){
+ if (null != input && !input.trim().equals("") &&
+ !input.equals(delimiter.trim())){
+ return input.split(",");
+ }
+ return null;
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/plugin/StreamisConfigAutowired.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/plugin/StreamisConfigAutowired.java
new file mode 100644
index 000000000..980a6aec5
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/plugin/StreamisConfigAutowired.java
@@ -0,0 +1,15 @@
+package com.webank.wedatasphere.streamis.jobmanager.plugin;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.StreamisLogAppenderConfig;
+
+/**
+ * Streamis config autowired
+ */
+public interface StreamisConfigAutowired {
+
+ /**
+ * Log appender config
+ * @param builder builder
+ */
+ StreamisLogAppenderConfig logAppenderConfig(StreamisLogAppenderConfig.Builder builder) throws Exception;
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/pom.xml b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/pom.xml
new file mode 100644
index 000000000..c672ffe5d
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/pom.xml
@@ -0,0 +1,64 @@
+
+
+
+ streamis-job-log
+ com.webank.wedatasphere.streamis
+ 0.2.4
+ ../../pom.xml
+
+ 4.0.0
+
+ streamis-job-log-collector
+
+
+ 8
+ 8
+ 2.17.1
+ 1.7.15
+
+
+
+
+ com.webank.wedatasphere.streamis
+ streamis-job-log-collector-core
+ ${streamis.version}
+
+
+
+ org.slf4j
+ slf4j-api
+ ${slf4j.version}
+ provided
+
+
+
+ org.apache.logging.log4j
+ log4j-slf4j-impl
+ ${log4j.version}
+ provided
+
+
+
+ org.apache.logging.log4j
+ log4j-api
+ ${log4j.version}
+ provided
+
+
+
+ org.apache.logging.log4j
+ log4j-core
+ ${log4j.version}
+ provided
+
+
+
+ junit
+ junit
+ ${junit.version}
+ test
+
+
+
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisRpcLogAppender.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisRpcLogAppender.java
new file mode 100644
index 000000000..a82f44cbb
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisRpcLogAppender.java
@@ -0,0 +1,128 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.cache.LogCache;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcLogSenderConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.StreamisLogAppenderConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j2.StreamisLog4j2AppenderConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters.LogMessageFilter;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.StreamisRpcLogSender;
+import com.webank.wedatasphere.streamis.jobmanager.log.entities.StreamisLogEvent;
+import com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired;
+import org.apache.logging.log4j.core.Filter;
+import org.apache.logging.log4j.core.Layout;
+import org.apache.logging.log4j.core.LogEvent;
+import org.apache.logging.log4j.core.appender.AbstractAppender;
+import org.apache.logging.log4j.core.config.Property;
+import org.apache.logging.log4j.core.config.plugins.Plugin;
+import org.apache.logging.log4j.core.config.plugins.PluginAttribute;
+import org.apache.logging.log4j.core.config.plugins.PluginElement;
+import org.apache.logging.log4j.core.config.plugins.PluginFactory;
+import org.apache.logging.log4j.core.layout.PatternLayout;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Objects;
+import java.util.ServiceLoader;
+import java.util.function.BiFunction;
+import java.util.function.Function;
+
+/**
+ * Streamis rpc log appender
+ */
+@Plugin(name = "StreamRpcLog", category = "Core", elementType = "appender", printObject = true)
+public class StreamisRpcLogAppender extends AbstractAppender {
+ private static final String DEFAULT_APPENDER_NAME = "StreamRpcLog";
+ /**
+ * Appender config
+ */
+ private final StreamisLogAppenderConfig appenderConfig;
+
+ /**
+ * Rpc log sender
+ */
+ private final StreamisRpcLogSender rpcLogSender;
+
+ /**
+ * Cache
+ */
+ private final LogCache logCache;
+
+ /**
+ * Filter function
+ */
+ private BiFunction messageFilterFunction = (logger, message) -> true;
+
+ protected StreamisRpcLogAppender(String name, Filter filter,
+ Layout extends Serializable> layout,
+ boolean ignoreExceptions, Property[] properties,
+ StreamisLogAppenderConfig appenderConfig) {
+ super(name, filter, layout, ignoreExceptions, properties);
+ this.appenderConfig = appenderConfig;
+ this.rpcLogSender = new StreamisRpcLogSender(this.appenderConfig.getApplicationName(),
+ this.appenderConfig.getSenderConfig());
+ this.rpcLogSender.setExceptionListener((subject, t, message) ->
+ LOGGER.error((null != subject? subject.getClass().getSimpleName() : "") + ": " + message, t));
+ this.logCache = this.rpcLogSender.getOrCreateLogCache();
+ List messageFilters = appenderConfig.getMessageFilters();
+ if (null != messageFilters && messageFilters.size() > 0){
+ messageFilterFunction = (logger, message) ->{
+ for(LogMessageFilter messageFilter : messageFilters){
+ if (!messageFilter.doFilter(logger, message)){
+ return false;
+ }
+ }
+ return true;
+ };
+ }
+ Runtime.getRuntime().addShutdownHook(new Thread(this.rpcLogSender::close));
+ }
+
+ @Override
+ public void append(LogEvent event) {
+ String content = new String(getLayout().toByteArray(event));
+ if (messageFilterFunction.apply(event.getLoggerName(), content)) {
+ // Transform to stream log event;
+ StreamisLogEvent logEvent = new StreamisLogEvent(content, event.getTimeMillis());
+ try {
+ this.logCache.cacheLog(logEvent);
+ } catch (InterruptedException e) {
+ LOGGER.error("StreamisRpcLogAppender: {} interrupted when cache the log into the RPC sender, message: {}", this.getName(), e.getMessage());
+ }
+ }
+ }
+
+ @PluginFactory
+ public static StreamisRpcLogAppender createAppender(@PluginAttribute("name") String name,
+ @PluginAttribute("appName") String applicationName,
+ @PluginAttribute("ignoreExceptions") boolean ignoreExceptions,
+ @PluginElement("Filter") final Filter filter,
+ @PluginElement("Layout") Layout extends Serializable> layout,
+ @PluginElement("RpcLogSender")RpcLogSenderConfig rpcLogSenderConfig) throws Exception{
+ if (null == name || name.trim().equals("")){
+ name = DEFAULT_APPENDER_NAME;
+ }
+ if (Objects.isNull(layout)){
+ layout = PatternLayout.createDefaultLayout();
+ }
+ // Search the config autowired class
+ List configAutowiredEntities = new ArrayList<>();
+ StreamisLog4j2AppenderConfig logAppenderConfig = null;
+ ServiceLoader.load(StreamisConfigAutowired.class,
+ StreamisRpcLogAppender.class.getClassLoader()).iterator().forEachRemaining(configAutowiredEntities::add);
+ StreamisLog4j2AppenderConfig.Builder builder = new StreamisLog4j2AppenderConfig.Builder(applicationName, filter, rpcLogSenderConfig);
+ for (StreamisConfigAutowired autowired : configAutowiredEntities){
+ logAppenderConfig = (StreamisLog4j2AppenderConfig) autowired.logAppenderConfig(builder);
+ }
+ if (Objects.isNull(logAppenderConfig)){
+ logAppenderConfig = builder.build();
+ }
+ applicationName = logAppenderConfig.getApplicationName();
+ if (null == applicationName || applicationName.trim().equals("")){
+ throw new IllegalArgumentException("Application name cannot be empty");
+ }
+ System.out.println("StreamisRpcLogAppender: init with config => " + logAppenderConfig);
+ return new StreamisRpcLogAppender(name, logAppenderConfig.getFilter(), layout, ignoreExceptions, Property.EMPTY_ARRAY, logAppenderConfig);
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/StreamisLog4j2AppenderConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/StreamisLog4j2AppenderConfig.java
new file mode 100644
index 000000000..adf7dfe06
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/StreamisLog4j2AppenderConfig.java
@@ -0,0 +1,97 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j2;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcLogSenderConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.StreamisLogAppenderConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters.LogMessageFilter;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters.LogMessageFilterAdapter;
+import org.apache.logging.log4j.core.Filter;
+import org.apache.logging.log4j.core.filter.CompositeFilter;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Objects;
+
+/**
+ * Appender config for log4j2
+ */
+public class StreamisLog4j2AppenderConfig extends StreamisLogAppenderConfig {
+ /**
+ * Filter in log4j2
+ */
+ private final Filter filter;
+
+ public StreamisLog4j2AppenderConfig(String applicationName, Filter filter,
+ RpcLogSenderConfig rpcLogSenderConfig, List messageFilters){
+ super(applicationName, rpcLogSenderConfig, messageFilters);
+ this.filter = filter;
+ }
+
+ public static class Builder extends StreamisLogAppenderConfig.Builder {
+
+ /**
+ * Filter rules
+ */
+ private final List filters = new ArrayList<>();
+
+ public Builder(String applicationName, Filter filter, RpcLogSenderConfig rpcLogSenderConfig) {
+ super(applicationName, rpcLogSenderConfig);
+ if (Objects.nonNull(filter)) {
+ this.filters.add(filter);
+ }
+ }
+
+ /**
+ * Set filter
+ * @param filter filter
+ * @return builder
+ */
+ public StreamisLog4j2AppenderConfig.Builder setFilter(Filter filter){
+ this.filters.clear();
+ this.messageFilters.clear();
+ this.filters.add(filter);
+ if (filter instanceof LogMessageFilterAdapter){
+ this.messageFilters.add(((LogMessageFilterAdapter) filter).getLogMessageFilter());
+ }
+ return this;
+ }
+
+ /**
+ * Append filter
+ * @param filter filter
+ * @return builder
+ */
+ public StreamisLog4j2AppenderConfig.Builder withFilter(Filter filter){
+ filters.add(filter);
+ if (filter instanceof LogMessageFilterAdapter){
+ this.messageFilters.add(((LogMessageFilterAdapter) filter).getLogMessageFilter());
+ }
+ return this;
+ }
+
+ /**
+ * Build method
+ * @return config
+ */
+ public StreamisLog4j2AppenderConfig build(){
+ Filter logFilter = null;
+ if (filters.size() > 1){
+ logFilter = CompositeFilter.createFilters(filters.toArray(new Filter[0]));
+ } else if (!filters.isEmpty()){
+ logFilter = filters.get(0);
+ }
+ return new StreamisLog4j2AppenderConfig(applicationName, logFilter, rpcLogSenderConfig, messageFilters);
+ }
+ }
+ public Filter getFilter() {
+ return filter;
+ }
+
+ @Override
+ public String toString() {
+ return "StreamisLog4j2AppenderConfig{" +
+ "applicationName='" + applicationName + '\'' +
+ ", senderConfig=" + senderConfig +
+ ", filter=" + filter +
+ '}';
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/RpcAuthConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/RpcAuthConfig.java
new file mode 100644
index 000000000..87a10ba85
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/RpcAuthConfig.java
@@ -0,0 +1,31 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j2.config;
+
+import org.apache.logging.log4j.core.config.plugins.Plugin;
+import org.apache.logging.log4j.core.config.plugins.PluginAttribute;
+import org.apache.logging.log4j.core.config.plugins.PluginFactory;
+
+/**
+ * AuthConfig Element in log4j2
+ */
+@Plugin(
+ name = "AuthConfig",
+ category = "Core",
+ printObject = true
+)
+public class RpcAuthConfig extends com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcAuthConfig {
+
+ public RpcAuthConfig(){
+ super();
+ }
+ public RpcAuthConfig(String tokenCodeKey, String tokenCode, String tokenUserKey, String tokenUser) {
+ super(tokenCodeKey, tokenCode, tokenUserKey, tokenUser);
+ }
+
+ @PluginFactory
+ public static RpcAuthConfig createRpcAuthConfig(@PluginAttribute("tokenCodeKey") String tokenCodeKey,
+ @PluginAttribute("tokenCode") String tokenCode,
+ @PluginAttribute("tokenUserKey") String tokenUserKey, @PluginAttribute("tokenUser") String tokenUser){
+ return new RpcAuthConfig(tokenCodeKey, tokenCode, tokenUserKey, tokenUser);
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/RpcLogSenderConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/RpcLogSenderConfig.java
new file mode 100644
index 000000000..f9dff1d10
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/RpcLogSenderConfig.java
@@ -0,0 +1,40 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j2.config;
+
+import org.apache.logging.log4j.core.config.plugins.Plugin;
+import org.apache.logging.log4j.core.config.plugins.PluginAttribute;
+import org.apache.logging.log4j.core.config.plugins.PluginElement;
+import org.apache.logging.log4j.core.config.plugins.PluginFactory;
+import org.apache.logging.log4j.core.util.Integers;
+
+/**
+ * Rpc sender configuration
+ */
+@Plugin(
+ name = "RpcLogSender",
+ category = "Core",
+ printObject = true
+)
+public class RpcLogSenderConfig extends com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcLogSenderConfig {
+
+ public RpcLogSenderConfig(String address, int sendRetryCnt, int connectionTimeout, int socketTimeout, int serverRecoveryTimeInSec, int maxDelayTimeInSec,
+ RpcAuthConfig authConfig, SendLogCacheConfig cacheConfig, SendBufferConfig bufferConfig) {
+ super(address, sendRetryCnt, connectionTimeout, socketTimeout, serverRecoveryTimeInSec, maxDelayTimeInSec, authConfig, cacheConfig, bufferConfig);
+ }
+
+ @PluginFactory
+ public static RpcLogSenderConfig createConfig(
+ @PluginAttribute("address") String address, @PluginAttribute("sendRetryCnt") String sendRetryCnt,
+ @PluginAttribute("connectionTimeout") String connectionTimeout, @PluginAttribute("socketTimeout") String socketTimeout,
+ @PluginAttribute("serverRecoveryTimeInSec") String serverRecoveryTimeInSec, @PluginAttribute("maxDelayTimeInSec") String maxDelayTimeInSec,
+ @PluginAttribute("debugMode")String debugMode,
+ @PluginElement("AuthConfig")RpcAuthConfig authConfig, @PluginElement("SendLogCache") SendLogCacheConfig cacheConfig,
+ @PluginElement("SendBuffer")SendBufferConfig bufferConfig){
+ RpcLogSenderConfig config = new RpcLogSenderConfig(address, Integers.parseInt(sendRetryCnt, 3),
+ Integers.parseInt(connectionTimeout, 3000), Integers.parseInt(socketTimeout, 15000),
+ Integers.parseInt(serverRecoveryTimeInSec, 5), Integers.parseInt(maxDelayTimeInSec, 60),
+ authConfig, cacheConfig, bufferConfig);
+ config.setDebugMode(Boolean.parseBoolean(debugMode));
+ return config;
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/SendBufferConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/SendBufferConfig.java
new file mode 100644
index 000000000..936accd72
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/SendBufferConfig.java
@@ -0,0 +1,28 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j2.config;
+
+import org.apache.logging.log4j.core.config.plugins.Plugin;
+import org.apache.logging.log4j.core.config.plugins.PluginAttribute;
+import org.apache.logging.log4j.core.config.plugins.PluginFactory;
+import org.apache.logging.log4j.core.util.Integers;
+
+@Plugin(
+ name = "SendBuffer",
+ category = "Core",
+ printObject = true
+)
+public class SendBufferConfig extends com.webank.wedatasphere.streamis.jobmanager.log.collector.config.SendBufferConfig {
+
+ public SendBufferConfig() {
+ }
+
+ public SendBufferConfig(int size, long expireTimeInSec) {
+ super(size, expireTimeInSec);
+ }
+
+ @PluginFactory
+ public static SendBufferConfig createBufferConfig(
+ @PluginAttribute("size") String size, @PluginAttribute("expireTimeInSec") String expireTimeInSec){
+ return new SendBufferConfig(Integers.parseInt(size, 50),
+ Integers.parseInt(expireTimeInSec, 2));
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/SendLogCacheConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/SendLogCacheConfig.java
new file mode 100644
index 000000000..f4a63c49c
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/SendLogCacheConfig.java
@@ -0,0 +1,27 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j2.config;
+
+import org.apache.logging.log4j.core.config.plugins.Plugin;
+import org.apache.logging.log4j.core.config.plugins.PluginAttribute;
+import org.apache.logging.log4j.core.config.plugins.PluginFactory;
+import org.apache.logging.log4j.core.util.Integers;
+
+/**
+ * Cache config
+ */
+@Plugin(
+ name = "SendLogCache",
+ category = "Core",
+ printObject = true
+)
+public class SendLogCacheConfig extends com.webank.wedatasphere.streamis.jobmanager.log.collector.config.SendLogCacheConfig {
+
+ public SendLogCacheConfig(int size, int maxConsumeThread) {
+ super(size, maxConsumeThread);
+ }
+
+ @PluginFactory
+ public static SendLogCacheConfig createCacheConfig(
+ @PluginAttribute("size") String size, @PluginAttribute("maxConsumeThread") String maxConsumeThread){
+ return new SendLogCacheConfig(Integers.parseInt(size, 150), Integers.parseInt(maxConsumeThread, 10));
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/filters/KeywordThresholdFilter.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/filters/KeywordThresholdFilter.java
new file mode 100644
index 000000000..59a2a3da9
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/filters/KeywordThresholdFilter.java
@@ -0,0 +1,84 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j2.filters;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters.KeywordMessageFilter;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters.LogMessageFilter;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters.LogMessageFilterAdapter;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.Marker;
+import org.apache.logging.log4j.core.Filter;
+import org.apache.logging.log4j.core.LogEvent;
+import org.apache.logging.log4j.core.Logger;
+import org.apache.logging.log4j.core.filter.AbstractFilter;
+import org.apache.logging.log4j.message.Message;
+
+import java.util.Optional;
+
+/**
+ * Threshold filter with keyword
+ */
+public class KeywordThresholdFilter extends AbstractFilter implements LogMessageFilterAdapter {
+
+ /**
+ * Level
+ */
+ private final Level level;
+
+ /**
+ * Message filter
+ */
+ private final KeywordMessageFilter messageFilter;
+ public KeywordThresholdFilter(String[] acceptKeywords, String[] excludeKeywords){
+ // Use accept and deny match
+ super(Filter.Result.ACCEPT, Filter.Result.DENY);
+ // If accept keywords is empty, set the log level to warn
+ if (null == acceptKeywords || acceptKeywords.length <= 0){
+ this.level = Level.WARN;
+ System.out.println("The keywords is empty, set the log threshold level >= " + this.level);
+ } else {
+ this.level = Level.ALL;
+ }
+ this.messageFilter = new KeywordMessageFilter(acceptKeywords, excludeKeywords);
+ }
+
+ @Override
+ public Result filter(LogEvent event) {
+ return filter(event.getLevel());
+ }
+
+ @Override
+ public Result filter(Logger logger, Level level, Marker marker, Message msg, Throwable t) {
+ return filter(level);
+ }
+
+ @Override
+ public Result filter(Logger logger, Level level, Marker marker, Object msg, Throwable t) {
+ return filter(level);
+ }
+
+ @Override
+ public Result filter(Logger logger, Level level, Marker marker, String msg, Object... params) {
+ return filter(level);
+ }
+
+ private Result filter(final Level level){
+ return level.isMoreSpecificThan(this.level) ? onMatch : onMismatch;
+ }
+
+ public Level getLevel() {
+ return level;
+ }
+
+ @Override
+ public String toString() {
+ return level.toString() +
+ "|acceptKeywords:[" +
+ Optional.ofNullable(this.messageFilter.getAcceptKeywords()).orElse(new String[]{}).length +
+ "]|excludeKeywords:[" +
+ Optional.ofNullable(this.messageFilter.getExcludeKeywords()).orElse(new String[]{}).length + "]" ;
+ }
+
+ @Override
+ public LogMessageFilter getLogMessageFilter() {
+ return this.messageFilter;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisLogAppenderTest.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisLogAppenderTest.java
new file mode 100644
index 000000000..0bc49c139
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisLogAppenderTest.java
@@ -0,0 +1,29 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector;
+
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class StreamisLogAppenderTest {
+ private static final Logger LOG = LoggerFactory.getLogger(StreamisLogAppenderTest.class);
+ @Test
+ public void appenderLog() throws InterruptedException {
+ int total = 10000;
+ int tps = 1000;
+ long timer = System.currentTimeMillis() + 1000;
+ for (int i = 0; i < total; i++) {
+ if (i > 0 && i % tps == 0) {
+ long sleep = timer - System.currentTimeMillis();
+ if (sleep > 0) {
+ try {
+ Thread.sleep(sleep);
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ }
+ timer = System.currentTimeMillis() + 1000;
+ }
+ LOG.info("ERROR: Stream Log appender test, sequence id: " + i);
+ }
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/test/resources/log4j2.xml b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/test/resources/log4j2.xml
new file mode 100644
index 000000000..27aff1d6d
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/test/resources/log4j2.xml
@@ -0,0 +1,37 @@
+
+
+
+
+
+
+
+
+
+
+
+
+ `
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/pom.xml b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/pom.xml
new file mode 100644
index 000000000..0dcb67247
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/pom.xml
@@ -0,0 +1,56 @@
+
+
+
+ streamis-job-log
+ com.webank.wedatasphere.streamis
+ 0.2.4
+ ../../pom.xml
+
+ 4.0.0
+
+ streamis-job-log-collector1x
+
+
+ 8
+ 8
+ 1.2.17
+ 1.7.12
+
+
+
+ com.webank.wedatasphere.streamis
+ streamis-job-log-collector-core
+ ${streamis.version}
+
+
+
+ org.slf4j
+ slf4j-api
+ ${slf4j.version}
+ provided
+
+
+
+ org.slf4j
+ slf4j-log4j12
+ ${slf4j.version}
+ provided
+
+
+
+ log4j
+ log4j
+ ${log4j.version}
+ provided
+
+
+
+ junit
+ junit
+ ${junit.version}
+ test
+
+
+
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisRpcLogAppender.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisRpcLogAppender.java
new file mode 100644
index 000000000..90a28abf4
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisRpcLogAppender.java
@@ -0,0 +1,233 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.cache.LogCache;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcLogSenderConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j1.StreamisLog4jAppenderConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters.LogMessageFilter;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.StreamisRpcLogSender;
+import com.webank.wedatasphere.streamis.jobmanager.log.entities.StreamisLogEvent;
+import com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired;
+import org.apache.log4j.AppenderSkeleton;
+import org.apache.log4j.Level;
+import org.apache.log4j.SimpleLayout;
+import org.apache.log4j.helpers.LogLog;
+import org.apache.log4j.spi.LoggingEvent;
+
+import java.util.*;
+import java.util.function.BiFunction;
+
+/**
+ * Rpc appender for log4j1
+ */
+public class StreamisRpcLogAppender extends AppenderSkeleton {
+
+ /**
+ * Application name
+ */
+ private String applicationName;
+
+ private String filterEnable = "true";
+ /**
+ * Appender config
+ */
+ private StreamisLog4jAppenderConfig appenderConfig;
+
+ /**
+ * Rpc log sender
+ */
+ private StreamisRpcLogSender rpcLogSender;
+
+ /**
+ * Rpc log sender config
+ */
+ private RpcLogSenderConfig rpcLogSenderConfig = new RpcLogSenderConfig();
+
+
+ /**
+ * Cache
+ */
+ private LogCache logCache;
+
+ /**
+ * Filter function
+ */
+ private BiFunction messageFilterFunction = (logger, message) -> false;
+
+ @Override
+ protected void append(LoggingEvent loggingEvent) {
+ String content = super.getLayout().format(loggingEvent);
+ if (messageFilterFunction.apply(loggingEvent.getLoggerName(), content)) {
+ // Transform to stream log event;
+ // System.currentTimeMills() -> loggingEvent.getTimeStamp()
+ StreamisLogEvent logEvent = new StreamisLogEvent(content, loggingEvent.getTimeStamp());
+ if (Objects.nonNull(logCache)) {
+ try {
+ this.logCache.cacheLog(logEvent);
+ } catch (InterruptedException e) {
+ LogLog.error("StreamisRpcLogAppender: " + this.getName() +
+ " interrupted when cache the log into the RPC sender, message: " + e.getMessage());
+ }
+ }
+ }
+ }
+
+ @Override
+ public void close() {
+ if (Objects.nonNull(this.rpcLogSender)){
+ this.rpcLogSender.close();
+ }
+ }
+
+ @Override
+ public boolean requiresLayout() {
+ return true;
+ }
+
+ @Override
+ public void activateOptions() {
+ if (Objects.nonNull(this.logCache)){
+ return;
+ }
+ if (Objects.isNull(getLayout())){
+ setLayout(new SimpleLayout());
+ }
+ if (System.getProperty("filter.enable") == null){
+ System.setProperty("filter.enable", filterEnable);
+ }
+ // Search the config autowired class
+ List configAutowiredEntities = new ArrayList<>();
+ StreamisLog4jAppenderConfig logAppenderConfig = null;
+ ServiceLoader.load(StreamisConfigAutowired.class,
+ StreamisRpcLogAppender.class.getClassLoader()).iterator().forEachRemaining(configAutowiredEntities::add);
+ StreamisLog4jAppenderConfig.Builder builder = new StreamisLog4jAppenderConfig.Builder(this.applicationName,
+ getThreshold(), getFilter(), rpcLogSenderConfig);
+ for (StreamisConfigAutowired autowired : configAutowiredEntities){
+ try {
+ logAppenderConfig = (StreamisLog4jAppenderConfig) autowired.logAppenderConfig(builder);
+ } catch (Exception e) {
+ LogLog.warn("Unable to autowired the config from: " +autowired.getClass().getName(), e);
+ }
+ }
+ if (Objects.isNull(logAppenderConfig)){
+ logAppenderConfig = builder.build();
+ }
+ this.applicationName = logAppenderConfig.getApplicationName();
+ if (null == applicationName || applicationName.trim().equals("")){
+ throw new IllegalArgumentException("Application name cannot be empty");
+ }
+ this.appenderConfig = logAppenderConfig;
+ // Set the threshold to error default
+ setThreshold(Optional.ofNullable(logAppenderConfig.getThreshold()).orElse(Level.ERROR));
+ // First to clear the filters
+ clearFilters();
+ // Then to add filter
+ logAppenderConfig.getFilters().forEach(this::addFilter);
+ System.out.println("StreamisRpcLogAppender: init with config => " + logAppenderConfig);
+ this.rpcLogSender = new StreamisRpcLogSender(this.appenderConfig.getApplicationName(),
+ this.appenderConfig.getSenderConfig());
+ this.rpcLogSender.setExceptionListener((subject, t, message) ->
+ LogLog.error((null != subject? subject.getClass().getSimpleName() : "") + ": " + message, t));
+ this.logCache = this.rpcLogSender.getOrCreateLogCache();
+ List messageFilters = appenderConfig.getMessageFilters();
+ if (null != messageFilters && messageFilters.size() > 0){
+ messageFilterFunction = (logger, message) ->{
+ for(LogMessageFilter messageFilter : messageFilters){
+ if (!messageFilter.doFilter(logger, message)){
+ return false;
+ }
+ }
+ return true;
+ };
+ }
+ }
+
+
+ public String getAppName() {
+ return applicationName;
+ }
+
+ /**
+ * Application name
+ * @param applicationName name
+ */
+ public void setAppName(String applicationName) {
+ this.applicationName = applicationName;
+ }
+
+ public String getFilterEnable() {
+ return filterEnable;
+ }
+
+ public void setFilterEnable(String filterEnable) {
+ this.filterEnable = filterEnable;
+ }
+
+ public void setRpcAddress(String address){
+ this.rpcLogSenderConfig.setAddress(address);
+ }
+
+ public void setRpcConnTimeout(int connectionTimeout){
+ this.rpcLogSenderConfig.setConnectionTimeout(connectionTimeout);
+ }
+
+ public void setRpcSocketTimeout(int socketTimeout){
+ this.rpcLogSenderConfig.setSocketTimeout(socketTimeout);
+ }
+ public void setRpcSendRetryCnt(int sendRetryCnt){
+ this.rpcLogSenderConfig.setSendRetryCnt(sendRetryCnt);
+ }
+
+ public void setRpcServerRecoveryTimeInSec(int serverRecoveryTimeInSec){
+ this.rpcLogSenderConfig.setServerRecoveryTimeInSec(serverRecoveryTimeInSec);
+ }
+
+ public void setRpcMaxDelayTimeInSec(int maxDelayTimeInSec){
+ this.rpcLogSenderConfig.setMaxDelayTimeInSec(maxDelayTimeInSec);
+ }
+ // Authentication
+ public void setRpcAuthTokenCodeKey(String tokenCodeKey){
+ this.rpcLogSenderConfig.getAuthConfig().setTokenCodeKey(tokenCodeKey);
+ }
+
+ public void setRpcAuthTokenUserKey(String tokenUserKey){
+ this.rpcLogSenderConfig.getAuthConfig().setTokenUserKey(tokenUserKey);
+ }
+
+ public void setRpcAuthTokenUser(String tokenUser){
+ this.rpcLogSenderConfig.getAuthConfig().setTokenUser(tokenUser);
+ }
+
+ public void setRpcAuthTokenCode(String tokenCode){
+ this.rpcLogSenderConfig.getAuthConfig().setTokenCode(tokenCode);
+ }
+
+ // Cache configuration
+ public void setRpcCacheSize(int cacheSize){
+ this.rpcLogSenderConfig.getCacheConfig().setSize(cacheSize);
+ }
+
+ public void setRpcCacheMaxConsumeThread(int maxConsumeThread){
+ this.rpcLogSenderConfig.getCacheConfig().setMaxConsumeThread(maxConsumeThread);
+ }
+
+ // Buffer configuration
+ public void setRpcBufferSize(int bufferSize){
+ this.rpcLogSenderConfig.getBufferConfig().setSize(bufferSize);
+ }
+
+ public void setRpcBufferExpireTimeInSec(int expireTimeInSec){
+ this.rpcLogSenderConfig.getBufferConfig().setExpireTimeInSec(expireTimeInSec);
+ }
+
+ public void setDebugMode(boolean debugMode){
+ this.rpcLogSenderConfig.setDebugMode(debugMode);
+ }
+
+ public void setDiscard(boolean discard){
+ this.rpcLogSenderConfig.getCacheConfig().setDiscard(discard);
+ }
+
+ public void setDiscardWindow(int window){
+ this.rpcLogSenderConfig.getCacheConfig().setDiscardWindow(window);
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j1/StreamisLog4jAppenderConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j1/StreamisLog4jAppenderConfig.java
new file mode 100644
index 000000000..f10bef451
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j1/StreamisLog4jAppenderConfig.java
@@ -0,0 +1,110 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j1;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcLogSenderConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.StreamisLogAppenderConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters.LogMessageFilter;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters.LogMessageFilterAdapter;
+import org.apache.log4j.Priority;
+import org.apache.log4j.spi.Filter;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Objects;
+
+/**
+ * Appender config for log4j1
+ */
+public class StreamisLog4jAppenderConfig extends StreamisLogAppenderConfig {
+
+ /**
+ * Filter in log4j1
+ */
+ private final List filters = new ArrayList<>();
+ /**
+ *
+ */
+ private final Priority threshold;
+
+ protected StreamisLog4jAppenderConfig(String applicationName, Priority threshold, List filters,
+ RpcLogSenderConfig rpcLogSenderConfig, List messageFilters) {
+ super(applicationName, rpcLogSenderConfig, messageFilters);
+ this.threshold = threshold;
+ this.filters.addAll(filters);
+ }
+
+ public static class Builder extends StreamisLogAppenderConfig.Builder{
+
+ /**
+ * Filter rules
+ */
+ private final List filters = new ArrayList<>();
+
+ /**
+ * Threshold
+ */
+ private Priority threshold;
+
+ public Builder(String applicationName, Priority threshold, Filter filter,RpcLogSenderConfig rpcLogSenderConfig) {
+ super(applicationName, rpcLogSenderConfig);
+ this.threshold = threshold;
+ if (Objects.nonNull(filter)) {
+ this.filters.add(filter);
+ }
+ }
+
+ public StreamisLog4jAppenderConfig.Builder setFilter(Filter filter){
+ this.filters.clear();
+ this.messageFilters.clear();
+ this.filters.add(filter);
+ if (filter instanceof LogMessageFilterAdapter){
+ this.messageFilters.add(((LogMessageFilterAdapter) filter).getLogMessageFilter());
+ }
+ return this;
+ }
+
+ public StreamisLog4jAppenderConfig.Builder withFilter(Filter filter){
+ filters.add(filter);
+ if (filter instanceof LogMessageFilterAdapter){
+ this.messageFilters.add(((LogMessageFilterAdapter) filter).getLogMessageFilter());
+ }
+ return this;
+ }
+
+ /**
+ * Set threshold
+ * @param threshold threshold
+ * @return builder
+ */
+ public StreamisLog4jAppenderConfig.Builder threshold(Priority threshold, boolean needMoreSpecific){
+ if (needMoreSpecific){
+ if (this.threshold == null || threshold.isGreaterOrEqual(this.threshold)){
+ this.threshold = threshold;
+ }
+ }else {
+ this.threshold = threshold;
+ }
+ return this;
+ }
+ public StreamisLog4jAppenderConfig build(){
+ return new StreamisLog4jAppenderConfig(applicationName, threshold, filters, rpcLogSenderConfig, messageFilters);
+ }
+ }
+
+ public List getFilters() {
+ return filters;
+ }
+
+ public Priority getThreshold() {
+ return threshold;
+ }
+
+ @Override
+ public String toString() {
+ return "StreamisLog4jAppenderConfig{" +
+ "applicationName='" + applicationName + '\'' +
+ ", senderConfig=" + senderConfig +
+ ", filters=" + filters +
+ ", threshold=" + threshold +
+ '}';
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j1/filters/KeywordAllMatchFilter.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j1/filters/KeywordAllMatchFilter.java
new file mode 100644
index 000000000..1fe60b308
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j1/filters/KeywordAllMatchFilter.java
@@ -0,0 +1,31 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j1.filters;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters.KeywordMessageFilter;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters.LogMessageFilter;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters.LogMessageFilterAdapter;
+import org.apache.log4j.spi.Filter;
+import org.apache.log4j.spi.LoggingEvent;
+
+/**
+ * All match filter with keyword
+ */
+public class KeywordAllMatchFilter extends Filter implements LogMessageFilterAdapter {
+
+ /**
+ * Message filter
+ */
+ private final KeywordMessageFilter messageFilter;
+
+ public KeywordAllMatchFilter(String[] acceptKeywords, String[] excludeKeywords){
+ this.messageFilter = new KeywordMessageFilter(acceptKeywords, excludeKeywords);
+ }
+ @Override
+ public int decide(LoggingEvent event) {
+ return Filter.ACCEPT;
+ }
+
+ @Override
+ public LogMessageFilter getLogMessageFilter() {
+ return this.messageFilter;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisLogAppenderTest.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisLogAppenderTest.java
new file mode 100644
index 000000000..0dcca02c9
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisLogAppenderTest.java
@@ -0,0 +1,27 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector;
+
+import org.apache.log4j.PropertyConfigurator;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class StreamisLogAppenderTest {
+ private static final Logger LOG = LoggerFactory.getLogger(StreamisLogAppenderTest.class);
+ @Test
+ public void appenderLog() throws InterruptedException {
+ PropertyConfigurator.configure(StreamisLogAppenderTest.class.getResource("/log4j.properties").getPath());
+ int total = 1000;
+ int tps = 100;
+ long timer = System.currentTimeMillis() + 1000;
+ for(int i = 0; i < total; i ++){
+ if (i > 0 && i % tps == 0){
+ long sleep = timer - System.currentTimeMillis();
+ if (sleep > 0){
+ Thread.sleep(sleep);
+ }
+ timer = System.currentTimeMillis() + 1000;
+ }
+ LOG.info("Stream Log appender test, sequence id: " + i);
+ }
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/test/resources/log4j.properties b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/test/resources/log4j.properties
new file mode 100644
index 000000000..8801938ab
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/test/resources/log4j.properties
@@ -0,0 +1,44 @@
+#
+# Copyright 2021 WeBank
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+### set log levels ###
+
+log4j.rootCategory=INFO,stream
+
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.Threshold=INFO
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+#log4j.appender.console.layout.ConversionPattern= %d{ISO8601} %-5p (%t) [%F:%M(%L)] - %m%n
+log4j.appender.console.layout.ConversionPattern= %d{ISO8601} %-5p (%t) %p %c{1} - %m%n
+
+log4j.appender.stream=com.webank.wedatasphere.streamis.jobmanager.log.collector.StreamisRpcLogAppender
+log4j.appender.stream.appName=stream_applicatioin
+log4j.appender.stream.Threshold=INFO
+log4j.appender.stream.filterEnable=false
+log4j.appender.stream.layout=org.apache.log4j.PatternLayout
+log4j.appender.stream.layout.ConversionPattern= %d{ISO8601} %-5p (%t) [%F:%M(%L)] - %m%n
+log4j.appender.stream.rpcConnTimeout=3000
+log4j.appender.stream.rpcSocketTimeout=15000
+log4j.appender.stream.rpcSendRetryCnt=3
+log4j.appender.stream.rpcServerRecoveryTimeInSec=5
+log4j.appender.stream.rpcMaxDelayTimeInSec=60
+log4j.appender.stream.rpcAuthTokenCodeKey=
+log4j.appender.stream.rpcAuthTokenUserKey=
+log4j.appender.stream.rpcAuthTokenUser=
+log4j.appender.stream.rpcAuthTokenCode=
+log4j.appender.stream.rpcCacheSize=200
+log4j.appender.stream.rpcCacheMaxConsumeThread=1
+log4j.appender.stream.rpcBufferSize=50
+log4j.appender.stream.rpcBufferExpireTimeInSec=2
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/pom.xml b/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/pom.xml
new file mode 100644
index 000000000..9866eede4
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/pom.xml
@@ -0,0 +1,75 @@
+
+
+
+ streamis-job-log
+ com.webank.wedatasphere.streamis
+ 0.2.4
+ ../../pom.xml
+
+ 4.0.0
+
+ xspark-streamis-log-collector
+
+
+ 8
+ 8
+ 1.2.17
+ 1.7.12
+
+
+
+
+ com.webank.wedatasphere.streamis
+ streamis-job-log-collector1x
+ ${streamis.version}
+
+
+
+ org.slf4j
+ slf4j-api
+ ${slf4j.version}
+ provided
+
+
+
+ org.slf4j
+ slf4j-log4j12
+ ${slf4j.version}
+ provided
+
+
+
+ log4j
+ log4j
+ ${log4j.version}
+ provided
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-assembly-plugin
+ 2.3
+
+
+ assemble
+
+ single
+
+
+ install
+
+
+
+
+ src/main/assembly/package.xml
+
+ false
+
+
+
+
+
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/src/main/assembly/package.xml b/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/src/main/assembly/package.xml
new file mode 100644
index 000000000..8da27bf2c
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/src/main/assembly/package.xml
@@ -0,0 +1,19 @@
+
+
+ package
+
+
+ jar
+
+ false
+
+
+ /
+ true
+ runtime
+
+
+
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/spark/SparkStreamisConfigAutowired.java b/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/spark/SparkStreamisConfigAutowired.java
new file mode 100644
index 000000000..2d92da75e
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/spark/SparkStreamisConfigAutowired.java
@@ -0,0 +1,106 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.spark;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.StreamisLogAppenderConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j1.StreamisLog4jAppenderConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j1.filters.KeywordAllMatchFilter;
+import com.webank.wedatasphere.streamis.jobmanager.log.utils.StringUtils;
+import com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired;
+import org.apache.log4j.Level;
+
+import java.util.Optional;
+/**
+ * Autoconfigure the streamis config in Spark environment
+ */
+public class SparkStreamisConfigAutowired implements StreamisConfigAutowired {
+
+ private static final String DEBUG_MODE = "log.debug.mode";
+
+ private static final String DISCARD_SWITCH = "log.discard";
+
+ private static final String DISCARD_WINDOW = "log.discard.window";
+
+ private static final String APP_NAME_CONFIG = "app.name";
+
+ private static final String SERVER_ADDRESS_CONFIG = "streamis.url";
+
+ private static final String COLLECTOR_URI_CONFIG = "streamis.log.collector.uri";
+
+ private static final String PROJECT_NAME_CONFIG = "project.name";
+
+ private static final String DEFAULT_COLLECTOR_URI = "/api/rest_j/v1/streamis/streamJobManager/log/collect/events";
+
+ private static final String FILTER_ENABLE = "filter.enable";
+
+ private static final String FILTER_KEYWORD = "filter.keywords";
+
+ private static final String FILTER_KEYWORD_EXCLUDE = "filter.keywords.exclude";
+ @Override
+ public StreamisLogAppenderConfig logAppenderConfig(StreamisLogAppenderConfig.Builder builder) throws Exception {
+ // Load the config from system properties
+ String debugMode = System.getProperty(DEBUG_MODE, "false");
+ if (null != debugMode && debugMode.equals("true")){
+ builder.setDebugMode(true);
+ }
+ String discard = System.getProperty(DISCARD_SWITCH, "true");
+ if (null != discard && discard.equals("true")){
+ builder.setDiscard(true);
+ }
+ String discardWind = System.getProperty(DISCARD_WINDOW, "2");
+ if (null != discardWind){
+ try{
+ builder.setDiscardWindow(Integer.parseInt(discardWind));
+ } catch (Exception e){
+ // Ignore
+ }
+ }
+ Optional.ofNullable(System.getProperty(APP_NAME_CONFIG)).ifPresent(appName -> {
+ String projectName = System.getProperty(PROJECT_NAME_CONFIG);
+ if (null != projectName && !projectName.trim().equals("")){
+ appName = projectName + "." + appName;
+ }
+ System.out.println("Spark env to streamis: application name =>" + appName);
+ builder.setAppName(appName);
+ });
+ String serverAddress = System.getProperty(SERVER_ADDRESS_CONFIG);
+ if (null != serverAddress && !serverAddress.trim().equals("")){
+ if (serverAddress.endsWith("/")){
+ serverAddress = serverAddress.substring(0, serverAddress.length() - 1);
+ }
+ String collectorUri = System.getProperty(COLLECTOR_URI_CONFIG, DEFAULT_COLLECTOR_URI);
+ if (null != collectorUri && !collectorUri.trim().equals("")){
+ if (!collectorUri.startsWith("/")){
+ collectorUri = "/" + collectorUri;
+ }
+ serverAddress += collectorUri;
+ }
+ System.out.println("Spark env to streamis: server address =>" + serverAddress);
+ builder.setRpcAddress(serverAddress);
+ }
+ String user = System.getenv("USER");
+ if (null == user || user.trim().equals("")){
+ user = System.getProperty("user.name", "hadoop");
+ }
+ System.out.println("Spark env to streamis: log user =>" + user);
+ builder.setRpcAuthTokenUser(user);
+ // Set filter
+ boolean filterEnable = true;
+ try {
+ filterEnable = Boolean.parseBoolean(System.getProperty(FILTER_ENABLE, "true"));
+ }catch (Exception e){
+ // ignore
+ }
+ if (filterEnable && builder instanceof StreamisLog4jAppenderConfig.Builder){
+ StreamisLog4jAppenderConfig.Builder log4jBuilder = ((StreamisLog4jAppenderConfig.Builder) builder);
+ String[] acceptKeywords = StringUtils.convertStrToArray(System.getProperty(FILTER_KEYWORD, "ERROR"), ",");
+ KeywordAllMatchFilter keywordAllMatchFilter = new KeywordAllMatchFilter(
+ acceptKeywords,
+ StringUtils.convertStrToArray(System.getProperty(FILTER_KEYWORD_EXCLUDE), ","));
+ if (null == acceptKeywords || acceptKeywords.length <=0 ){
+ System.out.println("The keywords is empty, set the log threshold level >= " + Level.WARN);
+ log4jBuilder.threshold(Level.WARN, true);
+ }
+ log4jBuilder.setFilter(keywordAllMatchFilter);
+ }
+ return builder.build();
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/src/main/resources/META-INF/services/com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired b/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/src/main/resources/META-INF/services/com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired
new file mode 100644
index 000000000..dac2fcaed
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/src/main/resources/META-INF/services/com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired
@@ -0,0 +1 @@
+com.webank.wedatasphere.streamis.jobmanager.log.collector.spark.SparkStreamisConfigAutowired
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-log/pom.xml b/streamis-jobmanager/streamis-job-log/pom.xml
new file mode 100644
index 000000000..4b0219d96
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/pom.xml
@@ -0,0 +1,29 @@
+
+
+
+ streamis-jobmanager
+ com.webank.wedatasphere.streamis
+ 0.2.4
+
+ 4.0.0
+
+ streamis-job-log
+ pom
+
+ job-log-collector/streamis-job-log-collector-core
+ job-log-collector/streamis-job-log-collector
+ job-log-collector/streamis-job-log-collector1x
+ job-log-collector/flink-streamis-log-collector
+ job-log-collector/xspark-streamis-log-collector
+ streamis-job-log-server
+ streamis-job-log-common
+
+
+
+ 8
+ 8
+
+
+
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-common/pom.xml b/streamis-jobmanager/streamis-job-log/streamis-job-log-common/pom.xml
new file mode 100644
index 000000000..886d7ed30
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-common/pom.xml
@@ -0,0 +1,29 @@
+
+
+
+ streamis-job-log
+ com.webank.wedatasphere.streamis
+ 0.2.4
+ ../pom.xml
+
+ 4.0.0
+
+ streamis-job-log-common
+
+
+ 8
+ 8
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/entities/LogElement.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/entities/LogElement.java
new file mode 100644
index 000000000..da3a7054b
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/entities/LogElement.java
@@ -0,0 +1,34 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.entities;
+
+
+/**
+ * Element defined of log
+ */
+public interface LogElement {
+
+ /**
+ * Sequence id
+ * @return seq id
+ */
+ int getSequenceId();
+
+ /**
+ * Log time
+ * @return log time
+ */
+ long getLogTimeStamp();
+
+ /**
+ * Get content
+ * @return content array
+ */
+ String[] getContents();
+
+ /**
+ * The importance of log
+ * 0: useless, 1: normal, 2:important
+ * @return
+ */
+ int mark();
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/entities/StreamisLogEvent.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/entities/StreamisLogEvent.java
new file mode 100644
index 000000000..6f8645f77
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/entities/StreamisLogEvent.java
@@ -0,0 +1,84 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.entities;
+
+
+import com.webank.wedatasphere.streamis.jobmanager.log.json.JsonTool;
+
+import java.io.Serializable;
+import java.util.Objects;
+
+/**
+ * Log event for streamis
+ */
+public class StreamisLogEvent implements LogElement, Serializable {
+
+ /**
+ * Log time
+ */
+ private long logTimeInMills;
+
+ /**
+ * Log content
+ */
+ private String content;
+
+ /**
+ * Mark
+ */
+ private int mark;
+
+ public StreamisLogEvent(){
+
+ }
+ public StreamisLogEvent(String content, long logTimeInMills){
+ this.content = content;
+ this.logTimeInMills = logTimeInMills;
+ }
+ @Override
+ public int getSequenceId() {
+ return 0;
+ }
+
+ @Override
+ public long getLogTimeStamp() {
+ return this.logTimeInMills;
+ }
+
+ @Override
+ public String[] getContents() {
+ return new String[]{content};
+ }
+
+ public String getContent() {
+ return content;
+ }
+
+ @Override
+ public int mark() {
+ return this.mark;
+ }
+
+ public void setLogTimeStamp(long logTimeInMills) {
+ this.logTimeInMills = logTimeInMills;
+ }
+
+ public void setContent(String content) {
+ this.content = content;
+ }
+
+ public void setMark(int mark) {
+ this.mark = mark;
+ }
+
+ public void setSequenceId(int sequenceId){
+ // Ignore
+ }
+
+ public String toJson(){
+ return "{" +
+ "\"logTimeStamp\":" + logTimeInMills +
+ ",\"content\":" + (Objects.isNull(content)? null : "\"" + JsonTool.escapeStrValue(content) + "\"") +
+ ",\"sequenceId\":0"
+ + "}";
+
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/entities/StreamisLogEvents.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/entities/StreamisLogEvents.java
new file mode 100644
index 000000000..f2843c8af
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/entities/StreamisLogEvents.java
@@ -0,0 +1,112 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.entities;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.json.JsonTool;
+
+import java.io.Serializable;
+import java.util.Objects;
+
+public class StreamisLogEvents implements LogElement, Serializable {
+
+ /**
+ * Application name
+ */
+ private String appName;
+ /**
+ * Log time
+ */
+ private long logTimeInMills;
+
+ private StreamisLogEvent[] events;
+ public StreamisLogEvents(){
+
+ }
+ public StreamisLogEvents(String applicationName, StreamisLogEvent[] events){
+ this.appName = applicationName;
+ this.events = events;
+ long maxTime = -1;
+ StreamisLogEvent lastEvent = events[events.length - 1];
+ if (null == lastEvent) {
+ for (StreamisLogEvent event : events) {
+ long time = event.getLogTimeStamp();
+ if (time > maxTime) {
+ maxTime = time;
+ }
+ }
+ this.logTimeInMills = maxTime;
+ }else {
+ this.logTimeInMills = lastEvent.getLogTimeStamp();
+ }
+
+ }
+
+ @Override
+ public int getSequenceId() {
+ return 0;
+ }
+
+ @Override
+ public long getLogTimeStamp() {
+ return this.logTimeInMills;
+ }
+
+
+ @Override
+ public String[] getContents() {
+ String[] contents = new String[events.length];
+ for(int i = 0 ; i < contents.length; i++){
+ contents[i] = events[i].getContent();
+ }
+ return contents;
+ }
+
+ @Override
+ public int mark() {
+ return 1;
+ }
+
+ public String getAppName() {
+ return appName;
+ }
+
+ public StreamisLogEvent[] getEvents() {
+ return events;
+ }
+
+ public void setAppName(String appName) {
+ this.appName = appName;
+ }
+
+ public void setLogTimeStamp(long logTimeInMills) {
+ this.logTimeInMills = logTimeInMills;
+ }
+
+ public void setEvents(StreamisLogEvent[] events) {
+ this.events = events;
+ }
+
+ public void setSequenceId(int sequenceId){
+ // Ignore
+ }
+
+ public String toJson(){
+ return "{" +
+ "\"logTimeStamp\":" + logTimeInMills +
+ ",\"appName\":" + (Objects.isNull(appName)? null : "\"" + JsonTool.escapeStrValue(appName) + "\"") +
+ ",\"events\":[" +
+ (Objects.isNull(events) || events.length <=0 ? "" : joinEvents(events, ",") ) + "]" +
+ ",\"sequenceId\":0"
+ + "}";
+ }
+
+ private String joinEvents(StreamisLogEvent[] events, String separator){
+ StringBuilder builder = new StringBuilder();
+ for(int i = 0; i < events.length; i ++){
+ builder.append(events[i].toJson());
+ if (i < events.length - 1){
+ builder.append(separator);
+ }
+ }
+ return builder.toString();
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/json/JsonTool.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/json/JsonTool.java
new file mode 100644
index 000000000..0822e820d
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/json/JsonTool.java
@@ -0,0 +1,63 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.json;
+
+import java.util.Locale;
+
+public class JsonTool {
+ static final char[] HEX_DIGITS = new char[] {'0','1','2','3','4','5','6','7','8','9','A','B','C','D','E','F'};
+
+ /**
+ * Avoid the special char
+ * @param input input string
+ * @return output string
+ */
+ public static String escapeStrValue(String input){
+ char[] chars = input.toCharArray();
+ StringBuilder sb = new StringBuilder();
+ for (char c : chars) {
+ switch (c) {
+ case '\"':
+ sb.append("\\\"");
+ break;
+ case '\\':
+ sb.append("\\\\");
+ break;
+ case '/':
+ sb.append("\\/");
+ break;
+ case '\b':
+ sb.append("\\b");
+ break;
+ case '\f':
+ sb.append("\\f");
+ break;
+ case '\n':
+ sb.append("\\n");
+ break;
+ case '\r':
+ sb.append("\\r");
+ break;
+ case '\t':
+ sb.append("\\t");
+ break;
+ default:
+ sb.append((c < 32) ? escapeUnicode(c) : c);
+ }
+ }
+ return sb.toString();
+ }
+
+ /**
+ * Escape unicode
+ * @param code char code
+ * @return escaped string
+ */
+ private static String escapeUnicode(int code){
+ if (code > 0xffff){
+ return "\\u" + Integer.toHexString(code).toUpperCase(Locale.ENGLISH);
+ } else {
+ return "\\u" + HEX_DIGITS[(code >> 12) & 15]
+ + HEX_DIGITS[(code >> 8) & 15] + HEX_DIGITS[(code >> 4) & 15] + HEX_DIGITS[code & 15];
+ }
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/pom.xml b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/pom.xml
new file mode 100644
index 000000000..8b4714e25
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/pom.xml
@@ -0,0 +1,37 @@
+
+
+
+ streamis-job-log
+ com.webank.wedatasphere.streamis
+ 0.2.4
+ ../pom.xml
+
+ 4.0.0
+
+ streamis-job-log-server
+
+
+ 8
+ 8
+
+
+
+
+ com.webank.wedatasphere.streamis
+ streamis-job-log-common
+ 0.2.4
+
+
+ org.apache.linkis
+ linkis-module
+
+
+ junit
+ junit
+ ${junit.version}
+ test
+
+
+
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/StreamisJobLogAutoConfiguration.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/StreamisJobLogAutoConfiguration.java
new file mode 100644
index 000000000..f3f32e363
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/StreamisJobLogAutoConfiguration.java
@@ -0,0 +1,24 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.JobLogStorage;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.StreamisJobLogStorage;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.StorageThresholdDriftPolicy;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.loadbalancer.RoundRobinLoadBalancer;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.loadbalancer.SimpleLoadBalancer;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+
+@Configuration
+public class StreamisJobLogAutoConfiguration {
+
+ @Bean(initMethod = "init", destroyMethod = "destroy")
+ @ConditionalOnMissingBean(JobLogStorage.class)
+ public JobLogStorage streamisJobLogStorage(){
+ StreamisJobLogStorage jobLogStorage = new StreamisJobLogStorage();
+ jobLogStorage.addLoadBalancer(new RoundRobinLoadBalancer());
+ jobLogStorage.addLoadBalancer(new SimpleLoadBalancer());
+ jobLogStorage.setBucketDriftPolicy(new StorageThresholdDriftPolicy());
+ return jobLogStorage;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/config/StreamJobLogConfig.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/config/StreamJobLogConfig.java
new file mode 100644
index 000000000..1f6777e9f
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/config/StreamJobLogConfig.java
@@ -0,0 +1,66 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.config;
+
+import org.apache.linkis.common.conf.CommonVars;
+import org.apache.linkis.common.conf.TimeType;
+
+/**
+ * Store the configuration defined for job log
+ */
+public class StreamJobLogConfig {
+
+ /**
+ * Set the log restful api as no-auth
+ */
+ public static final CommonVars NO_AUTH_REST = CommonVars.apply("wds.stream.job.log.restful.no-auth", false);
+
+ /**
+ * The threshold of log storage
+ */
+ public static final CommonVars STORAGE_THRESHOLD = CommonVars.apply("wds.stream.job.log.storage.threshold", 0.9);
+
+ /**
+ * Max weight of storage context
+ */
+ public static final CommonVars STORAGE_CONTEXT_MAX_WEIGHT = CommonVars.apply("wds.stream.job.log.storage.context.max-weight", 5);
+
+ /**
+ * Paths of storage context
+ */
+ public static final CommonVars STORAGE_CONTEXT_PATHS = CommonVars.apply("wds.stream.job.log.storage.context.paths", "/data/stream/log");
+
+ /**
+ * Bucket monitor name
+ */
+ public static final CommonVars BUCKET_MONITOR_NAME = CommonVars.apply("wds.stream.job.log.storage.bucket.monitor.name", "Log-Storage-Bucket-Monitor");
+
+ /**
+ * Bucket monitor interval
+ */
+ public static final CommonVars BUCKET_MONITOR_INTERVAL = CommonVars.apply("wds.stream.job.log.storage.bucket.monitor.interval", new TimeType("2m"));
+
+ /**
+ * Bucket max idle time
+ */
+ public static final CommonVars BUCKET_MAX_IDLE_TIME = CommonVars.apply("wds.stream.job.log.storage.bucket.max-idle-time", new TimeType("12h"));
+
+ /**
+ * Bucket root path
+ */
+ public static final CommonVars BUCKET_ROOT_PATH = CommonVars.apply("wds.stream.job.log.storage.bucket.root-path", "/data/stream/log");
+ /**
+ * Max active part size in bucket
+ */
+ public static final CommonVars BUCKET_MAX_ACTIVE_PART_SIZE = CommonVars.apply("wds.stream.job.log.storage.bucket.max-active-part-size", 100L);
+
+ /**
+ * Compression of part in bucket
+ */
+ public static final CommonVars BUCKET_PART_COMPRESS = CommonVars.apply("wds.stream.job.log.storage.bucket.part-compress", "gz");
+
+ /**
+ * Bucket layout
+ */
+ public static final CommonVars BUCKET_LAYOUT = CommonVars.apply("wds.stream.job.log.storage.bucket.layout", "%msg");
+
+ public static final CommonVars BUCKET_PART_HOLD_DAY = CommonVars.apply("wds.stream.job.log.storage.bucket.part-hold-day", 30);
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/entities/StreamisLogEvents.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/entities/StreamisLogEvents.java
new file mode 100644
index 000000000..8676c5778
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/entities/StreamisLogEvents.java
@@ -0,0 +1,12 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.entities;
+
+import com.fasterxml.jackson.annotation.JsonIgnore;
+
+public class StreamisLogEvents extends com.webank.wedatasphere.streamis.jobmanager.log.entities.StreamisLogEvents {
+
+ @Override
+ @JsonIgnore
+ public String[] getContents() {
+ return super.getContents();
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/exception/StreamJobLogException.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/exception/StreamJobLogException.java
new file mode 100644
index 000000000..56edc2dd3
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/exception/StreamJobLogException.java
@@ -0,0 +1,29 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.exception;
+
+import org.apache.linkis.common.exception.ErrorException;
+import org.apache.linkis.common.exception.ExceptionLevel;
+import org.apache.linkis.common.exception.LinkisRuntimeException;
+
+/**
+ * Stream job log exception
+ */
+public class StreamJobLogException extends ErrorException {
+ public StreamJobLogException(int errCode, String desc) {
+ super(errCode, desc);
+ }
+ public StreamJobLogException(int errCode, String desc, Throwable t){
+ super(errCode, desc);
+
+ }
+ public static class Runtime extends LinkisRuntimeException{
+
+ public Runtime(int errCode, String desc) {
+ super(errCode, desc);
+ }
+
+ @Override
+ public ExceptionLevel getLevel() {
+ return ExceptionLevel.ERROR;
+ }
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/restful/JobLogRestfulApi.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/restful/JobLogRestfulApi.java
new file mode 100644
index 000000000..27104311e
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/restful/JobLogRestfulApi.java
@@ -0,0 +1,64 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.restful;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.server.config.StreamJobLogConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.entities.StreamisLogEvents;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.exception.StreamJobLogException;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.service.StreamisJobLogService;
+import org.apache.commons.lang.StringUtils;
+import org.apache.linkis.server.Message;
+import org.apache.linkis.server.security.SecurityFilter;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.web.bind.annotation.RequestBody;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RequestMethod;
+import org.springframework.web.bind.annotation.RestController;
+
+import javax.annotation.Resource;
+import javax.servlet.http.HttpServletRequest;
+
+@RestController
+@RequestMapping(path = "/streamis/streamJobManager/log")
+public class JobLogRestfulApi {
+
+ private static final Logger LOG = LoggerFactory.getLogger(JobLogRestfulApi.class);
+
+ @Resource
+ private StreamisJobLogService streamisJobLogService;
+
+ @RequestMapping(value = "/collect/events", method = RequestMethod.POST)
+ public Message collectEvents(@RequestBody StreamisLogEvents events, HttpServletRequest request){
+ Message result;
+ try{
+ if (StringUtils.isBlank(events.getAppName())){
+ return Message.ok("Ignore the stream log events without application name");
+ }
+ String userName;
+ if (StreamJobLogConfig.NO_AUTH_REST.getValue()){
+ userName = request.getHeader("Token-User");
+ if (StringUtils.isBlank(userName)){
+ try {
+ userName = SecurityFilter.getLoginUsername(request);
+ }catch(Exception e){
+ // Ignore
+ }
+ if (StringUtils.isBlank(userName)){
+ userName = "hadoop";
+ }
+ }
+ }else {
+ userName = SecurityFilter.getLoginUsername(request);
+ if (StringUtils.isBlank(userName)) {
+ throw new StreamJobLogException(-1, "The request should has token user");
+ }
+ }
+ this.streamisJobLogService.store(userName, events);
+ result = Message.ok();
+ }catch (Exception e){
+ String message = "Fail to collect stream log events, message: " + e.getMessage();
+ result = Message.error(message);
+ }
+ return result;
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/service/DefaultStreamisJobLogService.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/service/DefaultStreamisJobLogService.java
new file mode 100644
index 000000000..8fea4dab6
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/service/DefaultStreamisJobLogService.java
@@ -0,0 +1,35 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.service;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.entities.StreamisLogEvents;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.JobLogStorage;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucket;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucketConfig;
+import org.springframework.stereotype.Service;
+
+import javax.annotation.PostConstruct;
+import javax.annotation.Resource;
+
+/**
+ * Default implement
+ */
+@Service
+public class DefaultStreamisJobLogService implements StreamisJobLogService{
+
+ @Resource
+ private JobLogStorage jobLogStorage;
+
+ private JobLogBucketConfig jobLogBucketConfig;
+
+ @PostConstruct
+ public void init(){
+ jobLogBucketConfig = new JobLogBucketConfig();
+ }
+ @Override
+ public void store(String user, StreamisLogEvents events) {
+ JobLogBucket jobLogBucket = jobLogStorage.getOrCreateBucket(user, events.getAppName(), jobLogBucketConfig);
+ // If cannot get log bucket, drop the events
+ if (null != jobLogBucket){
+ jobLogBucket.getBucketStorageWriter().write(events);
+ }
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/service/StreamisJobLogService.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/service/StreamisJobLogService.java
new file mode 100644
index 000000000..e8f8bfe4e
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/service/StreamisJobLogService.java
@@ -0,0 +1,16 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.service;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.entities.StreamisLogEvents;
+
+/**
+ * Job log service
+ */
+public interface StreamisJobLogService {
+
+ /**
+ * Store log events
+ * @param user user own
+ * @param events events
+ */
+ void store(String user, StreamisLogEvents events);
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/JobLogStorage.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/JobLogStorage.java
new file mode 100644
index 000000000..4299104d5
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/JobLogStorage.java
@@ -0,0 +1,49 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.storage;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucket;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucketConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucketDriftPolicy;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context.JobLogStorageContextListener;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.loadbalancer.JobLogStorageLoadBalancer;
+
+/**
+ * Storage of job log
+ */
+public interface JobLogStorage {
+
+ /**
+ * Create buckets
+ * @param userName user own
+ * @param appName application name
+ * @param bucketConfig bucket config
+ * @return config
+ */
+ JobLogBucket getOrCreateBucket(String userName, String appName, JobLogBucketConfig bucketConfig);
+
+ /**
+ * Set bucket drift policy
+ * @param bucketDriftPolicy bucket drift policy
+ */
+ void setBucketDriftPolicy(JobLogBucketDriftPolicy bucketDriftPolicy);
+
+ /**
+ * Add context listener
+ * @param listener listener
+ */
+ void addContextListener(JobLogStorageContextListener listener);
+
+ /**
+ * Add load balancer
+ * @param loadBalancer load balancer
+ */
+ void addLoadBalancer(JobLogStorageLoadBalancer loadBalancer);
+ /**
+ * Init method
+ */
+ void init() throws Exception;
+
+ /**
+ * Destroy method
+ */
+ void destroy();
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/StreamisJobLogStorage.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/StreamisJobLogStorage.java
new file mode 100644
index 000000000..ccadd721d
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/StreamisJobLogStorage.java
@@ -0,0 +1,332 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.storage;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.server.config.StreamJobLogConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.exception.StreamJobLogException;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucket;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucketConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucketDriftPolicy;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucketState;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context.*;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.loadbalancer.JobLogStorageLoadBalancer;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.loadbalancer.RoundRobinLoadBalancer;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.utils.MemUtils;
+import org.apache.commons.lang.StringUtils;
+import org.apache.linkis.common.utils.Utils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.stereotype.Component;
+
+import javax.annotation.PostConstruct;
+import javax.annotation.PreDestroy;
+import java.io.IOException;
+import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationTargetException;
+import java.text.SimpleDateFormat;
+import java.util.*;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.CopyOnWriteArrayList;
+import java.util.concurrent.Future;
+import java.util.concurrent.TimeUnit;
+
+import static com.webank.wedatasphere.streamis.jobmanager.log.server.config.StreamJobLogConfig.BUCKET_MONITOR_INTERVAL;
+
+/**
+ * Job log storage
+ */
+public class StreamisJobLogStorage implements JobLogStorage{
+
+ private static final Logger LOG = LoggerFactory.getLogger(StreamisJobLogStorage.class);
+
+ /**
+ * Storage context
+ */
+ private final List storageContexts = new CopyOnWriteArrayList<>();
+
+ /**
+ * Drift policy
+ */
+ private JobLogBucketDriftPolicy bucketDriftPolicy;
+ /**
+ * Buckets
+ */
+ private final Map buckets = new ConcurrentHashMap<>();
+
+ /**
+ * Context listeners
+ */
+ private final List contextListeners = new ArrayList<>();
+
+ /**
+ * Load balancer
+ */
+ private final List loadBalancers = new ArrayList<>();
+
+ /**
+ * Constructor cache
+ */
+ private final Map> bucketConstructors = new ConcurrentHashMap<>();
+
+ /**
+ * To monitor the status of buckets
+ */
+ private Future> monitorThread;
+
+ @Override
+ public JobLogBucket getOrCreateBucket(String userName, String appName, JobLogBucketConfig bucketConfig) {
+ String bucketName = toBucketName(userName, appName);
+ return buckets.computeIfAbsent(bucketName, name -> {
+ // First to choose context
+ JobLogStorageContext context = chooseStorageContext(bucketName, bucketConfig);
+ if (null != context){
+ Class extends JobLogBucket> bucketClass = bucketConfig.getBucketClass();
+ if (Objects.nonNull(bucketClass)) {
+ Constructor> constructor = bucketConstructors.computeIfAbsent(bucketClass.getName(), className -> {
+ Constructor>[] constructors = bucketClass.getConstructors();
+ Constructor> matchConstructor = null;
+ for (Constructor> constructor1 : constructors) {
+ Class>[] inputParams = constructor1.getParameterTypes();
+ if (inputParams.length >= 3 && inputParams[0].equals(String.class)
+ && inputParams[1].equals(JobLogStorageContext.class) && inputParams[2].equals(JobLogBucketConfig.class)) {
+ matchConstructor = constructor1;
+ break;
+ }
+ }
+ return matchConstructor;
+ });
+ if (Objects.nonNull(constructor)) {
+ try {
+ return (JobLogBucket) constructor.newInstance(bucketName, context, bucketConfig);
+ } catch (InstantiationException | IllegalAccessException | InvocationTargetException e) {
+ LOG.warn("Cannot create storage log bucket from [{}]", bucketClass.getName(), e);
+ }
+ }
+ }
+ }
+ return null;
+ });
+ }
+
+ @Override
+ public void setBucketDriftPolicy(JobLogBucketDriftPolicy bucketDriftPolicy) {
+ this.bucketDriftPolicy = bucketDriftPolicy;
+ }
+
+ @Override
+ public void addContextListener(JobLogStorageContextListener listener) {
+ this.contextListeners.add(listener);
+ }
+
+ @Override
+ public void addLoadBalancer(JobLogStorageLoadBalancer loadBalancer) {
+ this.loadBalancers.add(loadBalancer);
+ if (loadBalancer instanceof JobLogStorageContextListener){
+ addContextListener((JobLogStorageContextListener) loadBalancer);
+ }
+ }
+
+ @Override
+ @PostConstruct
+ public synchronized void init() throws Exception{
+ initStorageContexts(StringUtils.split(StreamJobLogConfig.STORAGE_CONTEXT_PATHS.getValue(), ","));
+ onContextEvent(new ContextLaunchEvent(new ArrayList<>(this.storageContexts)));
+ // Init load balancer
+ initLoadBalancers();
+ if (Objects.isNull(monitorThread)){
+ monitorThread = Utils.defaultScheduler().scheduleAtFixedRate(() -> {
+ String threadName = Thread.currentThread().getName();
+ try {
+ Thread.currentThread().setName(StreamJobLogConfig.BUCKET_MONITOR_NAME.getValue());
+ long maxIdleTime = StreamJobLogConfig.BUCKET_MAX_IDLE_TIME.getValue().toLong();
+ SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+ // Update the storage context
+ JobLogStorageContext[] contexts = this.storageContexts.toArray(new JobLogStorageContext[0]);
+ try {
+ updateContextWeight(contexts);
+ // Notify the listener to refresh the context information
+ onContextEvent(new ContextRefreshAllEvent());
+ } catch (IOException e) {
+ LOG.warn("Unable to calculate weight array of storage context list", e);
+ }
+ if (buckets.size() > 0) {
+ StringBuilder builder = new StringBuilder("Buckets(").append(buckets.size()).append(") in LogStorage: [\n");
+ buckets.forEach((bucketName, bucket) -> {
+ JobLogBucketState bucketState = bucket.getBucketState();
+ builder.append("bucket: [ name: ")
+ .append(bucketName)
+ .append(", path: ").append(bucketState.getBucketPath())
+ .append(", parts: ").append(bucketState.getBucketParts())
+ .append(", write-rate: ").append(bucketState.getBucketWriteRate()).append("/s")
+ .append(", last-write-time: ").append(dateFormat.format(bucketState.getBucketWriteTime()))
+ .append(" ]\n");
+ boolean closeBucket = false;
+ if (bucketState.getBucketWriteTime() + maxIdleTime <= System.currentTimeMillis()) {
+ LOG.info("Close the idle bucket: [ name: {}, last-write-time: {} ]",
+ bucketName, dateFormat.format(bucketState.getBucketWriteTime()));
+ closeBucket = true;
+ } if (Objects.nonNull(bucketDriftPolicy) && bucketDriftPolicy.onPolicy(bucket, contexts)){
+ LOG.info("Drift the bucket: [ name: {}, last-write-time: {} ]", bucketName,
+ dateFormat.format(bucketState.getBucketWriteTime()));
+ closeBucket = true;
+ }
+ if (closeBucket) {
+ // Delete the bucket
+ // First to move the bucket from map, then close it
+ buckets.remove(bucketName);
+ bucket.close();
+ }
+ });
+ LOG.info(builder.toString());
+ }
+ } catch (Throwable e){
+ assert LOG != null;
+ LOG.warn("Some exception happened in monitor thread", e);
+ //Ignore
+ } finally {
+ Thread.currentThread().setName(threadName);
+ }
+
+ },BUCKET_MONITOR_INTERVAL.getValue().toLong(), BUCKET_MONITOR_INTERVAL.getValue().toLong(), TimeUnit.MILLISECONDS);
+ }
+ }
+
+
+ @Override
+ @PreDestroy
+ public void destroy() {
+ // Fist to close all the bucket
+ buckets.forEach((bucketName, bucket) -> bucket.close());
+ if (null != monitorThread){
+ monitorThread.cancel(true);
+ }
+ }
+
+ /**
+ * Choose storage context
+ * @param bucketName bucket name
+ * @param jobLogBucketConfig bucket config
+ * @return storage context
+ */
+ private JobLogStorageContext chooseStorageContext(String bucketName, JobLogBucketConfig jobLogBucketConfig){
+ JobLogStorageContext context;
+ for(JobLogStorageLoadBalancer balancer : loadBalancers){
+ context = balancer.chooseContext(bucketName, jobLogBucketConfig);
+ if (null != context){
+ return context;
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Init load balancers
+ */
+ private void initLoadBalancers(){
+ for(JobLogStorageLoadBalancer loadBalancer : this.loadBalancers){
+ loadBalancer.init();
+ }
+ // Sort the load balancer
+ this.loadBalancers.sort(Comparator.comparingInt(JobLogStorageLoadBalancer::priority).reversed());
+ }
+ /**
+ * Init the storage context
+ * @param storagePaths storage paths
+ */
+ private void initStorageContexts(String[] storagePaths) throws StreamJobLogException {
+ LOG.info("Init the storage context: [" + StringUtils.join(storagePaths, ",") + "]");
+ for(String storagePath : storagePaths){
+ if (StringUtils.isNotBlank(storagePath)) {
+ // TODO the score of context
+ this.storageContexts.add(new JobLogStorageContext(storagePath, 1.0));
+ }
+ }
+ if (!this.storageContexts.isEmpty()) {
+ int size = this.storageContexts.size();
+ try {
+ updateContextWeight(storageContexts.toArray(new JobLogStorageContext[size]));
+ } catch (IOException e) {
+ throw new StreamJobLogException(-1, "Unable to calculate weight array of storage context list", e);
+ }
+ }
+ }
+
+ private void updateContextWeight(JobLogStorageContext[] contexts) throws IOException {
+ double[] weights = calculateContextWeight(contexts);
+ StringBuilder builder = new StringBuilder("Update storage context weights:[\n");
+ for(int i = 0 ; i < weights.length; i ++){
+ JobLogStorageContext context = contexts[i];
+ builder.append(context.getStorePath()).append(" => ").append(weights[i]);
+ if (i != weights.length - 1){
+ builder.append(", ");
+ }
+ context.setStoreWeight(weights[i]);
+ }
+ builder.append("\n]");
+ LOG.info(builder.toString());
+ }
+ /**
+ * Calculate the base weight of storage context
+ * @param contexts context array
+ */
+ private double[] calculateContextWeight(JobLogStorageContext[] contexts) throws IOException {
+ double[] weights = new double[contexts.length];
+ if (contexts.length > 0) {
+ int maxNormalizeWt = StreamJobLogConfig.STORAGE_CONTEXT_MAX_WEIGHT.getValue();
+ double storageThreshold = StreamJobLogConfig.STORAGE_THRESHOLD.getValue();
+ if (maxNormalizeWt < 1){
+ maxNormalizeWt = 1;
+ }
+ double maxWeight = Double.MIN_VALUE;
+ double minWeight = Double.MAX_VALUE;
+ int i = 0;
+ for (; i < weights.length; i++) {
+ JobLogStorageContext context = contexts[0];
+ long usableSpace = context.getUsableSpace();
+ long totalSpace = context.getTotalSpace();
+ double usage = (double)(totalSpace - usableSpace) / (double)totalSpace;
+ double weight = 0d;
+ if (usage >= storageThreshold){
+ LOG.warn("The usage of storage context:[{}] reach the threshold: {} > {}, set the weight of it to 0",
+ context.getStorePath(), usage, storageThreshold);
+ } else {
+ long freeSpaceInGB = MemUtils.convertToGB(usableSpace, "B");
+ if (freeSpaceInGB <= 0) {
+ freeSpaceInGB = 1;
+ }
+ weight = context.getScore() * (double) freeSpaceInGB;
+ }
+ weights[i] = weight;
+ if (weight > maxWeight){
+ maxWeight = weight;
+ }
+ if (weight < minWeight){
+ minWeight = weight;
+ }
+ }
+ double sub = maxWeight - minWeight;
+ i = i - 1;
+ for (; i >= 0; i--){
+ weights[i] = (sub > 0? (maxNormalizeWt - 1) * (weights[i] - minWeight) * sub : 0) + 1;
+ }
+ }
+ return weights;
+ }
+
+ /**
+ * Produce context event
+ * @param event event
+ */
+ private void onContextEvent(JobLogStorageContextListener.ContextEvent event){
+ for(JobLogStorageContextListener listener : contextListeners){
+ listener.onContextEvent(event);
+ }
+ }
+ /**
+ * Bucket name
+ * @param userName username
+ * @param appName app name
+ * @return bucket name
+ */
+ private String toBucketName(String userName, String appName){
+ return userName + "." + appName;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucket.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucket.java
new file mode 100644
index 000000000..463edab76
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucket.java
@@ -0,0 +1,36 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context.JobLogStorageContext;
+
+/**
+ * Job log bucket for streamis
+ */
+public interface JobLogBucket {
+
+ /**
+ * Bucket state
+ * @return state
+ */
+ JobLogBucketState getBucketState();
+
+ /**
+ * Storage writer
+ * @return storage writer
+ */
+ JobLogStorageWriter getBucketStorageWriter();
+
+ /**
+ * Get storage context
+ * @return context
+ */
+ JobLogStorageContext getStorageContext();
+ /**
+ * Bucket name
+ * @return bucket name
+ */
+ String getBucketName();
+ /**
+ * Close the bucket
+ */
+ void close();
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketConfig.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketConfig.java
new file mode 100644
index 000000000..7264986bd
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketConfig.java
@@ -0,0 +1,112 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.server.config.StreamJobLogConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.exception.StreamJobLogException;
+import org.apache.linkis.common.conf.CommonVars;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Configuration for job log bucket
+ */
+public class JobLogBucketConfig {
+
+ @SuppressWarnings("unchecked")
+ public JobLogBucketConfig(){
+ try {
+ Class> defaultBucketClass = Class.forName(Define.JOB_LOG_BUCKET_CLASS.getValue());
+ if (JobLogBucket.class.isAssignableFrom(defaultBucketClass)){
+ this.bucketClass = (Class extends JobLogBucket>) defaultBucketClass;
+ }
+ } catch (ClassNotFoundException e) {
+ throw new StreamJobLogException.Runtime(-1, "Cannot find the bucket class, message: " + e.getMessage());
+ }
+ }
+
+ /**
+ * Bucket class
+ */
+ private Class extends JobLogBucket> bucketClass;
+
+ /**
+ * Attribute
+ */
+ protected Map attributes = new HashMap<>();
+
+ /**
+ * Max size of bucket active part (MB)
+ */
+ private long maxBucketActivePartSize = StreamJobLogConfig.BUCKET_MAX_ACTIVE_PART_SIZE.getValue();
+
+ /**
+ * The compress format used for bucket parts
+ */
+ private String bucketPartCompress = StreamJobLogConfig.BUCKET_PART_COMPRESS.getValue();
+
+ /**
+ * Max hold time in days for bucket part
+ */
+ private int bucketPartHoldTimeInDay = StreamJobLogConfig.BUCKET_PART_HOLD_DAY.getValue();
+
+ /**
+ * Layout pattern
+ */
+ private String LogLayOutPattern = StreamJobLogConfig.BUCKET_LAYOUT.getValue();
+
+ public Class extends JobLogBucket> getBucketClass() {
+ return bucketClass;
+ }
+
+ public void setBucketClass(Class extends JobLogBucket> bucketClass) {
+ this.bucketClass = bucketClass;
+ }
+
+ public Map getAttributes() {
+ return attributes;
+ }
+
+ public void setAttributes(Map attributes) {
+ this.attributes = attributes;
+ }
+
+ public long getMaxBucketActivePartSize() {
+ return maxBucketActivePartSize;
+ }
+
+ public void setMaxBucketActivePartSize(long maxBucketActivePartSize) {
+ this.maxBucketActivePartSize = maxBucketActivePartSize;
+ }
+
+ public String getBucketPartCompress() {
+ return bucketPartCompress;
+ }
+
+ public void setBucketPartCompress(String bucketPartCompress) {
+ this.bucketPartCompress = bucketPartCompress;
+ }
+
+ public int getBucketPartHoldTimeInDay() {
+ return bucketPartHoldTimeInDay;
+ }
+
+ public void setBucketPartHoldTimeInDay(int bucketPartHoldTimeInDay) {
+ this.bucketPartHoldTimeInDay = bucketPartHoldTimeInDay;
+ }
+
+ public String getLogLayOutPattern() {
+ return LogLayOutPattern;
+ }
+
+ public void setLogLayOutPattern(String logLayOutPattern) {
+ LogLayOutPattern = logLayOutPattern;
+ }
+
+
+ public static final class Define{
+ /**
+ * Default bucket class
+ */
+ public static final CommonVars JOB_LOG_BUCKET_CLASS = CommonVars.apply("wds.streamis.job.log.bucket.class", "com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.Log4j2JobLogBucket");
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketDriftPolicy.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketDriftPolicy.java
new file mode 100644
index 000000000..147f8fafe
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketDriftPolicy.java
@@ -0,0 +1,15 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context.JobLogStorageContext;
+
+/**
+ * Drift policy
+ */
+public interface JobLogBucketDriftPolicy {
+ /**
+ * Decide whether you should drift the bucket
+ * @param bucket bucket
+ * @return
+ */
+ boolean onPolicy(JobLogBucket bucket, JobLogStorageContext[] contexts);
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketFactory.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketFactory.java
new file mode 100644
index 000000000..d4b9b6b2a
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketFactory.java
@@ -0,0 +1,15 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket;
+
+/**
+ * Factory of creating job log bucket
+ */
+public interface JobLogBucketFactory {
+
+ /**
+ * Create bucket
+ * @param jobName job name
+ * @param config bucket config
+ * @return
+ */
+ JobLogBucket createBucket(String jobName, JobLogBucketConfig config);
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketState.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketState.java
new file mode 100644
index 000000000..8051e6d13
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketState.java
@@ -0,0 +1,31 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket;
+
+/**
+ * State of log bucket
+ */
+public interface JobLogBucketState {
+
+ /**
+ * Bucket path
+ * @return path
+ */
+ String getBucketPath();
+
+ /**
+ * Write rate
+ * @return rate
+ */
+ double getBucketWriteRate();
+
+ /**
+ * Bucket parts
+ * @return number
+ */
+ int getBucketParts();
+
+ /**
+ * Last rite time
+ * @return time
+ */
+ long getBucketWriteTime();
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogStorageWriter.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogStorageWriter.java
new file mode 100644
index 000000000..772040374
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogStorageWriter.java
@@ -0,0 +1,27 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.entities.LogElement;
+
+/**
+ * Storage writer for job log
+ */
+public interface JobLogStorageWriter {
+
+ /**
+ * Write log element
+ * @param logEl elements
+ * @param
+ */
+ void write(LogElement logEl);
+
+ /**
+ * Write log line
+ * @param logLine log line
+ */
+ void write(String logLine);
+
+ /**
+ * Close log storage
+ */
+ void close();
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/Log4j2JobLogBucket.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/Log4j2JobLogBucket.java
new file mode 100644
index 000000000..6d7b6318a
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/Log4j2JobLogBucket.java
@@ -0,0 +1,356 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.entities.LogElement;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context.JobLogStorageContext;
+import org.apache.commons.io.FilenameUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.linkis.common.conf.CommonVars;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.core.Appender;
+import org.apache.logging.log4j.core.LoggerContext;
+import org.apache.logging.log4j.core.appender.RollingFileAppender;
+import org.apache.logging.log4j.core.appender.rolling.*;
+import org.apache.logging.log4j.core.appender.rolling.action.*;
+import org.apache.logging.log4j.core.config.AppenderRef;
+import org.apache.logging.log4j.core.config.Configuration;
+import org.apache.logging.log4j.core.config.LoggerConfig;
+import org.apache.logging.log4j.core.layout.PatternLayout;
+import org.checkerframework.checker.units.qual.A;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.util.Optional;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.concurrent.locks.Condition;
+import java.util.concurrent.locks.ReentrantLock;
+
+/**
+ * Job log bucket for log4j
+ */
+public class Log4j2JobLogBucket implements JobLogBucket{
+
+ private static final org.slf4j.Logger LOG = LoggerFactory.getLogger(Log4j2JobLogBucket.class);
+
+ private static final String DEFAULT_FILE_PATTERN_SUFFIX = ".%d{yyyy-MM-dd}-%i";
+
+ private static final CommonVars ROLLOVER_MAX = CommonVars.apply("wds.stream.job.log.storage.bucket.log4j.rollover-max", 20);
+ /**
+ * Bucket name
+ */
+ private final String bucketName;
+
+ /**
+ * Logger name
+ */
+ private final String loggerName;
+ /**
+ * Logger context
+ */
+ private final LoggerContext loggerContext;
+
+ /**
+ * Logger entity
+ */
+ private final Logger logger;
+
+ /**
+ * Storage context
+ */
+ private final JobLogStorageContext storageContext;
+ /**
+ * Storage writer
+ */
+ private final JobLogStorageWriter jobLogStorageWriter;
+
+ /**
+ * Bucket state
+ */
+ private final JobLogBucketState jobLogBucketState;
+
+ /**
+ * Last write time;
+ */
+ private long lastWriteTime;
+
+ /**
+ * Prev Interval time
+ */
+ private long preIntervalTime;
+
+ /**
+ * Active thread
+ */
+ private final AtomicLong activeThread = new AtomicLong(0);
+ /**
+ * Interval counter
+ */
+ private final AtomicLong intervalCounter = new AtomicLong(0);
+
+ /**
+ * Shutdown flag
+ */
+ private final AtomicBoolean isShutdown = new AtomicBoolean(false);
+
+ /**
+ * Shutdown lock
+ */
+ private final ReentrantLock shutdownLock = new ReentrantLock();
+
+ /**
+ * Shutdown condition
+ */
+ private final Condition canShutdown = shutdownLock.newCondition();
+ /**
+ * Store the write rate
+ */
+ private double writeRate;
+ public Log4j2JobLogBucket(String bucketName, JobLogStorageContext storageContext, JobLogBucketConfig config){
+ this.bucketName = bucketName;
+ // Build unique logger name
+ this.loggerName = bucketName + System.currentTimeMillis() + "_" + Thread.currentThread().getId();
+ this.storageContext = storageContext;
+ // Create logger context
+ this.loggerContext = (LoggerContext) LogManager.getContext(false);
+ this.logger = initLogger(this.bucketName, this.loggerName, this.storageContext, config, this.loggerContext);
+ this.jobLogStorageWriter = createStorageWriter();
+ this.jobLogBucketState = createBucketState();
+ }
+ @Override
+ public JobLogBucketState getBucketState() {
+ return this.jobLogBucketState;
+ }
+
+ @Override
+ public JobLogStorageWriter getBucketStorageWriter() {
+ return this.jobLogStorageWriter;
+ }
+
+ @Override
+ public JobLogStorageContext getStorageContext() {
+ return this.storageContext;
+ }
+
+ @Override
+ public String getBucketName() {
+ return this.bucketName;
+ }
+
+ @Override
+ public void close() {
+ this.isShutdown.set(true);
+ this.shutdownLock.lock();
+ try{
+ if (activeThread.get() > 0) {
+ if (!this.canShutdown.await(5, TimeUnit.SECONDS)) {
+ LOG.warn("Shutdown the bucket: [{}] directly because the timeout of waiting", bucketName);
+ }
+ }
+ } catch (InterruptedException e) {
+ // Ignore
+ } finally {
+ this.shutdownLock.unlock();
+ }
+ Configuration log4jConfig = this.loggerContext.getConfiguration();
+ // First to stop appender
+ log4jConfig.getAppender(this.loggerName).stop();
+ log4jConfig.getLoggerConfig(this.loggerName).removeAppender(this.loggerName);
+ log4jConfig.removeLogger(this.loggerName);
+ loggerContext.updateLoggers();
+ }
+
+ private synchronized Logger initLogger(String bucketName, String loggerName,
+ JobLogStorageContext storageContext, JobLogBucketConfig config, LoggerContext loggerContext){
+ Configuration log4jConfig = loggerContext.getConfiguration();
+ String fileName = resolveFileName(storageContext.getStorePath().toString(), bucketName);
+ RollingFileAppender appender = RollingFileAppender.newBuilder()
+ .setLayout(PatternLayout.newBuilder().withPattern(config.getLogLayOutPattern()).build())
+ .setName(loggerName)
+// .withFileOwner()
+ .withFileName(fileName)
+ .withFilePattern(resolveFilePattern(fileName, config.getBucketPartCompress()))
+ .withPolicy(SizeBasedTriggeringPolicy.createPolicy(config.getMaxBucketActivePartSize() + "MB"))
+ .withStrategy(createRolloverStrategy(log4jConfig, fileName, ROLLOVER_MAX.getValue(), config.getBucketPartHoldTimeInDay()))
+ .setConfiguration(log4jConfig)
+ .build();
+ appender.start();
+ log4jConfig.addAppender(appender);
+ LoggerConfig loggerConfig = LoggerConfig.newBuilder().withAdditivity(false).withLevel(Level.ALL)
+ .withRefs(new AppenderRef[]{
+ AppenderRef.createAppenderRef(loggerName, null, null)
+ })
+ .withLoggerName(loggerName).withConfig(log4jConfig).build();
+ loggerConfig.addAppender(appender, null, null);
+ log4jConfig.addLogger(loggerName, loggerConfig);
+ // Should we update the logger context ?
+ loggerContext.updateLoggers();
+ return loggerContext.getLogger(loggerName);
+ }
+
+ /**
+ * Create storage writer
+ * @return storage writer
+ */
+ private JobLogStorageWriter createStorageWriter(){
+ return new JobLogStorageWriter() {
+ @Override
+ public void write(LogElement logEl) {
+ activeThread.incrementAndGet();
+ try {
+ String[] contents = logEl.getContents();
+ if (null != contents) {
+ for (String content : contents) {
+ write(content, true);
+ }
+ }
+ }finally {
+ if (activeThread.decrementAndGet() <= 0 && isShutdown.get()){
+ notifyShutdown();
+ }
+ }
+ }
+
+ @Override
+ public void write(String logLine) {
+ activeThread.incrementAndGet();
+ try {
+ write(logLine, false);
+ }finally {
+ if (activeThread.decrementAndGet() <= 0 && isShutdown.get()){
+ notifyShutdown();
+ }
+ }
+ }
+
+ private void write(String logLine, boolean batch){
+ logger.info(logLine);
+ long currentTime = System.currentTimeMillis();
+ long intervalCnt = intervalCounter.getAndIncrement();
+ long intervalTime = (currentTime - preIntervalTime)/1000;
+ // Per minute accumulate the rate
+ if ( intervalTime >= 60){
+ writeRate = (double)intervalCnt / (double)intervalTime;
+ preIntervalTime = currentTime;
+ intervalCounter.set(0);
+ }
+ lastWriteTime = currentTime;
+ }
+ @Override
+ public void close() {
+ // Ignore
+ }
+ };
+ }
+
+ /**
+ * Create bucket state
+ * @return bucket state
+ */
+ private JobLogBucketState createBucketState(){
+ return new JobLogBucketState() {
+ private String bucketPath;
+ @Override
+ public String getBucketPath() {
+ if (StringUtils.isBlank(bucketPath)) {
+ Appender appender = loggerContext.getConfiguration().getAppender(loggerName);
+ if (appender instanceof RollingFileAppender) {
+ bucketPath = new File(((RollingFileAppender) appender).getFileName()).getParent();
+ }
+ }
+ return this.bucketPath;
+ }
+
+ @Override
+ public double getBucketWriteRate() {
+ return writeRate;
+ }
+
+ @Override
+ public int getBucketParts() {
+ AtomicInteger parts = new AtomicInteger(-1);
+ String bucketPath = getBucketPath();
+ if (StringUtils.isNotBlank(bucketPath)){
+ Optional.ofNullable(new File(bucketPath).list()).ifPresent(list -> parts.set(list.length));
+ }
+ return parts.get();
+ }
+
+ @Override
+ public long getBucketWriteTime() {
+ return lastWriteTime;
+ }
+ };
+ }
+
+ private void notifyShutdown(){
+ this.shutdownLock.lock();
+ try{
+ this.canShutdown.notifyAll();
+ }finally {
+ this.shutdownLock.unlock();
+ }
+ }
+ /**
+ * Create rollover strategy
+ * @param configuration configuration
+ * @param fileName file name
+ * @param rolloverMax rollover max inf file pattern
+ * @param fileHoldDay file hold day time
+ * @return strategy
+ */
+ private RolloverStrategy createRolloverStrategy(Configuration configuration,
+ String fileName, int rolloverMax, int fileHoldDay){
+ DefaultRolloverStrategy.Builder builder = DefaultRolloverStrategy.newBuilder();
+ if (rolloverMax > 0){
+ builder.withMax(rolloverMax + "");
+ }
+ if (fileHoldDay > 0){
+ // Create the actions to delete old file
+ builder.withCustomActions(new Action[]{
+ DeleteAction.createDeleteAction(new File(fileName).getParent(), false, 2, false, null,
+ new PathCondition[]{
+ IfFileName.createNameCondition(null, ".*"),
+ IfLastModified.createAgeCondition(Duration.parse(fileHoldDay + "d"))
+ },
+ null, configuration)
+ }
+ );
+ }
+ return builder.build();
+ }
+ /**
+ * Ex: /data/stream/log/hadoop/{projectName}/{jobName}/{projectName}.{jobName}.log
+ * @param bucketRootPath bucket root path
+ * @param bucketName bucket name
+ * @return file name with absolute path
+ */
+ private String resolveFileName(String bucketRootPath, String bucketName){
+ // {projectName}.{jobName}
+ String fileName = FilenameUtils.normalize(bucketName);
+ String basePath = bucketRootPath;
+ if (!basePath.endsWith("/")){
+ basePath += "/";
+ }
+ basePath += fileName.replace(".", "/");
+ return basePath + "/" + fileName.substring(bucketName.indexOf(".") + 1) + ".log";
+ }
+
+ /**
+ * Resolve file pattern
+ * @param fileName file name
+ * @param format format
+ * @return file pattern
+ */
+ private String resolveFilePattern(String fileName, String format){
+ String filePattern = fileName + Log4j2JobLogBucket.DEFAULT_FILE_PATTERN_SUFFIX;
+ if (StringUtils.isNotBlank(format)){
+ filePattern = filePattern + (format.startsWith(".") ? format : "." +format);
+ }
+ return filePattern;
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/StorageThresholdDriftPolicy.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/StorageThresholdDriftPolicy.java
new file mode 100644
index 000000000..608faa75e
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/StorageThresholdDriftPolicy.java
@@ -0,0 +1,23 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context.JobLogStorageContext;
+
+public class StorageThresholdDriftPolicy implements JobLogBucketDriftPolicy{
+ @Override
+ public boolean onPolicy(JobLogBucket bucket, JobLogStorageContext[] contexts) {
+ JobLogStorageContext bucketContext = bucket.getStorageContext();
+ // Means that the storage context is not healthy
+ if (bucketContext.getStoreWeight() <= 0){
+ // Find the available context
+ boolean hasRest = false;
+ for(JobLogStorageContext context : contexts){
+ if (context.getStoreWeight() > 0){
+ hasRest = true;
+ break;
+ }
+ }
+ return hasRest;
+ }
+ return false;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/StreamisJobLogBucket.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/StreamisJobLogBucket.java
new file mode 100644
index 000000000..ba9c002d6
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/StreamisJobLogBucket.java
@@ -0,0 +1,7 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket;
+
+/**
+ * Use the appender and strategy of log4j (version 1.x) to implement the bucket
+ */
+public class StreamisJobLogBucket {
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/ContextDownEvent.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/ContextDownEvent.java
new file mode 100644
index 000000000..c1964376e
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/ContextDownEvent.java
@@ -0,0 +1,20 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context;
+
+/**
+ * Means that the storage context has been downed
+ */
+public class ContextDownEvent implements JobLogStorageContextListener.ContextEvent {
+
+ /**
+ * Context id
+ */
+ private final String contextId;
+
+ public ContextDownEvent(String contextId){
+ this.contextId = contextId;
+ }
+
+ public String getContextId() {
+ return contextId;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/ContextLaunchEvent.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/ContextLaunchEvent.java
new file mode 100644
index 000000000..59de63d6e
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/ContextLaunchEvent.java
@@ -0,0 +1,23 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context;
+
+import java.util.List;
+
+/**
+ * Means that the storage context has been launched
+ */
+public class ContextLaunchEvent implements JobLogStorageContextListener.ContextEvent {
+
+ /**
+ * Storage contexts
+ */
+ private final List contexts;
+
+ public ContextLaunchEvent(List contexts){
+ this.contexts = contexts;
+ }
+
+ public List getContextList() {
+ return contexts;
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/ContextRefreshAllEvent.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/ContextRefreshAllEvent.java
new file mode 100644
index 000000000..b585e5718
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/ContextRefreshAllEvent.java
@@ -0,0 +1,8 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context;
+
+/**
+ * Just a sign that to refresh all the storage context
+ */
+public class ContextRefreshAllEvent implements JobLogStorageContextListener.ContextEvent {
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/JobLogStorageContext.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/JobLogStorageContext.java
new file mode 100644
index 000000000..9ffd95226
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/JobLogStorageContext.java
@@ -0,0 +1,144 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.server.exception.StreamJobLogException;
+
+import java.io.IOException;
+import java.nio.file.*;
+import java.nio.file.attribute.PosixFilePermissions;
+import java.util.UUID;
+
+/**
+ * Storage context (represent the driver/disk)
+ */
+public class JobLogStorageContext{
+
+ /**
+ * Context id
+ */
+ private final String id;
+
+ /**
+ * Store path
+ */
+ private final Path storePath;
+
+ /**
+ * Store information
+ */
+ private final FileStore storeInfo;
+ /**
+ * Score of storage context
+ */
+ private final double score;
+
+ /**
+ * Storage weight
+ */
+ private double storeWeight;
+
+ public JobLogStorageContext(String path, double score){
+ this.id = UUID.randomUUID().toString();
+ this.storePath = Paths.get(path);
+ this.storeInfo = initStorePath(this.storePath);
+ this.score = score;
+ }
+
+
+ private FileStore initStorePath(Path path){
+ if (Files.notExists(path)){
+ try {
+ Files.createDirectories(this.storePath,
+ PosixFilePermissions.asFileAttribute(PosixFilePermissions.fromString("rwxrwxr--")));
+ } catch (IOException e) {
+ throw new StreamJobLogException.Runtime(-1,
+ "Cannot make the storage path directory: [" + path + "], message: " + e.getMessage());
+ }
+ // Allow dir link
+ } else if (!Files.isDirectory(path)){
+ throw new StreamJobLogException.Runtime(-1,
+ "the storage path: [" + path + "] is not directory" );
+ }
+ try {
+ return Files.getFileStore(path);
+ } catch (IOException e) {
+ throw new StreamJobLogException.Runtime(-1,
+ "Fail to get the storage information in path: [" + path + "], message: " + e.getMessage());
+ }
+ }
+
+ public Path getStorePath() {
+ return storePath;
+ }
+
+ /**
+ * Score
+ * @return score value
+ */
+ public double getScore() {
+ return score;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ /**
+ * Total space
+ * @return bytes return
+ * @throws IOException
+ */
+ public long getTotalSpace() throws IOException {
+ long result = storeInfo.getTotalSpace();
+ if (result < 0){
+ result = Long.MAX_VALUE;
+ }
+ return result;
+ }
+
+ /**
+ * Usable space
+ * @return bytes return
+ * @throws IOException
+ */
+ public long getUsableSpace() throws IOException {
+ long result = storeInfo.getUsableSpace();
+ if (result < 0){
+ result = Long.MAX_VALUE;
+ }
+ return result;
+ }
+
+ /**
+ * Unallocated space
+ * @return bytes return
+ * @throws IOException
+ */
+ public long getUnallocatedSpace() throws IOException{
+ long result = storeInfo.getUnallocatedSpace();
+ if (result < 0){
+ result = Long.MAX_VALUE;
+ }
+ return result;
+ }
+
+ public double getStoreWeight() {
+ return storeWeight;
+ }
+
+ public void setStoreWeight(double storeWeight) {
+ this.storeWeight = storeWeight;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o instanceof JobLogStorageContext){
+ return this.id.equals(((JobLogStorageContext) o).id);
+ }
+ return super.equals(o);
+ }
+
+ @Override
+ public int hashCode() {
+ return this.id.hashCode();
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/JobLogStorageContextListener.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/JobLogStorageContextListener.java
new file mode 100644
index 000000000..77432a2b2
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/JobLogStorageContextListener.java
@@ -0,0 +1,17 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context;
+
+/**
+ * Context listener
+ */
+public interface JobLogStorageContextListener {
+
+ /**
+ * Listen the context event
+ * @param event event
+ */
+ void onContextEvent(ContextEvent event);
+
+ interface ContextEvent{
+
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/loadbalancer/JobLogStorageLoadBalancer.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/loadbalancer/JobLogStorageLoadBalancer.java
new file mode 100644
index 000000000..06d6186d7
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/loadbalancer/JobLogStorageLoadBalancer.java
@@ -0,0 +1,27 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.loadbalancer;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucketConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context.JobLogStorageContext;
+
+public interface JobLogStorageLoadBalancer {
+ /**
+ * Init method
+ */
+ void init();
+
+ /**
+ * The order
+ * @return priority value
+ */
+ default int priority(){
+ return -1;
+ }
+
+ /**
+ * Choose storage context
+ * @param bucketName bucket name
+ * @param config bucket config
+ * @return
+ */
+ JobLogStorageContext chooseContext(String bucketName, JobLogBucketConfig config);
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/loadbalancer/RoundRobinLoadBalancer.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/loadbalancer/RoundRobinLoadBalancer.java
new file mode 100644
index 000000000..8d77d5fd1
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/loadbalancer/RoundRobinLoadBalancer.java
@@ -0,0 +1,199 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.loadbalancer;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucketConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context.*;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.List;
+import java.util.concurrent.locks.ReentrantLock;
+
+
+/**
+ * Round-robin load balancer
+ */
+public class RoundRobinLoadBalancer implements JobLogStorageLoadBalancer, JobLogStorageContextListener {
+
+ private static final Logger LOG = LoggerFactory.getLogger(RoundRobinLoadBalancer.class);
+
+ /**
+ * Candidate array
+ */
+ private StorageContextInfo[] candidates = new StorageContextInfo[0];
+
+ /**
+ * Lock for candidate array
+ */
+ private final ReentrantLock candidateLock = new ReentrantLock();
+ @Override
+ public void onContextEvent(ContextEvent event) {
+ if (event instanceof ContextLaunchEvent){
+ onLaunchContexts(((ContextLaunchEvent) event).getContextList());
+ } else if (event instanceof ContextDownEvent){
+ onDownContext(((ContextDownEvent) event).getContextId());
+ } else if (event instanceof ContextRefreshAllEvent){
+ onRefreshAllContext();
+ }
+ }
+
+ @Override
+ public int priority() {
+ return Integer.MAX_VALUE;
+ }
+
+ @Override
+ public void init() {
+
+ }
+
+ @Override
+ public JobLogStorageContext chooseContext(String bucketName, JobLogBucketConfig config) {
+ updateCandidateContextWeight();
+ candidateLock.lock();
+ try {
+ int index = selectContext(candidates);
+ if (index >= 0){
+ StorageContextInfo info = this.candidates[index];
+ info.cwt = info.cwt -1;
+ LOG.info("Round-Robin chosen context: {} for bucket: {}", info.context.getStorePath(), bucketName);
+ return info.context;
+ }
+ }finally {
+ candidateLock.unlock();
+ }
+ return null;
+ }
+
+ private static class StorageContextInfo{
+
+ /**
+ * Storage context
+ */
+ final JobLogStorageContext context;
+
+ /**
+ * If the context is working
+ */
+ boolean online = true;
+
+ /**
+ * Weight value
+ */
+ int wt;
+
+ /**
+ * Dynamic weight
+ */
+ int cwt;
+
+ public StorageContextInfo(JobLogStorageContext context){
+ this.context = context;
+ this.wt = (int)Math.floor(context.getStoreWeight());
+ this.cwt = wt;
+ }
+
+ public void refreshWeight(){
+ this.wt = (int)Math.floor(context.getStoreWeight());
+ if (this.cwt > this.wt){
+ this.cwt = this.wt;
+ }
+ }
+ }
+
+ /**
+ * Select context
+ * @param infoArray info array
+ * @return index
+ */
+ private int selectContext(StorageContextInfo[] infoArray){
+ int u = 0;
+ int reset = -1;
+ while (true){
+ for (int i = 0; i < infoArray.length; i ++){
+ if (!infoArray[i].online || infoArray[i].cwt <= 0){
+ continue;
+ }
+ u = i;
+ while (i < infoArray.length - 1){
+ i ++;
+ if (!infoArray[i].online || infoArray[i].cwt <= 0){
+ continue;
+ }
+ if ((infoArray[u].wt * 1000 / infoArray[i].wt <
+ infoArray[u].cwt * 1000 / infoArray[i].cwt)){
+ return u;
+ }
+ u = i;
+ }
+ return u;
+ }
+ if (++reset > 0){
+ return -1;
+ }
+ for (StorageContextInfo info : infoArray){
+ info.cwt = info.wt;
+ }
+ }
+
+ }
+ /**
+ * Enlarge the candidate array of context info
+ * @param contexts context list
+ */
+ private void onLaunchContexts(List contexts){
+ if (contexts.size() > 0){
+ candidateLock.lock();
+ try{
+ StorageContextInfo[] source = candidates;
+ int newSize = source.length + contexts.size();
+ StorageContextInfo[] dest = new StorageContextInfo[newSize];
+ System.arraycopy(source, 0, dest, 0, source.length);
+ int offset = source.length;
+ for(JobLogStorageContext context : contexts){
+ dest[offset++] = new StorageContextInfo(context);
+ }
+ this.candidates = dest;
+ }finally {
+ candidateLock.unlock();
+ }
+ }
+ }
+
+ /**
+ * Mark the context has been downed
+ * @param contextId context id
+ */
+ private void onDownContext(String contextId){
+ // Need to lock the array ?
+ candidateLock.lock();
+ try{
+ for (StorageContextInfo info : candidates) {
+ if (contextId.equals(info.context.getId())) {
+ info.online = false;
+ return;
+ }
+ }
+ } finally {
+ candidateLock.unlock();
+ }
+ }
+
+ /**
+ * Refresh all the context
+ */
+ private void onRefreshAllContext(){
+ candidateLock.lock();
+ try{
+ // Update the dynamic weight
+ for (StorageContextInfo info : candidates) {
+ info.refreshWeight();
+ }
+ } finally {
+ candidateLock.unlock();
+ }
+ }
+ private void updateCandidateContextWeight(){
+ // Empty method
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/loadbalancer/SimpleLoadBalancer.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/loadbalancer/SimpleLoadBalancer.java
new file mode 100644
index 000000000..e11e9fa9c
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/loadbalancer/SimpleLoadBalancer.java
@@ -0,0 +1,51 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.loadbalancer;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucketConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context.ContextDownEvent;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context.ContextLaunchEvent;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context.JobLogStorageContext;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context.JobLogStorageContextListener;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.security.SecureRandom;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Simple load balancer
+ */
+public class SimpleLoadBalancer implements JobLogStorageLoadBalancer, JobLogStorageContextListener {
+
+ private static final Logger LOG = LoggerFactory.getLogger(SimpleLoadBalancer.class);
+
+ private final List contexts = new ArrayList<>();
+
+ private final SecureRandom random = new SecureRandom();
+ @Override
+ public void onContextEvent(ContextEvent event) {
+ if (event instanceof ContextLaunchEvent){
+ contexts.addAll(((ContextLaunchEvent) event).getContextList());
+ } else if (event instanceof ContextDownEvent){
+ contexts.removeIf(context -> context.getId().equals(((ContextDownEvent) event).getContextId()));
+ }
+ }
+
+ @Override
+ public void init() {
+
+ }
+
+ @Override
+ public JobLogStorageContext chooseContext(String bucketName, JobLogBucketConfig config) {
+ JobLogStorageContext context = randomSelectContext(this.contexts);
+ if (null != context){
+ LOG.info("Random chosen context: {} for bucket: {}", context.getStorePath(), bucketName);
+ }
+ return context;
+ }
+
+ private JobLogStorageContext randomSelectContext(List candidates){
+ return candidates.get(random.nextInt(candidates.size()));
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/utils/MemUtils.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/utils/MemUtils.java
new file mode 100644
index 000000000..8ef2f3ec6
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/utils/MemUtils.java
@@ -0,0 +1,234 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.utils;
+
+import org.apache.commons.lang.StringUtils;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Mem utils
+ */
+public class MemUtils {
+ private static final Map UNIT_MAP = new HashMap<>();
+ static{
+ UNIT_MAP.put("G", StoreUnit.GB);
+ UNIT_MAP.put("GB", StoreUnit.GB);
+ UNIT_MAP.put("B", StoreUnit.B);
+ UNIT_MAP.put("M", StoreUnit.MB);
+ UNIT_MAP.put("MB", StoreUnit.MB);
+ UNIT_MAP.put("K", StoreUnit.KB);
+ UNIT_MAP.put("KB", StoreUnit.KB);
+ }
+ public static long convertToGB(long size, String unitFlag){
+ if(size < 0){
+ return -1L;
+ }
+ if(StringUtils.isNotBlank(unitFlag)){
+ StoreUnit storeUnit = UNIT_MAP.get(unitFlag.trim().toUpperCase());
+ if(null != storeUnit){
+ return storeUnit.toGB(size);
+ }
+ }
+ return -1L;
+ }
+
+ public static long convertToMB(long size, String unitFlag){
+ if(size < 0){
+ return -1L;
+ }
+ if(StringUtils.isNotBlank(unitFlag)){
+ StoreUnit storeUnit = UNIT_MAP.get(unitFlag.trim().toUpperCase());
+ if(null != storeUnit){
+ return storeUnit.toMB(size);
+ }
+ }
+ return -1L;
+ }
+
+ public static long convertToByte(long size, String unitFlag){
+ if(size < 0){
+ return -1L;
+ }
+ if(StringUtils.isNotBlank(unitFlag)){
+ StoreUnit storeUnit = UNIT_MAP.get(unitFlag.trim().toUpperCase());
+ if(null != storeUnit){
+ return storeUnit.toB(size);
+ }
+ }
+ return -1L;
+ }
+ public enum StoreUnit {
+ /**
+ * byte
+ */
+ B {
+ @Override
+ public long toB(long s){
+ return s;
+ }
+
+ @Override
+ public long toKB(long s){
+ return s/(C1/C0);
+ }
+
+ @Override
+ public long toMB(long s) {
+ return s/(C2/C0);
+ }
+
+ @Override
+ public long toGB(long s) {
+ return s/(C3/C0);
+ }
+
+ @Override
+ public long toTB(long s) {
+ return s/(C4/C0);
+ }
+ },
+ /**
+ * kb
+ */
+ KB{
+ @Override
+ public long toB(long s){
+ return x(s, C1/C0, Long.MAX_VALUE/(C1/C0));
+ }
+
+ @Override
+ public long toKB(long s){
+ return s;
+ }
+
+ @Override
+ public long toMB(long s) {
+ return s/(C2/C1);
+ }
+
+ @Override
+ public long toGB(long s) {
+ return s/(C3/C1);
+ }
+
+ @Override
+ public long toTB(long s) {
+ return s/(C4/C0);
+ }
+ },
+ MB{
+ @Override
+ public long toB(long s){
+ return x(s, C2/C0, Long.MAX_VALUE/(C2/C0));
+ }
+
+ @Override
+ public long toKB(long s){
+ return x(s, C2/C1, Long.MAX_VALUE/(C2/C1));
+ }
+
+ @Override
+ public long toMB(long s) {
+ return s;
+ }
+
+ @Override
+ public long toGB(long s) {
+ return s/(C3/C2);
+ }
+
+ @Override
+ public long toTB(long s) {
+ return s/(C4/C2);
+ }
+ },
+ GB{
+ @Override
+ public long toB(long s){
+ return x(s, C3/C0, Long.MAX_VALUE/(C3/C0));
+ }
+
+ @Override
+ public long toKB(long s){
+ return x(s, C3/C1, Long.MAX_VALUE/(C3/C1));
+ }
+
+ @Override
+ public long toMB(long s) {
+ return x(s, C3/C2, Long.MAX_VALUE/(C3/C2));
+ }
+
+ @Override
+ public long toGB(long s) {
+ return s;
+ }
+
+ @Override
+ public long toTB(long s) {
+ return s/(C4/C3);
+ }
+ },
+ TB{
+ @Override
+ public long toB(long s){
+ return x(s, C4/C0, Long.MAX_VALUE/(C4/C0));
+ }
+
+ @Override
+ public long toKB(long s){
+ return x(s, C4/C1, Long.MAX_VALUE/(C4/C1));
+ }
+
+ @Override
+ public long toMB(long s) {
+ return x(s, C4/C2, Long.MAX_VALUE/(C4/C2));
+ }
+
+ @Override
+ public long toGB(long s) {
+ return x(s, C4/C3, Long.MAX_VALUE/(C4/C3));
+ }
+
+ @Override
+ public long toTB(long s) {
+ return s;
+ }
+ };
+
+ public long toB(long s){
+ throw new AbstractMethodError();
+ }
+
+ public long toKB(long s){
+ throw new AbstractMethodError();
+ }
+
+ public long toMB(long s){
+ throw new AbstractMethodError();
+ }
+
+ public long toGB(long s){
+ throw new AbstractMethodError();
+ }
+
+ public long toTB(long s){
+ throw new AbstractMethodError();
+ }
+ }
+
+ static long x(long d, long m, long over){
+ if(d > over){
+ return Long.MAX_VALUE;
+ }
+ if(d < -over){
+ return Long.MIN_VALUE;
+ }
+ return d * m;
+ }
+ static final long C0 = 1L;
+ static final long C1 = C0 * 1024L;
+ static final long C2 = C1 * 1024L;
+ static final long C3 = C2 * 1024L;
+ static final long C4 = C3 * 1024L;
+
+}
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/test/com/webank/wedatasphere/streamis/jobmanager/log/LogStorageTest.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/test/com/webank/wedatasphere/streamis/jobmanager/log/LogStorageTest.java
new file mode 100644
index 000000000..6ced214f2
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/test/com/webank/wedatasphere/streamis/jobmanager/log/LogStorageTest.java
@@ -0,0 +1,4 @@
+package com.webank.wedatasphere.streamis.jobmanager.log;
+
+public class LogStorageTest {
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/server/JobLogStorageTest.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/server/JobLogStorageTest.java
new file mode 100644
index 000000000..a74d38954
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/server/JobLogStorageTest.java
@@ -0,0 +1,103 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.server.config.StreamJobLogConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.JobLogStorage;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.StreamisJobLogStorage;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucket;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucketConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.StorageThresholdDriftPolicy;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context.JobLogStorageContext;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.loadbalancer.RoundRobinLoadBalancer;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.loadbalancer.SimpleLoadBalancer;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.utils.MemUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.linkis.common.conf.BDPConfiguration;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.net.URL;
+import java.util.Objects;
+
+public class JobLogStorageTest {
+
+ private static final Logger LOG = LoggerFactory.getLogger(JobLogStorageTest.class);
+ @Test
+ public void storageContext() throws IOException {
+ URL url = JobLogStorageTest.class.getResource("/");
+ if (null != url){
+ JobLogStorageContext context = new JobLogStorageContext(url.getPath(), 1.0d);
+ System.out.println("disk total(bytes): " + context.getTotalSpace());
+ System.out.println("disk total(gb): " + MemUtils.convertToGB(context.getTotalSpace(), "B"));
+ System.out.println("disk usable(bytes): " + context.getUsableSpace());
+ System.out.println("disk usable(gb): " + MemUtils.convertToGB(context.getUsableSpace(), "B"));
+ }
+ }
+ @Test
+ public void calculateWeight() throws IOException {
+ JobLogStorageContext candidate1 = new JobLogStorageContext(Objects.requireNonNull(JobLogStorage.class.getResource("/"))
+ .getPath(), 1.0d);
+ JobLogStorageContext candidate2 = new JobLogStorageContext(Objects.requireNonNull(JobLogStorage.class.getResource("/"))
+ .getPath(), 1.0d);
+ JobLogStorageContext[] contexts = new JobLogStorageContext[]{candidate1, candidate2};
+ double[] weights = new double[contexts.length];
+ int maxNormalizeWt = StreamJobLogConfig.STORAGE_CONTEXT_MAX_WEIGHT.getValue();
+ double storageThreshold = StreamJobLogConfig.STORAGE_THRESHOLD.getValue();
+ if (maxNormalizeWt < 1){
+ maxNormalizeWt = 1;
+ }
+ double maxWeight = Double.MIN_VALUE;
+ double minWeight = Double.MAX_VALUE;
+ int i = 0;
+ for (; i < weights.length; i++) {
+ JobLogStorageContext context = contexts[0];
+ long usableSpace = context.getUsableSpace();
+ long totalSpace = context.getTotalSpace();
+ double usage = (double)(totalSpace - usableSpace) / (double)totalSpace;
+ double weight = 0d;
+ if (usage >= storageThreshold){
+ LOG.info("The usage of storage context:[{}] reach the threshold: {} > {}, set the weight of it to 0",
+ context.getStorePath(), usage, storageThreshold);
+ } else {
+ long freeSpaceInGB = MemUtils.convertToGB(usableSpace, "B");
+ if (freeSpaceInGB <= 0) {
+ freeSpaceInGB = 1;
+ }
+ weight = context.getScore() * (double) freeSpaceInGB;
+ }
+ weights[i] = weight;
+ if (weight > maxWeight){
+ maxWeight = weight;
+ }
+ if (weight < minWeight){
+ minWeight = weight;
+ }
+ }
+ double sub = maxWeight - minWeight;
+ i = i - 1;
+ for (; i >= 0; i--){
+ weights[i] = (sub > 0? (maxNormalizeWt - 1) * (weights[i] - minWeight) * sub : 0) + 1;
+ }
+ System.out.println(StringUtils.join(weights, '|'));
+ }
+
+ @Test
+ public void startLogStorage() throws Exception {
+ BDPConfiguration.set("wds.stream.job.log.storage.context.paths", Objects.requireNonNull(JobLogStorage.class.getResource("/"))
+ .getPath());
+ JobLogStorage storage = createJobLogStorage();
+ storage.init();
+ JobLogBucket bucket = storage.getOrCreateBucket("hadoop", "test-app", new JobLogBucketConfig());
+ bucket.getBucketStorageWriter().write("Hello world");
+ Thread.sleep(1000);
+ storage.destroy();
+ }
+ private JobLogStorage createJobLogStorage(){
+ StreamisJobLogStorage jobLogStorage = new StreamisJobLogStorage();
+ jobLogStorage.addLoadBalancer(new RoundRobinLoadBalancer());
+ jobLogStorage.addLoadBalancer(new SimpleLoadBalancer());
+ jobLogStorage.setBucketDriftPolicy(new StorageThresholdDriftPolicy());
+ return jobLogStorage;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-manager/pom.xml b/streamis-jobmanager/streamis-job-manager/pom.xml
new file mode 100755
index 000000000..e3b085b24
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/pom.xml
@@ -0,0 +1,34 @@
+
+
+
+
+
+ streamis-jobmanager
+ com.webank.wedatasphere.streamis
+ 0.2.4
+
+ 4.0.0
+
+ streamis-job-manager
+ pom
+
+ streamis-job-manager-base
+ streamis-job-manager-service
+
+
+
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/pom.xml b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/pom.xml
new file mode 100755
index 000000000..6ffee865f
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/pom.xml
@@ -0,0 +1,104 @@
+
+
+
+
+
+ streamis-jobmanager
+ com.webank.wedatasphere.streamis
+ 0.2.4
+ ../../pom.xml
+
+ 4.0.0
+
+ streamis-job-manager-base
+
+
+
+ 8
+ 8
+
+
+
+
+ mysql
+ mysql-connector-java
+ ${mysql.connector.version}
+
+
+ org.apache.linkis
+ linkis-mybatis
+
+
+ org.apache.linkis
+ linkis-common
+
+
+ commons-io
+ commons-io
+ 2.7
+ compile
+
+
+ commons-lang
+ commons-lang
+
+
+ jakarta.servlet
+ jakarta.servlet-api
+ 4.0.4
+ compile
+
+
+ com.webank.wedatasphere.streamis
+ streamis-job-launcher-linkis
+ ${streamis.version}
+ compile
+
+
+ javax.validation
+ validation-api
+
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-deploy-plugin
+
+
+
+ net.alchim31.maven
+ scala-maven-plugin
+
+
+ org.apache.maven.plugins
+ maven-jar-plugin
+
+
+
+
+ src/main/java
+
+ **/*.xml
+
+
+
+ ${project.artifactId}-${project.version}
+
+
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/StreamAlertMapper.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/StreamAlertMapper.java
new file mode 100644
index 000000000..9b2d56fe4
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/StreamAlertMapper.java
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.manager.dao;
+
+
+import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamAlertRecord;
+import org.apache.ibatis.annotations.Param;
+
+import java.util.List;
+
+public interface StreamAlertMapper {
+
+ void insert(StreamAlertRecord streamAlertRecord);
+
+ List getAlertByJobIdAndVersion(@Param("username") String username, @Param("jobId") Long jobId, @Param("versionId") Long versionId);
+}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/StreamJobMapper.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/StreamJobMapper.java
new file mode 100644
index 000000000..5ef2f1d15
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/StreamJobMapper.java
@@ -0,0 +1,78 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.manager.dao;
+
+import com.webank.wedatasphere.streamis.jobmanager.manager.entity.*;
+import com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo.QueryJobListVo;
+import com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo.VersionDetailVo;
+import org.apache.ibatis.annotations.Param;
+
+import java.util.List;
+
+
+public interface StreamJobMapper {
+
+ List getJobLists(@Param("projectName") String projectName, @Param("userName") String userName, @Param("name") String name,
+ @Param("status") Integer status, @Param("createBy") String createBy);
+
+ List getJobVersionDetails(@Param("jobId") Long jobId);
+
+ StreamJob getJobById(@Param("jobId") Long jobId);
+
+ List getJobByName(@Param("jobName") String jobName);
+
+ List getJobVersions(@Param("jobId") Long jobId);
+
+ /**
+ * Get the latest job version
+ * @param jobId job id
+ * @return job version
+ */
+ StreamJobVersion getLatestJobVersion(@Param("jobId") Long jobId);
+
+ StreamJobVersion getJobVersionById(@Param("jobId") Long jobId, @Param("version") String version);
+
+ void insertJob(StreamJob streamJob);
+
+ void insertJobVersion(StreamJobVersion streamJobVersion);
+
+ void updateJob(StreamJob streamJob);
+
+ List getJobListsByProjectName(String projectName);
+
+ VersionDetailVo getVersionDetail(@Param("jobId") Long jobId, @Param("version") String version);
+
+ void insertJobVersionFiles(StreamJobVersionFiles jobVersionFiles);
+
+ List getStreamJobVersionFiles(@Param("jobId") Long jobId, @Param("jobVersionId") Long jobVersionId);
+
+ StreamJob getCurrentJob(@Param("projectName")String projectName, @Param("jobName")String jobName);
+
+ /**
+ * Query and lock current job
+ * @param projectName project name
+ * @param jobName job name
+ * @return stream job
+ */
+ StreamJob queryAndLockJobInCondition(@Param("projectName")String projectName, @Param("jobName")String jobName);
+
+ /**
+ * Query and lock by job id
+ * @param jobId job id
+ * @return stream job
+ */
+ StreamJob queryAndLockJobById(@Param("jobId")Long jobId);
+}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/StreamTaskMapper.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/StreamTaskMapper.java
new file mode 100755
index 000000000..2eae1e954
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/StreamTaskMapper.java
@@ -0,0 +1,85 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.manager.dao;
+
+import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamTask;
+import org.apache.ibatis.annotations.Param;
+
+import java.util.List;
+
+
+public interface StreamTaskMapper {
+
+ void insertTask(StreamTask streamTask);
+
+ void updateTask(StreamTask streamTask);
+
+ /**
+ * Update task which in specific status
+ * @param streamTask stream task
+ * @param status status
+ */
+ int updateTaskInStatus(@Param("task")StreamTask streamTask, @Param("status")Integer status);
+
+ List getByJobVersionId(@Param("jobVersionId") Long jobVersionId, @Param("version") String version);
+
+ /**
+ * Get latest task by job version id
+ * @param jobVersionId job version id
+ * @param version version number
+ * @return stream task
+ */
+ StreamTask getLatestByJobVersionId(@Param("jobVersionId") Long jobVersionId, @Param("version") String version);
+
+ /**
+ * Get the latest task by job id
+ * @param jobId job id
+ * @return stream task
+ */
+ StreamTask getLatestByJobId(@Param("jobId") Long jobId);
+
+ /**
+ * Get the latest task(launched) by job id
+ * @param jobId job id
+ * @return stream task
+ */
+ StreamTask getLatestLaunchedById(@Param("jobId") Long jobId);
+ /**
+ * Get earlier task list by job id
+ * @param jobId job id
+ * @param count the max number of task
+ * @return
+ */
+ List getEarlierByJobId(@Param("jobId") Long jobId, @Param("count") Integer count);
+
+ StreamTask getRunningTaskByJobId(@Param("jobId") Long jobId);
+
+ StreamTask getTaskById(@Param("id") Long id);
+
+ List getTasksByJobIdAndJobVersionId(@Param("jobId") Long jobId, @Param("jobVersionId") Long jobVersionId);
+
+ List getTasksByStatus(List status);
+
+ String getTask(@Param("jobId") Long jobId, @Param("version") String version);
+
+ /**
+ * Get status info of tasks by job ids
+ * @param jobIds job ids
+ * @return list
+ */
+ List getStatusInfoByJobIds(@Param("jobIds")List jobIds);
+
+}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/impl/StreamAlertMapper.xml b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/impl/StreamAlertMapper.xml
new file mode 100644
index 000000000..9dfb1c2d9
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/impl/StreamAlertMapper.xml
@@ -0,0 +1,45 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ insert into linkis_stream_alert_record (alert_level,alert_user,alert_msg,job_id,job_version_id,task_id,create_time,status,error_msg) values
+ (#{alertLevel},#{alertUser},#{alertMsg},#{jobId},#{jobVersionId},#{taskId},#{createTime},#{status},#{errorMsg})
+
+
+
+ select id, alert_level,alert_user,alert_msg,job_id,job_version_id,task_id,create_time,status,error_msg from linkis_stream_alert_record
+ where alert_user = #{username} and job_id = #{jobId} and job_version_id = #{versionId} ORDER BY create_time DESC;
+
+
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/impl/StreamJobMapper.xml b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/impl/StreamJobMapper.xml
new file mode 100644
index 000000000..dc7d7e5f7
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/impl/StreamJobMapper.xml
@@ -0,0 +1,230 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ `id`,`project_name`, `workspace_name`,`name`,`create_by`, `create_time`,`label`,`description`,`submit_user`,`job_type`, `current_version`
+
+
+
+ `id`,`job_id`,`version`,`source`,`job_content`,`comment`,`create_time`,`create_by`
+
+
+
+ SELECT FROM
+ linkis_stream_job WHERE id = #{jobId}
+
+
+
+
+
+ SELECT FROM
+ linkis_stream_job WHERE name = #{jobName}
+
+
+
+
+
+
+
+
+ select *, (SELECT COUNT(1) FROM linkis_stream_job_version v WHERE v.job_id = aa.id AND v.id > aa.version_id) AS version_forward,
+ (SELECT v.version FROM linkis_stream_job_version v WHERE v.job_id = aa.id ORDER BY v.id DESC LIMIT 1) AS last_version from (
+ SELECT j.`id`,j.`project_name`, j.`workspace_name`,j.`name`,j.`create_by`, j.`create_time`,j.`label`,j.`description`,
+ j.`job_type`, (CASE WHEN t.`status` IS NULL THEN 0 ELSE t.status END) AS status, j.`current_version` AS version, v.id as version_id,
+ v.create_time AS lastVersionTime, t.start_time, lsp.id AS project_id FROM
+ linkis_stream_job j
+ LEFT JOIN linkis_stream_job_version v ON v.job_id = j.id AND v.version = j.current_version
+ LEFT JOIN (SELECT MAX(id) id, job_id, version FROM linkis_stream_task GROUP BY job_id,version) gt
+ ON gt.job_id = j.id AND gt.version = v.version
+ LEFT JOIN linkis_stream_task t ON t.id = gt.id
+ LEFT JOIN linkis_stream_project lsp ON lsp.name = j.project_name
+ ) aa
+
+
+ AND aa.`project_name` = #{projectName}
+
+
+ AND aa.`name` like concat('%', #{name}, '%')
+
+
+ AND aa.`status`=#{status}
+
+
+ AND aa.`create_by` = #{createBy}
+
+ AND EXISTS (SELECT 1 FROM linkis_stream_project_privilege lspp WHERE lspp.project_id =aa.project_id
+ AND lspp.privilege in (1,2,3) AND lspp.user_name = #{userName})
+
+ order by aa.start_time desc
+
+
+
+ select *
+ from linkis_stream_job_version where job_id=#{jobId} order by version desc
+
+
+
+
+ SELECT * FROM linkis_stream_job_version WHERE job_id=#{jobId} ORDER BY id desc,version desc LIMIT 1
+
+
+
+ select
+ *
+ from linkis_stream_job_version
+
+ job_id=#{jobId}
+ AND version=#{version}
+
+
+
+
+ SELECT j.id,v.version,j.description,DATE_FORMAT(v.create_time,"%Y-%m-%d %H:%i:%s") AS releaseTime,j.create_by AS createBy,j.project_name
+ FROM `linkis_stream_job` j , linkis_stream_job_version v
+ WHERE v.job_id = j.id AND j.id = #{jobId} AND v.version=#{version}
+
+
+
+ SELECT j.id,v.version,j.description,DATE_FORMAT(v.create_time,"%Y-%m-%d %H:%i:%s") AS releaseTime,j.create_by AS createBy,j.project_name
+ FROM `linkis_stream_job` j INNER JOIN linkis_stream_job_version v
+ ON j.id = #{jobId} AND v.job_id = j.id order by v.version desc
+
+
+ SELECT
+ j.`id`,j.`project_name`,j.`name`,j.`job_type`,j.`label`,j.`description`,j.`submit_user`,j.`submit_user` as create_by, j.`current_version`
+ FROM `linkis_stream_job` j LEFT JOIN `linkis_stream_job_version` l ON j.`id`= l.job_id AND j.project_name = #{projectName}
+
+
+
+ SELECT
+ j.`id`,j.`project_name`,j.`name`,j.`job_type`,j.`label`,j.`description`,j.`submit_user`,j.`current_version`
+ FROM `linkis_stream_job` j JOIN `linkis_stream_job_version` l ON j.`id`=l.job_id AND j.project_name = #{projectName} AND j.`current_version` = l.version
+ AND j.name = #{jobName}
+
+
+
+ SELECT FROM
+ linkis_stream_job WHERE project_name = #{projectName} AND `name` = #{jobName} FOR UPDATE;
+
+
+
+ SELECT FROM
+ linkis_stream_job WHERE id = #{jobId};
+
+
+
+ INSERT INTO linkis_stream_job(`project_name`,`name`,`create_by`,`label`,`description`,`job_type`,`submit_user`, `current_version`, `create_time`)
+ VALUES(#{projectName},#{name},#{createBy},#{label},#{description},#{jobType},#{submitUser},#{currentVersion, jdbcType=VARCHAR},#{createTime})
+
+
+
+ INSERT INTO linkis_stream_job_version(`job_id`,`version`,`source`,`job_content`,`create_time`,`create_by`)
+ VALUES(#{jobId},#{version},#{source},#{jobContent},#{createTime},#{createBy})
+
+
+
+
+ INSERT INTO linkis_stream_job_version_files(`job_id`,`job_version_id`,`file_name`,`version`,`store_path`,`store_type`,`create_time`,`create_by`)
+ VALUES
+ (#{jobId},#{jobVersionId},#{fileName},#{version},#{storePath},#{storeType},#{createTime},#{createBy})
+
+
+
+ SELECT * from linkis_stream_job_version_files where job_id = #{jobId} and job_version_id = #{jobVersionId}
+
+
+
+ UPDATE linkis_stream_job
+
+
+ label=#{label},
+
+
+ description=#{description},
+
+
+ current_version=#{currentVersion}
+
+
+ WHERE id=#{id}
+
+
+
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/impl/StreamTaskMapper.xml b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/impl/StreamTaskMapper.xml
new file mode 100644
index 000000000..c3c39bc06
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/impl/StreamTaskMapper.xml
@@ -0,0 +1,195 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ `id`,`job_version_id`,`job_id`,`status`,`start_time`,`last_update_time`,`err_desc`,`submit_user`, `linkis_job_id`, `linkis_job_info`
+
+
+
+ INSERT INTO linkis_stream_task(`job_version_id`,`job_id`,`version`,`status`,`start_time`,`last_update_time`,`err_desc`,`submit_user`, `linkis_job_id`, `linkis_job_info`)
+ VALUES(#{jobVersionId},#{jobId},#{version},#{status},#{startTime},#{lastUpdateTime},#{errDesc},#{submitUser},#{linkisJobId},#{linkisJobInfo})
+
+
+
+ UPDATE linkis_stream_task
+
+
+ status=#{status},
+
+
+ last_update_time=#{lastUpdateTime},
+
+
+ linkis_job_id=#{linkisJobId},
+
+
+ linkis_job_info=#{linkisJobInfo},
+
+
+ err_desc=#{errDesc},
+
+
+ WHERE id=#{id}
+
+
+
+ UPDATE linkis_stream_task
+
+
+ status=#{task.status},
+
+
+ last_update_time=#{task.lastUpdateTime},
+
+
+ linkis_job_id=#{task.linkisJobId},
+
+
+ linkis_job_info=#{task.linkisJobInfo},
+
+
+ err_desc=#{task.errDesc},
+
+
+ WHERE id=#{task.id} AND status = #{status};
+
+
+
+ SELECT `id`,`job_version_id`,`job_id`, status
+ ,`start_time`,`last_update_time`,
+ `err_desc`,`submit_user`, `linkis_job_id`, `linkis_job_info`
+ FROM linkis_stream_task
+
+
+ AND job_version_id=#{jobVersionId}
+
+
+ AND version=#{version}
+
+
+ ORDER BY start_time DESC
+
+
+
+ SELECT `id`,`job_version_id`,`job_id`, status
+ ,`start_time`,`last_update_time`,
+ `err_desc`,`submit_user`, `linkis_job_id`, `linkis_job_info`
+ FROM linkis_stream_task
+
+
+ AND job_version_id=#{jobVersionId}
+
+
+ AND version=#{version}
+
+
+ ORDER BY start_time DESC, id DESC LIMIT 1
+
+
+
+ SELECT `id`,`job_version_id`,`job_id`,`version`, status
+ ,`start_time`,`last_update_time`,
+ `err_desc`,`submit_user`, `linkis_job_id`, `linkis_job_info`
+ FROM linkis_stream_task where `job_id`=#{jobId} ORDER BY start_time DESC, id DESC LIMIT 1
+
+
+
+ SELECT `id`,`job_version_id`,`job_id`,`version`, status
+ ,`start_time`,`last_update_time`,
+ `err_desc`,`submit_user`, `linkis_job_id`, `linkis_job_info`
+ FROM linkis_stream_task where `job_id`=#{jobId} AND linkis_job_id IS NOT NULL ORDER BY start_time DESC, id DESC LIMIT 1
+
+
+
+ SELECT `id`,`job_version_id`,`job_id`, status
+ ,`start_time`,`last_update_time`,
+ `err_desc`,`submit_user`, `linkis_job_id`, `linkis_job_info`
+ FROM linkis_stream_task where `job_id`=#{jobId} ORDER BY start_time DESC, id DESC LIMIT ${count}
+
+
+
+ SELECT
+ FROM linkis_stream_task
+ WHERE `job_id` = #{jobId} AND `status` 5 AND `status` =]]> 2
+
+
+
+ SELECT
+
+ FROM linkis_stream_task
+
+
+ id=#{id}
+
+
+
+
+
+ SELECT
+
+ FROM linkis_stream_task
+
+ status in
+
+ #{status}
+
+
+
+
+ SELECT
+
+ FROM linkis_stream_task where job_id = #{jobId} and job_version_id = #{jobVersionId}
+ order by start_time desc
+
+
+
+ SELECT linkis_job_info FROM linkis_stream_task WHERE job_id = #{jobId} and version = #{version} ORDER BY start_time DESC limit 1
+
+
+
+ SELECT id, job_id, status, version, err_desc FROM linkis_stream_task WHERE id IN
+ (SELECT max(id) FROM linkis_stream_task WHERE job_id IN
+
+ #{item}
+ GROUP BY job_id)
+
+
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/MetaJsonInfo.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/MetaJsonInfo.java
new file mode 100644
index 000000000..41032c6c8
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/MetaJsonInfo.java
@@ -0,0 +1,147 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.manager.entity;
+
+import javax.validation.constraints.NotBlank;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Created by v_wbyynie on 2021/9/16.
+ */
+public class MetaJsonInfo {
+
+ private String workspaceName;
+
+ /**
+ * 项目名
+ */
+ @NotBlank(message = "projectName is null")
+ private String projectName;
+
+ /**
+ * 作业名
+ */
+ @NotBlank(message = "jobName is null")
+ private String jobName;
+
+ /**
+ * 目前只支持flink.sql、flink.jar
+ */
+ @NotBlank(message = "jobType is null")
+ private String jobType;
+
+ private String comment;
+
+ /**
+ * 应用标签
+ */
+ private String tags;
+
+ /**
+ * 作业描述
+ */
+ private String description;
+
+
+ private Map jobContent;
+
+ /**
+ * Job configuration
+ */
+ private Map jobConfig;
+ private String metaInfo;
+
+ public String getMetaInfo() {
+ return metaInfo;
+ }
+
+ public void setMetaInfo(String metaInfo) {
+ this.metaInfo = metaInfo;
+ }
+
+ public String getWorkspaceName() {
+ return workspaceName;
+ }
+
+ public void setWorkspaceName(String workspaceName) {
+ this.workspaceName = workspaceName;
+ }
+
+ public String getProjectName() {
+ return projectName;
+ }
+
+ public void setProjectName(String projectName) {
+ this.projectName = projectName;
+ }
+
+ public String getJobName() {
+ return jobName;
+ }
+
+ public void setJobName(String jobName) {
+ this.jobName = jobName;
+ }
+
+ public String getJobType() {
+ return jobType;
+ }
+
+ public void setJobType(String jobType) {
+ this.jobType = jobType;
+ }
+
+ public String getTags() {
+ return tags;
+ }
+
+ public void setTags(String tags) {
+ this.tags = tags;
+ }
+
+ public String getDescription() {
+ return description;
+ }
+
+ public void setDescription(String description) {
+ this.description = description;
+ }
+
+ public Map getJobContent() {
+ return jobContent;
+ }
+
+ public void setJobContent(Map jobContent) {
+ this.jobContent = jobContent;
+ }
+
+ public String getComment() {
+ return comment;
+ }
+
+ public void setComment(String comment) {
+ this.comment = comment;
+ }
+
+ public Map getJobConfig() {
+ return jobConfig;
+ }
+
+ public void setJobConfig(Map jobConfig) {
+ this.jobConfig = jobConfig;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamAlertRecord.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamAlertRecord.java
new file mode 100644
index 000000000..c379217a8
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamAlertRecord.java
@@ -0,0 +1,107 @@
+package com.webank.wedatasphere.streamis.jobmanager.manager.entity;
+
+import java.util.Date;
+
+
+public class StreamAlertRecord {
+
+ private Long id;
+
+ private Long jobId;
+
+ private String alertLevel;
+
+ private String alertUser;
+
+ private String alertMsg;
+
+ private Long jobVersionId;
+
+ private Long taskId;
+
+ private Date createTime;
+
+ private int status;
+
+ private String errorMsg;
+
+ public String getErrorMsg() {
+ return errorMsg;
+ }
+
+ public void setErrorMsg(String errorMsg) {
+ this.errorMsg = errorMsg;
+ }
+
+ public Long getJobVersionId() {
+ return jobVersionId;
+ }
+
+ public void setJobVersionId(Long jobVersionId) {
+ this.jobVersionId = jobVersionId;
+ }
+
+ public Long getTaskId() {
+ return taskId;
+ }
+
+ public void setTaskId(Long taskId) {
+ this.taskId = taskId;
+ }
+
+ public Date getCreateTime() {
+ return createTime;
+ }
+
+ public void setCreateTime(Date createTime) {
+ this.createTime = createTime;
+ }
+
+ public int getStatus() {
+ return status;
+ }
+
+ public void setStatus(int status) {
+ this.status = status;
+ }
+
+ public Long getJobId() {
+ return jobId;
+ }
+
+ public void setJobId(Long jobId) {
+ this.jobId = jobId;
+ }
+
+ public Long getId() {
+ return id;
+ }
+
+ public void setId(Long id) {
+ this.id = id;
+ }
+
+ public String getAlertLevel() {
+ return alertLevel;
+ }
+
+ public void setAlertLevel(String alertLevel) {
+ this.alertLevel = alertLevel;
+ }
+
+ public String getAlertUser() {
+ return alertUser;
+ }
+
+ public void setAlertUser(String alertUser) {
+ this.alertUser = alertUser;
+ }
+
+ public String getAlertMsg() {
+ return alertMsg;
+ }
+
+ public void setAlertMsg(String alertMsg) {
+ this.alertMsg = alertMsg;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamJob.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamJob.java
new file mode 100644
index 000000000..a86a612ca
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamJob.java
@@ -0,0 +1,125 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.manager.entity;
+
+import java.util.Date;
+
+
+public class StreamJob {
+ private Long id;
+ private Long workspaceName;
+ private String name;
+ private String projectName;
+ private String jobType;
+ private String createBy;
+ private String label;
+ private String description;
+ private String submitUser;
+ private Date createTime;
+ /**
+ * Current version tab in used
+ */
+ private String currentVersion;
+
+ public Long getWorkspaceName() {
+ return workspaceName;
+ }
+
+ public void setWorkspaceName(Long workspaceName) {
+ this.workspaceName = workspaceName;
+ }
+
+ public Date getCreateTime() {
+ return createTime;
+ }
+
+ public void setCreateTime(Date createTime) {
+ this.createTime = createTime;
+ }
+
+ public String getProjectName() {
+ return projectName;
+ }
+
+ public void setProjectName(String projectName) {
+ this.projectName = projectName;
+ }
+
+ public Long getId() {
+ return id;
+ }
+
+ public void setId(Long id) {
+ this.id = id;
+ }
+
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public String getCreateBy() {
+ return createBy;
+ }
+
+ public void setCreateBy(String createBy) {
+ this.createBy = createBy;
+ }
+
+ public String getLabel() {
+ return label;
+ }
+
+ public void setLabel(String label) {
+ this.label = label;
+ }
+
+ public String getDescription() {
+ return description;
+ }
+
+ public void setDescription(String description) {
+ this.description = description;
+ }
+
+ public String getSubmitUser() {
+ return submitUser;
+ }
+
+ public void setSubmitUser(String submitUser) {
+ this.submitUser = submitUser;
+ }
+
+ public String getJobType() {
+ return jobType;
+ }
+
+ public void setJobType(String jobType) {
+ this.jobType = jobType;
+ }
+
+ public String getCurrentVersion() {
+ return currentVersion;
+ }
+
+ public void setCurrentVersion(String currentVersion) {
+ this.currentVersion = currentVersion;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamJobVersion.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamJobVersion.java
new file mode 100755
index 000000000..5e08bc78f
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamJobVersion.java
@@ -0,0 +1,94 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.manager.entity;
+
+
+import java.util.Date;
+
+public class StreamJobVersion {
+ private Long id;
+ private Long jobId;
+ private String version;
+ private String source;
+ private String jobContent;
+ private String comment;
+ private Date createTime;
+ private String createBy;
+
+ public Date getCreateTime() {
+ return createTime;
+ }
+
+ public void setCreateTime(Date createTime) {
+ this.createTime = createTime;
+ }
+
+ public String getCreateBy() {
+ return createBy;
+ }
+
+ public void setCreateBy(String createBy) {
+ this.createBy = createBy;
+ }
+
+ public String getSource() {
+ return source;
+ }
+
+ public void setSource(String source) {
+ this.source = source;
+ }
+
+ public String getJobContent() {
+ return jobContent;
+ }
+
+ public void setJobContent(String jobContent) {
+ this.jobContent = jobContent;
+ }
+
+ public Long getId() {
+ return id;
+ }
+
+ public void setId(Long id) {
+ this.id = id;
+ }
+
+ public Long getJobId() {
+ return jobId;
+ }
+
+ public void setJobId(Long jobId) {
+ this.jobId = jobId;
+ }
+
+ public String getVersion() {
+ return version;
+ }
+
+ public void setVersion(String version) {
+ this.version = version;
+ }
+
+ public String getComment() {
+ return comment;
+ }
+
+ public void setComment(String comment) {
+ this.comment = comment;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamJobVersionFiles.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamJobVersionFiles.java
new file mode 100644
index 000000000..3f0f83187
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamJobVersionFiles.java
@@ -0,0 +1,121 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.manager.entity;
+
+import java.util.Date;
+
+/**
+ * Created by v_wbyynie on 2021/9/16.
+ */
+public class StreamJobVersionFiles implements StreamisFile {
+
+ private Long id;
+ private Long jobId;
+ private Long jobVersionId;
+ private String fileName;
+ /**
+ * 文件版本号,由用户上传时指定的
+ */
+ private String version;
+ /**
+ * '如:{"resource":"22edar22", "version": "v0001"}',
+ */
+ private String storePath;
+ /**
+ * '存储类型,一般就是bml',
+ */
+ private String storeType = StreamisFile.BML_STORE_TYPE;
+ private Date createTime;
+ private String createBy;
+
+
+ public Long getId() {
+ return id;
+ }
+
+ public void setId(Long id) {
+ this.id = id;
+ }
+
+ public Long getJobId() {
+ return jobId;
+ }
+
+ public void setJobId(Long jobId) {
+ this.jobId = jobId;
+ }
+
+ public Long getJobVersionId() {
+ return jobVersionId;
+ }
+
+ public void setJobVersionId(Long jobVersionId) {
+ this.jobVersionId = jobVersionId;
+ }
+
+ @Override
+ public String getFileName() {
+ return fileName;
+ }
+
+ public void setFileName(String fileName) {
+ this.fileName = fileName;
+ }
+
+ @Override
+ public String getVersion() {
+ return version;
+ }
+
+ public void setVersion(String version) {
+ this.version = version;
+ }
+
+ @Override
+ public String getStorePath() {
+ return storePath;
+ }
+
+ public void setStorePath(String storePath) {
+ this.storePath = storePath;
+ }
+
+ @Override
+ public String getStoreType() {
+ return storeType;
+ }
+
+ public void setStoreType(String storeType) {
+ this.storeType = storeType;
+ }
+
+ public Date getCreateTime() {
+ return createTime;
+ }
+
+ public void setCreateTime(Date createTime) {
+ this.createTime = createTime;
+ }
+
+ @Override
+ public String getCreateBy() {
+ return createBy;
+ }
+
+ public void setCreateBy(String createBy) {
+ this.createBy = createBy;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamProject.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamProject.java
new file mode 100755
index 000000000..4105bf936
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamProject.java
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.manager.entity;
+
+
+public class StreamProject {
+ private Long id;
+ private Long workspaceId;
+ private String name;
+ private String createBy;
+
+ public Long getId() {
+ return id;
+ }
+
+ public void setId(Long id) {
+ this.id = id;
+ }
+
+ public Long getWorkspaceId() {
+ return workspaceId;
+ }
+
+ public void setWorkspaceId(Long workspaceId) {
+ this.workspaceId = workspaceId;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public String getCreateBy() {
+ return createBy;
+ }
+
+ public void setCreateBy(String createBy) {
+ this.createBy = createBy;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamTask.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamTask.java
new file mode 100755
index 000000000..ac4064e3c
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamTask.java
@@ -0,0 +1,135 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.manager.entity;
+
+import java.util.Calendar;
+import java.util.Date;
+
+
+public class StreamTask {
+ private Long id;
+ private Long jobVersionId;
+ private Long jobId;
+ private String submitUser;
+ private Date startTime;
+ private Date lastUpdateTime;
+ private String linkisJobId;
+ private String linkisJobInfo;
+ private String errDesc;
+ private String version;
+ private Integer status;
+
+ public StreamTask(){
+ Calendar calendar = Calendar.getInstance();
+ this.lastUpdateTime = calendar.getTime();
+ this.startTime = calendar.getTime();
+ }
+
+ public StreamTask(Long jobId, Long jobVersionId, String version, String submitUser){
+ this();
+ this.jobId = jobId;
+ this.jobVersionId = jobVersionId;
+ this.version = version;
+ this.submitUser = submitUser;
+ }
+ public String getVersion() {
+ return version;
+ }
+
+ public void setVersion(String version) {
+ this.version = version;
+ }
+
+ public Long getId() {
+ return id;
+ }
+
+ public void setId(Long id) {
+ this.id = id;
+ }
+
+ public Long getJobId() {
+ return jobId;
+ }
+
+ public void setJobId(Long jobId) {
+ this.jobId = jobId;
+ }
+
+ public Date getStartTime() {
+ return startTime;
+ }
+
+ public void setStartTime(Date startTime) {
+ this.startTime = startTime;
+ }
+
+ public Date getLastUpdateTime() {
+ return lastUpdateTime;
+ }
+
+ public void setLastUpdateTime(Date lastUpdateTime) {
+ this.lastUpdateTime = lastUpdateTime;
+ }
+
+ public String getErrDesc() {
+ return errDesc;
+ }
+
+ public void setErrDesc(String errDesc) {
+ this.errDesc = errDesc;
+ }
+
+ public Long getJobVersionId() {
+ return jobVersionId;
+ }
+
+ public void setJobVersionId(Long jobVersionId) {
+ this.jobVersionId = jobVersionId;
+ }
+
+ public Integer getStatus() {
+ return status;
+ }
+
+ public void setStatus(Integer status) {
+ this.status = status;
+ }
+
+ public String getSubmitUser() {
+ return submitUser;
+ }
+
+ public void setSubmitUser(String submitUser) {
+ this.submitUser = submitUser;
+ }
+
+ public String getLinkisJobId() {
+ return linkisJobId;
+ }
+
+ public void setLinkisJobId(String linkisJobId) {
+ this.linkisJobId = linkisJobId;
+ }
+
+ public String getLinkisJobInfo() {
+ return linkisJobInfo;
+ }
+
+ public void setLinkisJobInfo(String linkisJobInfo) {
+ this.linkisJobInfo = linkisJobInfo;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamisFile.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamisFile.java
new file mode 100644
index 000000000..9f85630ea
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamisFile.java
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.manager.entity;
+
+/**
+ * Created by enjoyyin on 2021/9/23.
+ */
+public interface StreamisFile {
+
+ String BML_STORE_TYPE = "bml";
+
+ String getFileName();
+
+ String getVersion();
+
+ String getStorePath();
+
+ String getStoreType();
+
+ String getCreateBy();
+
+}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/ExecResultVo.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/ExecResultVo.java
new file mode 100644
index 000000000..1e101a8a4
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/ExecResultVo.java
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo;
+
+import com.fasterxml.jackson.annotation.JsonInclude;
+
+@JsonInclude(JsonInclude.Include.NON_EMPTY)
+public class ExecResultVo extends ScheduleResultVo {
+
+ /**
+ * Job id
+ */
+ private Long jobId;
+
+ /**
+ * Task id
+ */
+ private Long taskId;
+
+ public ExecResultVo(){
+
+ }
+
+ public ExecResultVo(Long jobId, Long taskId){
+ this.jobId = jobId;
+ this.taskId = taskId;
+ }
+
+ public Long getJobId() {
+ return jobId;
+ }
+
+ public void setJobId(Long jobId) {
+ this.jobId = jobId;
+ }
+
+ public Long getTaskId() {
+ return taskId;
+ }
+
+ public void setTaskId(Long taskId) {
+ this.taskId = taskId;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobDetailsVo.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobDetailsVo.java
new file mode 100644
index 000000000..f01f301f3
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobDetailsVo.java
@@ -0,0 +1,205 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo;
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.LinkisJobInfo;
+
+import java.util.List;
+
+public class JobDetailsVo {
+
+ private List realTimeTraffic;
+ private List dataNumber;
+ private List loadCondition;
+ private LinkisJobInfo linkisJobInfo;
+
+ public LinkisJobInfo getLinkisJobInfo() {
+ return linkisJobInfo;
+ }
+
+ public void setLinkisJobInfo(LinkisJobInfo linkisJobInfo) {
+ this.linkisJobInfo = linkisJobInfo;
+ }
+
+ public List getRealTimeTraffic() {
+ return realTimeTraffic;
+ }
+
+ public void setRealTimeTraffic(List realTimeTraffic) {
+ this.realTimeTraffic = realTimeTraffic;
+ }
+
+ public List getDataNumber() {
+ return dataNumber;
+ }
+
+ public void setDataNumber(List dataNumber) {
+ this.dataNumber = dataNumber;
+ }
+
+ public List getLoadCondition() {
+ return loadCondition;
+ }
+
+ public void setLoadCondition(List loadCondition) {
+ this.loadCondition = loadCondition;
+ }
+
+ public static class RealTimeTrafficDTO {
+ private String sourceKey;
+ private String sourceSpeed;
+ private String transformKey;
+ private String transformSpeed;
+ private String sinkKey;
+ private String sinkSpeed;
+
+ public String getSourceKey() {
+ return sourceKey;
+ }
+
+ public void setSourceKey(String sourceKey) {
+ this.sourceKey = sourceKey;
+ }
+
+ public String getSourceSpeed() {
+ return sourceSpeed;
+ }
+
+ public void setSourceSpeed(String sourceSpeed) {
+ this.sourceSpeed = sourceSpeed;
+ }
+
+ public String getTransformKey() {
+ return transformKey;
+ }
+
+ public void setTransformKey(String transformKey) {
+ this.transformKey = transformKey;
+ }
+
+ public String getTransformSpeed() {
+ return transformSpeed;
+ }
+
+ public void setTransformSpeed(String transformSpeed) {
+ this.transformSpeed = transformSpeed;
+ }
+
+ public String getSinkKey() {
+ return sinkKey;
+ }
+
+ public void setSinkKey(String sinkKey) {
+ this.sinkKey = sinkKey;
+ }
+
+ public String getSinkSpeed() {
+ return sinkSpeed;
+ }
+
+ public void setSinkSpeed(String sinkSpeed) {
+ this.sinkSpeed = sinkSpeed;
+ }
+ }
+
+ public static class DataNumberDTO {
+ private String dataName;
+ private Integer dataNumber;
+
+ public String getDataName() {
+ return dataName;
+ }
+
+ public void setDataName(String dataName) {
+ this.dataName = dataName;
+ }
+
+ public Integer getDataNumber() {
+ return dataNumber;
+ }
+
+ public void setDataNumber(Integer dataNumber) {
+ this.dataNumber = dataNumber;
+ }
+ }
+
+ public static class LoadConditionDTO {
+ private String type;
+ private String host;
+ private String memory;
+ private String totalMemory;
+ private String gcTotalTime;
+ private String gcLastTime;
+ private String gcLastConsume;
+
+ public String getType() {
+ return type;
+ }
+
+ public void setType(String type) {
+ this.type = type;
+ }
+
+ public String getHost() {
+ return host;
+ }
+
+ public void setHost(String host) {
+ this.host = host;
+ }
+
+
+ public String getGcTotalTime() {
+ return gcTotalTime;
+ }
+
+ public void setGcTotalTime(String gcTotalTime) {
+ this.gcTotalTime = gcTotalTime;
+ }
+
+ public String getGcLastTime() {
+ return gcLastTime;
+ }
+
+ public void setGcLastTime(String gcLastTime) {
+ this.gcLastTime = gcLastTime;
+ }
+
+ public String getGcLastConsume() {
+ return gcLastConsume;
+ }
+
+ public void setGcLastConsume(String gcLastConsume) {
+ this.gcLastConsume = gcLastConsume;
+ }
+
+ public String getMemory() {
+ return memory;
+ }
+
+ public void setMemory(String memory) {
+ this.memory = memory;
+ }
+
+ public String getTotalMemory() {
+ return totalMemory;
+ }
+
+ public void setTotalMemory(String totalMemory) {
+ this.totalMemory = totalMemory;
+ }
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobInspectVo.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobInspectVo.java
new file mode 100644
index 000000000..d8b611f6c
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobInspectVo.java
@@ -0,0 +1,19 @@
+package com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo;
+
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonInclude;
+
+/**
+ * Job inspect vo
+ */
+@JsonInclude(JsonInclude.Include.NON_EMPTY)
+public interface JobInspectVo {
+
+ enum Types{
+ VERSION, SNAPSHOT, STATUS
+ }
+ @JsonIgnore
+ String getInspectName();
+
+
+}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobProgressVo.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobProgressVo.java
new file mode 100644
index 000000000..973c06a64
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobProgressVo.java
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo;
+
+public class JobProgressVo {
+ private Long taskId;
+ private Integer progress;
+
+ public Long getTaskId() {
+ return taskId;
+ }
+
+ public void setTaskId(Long taskId) {
+ this.taskId = taskId;
+ }
+
+ public Integer getProgress() {
+ return progress;
+ }
+
+ public void setProgress(Integer progress) {
+ this.progress = progress;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobSnapshotInspectVo.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobSnapshotInspectVo.java
new file mode 100644
index 000000000..59f89b0c1
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobSnapshotInspectVo.java
@@ -0,0 +1,23 @@
+package com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo;
+
+import java.util.Locale;
+
+public class JobSnapshotInspectVo implements JobInspectVo{
+ /**
+ * Path
+ */
+ private String path;
+
+ @Override
+ public String getInspectName() {
+ return Types.SNAPSHOT.name().toLowerCase(Locale.ROOT);
+ }
+
+ public String getPath() {
+ return path;
+ }
+
+ public void setPath(String path) {
+ this.path = path;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobStatusVo.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobStatusVo.java
new file mode 100644
index 000000000..0ecca29b4
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobStatusVo.java
@@ -0,0 +1,72 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo;
+
+import com.fasterxml.jackson.annotation.JsonInclude;
+
+@JsonInclude(JsonInclude.Include.NON_EMPTY)
+public class JobStatusVo {
+ /**
+ * Job id
+ */
+ private Long jobId;
+
+ /**
+ * Status name
+ */
+ private String status;
+
+ /**
+ * Status code
+ */
+ private int statusCode;
+
+ /**
+ * Message
+ */
+ private String message;
+
+ public Long getJobId() {
+ return jobId;
+ }
+
+ public void setJobId(Long jobId) {
+ this.jobId = jobId;
+ }
+
+ public String getStatus() {
+ return status;
+ }
+
+ public void setStatus(String status) {
+ this.status = status;
+ }
+
+ public int getStatusCode() {
+ return statusCode;
+ }
+
+ public void setStatusCode(int statusCode) {
+ this.statusCode = statusCode;
+ }
+
+ public String getMessage() {
+ return message;
+ }
+
+ public void setMessage(String message) {
+ this.message = message;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobVersionInspectVo.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobVersionInspectVo.java
new file mode 100644
index 000000000..2f06703dc
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobVersionInspectVo.java
@@ -0,0 +1,43 @@
+package com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo;
+
+import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamJobVersion;
+
+import java.util.Locale;
+
+/**
+ * Version inspect
+ */
+public class JobVersionInspectVo implements JobInspectVo{
+
+ /**
+ * Current version
+ */
+ private StreamJobVersion now;
+
+ /**
+ * Last version
+ */
+ private StreamJobVersion last;
+
+ @Override
+ public String getInspectName() {
+ return Types.VERSION.name().toLowerCase(Locale.ROOT);
+ }
+
+
+ public StreamJobVersion getNow() {
+ return now;
+ }
+
+ public void setNow(StreamJobVersion now) {
+ this.now = now;
+ }
+
+ public StreamJobVersion getLast() {
+ return last;
+ }
+
+ public void setLast(StreamJobVersion last) {
+ this.last = last;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/PauseResultVo.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/PauseResultVo.java
new file mode 100644
index 000000000..ac41b5d0b
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/PauseResultVo.java
@@ -0,0 +1,66 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo;
+
+import com.fasterxml.jackson.annotation.JsonInclude;
+
+@JsonInclude(JsonInclude.Include.NON_EMPTY)
+public class PauseResultVo extends ScheduleResultVo {
+ /**
+ * Job id
+ */
+ private Long jobId;
+
+ /**
+ * Task id
+ */
+ private Long taskId;
+
+ /**
+ * Snapshot path
+ */
+ private String snapshotPath;
+
+ public PauseResultVo(){
+ }
+
+ public PauseResultVo(Long jobId, Long taskId){
+ this.jobId = jobId;
+ this.taskId = taskId;
+ }
+ public Long getJobId() {
+ return jobId;
+ }
+
+ public void setJobId(Long jobId) {
+ this.jobId = jobId;
+ }
+
+ public Long getTaskId() {
+ return taskId;
+ }
+
+ public void setTaskId(Long taskId) {
+ this.taskId = taskId;
+ }
+
+ public String getSnapshotPath() {
+ return snapshotPath;
+ }
+
+ public void setSnapshotPath(String snapshotPath) {
+ this.snapshotPath = snapshotPath;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/PublishRequestVo.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/PublishRequestVo.java
new file mode 100644
index 000000000..6969d1736
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/PublishRequestVo.java
@@ -0,0 +1,231 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo;
+
+public class PublishRequestVo {
+
+ private Long projectId;
+ /**
+ * sql或者scala等
+ */
+ private String type;
+
+ private String jobType;
+
+ private String fileName;
+
+ private String storePath;
+
+ private String storeType;
+
+ /**
+ * 提交到jobmanager的执行代码
+ */
+ private String executionCode;
+
+ /**
+ * job的名字
+ */
+ private String streamisJobName;
+
+ /**
+ * 创建人
+ */
+ private String createBy;
+
+ /**
+ * 修改人
+ */
+ private String updateBy;
+
+ /**
+ * 任务的描述信息
+ */
+ private String description;
+
+ private String source;
+
+ private String metaInfo;
+
+ private String bmlVersion;
+
+ /**
+ * 标签
+ */
+ private String tags;
+
+
+ /**
+ * 发布用户
+ */
+ private String publishUser;
+
+ /**
+ * 是用来进行判断是新建一个jobmanager 的任务
+ * 传v0001 v0002
+ */
+ private String version;
+
+
+ /**
+ * 工程名字,必须是要传到的streamis jobmanager
+ */
+ private String projectName;
+
+ public String getBmlVersion() {
+ return bmlVersion;
+ }
+
+ public void setBmlVersion(String bmlVersion) {
+ this.bmlVersion = bmlVersion;
+ }
+
+ public String getFileName() {
+ return fileName;
+ }
+
+ public void setFileName(String fileName) {
+ this.fileName = fileName;
+ }
+
+ public String getStorePath() {
+ return storePath;
+ }
+
+ public void setStorePath(String storePath) {
+ this.storePath = storePath;
+ }
+
+ public String getStoreType() {
+ return storeType;
+ }
+
+ public void setStoreType(String storeType) {
+ this.storeType = storeType;
+ }
+
+ public String getSource() {
+ return source;
+ }
+
+ public void setSource(String source) {
+ this.source = source;
+ }
+
+ public String getMetaInfo() {
+ return metaInfo;
+ }
+
+ public void setMetaInfo(String metaInfo) {
+ this.metaInfo = metaInfo;
+ }
+
+ public String getJobType() {
+ return jobType;
+ }
+
+ public void setJobType(String jobType) {
+ this.jobType = jobType;
+ }
+
+ public String getType() {
+ return type;
+ }
+
+ public void setType(String type) {
+ this.type = type;
+ }
+
+ public String getExecutionCode() {
+ return executionCode;
+ }
+
+ public void setExecutionCode(String executionCode) {
+ this.executionCode = executionCode;
+ }
+
+ public String getStreamisJobName() {
+ return streamisJobName;
+ }
+
+ public void setStreamisJobName(String streamisJobName) {
+ this.streamisJobName = streamisJobName;
+ }
+
+ public String getCreateBy() {
+ return createBy;
+ }
+
+ public void setCreateBy(String createBy) {
+ this.createBy = createBy;
+ }
+
+ public String getUpdateBy() {
+ return updateBy;
+ }
+
+ public void setUpdateBy(String updateBy) {
+ this.updateBy = updateBy;
+ }
+
+ public String getDescription() {
+ return description;
+ }
+
+ public void setDescription(String description) {
+ this.description = description;
+ }
+
+ public String getTags() {
+ return tags;
+ }
+
+ public void setTags(String tags) {
+ this.tags = tags;
+ }
+
+ public String getPublishUser() {
+ return publishUser;
+ }
+
+ public void setPublishUser(String publishUser) {
+ this.publishUser = publishUser;
+ }
+
+ public String getVersion() {
+ return version;
+ }
+
+ public void setVersion(String version) {
+ this.version = version;
+ }
+
+ public String getProjectName() {
+ return projectName;
+ }
+
+ public void setProjectName(String projectName) {
+ this.projectName = projectName;
+ }
+
+ public Long getProjectId() {
+ return projectId;
+ }
+
+ public void setProjectId(Long projectId) {
+ this.projectId = projectId;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/QueryJobListVo.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/QueryJobListVo.java
new file mode 100644
index 000000000..0e08d538e
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/QueryJobListVo.java
@@ -0,0 +1,162 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo;
+
+import java.util.Date;
+
+public class QueryJobListVo {
+ private Long id;
+ private String name;
+ private Long workspaceName;
+ private String projectId;
+ private String projectName;
+ private String jobType;
+ private String label;
+ private String createBy;
+ private Date createTime;
+ private Integer status;
+ private String version;
+ /**
+ * Last version
+ */
+ private String lastVersion;
+ private Date lastVersionTime;
+ /**
+ * Number of version forward
+ */
+ private Integer versionForwards;
+ private String description;
+
+ public Long getId() {
+ return id;
+ }
+
+ public void setId(Long id) {
+ this.id = id;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ public Long getWorkspaceName() {
+ return workspaceName;
+ }
+
+ public void setWorkspaceName(Long workspaceName) {
+ this.workspaceName = workspaceName;
+ }
+
+ public String getProjectId() {
+ return projectId;
+ }
+
+ public void setProjectId(String projectId) {
+ this.projectId = projectId;
+ }
+
+ public String getProjectName() {
+ return projectName;
+ }
+
+ public void setProjectName(String projectName) {
+ this.projectName = projectName;
+ }
+
+ public String getJobType() {
+ return jobType;
+ }
+
+ public void setJobType(String jobType) {
+ this.jobType = jobType;
+ }
+
+ public String getLabel() {
+ return label;
+ }
+
+ public void setLabel(String label) {
+ this.label = label;
+ }
+
+ public String getCreateBy() {
+ return createBy;
+ }
+
+ public void setCreateBy(String createBy) {
+ this.createBy = createBy;
+ }
+
+ public Date getCreateTime() {
+ return createTime;
+ }
+
+ public void setCreateTime(Date createTime) {
+ this.createTime = createTime;
+ }
+
+ public Integer getStatus() {
+ return status;
+ }
+
+ public void setStatus(Integer status) {
+ this.status = status;
+ }
+
+ public String getVersion() {
+ return version;
+ }
+
+ public void setVersion(String version) {
+ this.version = version;
+ }
+
+ public Date getLastVersionTime() {
+ return lastVersionTime;
+ }
+
+ public void setLastVersionTime(Date lastVersionTime) {
+ this.lastVersionTime = lastVersionTime;
+ }
+
+ public String getDescription() {
+ return description;
+ }
+
+ public void setDescription(String description) {
+ this.description = description;
+ }
+
+ public Integer getVersionForwards() {
+ return versionForwards;
+ }
+
+ public void setVersionForwards(Integer versionForwards) {
+ this.versionForwards = versionForwards;
+ }
+
+ public String getLastVersion() {
+ return lastVersion;
+ }
+
+ public void setLastVersion(String lastVersion) {
+ this.lastVersion = lastVersion;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/ScheduleResultVo.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/ScheduleResultVo.java
new file mode 100644
index 000000000..ee7cbfcbe
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/ScheduleResultVo.java
@@ -0,0 +1,89 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Schedule Result
+ * @param
+ */
+public class ScheduleResultVo {
+
+ /**
+ * Schedule id
+ */
+ protected String scheduleId;
+
+ /**
+ * Schedule status
+ */
+ protected String scheduleState;
+
+ /**
+ * Error message
+ */
+ protected String message;
+ /**
+ * Progress
+ */
+ private double progress = 0d;
+
+ /**
+ * Metric
+ */
+ private Map metric = new HashMap<>();
+
+ public String getScheduleId() {
+ return scheduleId;
+ }
+
+ public void setScheduleId(String scheduleId) {
+ this.scheduleId = scheduleId;
+ }
+
+ public String getScheduleState() {
+ return scheduleState;
+ }
+
+ public void setScheduleState(String scheduleState) {
+ this.scheduleState = scheduleState;
+ }
+
+ public double getProgress() {
+ return progress;
+ }
+
+ public void setProgress(double progress) {
+ this.progress = progress;
+ }
+
+ public Map getMetric() {
+ return metric;
+ }
+
+ public void setMetric(Map metric) {
+ this.metric = metric;
+ }
+
+ public String getMessage() {
+ return message;
+ }
+
+ public void setMessage(String message) {
+ this.message = message;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/StreamTaskListVo.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/StreamTaskListVo.java
new file mode 100755
index 000000000..e36daaa03
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/StreamTaskListVo.java
@@ -0,0 +1,120 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo;
+
+
+public class StreamTaskListVo {
+ private Long taskId;
+ private Long jobVersionId;
+ private String jobName;
+ private String creator;
+ private String version;
+ private String status;
+ private String startTime;
+ private String endTime;
+ private String runTime;
+ private String stopCause;
+
+ private String versionContent;
+
+ public Long getTaskId() {
+ return taskId;
+ }
+
+ public void setTaskId(Long taskId) {
+ this.taskId = taskId;
+ }
+
+ public Long getJobVersionId() {
+ return jobVersionId;
+ }
+
+ public void setJobVersionId(Long jobVersionId) {
+ this.jobVersionId = jobVersionId;
+ }
+
+ public String getJobName() {
+ return jobName;
+ }
+
+ public void setJobName(String jobName) {
+ this.jobName = jobName;
+ }
+
+ public String getCreator() {
+ return creator;
+ }
+
+ public void setCreator(String creator) {
+ this.creator = creator;
+ }
+
+ public String getVersion() {
+ return version;
+ }
+
+ public void setVersion(String version) {
+ this.version = version;
+ }
+
+ public String getStatus() {
+ return status;
+ }
+
+ public void setStatus(String status) {
+ this.status = status;
+ }
+
+ public String getStartTime() {
+ return startTime;
+ }
+
+ public void setStartTime(String startTime) {
+ this.startTime = startTime;
+ }
+
+ public String getEndTime() {
+ return endTime;
+ }
+
+ public void setEndTime(String endTime) {
+ this.endTime = endTime;
+ }
+
+ public String getRunTime() {
+ return runTime;
+ }
+
+ public void setRunTime(String runTime) {
+ this.runTime = runTime;
+ }
+
+ public String getStopCause() {
+ return stopCause;
+ }
+
+ public void setStopCause(String stopCause) {
+ this.stopCause = stopCause;
+ }
+
+ public String getVersionContent() {
+ return versionContent;
+ }
+
+ public void setVersionContent(String versionContent) {
+ this.versionContent = versionContent;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/TaskCoreNumVo.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/TaskCoreNumVo.java
new file mode 100644
index 000000000..7bb738289
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/TaskCoreNumVo.java
@@ -0,0 +1,110 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo;
+
+/**
+ * job核心指标
+ */
+public class TaskCoreNumVo {
+ private Long projectId;
+ private String projectName;
+ //失败任务数目
+ private Integer failureNum = 0;
+ //运行数目
+ private Integer runningNum = 0;
+ //慢任务数目
+ private Integer slowTaskNum = 0;
+ //告警任务
+ private Integer alertNum = 0;
+ //等待重启数目
+ private Integer waitRestartNum = 0;
+ //已完成数目
+ private Integer successNum = 0;
+ //已停止数目
+ private Integer stoppedNum = 0;
+
+ public Integer getStoppedNum() {
+ return stoppedNum;
+ }
+
+ public void setStoppedNum(Integer stoppedNum) {
+ this.stoppedNum = stoppedNum;
+ }
+
+ public String getProjectName() {
+ return projectName;
+ }
+
+ public void setProjectName(String projectName) {
+ this.projectName = projectName;
+ }
+
+ public Integer getFailureNum() {
+ return failureNum;
+ }
+
+ public void setFailureNum(Integer failureNum) {
+ this.failureNum = failureNum;
+ }
+
+ public Integer getRunningNum() {
+ return runningNum;
+ }
+
+ public void setRunningNum(Integer runningNum) {
+ this.runningNum = runningNum;
+ }
+
+ public Integer getSlowTaskNum() {
+ return slowTaskNum;
+ }
+
+ public void setSlowTaskNum(Integer slowTaskNum) {
+ this.slowTaskNum = slowTaskNum;
+ }
+
+ public Integer getAlertNum() {
+ return alertNum;
+ }
+
+ public void setAlertNum(Integer alertNum) {
+ this.alertNum = alertNum;
+ }
+
+ public Integer getWaitRestartNum() {
+ return waitRestartNum;
+ }
+
+ public void setWaitRestartNum(Integer waitRestartNum) {
+ this.waitRestartNum = waitRestartNum;
+ }
+
+ public Integer getSuccessNum() {
+ return successNum;
+ }
+
+ public void setSuccessNum(Integer successNum) {
+ this.successNum = successNum;
+ }
+
+ public Long getProjectId() {
+ return projectId;
+ }
+
+ public void setProjectId(Long projectId) {
+ this.projectId = projectId;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/VersionDetailVo.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/VersionDetailVo.java
new file mode 100644
index 000000000..ca46845f9
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/VersionDetailVo.java
@@ -0,0 +1,85 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo;
+
+/**
+ * Created by v_wbyynie on 2021/6/18.
+ */
+public class VersionDetailVo {
+ private Long id;
+ private String version;
+ private String description;
+ private String releaseTime;
+ private String createBy;
+ private Long projectId;
+ private String projectName;
+
+ public String getProjectName() {
+ return projectName;
+ }
+
+ public void setProjectName(String projectName) {
+ this.projectName = projectName;
+ }
+
+ public void setProjectId(Long projectId) {
+ this.projectId = projectId;
+ }
+
+ public Long getProjectId() {
+ return projectId;
+ }
+
+ public Long getId() {
+ return id;
+ }
+
+ public String getVersion() {
+ return version;
+ }
+
+ public String getDescription() {
+ return description;
+ }
+
+ public String getReleaseTime() {
+ return releaseTime;
+ }
+
+ public String getCreateBy() {
+ return createBy;
+ }
+
+ public void setId(Long id) {
+ this.id = id;
+ }
+
+ public void setVersion(String version) {
+ this.version = version;
+ }
+
+ public void setDescription(String description) {
+ this.description = description;
+ }
+
+ public void setReleaseTime(String releaseTime) {
+ this.releaseTime = releaseTime;
+ }
+
+ public void setCreateBy(String createBy) {
+ this.createBy = createBy;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/exception/FileException.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/exception/FileException.java
new file mode 100644
index 000000000..62cd9a0c1
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/exception/FileException.java
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.manager.exception;
+
+import org.apache.linkis.common.exception.ErrorException;
+
+public class FileException extends ErrorException {
+ public FileException(int errCode, String desc) {
+ super(errCode, desc);
+ }
+
+ public FileException(int errCode, String desc, String ip, int port, String serviceKind) {
+ super(errCode, desc, ip, port, serviceKind);
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/exception/FileExceptionManager.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/exception/FileExceptionManager.java
new file mode 100644
index 000000000..22153bb25
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/exception/FileExceptionManager.java
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.manager.exception;
+
+
+import java.util.HashMap;
+import java.util.Map;
+
+public class FileExceptionManager {
+ //30600-30700
+ private static Map desc = new HashMap