diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 000000000..b1f35fc4f --- /dev/null +++ b/.gitattributes @@ -0,0 +1,4 @@ +*.js linguist-language=java +*.css linguist-language=java +*.html linguist-language=java +*.vue linguist-language=java \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml new file mode 100644 index 000000000..33e2be454 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -0,0 +1,99 @@ +name: Bug report +title: "[Bug] " +description: Problems and issues with code of Streamis +labels: [bug, triage] +body: + - type: markdown + attributes: + value: | + Thank you for reporting the problem! + Please make sure what you are reporting is a bug with reproducible steps. To ask questions + or share ideas, pleae post on our [Discussion page](https://github.com/WeBankFinTech/Streamis/discussions) instead. + + - type: checkboxes + attributes: + label: Search before asking + description: > + Please make sure to search in the [issues](https://github.com/WeBankFinTech/Streamis/issues) first to see + whether the same issue was reported already. + options: + - label: > + I searched the [issues](https://github.com/WeBankFinTech/Streamis/issues) and found no similar + issues. + required: true + + - type: dropdown + attributes: + label: Streamis Component + description: | + What component are you using? Streamis has many modules, please make sure to choose the module that + you found the bug. + multiple: true + options: + - "streamis-commons" + - "streamis-server" + - "streamis-job-manager" + - "streamis-job-launcher" + - "streamis-web" + validations: + required: true + + - type: textarea + attributes: + label: What happened + What you expected to happen + description: Describe 1. the bug 2. expected behavior 3. useful information (e.g., logs) + placeholder: > + Please provide the context in which the problem occurred and explain what happened. Further, + To Reproduce Steps to reproduce the behavior: 1. Go to '...' 2. Click on '....' 3. Scroll down to '.... 4. See error + please also explain why you think the behaviour is erroneous. It is extremely helpful if you can + copy and paste the fragment of logs showing the exact error messages or wrong behaviour here. + + **NOTE**: Expected behavior A clear and concise description of what you expected to happen.Screenshots If applicable, add screenshots to help explain your problem. + validations: + required: true + + - type: textarea + attributes: + label: Relevent platform + description: The platform where you occurred this issue + placeholder: > + Please specify Desktop or Smartphone, Version / Dependencies / OS / Browser + validations: + required: true + + - type: textarea + attributes: + label: Reproduction script + description: > + Please provide a reproducible script. Providing a narrow reproduction (minimal / no external dependencies) will + help us triage and address issues in the timely manner! + placeholder: > + Please provide a short code snippet (less than 50 lines if possible) that can be copy-pasted to + reproduce the issue. The snippet should have **no external library dependencies** + (i.e., use fake or mock data / environments). + + **NOTE**: If the code snippet cannot be run by itself, the issue will be marked as "needs-repro-script" + until the repro instruction is updated. + validations: + required: true + + - type: textarea + attributes: + label: Anything else + description: Anything else we need to know? + placeholder: > + How often does this problem occur? (Once? Every time? Only when certain conditions are met?) + Any relevant logs to include? Are there other relevant issues? + + - type: checkboxes + attributes: + label: Are you willing to submit a PR? + description: > + This is absolutely not required, but we are happy to guide you in the contribution process + especially if you already have a good understanding of how to implement the fix. + options: + - label: Yes I am willing to submit a PR! + + - type: markdown + attributes: + value: "Thanks for completing our form!" diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 000000000..40276a265 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,5 @@ +blank_issues_enabled: fasle +contact_links: + - name: Ask a question or get support + url: https://github.com/WeBankFinTech/Streamis/discussions + about: Ask a question or request support for using Streamis \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml new file mode 100644 index 000000000..1e7492c46 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.yml @@ -0,0 +1,63 @@ +name: Streamis feature request +description: Suggest an idea for Streamis project +title: "[Feature] " +labels: [enhancement] +body: + - type: markdown + attributes: + value: | + Thank you for finding the time to propose a new feature! + We really appreciate the community efforts to improve Streamis. + - type: checkboxes + attributes: + label: Search before asking + description: > + Please make sure to search in the [issues](https://github.com/WeBankFinTech/Streamis/issues) first to see + whether the same feature was requested already. + options: + - label: > + I had searched in the [issues](https://github.com/WeBankFinTech/Streamis/issues) and found no similar + feature requirement. + required: true + - type: textarea + attributes: + label: Problem Description + description: Is your feature request related to a problem? Please describe. + + - type: textarea + attributes: + label: Description + description: A short description of your feature + + - type: textarea + attributes: + label: Use case + description: > + Describe the use case of your feature request. + placeholder: > + Describe the solution you'd like A clear and concise description of what you want to happen. + + - type: textarea + attributes: + label: solutions + description: Describe alternatives you've considered A clear and concise description of any alternative solutions or features you've considered. + + - type: textarea + attributes: + label: Anything else + description: Anything else we need to know? + placeholder: > + Additional context Add any other context or screenshots about the feature request here. + + - type: checkboxes + attributes: + label: Are you willing to submit a PR? + description: > + This is absolutely not required, but we are happy to guide you in the contribution process + especially if you already have a good understanding of how to implement the feature. + options: + - label: Yes I am willing to submit a PR! + + - type: markdown + attributes: + value: "Thanks for completing our form!" diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 000000000..418271fff --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,27 @@ +### What is the purpose of the change +(For example: Streamis-Server defines the restful interfaces of Streamis, we can use linkis-httpclient to access it. +Related issues: #50. ) + +### Brief change log +(for example:) +- Define the the restful interfaces of Streamis; +- Define the service and dao interfaces of Streamis. + +### Verifying this change +(Please pick either of the following options) +This change is a trivial rework / code cleanup without any test coverage. +(or) +This change is already covered by existing tests, such as (please describe tests). +(or) +This change added tests and can be verified as follows: +(example:) +- Added tests for creating and execute the Streamis jobs and verify the availability of different Streamis Job, such as flinkSQL, Jar. + +### Does this pull request potentially affect one of the following parts: +- Dependencies (does it add or upgrade a dependency): (yes / no) +- Anything that affects deployment: (yes / no / don't know) +- The Core framework, i.e., JobManager, Server.: (yes / no) + +### Documentation +- Does this pull request introduce a new feature? (yes / no) +- If yes, how is the feature documented? (not applicable / docs / JavaDocs / not documented) \ No newline at end of file diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 000000000..7f3d71477 --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,55 @@ +# +# Copyright 2019 WeBank. +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +name: Streamis CI Actions + +on: + push: + pull_request: + +jobs: + build: + + runs-on: ubuntu-latest + + strategy: + matrix: + node-version: [14.17.3] + # See supported Node.js release schedule at https://nodejs.org/en/about/releases/ + + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Set up JDK 8 + uses: actions/setup-java@v2 + with: + distribution: 'adopt' + java-version: 8 + - name: Use Node.js ${{ matrix.node-version }} + uses: actions/setup-node@v2 + with: + node-version: ${{ matrix.node-version }} + - name: Build backend by maven + run: | + mvn -N install + mvn clean package + - name: Build frontend by node.js + run: | + cd web + npm install + npm run build diff --git a/.github/workflows/check_license.yml b/.github/workflows/check_license.yml new file mode 100644 index 000000000..3a9e1c01d --- /dev/null +++ b/.github/workflows/check_license.yml @@ -0,0 +1,48 @@ +# +# Copyright 2019 WeBank. +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +name: Streamis License check + +on: [push, pull_request] + +jobs: + build: + runs-on: ubuntu-latest + steps: + - name: Checkout source + uses: actions/checkout@v2 + - name: Set up JDK 8 + uses: actions/setup-java@v2 + with: + java-version: '8' + distribution: 'adopt' + - name: mvn -N install + run: + mvn -N install + - name: License check with Maven + run: | + rat_file=`mvn apache-rat:check | { grep -oe "\\S\\+/rat.txt" || true; }` + echo "rat_file=$rat_file" + if [[ -n "$rat_file" ]];then echo "check error!" && cat $rat_file && exit 123;else echo "check success!" ;fi + - name: Upload the report + uses: actions/upload-artifact@v2 + with: + name: license-check-report + path: "**/target/rat.txt" diff --git a/.gitignore b/.gitignore new file mode 100644 index 000000000..d8b0f432b --- /dev/null +++ b/.gitignore @@ -0,0 +1,23 @@ +*.iml +.idea +.DS_Store +assembly/target +streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/target +streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/target +streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/target +streamis-jobmanager/streamis-job-launcher/streamis-job-launcher.iml +streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/target +streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/target +streamis-jobmanager/streamis-jobmanager-common/target +streamis-jobmanager/streamis-jobmanager-server/target +streamis-jobmanager/streamis-projectmanager-server/target + +streamis-project/streamis-project-common/target +streamis-project/streamis-project-server/target + +streamis-server/target +streamis-appconn/target +/logs/streamis-server.log +/logs/linkis.log +/test/target +/test/src \ No newline at end of file diff --git a/LICENSE b/LICENSE new file mode 100644 index 000000000..261eeb9e9 --- /dev/null +++ b/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README-ZH.md b/README-ZH.md new file mode 100644 index 000000000..5723fe3d5 --- /dev/null +++ b/README-ZH.md @@ -0,0 +1,127 @@ +# Streamis + +[![License](https://img.shields.io/badge/license-Apache%202-4EB1BA.svg)](https://www.apache.org/licenses/LICENSE-2.0.html) + +[English](README.md) | 中文 + +## 引言 + +        Streamis 是 **微众银行** 联合 **天翼云**、**仙翁科技** 和 **萨摩耶云** 联合共建的流式应用开发管理系统。 + +        基于 [DataSphere Studio](https://github.com/WeBankFinTech/DataSphereStudio) 的框架化能力,以及底层对接 [Linkis](https://github.com/apache/incubator-linkis) 的 **Flink 引擎**,让用户低成本完成流式应用的开发、调试、发布和生产管理。 + +        未来还规划将以工作流式的图形化拖拽开发体验,将流式应用以Source节点、 +Dimension节点、Transform节点、Sink节点 和 [Visualis](https://github.com/WeBankFinTech/Visualis) 节点串连成一条流式工作流,让用户以更低的学习成本完成流式应用的开发、调试和发布。 + +---- + +## 核心特点 + +#### 1. 基于 DSS 和 Linkis,打造领先的流式应用开发管理系统。 + +       以 Flink 为底层计算引擎,基于开发中心和生产中心隔离的架构设计模式,完全隔离开发权限与发布权限,隔离开发环境与生产环境,保证业务应用的高稳定性和高安全性。 + +       应用开发层与 DSS 的数据应用开发流程有机整合,提供极简的用户使用体验。 + +       应用执行层集成 Linkis 计算中间件,打造金融级具备高并发、高可用、多租户隔离和资源管控等能力的流式应用管理能力。 + +#### 2. 强大的流式应用开发调试能力。 + +       基于 DSS-Scriptis 提供流式应用的开发和调试功能,支持对 FlinkSQL 进行实时调试和结果集展示。 + +![流式开发中心](docs/images/开发中心.png) + +#### 3. 强大的流式应用生产中心能力。 + +       支持流式作业的多版本管理、全生命周期管理、监控告警、checkpoint 和 savepoint 管理能力。 + +![流式应用生产](docs/images/stream_product_center.png) + +       流式应用运行情况: + +![流式应用运行情况](docs/images/stream_job_detail.png) + +       流式应用参数配置: + +![流式应用配置](docs/images/stream_job_config_1.png) +![流式应用配置](docs/images/stream_job_config_2.png) + +       更多功能,请参考:[Streamis 用户手册](docs/zh_CN/0.2.4/使用文档/Streamis用户手册.md)。 + +---- + +## 依赖的生态组件 + +| 依赖的应用工具 | 描述 | Streamis 兼容版本 | +|--------------|---------------------------------------------------------------|--------------| +| [DataSphereStudio](https://github.com/WeBankFinTech/DataSphereStudio) | 数据应用开发管理集成框架。以工作流式的图形化拖拽开发体验,将满足从数据交换、脱敏清洗、分析挖掘、质量检测、可视化展现、定时调度到数据输出应用等,数据应用开发全流程场景需求。 | >= DSS1.1.0(已发布)| +| [Linkis](https://github.com/apache/incubator-linkis) | 计算中间件 Apache Linkis,通过提供 REST/WebSocket/JDBC/SDK 等标准接口,上层应用可以方便地连接访问 MySQL/Spark/Hive/Presto/Flink 等底层引擎。 | >= Linkis1.1.1(已发布),部分功能需要Linkis 1.1.2支持 | + +---- + +## Demo试用环境 + +       正在部署中,敬请期待! + +---- + +## 下载 + +       请前往 [Streamis releases](https://github.com/WeBankFinTech/Streamis/releases) 页面下载 Streamis 的已编译版本或源码包。 + +---- + +## 编译和安装部署 + +       请参考 [Streamis 安装部署文档](docs/zh_CN/0.2.4/Streamis安装文档.md) ,用于安装部署 Streamis 及其依赖环境。 + + +---- + +## 示例和使用指引 + +       请到 [用户使用文档](docs/zh_CN/0.2.4/使用文档/Streamis用户手册.md) ,了解如何快速使用 Streamis。 + +---- + +## Streamis 功能介绍 + +| 功能模组 | 描述 | Streamis | + | :----: | :----: |-------| + | 安装部署 | 部署难易程度和第三方依赖 | 一键部署,依赖Linkis Flink引擎 | + | 开发中心| FlinkSQL 流式应用实时开发、调试 | 支持,需集成DSS | + | 生产中心 | 流式应用管理运维能力 | 支持 | + | | 复用 Linkis 计算治理能力 | 支持 | + | | 支持 FlinkSQL 和 FlinkJar 包等方式发布 | 支持 | + | | 流式应用的多版本管理能力 | 支持 | + | | 流式应用的参数配置和告警能力 | 支持 | + | 服务高可用 | 应用高可用,服务多点,状态快照实现容错处理,故障不影响使用 | 支持 | + | 系统管理 | 节点、资源管理 | 支持 | + |权限管理 |任务的操作权限控制 |支持 | + +---- + +## 架构 + +![架构](images/zh_CN/readme/architecture.png) + +---- + +## 贡献 + +       我们非常欢迎和期待更多的贡献者参与共建 Streamis, 不论是代码、文档,或是其他能够帮助到社区的贡献形式。 + +## 联系我们 + +       对 Streamis 的任何问题和建议,敬请提交 [issue](https://github.com/WeBankFinTech/Streamis/issues),以便跟踪处理和经验沉淀共享。 + +       您也可以扫描下面的二维码,加入我们的 微信/QQ群,以获得更快速的响应。 + +![交流](images/zh_CN/readme/communication.png) + +---- + +## License + +        DSS is under the Apache 2.0 license. See the [License](LICENSE) file for details. + diff --git a/README.md b/README.md index 4f78a9656..2db81429d 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,127 @@ -# dw-streamis +# Streamis + +[![License](https://img.shields.io/badge/license-Apache%202-4EB1BA.svg)](https://www.apache.org/licenses/LICENSE-2.0.html) + +English | [中文](README-ZH.md) + +## Introduction + +       Streamis is an jointed development project for Streaming application development and management established by WeBank, CtYun, Samoyed Financial Cloud and XianWeng Technology. + +       Based on the framework capabilities of [DataSphere Studio](https://github.com/WeBankFinTech/DataSphereStudio) and the underlying docking [Linkis](https://github.com/apache/incubator-linkis/blob/master/README.md) 's **Flink engine** allows users to complete the development, debugging, release and production management of streaming applications at low cost. + +       In the future, it is also planned to use a workflow-style graphical drag-and-drop development experience, and the streaming application will be based on the Source node, +The Dimension node, Transform node, Sink node and [Visualis](https://github.com/WeBankFinTech/Visualis) nodes are connected in series to form a streaming workflow, allowing users to complete the development of streaming applications at a lower learning cost. Debug and release. + +---- + +## Core features + +#### 1. Based on DSS and DSS-Scriptis, to create a leading streaming application development management system. + +       With Flink as the underlying computation engine, based on the architectural design pattern of the isolation between the development center and the production center, it completely isolates development permissions and publishing permissions, and isolates the development environment and production environment to ensure high stability and high security of streaming applications. + +       The application development layer is organically integrated with the data application development process of DSS, providing simplier user experience. + +       The application execution layer integrates Linkis to provide financial-level streaming application management capabilities with high concurrency, high availability, multi-tenant isolation, and resource management. + +#### 2. Powerful streaming application development and debugging capabilities. + +       Based on DSS-Scriptis, provides streaming application development and debugging functions, and supports real-time debugging and result set display of FlinkSQL. + +![development center](docs/images/开发中心.png) + +#### 3. Powerful streaming application production center capabilities. + +       Supports multi-version management, full life cycle management, monitoring alarm, checkpoint and savepoint management capabilities of streaming jobs. + +![prod center](docs/images/stream_product_center_en.png) + +       Running information page: + +![Running information](docs/images/stream_job_detail_en.png) + +       Configurations page: + +![Configurations](docs/images/stream_job_config_en_1.png) +![Configurations](docs/images/stream_job_config_en_2.png) + +       For more features, please refer to: [User Manual](docs/en_US/userManual/StreamisUserManual.md). + +---- + +## Depended ecosystems + +| Depended Component | Description | Streamis compatibility | +| -------------- | -------------------------------------------------------------- | --------------| +| [DataSphereStudio](https://github.com/WeBankFinTech/DataSphereStudio) | Data application development management framework. With a unified UI, the workflow-like graphical drag-and-drop development experience meets the entire lifecycle of data application development from data import, desensitization cleaning, data analysis, data mining, quality inspection, visualization, scheduling to data output applications, etc. | >= DSS1.1.0 (Released) | +| [Linkis](https://github.com/apache/incubator-linkis) | Apache Linkis, builds a layer of computation middleware, by using standard interfaces such as REST/WS/JDBC provided by Linkis, the upper applications can easily access the underlying engines such as MySQL/Spark/Hive/Presto/Flink, etc. | >= Linkis1.1.1 (Released),some functions need to be supported by linkis 1.1.2 | + +## Demo Trial environment + +       In progress, stay tuned! + +---- + +## Download + +       Please go to the [Streamis Releases](https://github.com/WeBankFinTech/Streamis/releases) Page to download a compiled version or a source code package of Streamis. + +---- + +## Compile and install deployment + +please refer to [Streamis Installation and Deployment Document](docs/en_US/0.2.4/StreamisDeployment.md) for installing and deploying Streamis. + +---- + +## Examples and usage guidelines + +       Please visit to [User documentation](docs/en_US/userManual/StreamisUserManual.md), learn how to use Streamis quickly. + +---- + +## Features + +| Function Module | Description | Streamis | + | :----: | :----: |-------| +| UI | Integrated and convenient management interface and monitoring window | Integrated | +| Installation and deployment | Deployment difficulty and third-party dependencies | One-click deployment, relying on Linkis Flink engine | +| Development Center | FlinkSQL streaming application real-time development and debugging | Support, need to integrate DSS | +|Production Center | Streaming Application Management Operation and Maintenance Capability | Support | +| | Reuse Linkis computing governance capabilities | Support | +| | Support FlinkSQL and FlinkJar package release | Support | +| | Multi-version management capabilities | Support | +| | Configuration and alert management capabilities | Support | +| Service high availability | Multiple services,State snapshot for fault tolerance, failure does not affect the use | Application high availability | +| System Management | Node and Resource Management | Support | +| Permission management | Task operation permission control | Support | + +---- + +## Architecture + +![Architecture](images/en_US/readme/architecture.png) + +---- + +## Contributing + +       Contributions are always welcomed, we need more contributors to build Streamis together. either code, or doc, or other supports that could help the community. + +---- + +## Communication contribution + +       For any questions or suggestions, please kindly submit an [issue](https://github.com/WeBankFinTech/Streamis/issues). + +       You can scan the QR code below to join our WeChat and QQ group to get more immediate response. + +![comminicate](images/zh_CN/readme/communication.png) + +---- + +## License + +       DSS is under the Apache 2.0 license. See the [License](LICENSE) file for details. diff --git a/assembly/pom.xml b/assembly/pom.xml new file mode 100644 index 000000000..46d4e8a16 --- /dev/null +++ b/assembly/pom.xml @@ -0,0 +1,70 @@ + + + + + + streamis + com.webank.wedatasphere.streamis + 0.2.4 + + 4.0.0 + + assembly + + + + + + org.apache.maven.plugins + maven-antrun-plugin + + + package + + run + + + + + + org.apache.maven.plugins + maven-assembly-plugin + 2.3 + + + dist + package + + single + + + false + wedatasphere-streamis-${streamis.version}-dist + false + false + + src/main/assembly/assembly.xml + + + + + + + + + \ No newline at end of file diff --git a/assembly/src/main/assembly/assembly.xml b/assembly/src/main/assembly/assembly.xml new file mode 100644 index 000000000..158df4b98 --- /dev/null +++ b/assembly/src/main/assembly/assembly.xml @@ -0,0 +1,80 @@ + + + + dist + + tar.gz + + false + + + + + + + ${project.parent.basedir} + . + + README* + LICENSE* + NOTICE* + + + + + + ${project.parent.basedir}/conf/ + + conf + + **/* + + unix + + + + + ${project.parent.basedir}/bin/ + + bin + + **/* + + unix + + + + + ${project.parent.basedir}/db/ + + db + + **/* + + + + + + ${project.parent.basedir}/streamis-server/target/ + + ./share/streamis-server/ + + **/*.zip + + + + + diff --git a/bin/install.sh b/bin/install.sh new file mode 100644 index 000000000..35d03c1b5 --- /dev/null +++ b/bin/install.sh @@ -0,0 +1,159 @@ +#!/bin/sh +#Actively load user env +if [ -f "~/.bashrc" ];then + echo "Warning! user bashrc file does not exist." +else + source ~/.bashrc +fi + +shellDir=`dirname $0` +workDir=`cd ${shellDir}/..;pwd` + +SERVER_IP="" +SERVER_HOME="" + +local_host="`hostname --fqdn`" +LOCAL_IP="`ifconfig | grep 'inet' | grep -v '127.0.0.1' | cut -d: -f2 | awk '{ print $2}'`" + +#To be compatible with MacOS and Linux +txt="" +if [[ "$OSTYPE" == "darwin"* ]]; then + txt="''" +elif [[ "$OSTYPE" == "linux-gnu" ]]; then + txt="" +elif [[ "$OSTYPE" == "cygwin" ]]; then + echo "streamis not support Windows operating system" + exit 1 +elif [[ "$OSTYPE" == "msys" ]]; then + echo "streamis not support Windows operating system" + exit 1 +elif [[ "$OSTYPE" == "win32" ]]; then + echo "streamis not support Windows operating system" + exit 1 +elif [[ "$OSTYPE" == "freebsd"* ]]; then + txt="" +else + echo "Operating system unknown, please tell us(submit issue) for better service" + exit 1 +fi + +function isSuccess(){ +if [ $? -ne 0 ]; then + echo "Failed to " + $1 + exit 1 +else + echo "Succeed to" + $1 +fi +} + +function checkJava(){ + java -version + isSuccess "execute java --version" +} + + +##install env:expect, +sudo yum install -y expect +isSuccess "install expect" + +##install env:telnet, +sudo yum install -y telnet +isSuccess "install telnet" + +##load config +echo "step1:load config" +source ${workDir}/conf/config.sh +source ${workDir}/conf/db.sh +isSuccess "load config" + +local_host="`hostname --fqdn`" + + +##env check +echo "Do you want to clear Streamis table information in the database?" +echo " 1: Do not execute table-building statements" +echo " 2: Dangerous! Clear all data and rebuild the tables." +echo "" + +MYSQL_INSTALL_MODE=1 + +read -p "Please input the choice:" idx +if [[ '2' = "$idx" ]];then + MYSQL_INSTALL_MODE=2 + echo "You chose Rebuild the table" +elif [[ '1' = "$idx" ]];then + MYSQL_INSTALL_MODE=1 + echo "You chose not execute table-building statements" +else + echo "no choice,exit!" + exit 1 +fi + +##init db +if [[ '2' = "$MYSQL_INSTALL_MODE" ]];then + mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD -D$MYSQL_DB --default-character-set=utf8 -e "source ${workDir}/db/streamis_ddl.sql" + isSuccess "source streamis_ddl.sql" + mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD -D$MYSQL_DB --default-character-set=utf8 -e "source ${workDir}/db/streamis_dml.sql" + isSuccess "source streamis_dml.sql" + echo "Rebuild the table" +fi + + +EUREKA_URL=http://$EUREKA_INSTALL_IP:$EUREKA_PORT/eureka/ + +##function start +function installPackage(){ +echo "start to install $SERVERNAME" +echo "$SERVERNAME-step1: create dir" + +if ! test -e $SERVER_HOME; then + sudo mkdir -p $SERVER_HOME;sudo chown -R $deployUser:$deployUser $SERVER_HOME + isSuccess "create the dir of $SERVERNAME" +fi + +echo "$SERVERNAME-step2:copy install package" +cp ${workDir}/share/$PACKAGE_DIR/$SERVERNAME.zip $SERVER_HOME +isSuccess "copy ${SERVERNAME}.zip" +cd $SERVER_HOME/;rm -rf $SERVERNAME-bak; mv -f $SERVERNAME $SERVERNAME-bak +cd $SERVER_HOME/;unzip $SERVERNAME.zip > /dev/null +isSuccess "unzip ${SERVERNAME}.zip" + +echo "$SERVERNAME-step3:subsitution conf" +SERVER_CONF_PATH=$SERVER_HOME/$SERVERNAME/conf/application.yml +sed -i "s#port:.*#port: $SERVER_PORT#g" $SERVER_CONF_PATH +sed -i "s#defaultZone:.*#defaultZone: $EUREKA_URL#g" $SERVER_CONF_PATH +sed -i "s#hostname:.*#hostname: $SERVER_IP#g" $SERVER_CONF_PATH +isSuccess "subsitution conf of $SERVERNAME" +} + +function setDatasourcePassword(){ + PASSWORD=$MYSQL_PASSWORD + temp=${PASSWORD//#/%tream%} + sed -i "s#wds.linkis.server.mybatis.datasource.password.*#wds.linkis.server.mybatis.datasource.password=$temp#g" $SERVER_CONF_PATH + sed -i "s/%tream%/#/g" $SERVER_CONF_PATH +} +##function end + + +##Streamis-Server Install +PACKAGE_DIR=streamis-server +SERVERNAME=streamis-server +SERVER_IP=$STREAMIS_SERVER_INSTALL_IP +SERVER_PORT=$STREAMIS_SERVER_INSTALL_PORT +SERVER_HOME=$STREAMIS_INSTALL_HOME +###install Streamis-Server +installPackage +###update Streamis-Server linkis.properties +echo "$SERVERNAME-step4:update linkis.properties" +SERVER_CONF_PATH=$SERVER_HOME/$SERVERNAME/conf/linkis.properties +sed -i "s#wds.linkis.server.mybatis.datasource.url.*#wds.linkis.server.mybatis.datasource.url=jdbc:mysql://${MYSQL_HOST}:${MYSQL_PORT}/${MYSQL_DB}?characterEncoding=UTF-8#g" $SERVER_CONF_PATH +sed -i "s#wds.linkis.server.mybatis.datasource.username.*#wds.linkis.server.mybatis.datasource.username=$MYSQL_USER#g" $SERVER_CONF_PATH +setDatasourcePassword +sed -i "s#wds.linkis.gateway.ip.*#wds.linkis.gateway.ip=$GATEWAY_INSTALL_IP#g" $SERVER_CONF_PATH +sed -i "s#wds.linkis.gateway.port.*#wds.linkis.gateway.port=$GATEWAY_PORT#g" $SERVER_CONF_PATH +sed -i "s#wds.linkis.gateway.url.*#wds.linkis.gateway.url=http://${GATEWAY_INSTALL_IP}:${GATEWAY_PORT}#g" $SERVER_CONF_PATH +isSuccess "subsitution linkis.properties of $SERVERNAME" +echo "<----------------$SERVERNAME:end------------------->" +echo "" + + diff --git a/bin/start.sh b/bin/start.sh new file mode 100644 index 000000000..0be2ba7cb --- /dev/null +++ b/bin/start.sh @@ -0,0 +1,80 @@ +#!/usr/bin/env bash +# +# Copyright 2019 WeBank +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + + + +# Start all streamis applications +info="We will start all streamis applications, it will take some time, please wait" +echo ${info} + +#Actively load user env +source ~/.bash_profile + +workDir=`dirname "${BASH_SOURCE-$0}"` +workDir=`cd "$workDir"; pwd` + + +CONF_DIR="${workDir}"/../conf +CONF_FILE=${CONF_DIR}/config.sh + +function isSuccess(){ +if [ $? -ne 0 ]; then + echo "ERROR: " + $1 + exit 1 +else + echo "INFO:" + $1 +fi +} + +sudo yum -y install dos2unix + + +local_host="`hostname --fqdn`" + +#if there is no LINKIS_INSTALL_HOME,we need to source config again +if [ -z ${STREAMIS_INSTALL_HOME} ];then + echo "Warning: STREAMIS_INSTALL_HOME does not exist, we will source config" + if [ ! -f "${CONF_FILE}" ];then + echo "Error: can not find config file, start applications failed" + exit 1 + else + source ${CONF_FILE} + fi +fi + +function startApp(){ +echo "<-------------------------------->" +echo "Begin to start $SERVER_NAME" +SERVER_BIN=${STREAMIS_INSTALL_HOME}/${SERVER_NAME}/bin +SERVER_START_CMD="source ~/.bash_profile;cd ${SERVER_BIN}; dos2unix ./* > /dev/null 2>&1; dos2unix ../conf/* > /dev/null 2>&1;sh start-${SERVER_NAME}.sh > /dev/null 2>&1 &" + +if [ -n "${SERVER_IP}" ];then + ssh ${SERVER_IP} "${SERVER_START_CMD}" +else + ssh ${local_host} "${SERVER_START_CMD}" +fi +isSuccess "End to start $SERVER_NAME" +echo "<-------------------------------->" +sleep 15 #for Eureka register +} + +#streamis-server +SERVER_NAME=streamis-server +SERVER_IP=$STREAMIS_SERVER_INSTALL_IP +startApp + + diff --git a/bin/stop.sh b/bin/stop.sh new file mode 100644 index 000000000..33c59c560 --- /dev/null +++ b/bin/stop.sh @@ -0,0 +1,73 @@ +#!/usr/bin/env bash +# +# Copyright 2019 WeBank +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + + + +# Start all streamis applications +info="We will stop all streamis applications, it will take some time, please wait" +echo ${info} + +#Actively load user env +source ~/.bash_profile + +workDir=`dirname "${BASH_SOURCE-$0}"` +workDir=`cd "$workDir"; pwd` + + +CONF_DIR="${workDir}"/../conf +CONF_FILE=${CONF_DIR}/config.sh + +function isSuccess(){ +if [ $? -ne 0 ]; then + echo "ERROR: " + $1 + exit 1 +else + echo "INFO:" + $1 +fi +} + + + +local_host="`hostname --fqdn`" + +#if there is no LINKIS_INSTALL_HOME,we need to source config again +if [ -z ${STREAMIS_INSTALL_HOME} ];then + echo "Warning: STREAMIS_INSTALL_HOME does not exist, we will source config" + if [ ! -f "${CONF_FILE}" ];then + echo "Error: can not find config file, stop applications failed" + exit 1 + else + source ${CONF_FILE} + fi +fi + +function stopAPP(){ +echo "<-------------------------------->" +echo "Begin to stop $SERVER_NAME" +SERVER_BIN=${STREAMIS_INSTALL_HOME}/${SERVER_NAME}/bin +SERVER_STOP_CMD="source ~/.bash_profile;cd ${SERVER_BIN}; dos2unix ./* > /dev/null 2>&1; dos2unix ../conf/* > /dev/null 2>&1; sh stop-${SERVER_NAME}.sh" +if [ -n "${SERVER_IP}" ];then + ssh -p $SSH_PORT ${SERVER_IP} "${SERVER_STOP_CMD}" +else + ssh -p $SSH_PORT ${local_host} "${SERVER_STOP_CMD}" +fi +isSuccess "End to stop $SERVER_NAME" +echo "<-------------------------------->" +} + +#streamis-server +SERVER_NAME=streamis-server diff --git a/bin/upgrade.sh b/bin/upgrade.sh new file mode 100644 index 000000000..856b0c4bc --- /dev/null +++ b/bin/upgrade.sh @@ -0,0 +1,205 @@ +#!/usr/bin/env bash +# +# Copyright 2022 WeBank +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# Use to upgrade from 0.1.0 to 0.2.4 + +if [ -f "~/.bashrc" ];then + echo "Warning! user bashrc file does not exist." +else + source ~/.bashrc +fi + +shellDir=`dirname $0` +workDir=`cd ${shellDir}/..;pwd` + +interact_echo(){ + while [ 1 ]; do + read -p "$1 (Y/N)" yn + if [[ "${yn}x" == "Yx" ]] || [[ "${yn}x" == "yx" ]]; then + return 0 + elif [[ "${yn}x" == "Nx" ]] || [[ "${yn}x" == "nx" ]]; then + return 1 + else + echo "Unknown choose: [$yn], please choose again." + fi + done +} + +interact_echo "Are you sure the current version of Streamis is 0.2.x < 0.2.5 and need to upgrade to 0.2.5 ?" +if [[ $? == 0 ]]; then + source ${workDir}/conf/db.sh + echo "<------ Will connect to [${MYSQL_HOST}:${MYSQL_PORT}] to upgrade the tables in database... ------>" + mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD -D$MYSQL_DB --default-character-set=utf8 << EOF 1>/dev/null + /*Modify the table column*/ + ALTER TABLE \`linkis_stream_job\` MODIFY COLUMN \`project_name\` varchar(100) DEFAULT NULL; + ALTER TABLE \`linkis_stream_job\` MODIFY COLUMN \`name\` varchar(200) DEFAULT NULL; + ALTER TABLE \`linkis_stream_project\` MODIFY COLUMN \`name\` varchar(100) DEFAULT NULL; + ALTER TABLE \`linkis_stream_task\` MODIFY COLUMN \`job_id\` varchar(200) DEFAULT NULL; + ALTER TABLE \`linkis_stream_task\` MODIFY COLUMN \`linkis_job_id\` varchar(200) DEFAULT NULL; + + ALTER TABLE \`linkis_stream_project\` ADD create_time datetime DEFAULT NULL; + ALTER TABLE \`linkis_stream_project\` ADD last_update_by varchar(50) DEFAULT NULL; + ALTER TABLE \`linkis_stream_project\` ADD last_update_time datetime DEFAULT NULL; + ALTER TABLE \`linkis_stream_project\` ADD is_deleted tinyint unsigned DEFAULT 0; + + /*Add indexes into the tables*/ + ALTER TABLE \`linkis_stream_job\` ADD UNIQUE KEY(\`project_name\`, \`name\`); + ALTER TABLE \`linkis_stream_job_version\` ADD UNIQUE KEY(\`job_id\`, \`version\`); + + /*Add new tables*/ + DROP TABLE IF EXISTS \`linkis_stream_project_privilege\`; + CREATE TABLE \`linkis_stream_project_privilege\` ( + \`id\` bigint(20) NOT NULL AUTO_INCREMENT, + \`project_id\` bigint(20) NOT NULL, + \`user_name\` varchar(100) NOT NULL, + \`privilege\` tinyint(1) DEFAULT '0' NOT NULL COMMENT '1:发布权限 ,2:编辑权限 ,3:查看权限', + PRIMARY KEY (\`id\`) USING BTREE + ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 COMMENT='项目权限表'; + + DROP TABLE IF EXISTS \`linkis_stream_job_config_def\`; + CREATE TABLE \`linkis_stream_job_config_def\` ( + \`id\` bigint(20) NOT NULL AUTO_INCREMENT, + \`key\` varchar(100) COLLATE utf8_bin NOT NULL, + \`name\` varchar(100) COLLATE utf8_bin DEFAULT NULL COMMENT 'Equals option', + \`type\` varchar(50) COLLATE utf8_bin NOT NULL DEFAULT 'NONE' COMMENT 'def type, NONE: 0, INPUT: 1, SELECT: 2', + \`sort\` int(10) DEFAULT '0' COMMENT 'In order to sort the configurations that have the same level', + \`description\` varchar(200) COLLATE utf8_bin DEFAULT NULL COMMENT 'Description of configuration', + \`validate_type\` varchar(50) COLLATE utf8_bin DEFAULT NULL COMMENT 'Method the validate the configuration', + \`validate_rule\` varchar(100) COLLATE utf8_bin DEFAULT NULL COMMENT 'Value of validation rule', + \`style\` varchar(200) COLLATE utf8_bin DEFAULT '' COMMENT 'Display style', + \`visiable\` tinyint(1) NOT NULL DEFAULT '1' COMMENT '0: hidden, 1: display', + \`level\` tinyint(1) NOT NULL DEFAULT '1' COMMENT '0: root, 1: leaf', + \`unit\` varchar(25) COLLATE utf8_bin DEFAULT NULL COMMENT 'Unit symbol', + \`default_value\` varchar(200) COLLATE utf8_bin DEFAULT NULL COMMENT 'Default value', + \`ref_values\` varchar(200) COLLATE utf8_bin DEFAULT '', + \`parent_ref\` bigint(20) DEFAULT NULL COMMENT 'Parent key of configuration def', + \`required\` tinyint(1) NOT NULL DEFAULT '0' COMMENT 'If the value of configuration is necessary', + \`is_temp\` tinyint(1) DEFAULT '0' COMMENT 'Temp configuration', + PRIMARY KEY (\`id\`), + UNIQUE KEY \`config_def_key\` (\`key\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + + DROP TABLE IF EXISTS \`linkis_stream_job_config\`; + CREATE TABLE \`linkis_stream_job_config\` ( + \`job_id\` bigint(20) NOT NULL, + \`job_name\` varchar(200) COLLATE utf8_bin NOT NULL COMMENT 'Just store the job name', + \`key\` varchar(100) COLLATE utf8_bin NOT NULL, + \`value\` varchar(500) COLLATE utf8_bin NOT NULL, + \`ref_def_id\` bigint(20) DEFAULT NULL COMMENT 'Refer to id in config_def table', + PRIMARY KEY (\`job_id\`,\`key\`), + KEY \`config_def_id\` (\`ref_def_id\`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + + /*Execute dml*/ + source ${workDir}/db/streamis_dml.sql + + /*Data migration*/ + INSERT INTO \`linkis_stream_job_config\`(\`key\`, \`value\`, \`job_id\`, \`job_name\`, \`ref_def_id\`) SELECT ov.config_key, ov.config_value, ov.job_id, ov.job_name, d.id as refer_id from linkis_stream_configuration_config_value ov left join linkis_stream_job_config_def d on ov.config_key = d.key WHERE ov.config_value IS NOT NULL AND ov.job_name IS NOT NULL GROUP BY ov.job_id,ov.config_key; + UPDATE linkis_stream_job_config SET \`key\` = "wds.linkis.flink.taskmanager.memory" WHERE \`key\` = "flink.taskmanager.memory"; + UPDATE linkis_stream_job_config SET \`key\` = "wds.linkis.flink.taskmanager.cpus" WHERE \`key\` = "flink.taskmanager.cpu.cores"; + UPDATE linkis_stream_job_config SET \`key\` = "wds.linkis.flink.taskmanager.cpus" WHERE \`key\` = "wds.linkis.flink.taskManager.cpus"; + UPDATE linkis_stream_job_config SET \`key\` = "wds.linkis.flink.taskmanager.numberOfTaskSlots" WHERE \`key\` = "flink.taskmanager.numberOfTaskSlots"; + UPDATE linkis_stream_job_config SET \`key\` = "wds.linkis.flink.app.parallelism" WHERE \`key\` = "wds.linkis.engineconn.flink.app.parallelism"; + UPDATE linkis_stream_job_config SET \`key\` = "wds.linkis.flink.jobmanager.memory" WHERE \`key\` = "flink.jobmanager.memory"; + UPDATE linkis_stream_job_config c SET \`ref_def_id\` = (SELECT d.id FROM linkis_stream_job_config_def d WHERE d.\`key\` = c.\`key\`) WHERE c.ref_def_id IS NULL; + SELECT @flink_extra_param_id:=id FROM linkis_stream_job_config_def WHERE \`key\` = "wds.linkis.flink.custom"; + UPDATE linkis_stream_job_config SET ref_def_id = @flink_extra_param_id WHERE ref_def_id IS NULL; + + /*Drop tables*/ + /*DROP TABLE \`linkis_stream_configuration_config_key\`*/ + /*DROP TABLE \`linkis_stream_configuration_config_value\`*/ + + /*update tables data*/ + delimiter %% + + create procedure update_project() + BEGIN + -- 声明变量 + DECLARE projectname varchar(50); + DECLARE done INT default 0; + + -- 创建游标,并设置游标所指的数据 + DECLARE cur CURSOR for + SELECT distinct j.project_name from linkis_stream_job j; + -- 游标执行完,即遍历结束。设置done的值为1 + DECLARE CONTINUE HANDLER for not FOUND set done = 1; + -- 开启游标 + open cur; + -- 执行循环 + posLoop: + LOOP + -- 从游标中取出projectname + FETCH cur INTO projectname ; + -- 如果done的值为1,即遍历结束,结束循环 + IF done = 1 THEN + LEAVE posLoop; + -- 注意,if语句需要添加END IF结束IF + END IF; + insert into linkis_stream_project(\`name\`,\`create_by\`,\`create_time\`) values (projectname,\'system\',now()); + -- 关闭循环 + END LOOP posLoop; + -- 关闭游标 + CLOSE cur; + -- 关闭分隔标记 + END %% + + create procedure update_project_privilege() + BEGIN + -- 声明变量 + DECLARE projectid bigint(20); + DECLARE create_by varchar(50); + DECLARE done INT default 0; + + -- 创建游标,并设置游标所指的数据 + DECLARE cur CURSOR for + SELECT distinct p.id,j.create_by from linkis_stream_project p,linkis_stream_job j where p.name =j.project_name ; + -- 游标执行完,即遍历结束。设置done的值为1 + DECLARE CONTINUE HANDLER for not FOUND set done = 1; + -- 开启游标 + open cur; + -- 执行循环 + posLoop: + LOOP + -- 从游标中取出id + FETCH cur INTO projectid ,create_by; + -- 如果done的值为1,即遍历结束,结束循环 + IF done = 1 THEN + LEAVE posLoop; + -- 注意,if语句需要添加END IF结束IF + END IF; + + insert into linkis_stream_project_privilege (project_id ,user_name ,privilege) values (projectid,create_by,2); + -- 关闭循环 + END LOOP posLoop; + -- 关闭游标 + CLOSE cur; + -- 关闭分隔标记 + END %% + delimiter ; + + call update_project; + call update_project_privilege; + + drop PROCEDURE update_project; + drop PROCEDURE update_project_privilege; + +EOF + echo "<------ End to upgrade ------>" +fi + + + diff --git a/conf/config.sh b/conf/config.sh new file mode 100644 index 000000000..4760e1f0f --- /dev/null +++ b/conf/config.sh @@ -0,0 +1,45 @@ +### deploy user +deployUser=hadoop + +### ssh port +SSH_PORT=22 + +##The Max Heap size for the JVM +SERVER_HEAP_SIZE="512M" + +##The Port of Streamis +STREAMIS_PORT=9400 + +### The install home path of STREAMIS,Must provided +STREAMIS_INSTALL_HOME=/appcom/Install/streamis + +### Linkis EUREKA information. # Microservices Service Registration Discovery Center +EUREKA_INSTALL_IP=127.0.0.1 +EUREKA_PORT=20303 + +### Specifies the user workspace, which is used to store the user's script files and log files. +### Generally local directory +#WORKSPACE_USER_ROOT_PATH=file:///tmp/linkis/ +#### Path to store job ResultSet:file or hdfs path +#RESULT_SET_ROOT_PATH=hdfs:///tmp/linkis + +### Linkis Gateway information +GATEWAY_INSTALL_IP=127.0.0.1 +GATEWAY_PORT=9001 + + +################### The install Configuration of all Micro-Services ##################### +# +# NOTICE: +# 1. If you just wanna try, the following micro-service configuration can be set without any settings. +# These services will be installed by default on this machine. +# 2. In order to get the most complete enterprise-level features, we strongly recommend that you install +# the following microservice parameters +# + +STREAMIS_SERVER_INSTALL_IP=127.0.0.1 +STREAMIS_SERVER_INSTALL_PORT=9400 + +STREAMIS_VERSION=0.2.4 + +STREAMIS_FILE_NAME="STREAMIS-$STREAMIS_VERSION" \ No newline at end of file diff --git a/conf/db.sh b/conf/db.sh new file mode 100644 index 000000000..176f14419 --- /dev/null +++ b/conf/db.sh @@ -0,0 +1,8 @@ +### for DSS-Server and Eventchecker APPCONN +MYSQL_HOST= +MYSQL_PORT= +MYSQL_DB= +MYSQL_USER= +MYSQL_PASSWORD= + + diff --git a/db/streamis_ddl.sql b/db/streamis_ddl.sql new file mode 100644 index 000000000..144ecde68 --- /dev/null +++ b/db/streamis_ddl.sql @@ -0,0 +1,257 @@ + +SET NAMES utf8mb4; +SET FOREIGN_KEY_CHECKS = 0; + +-- +-- Table structure for table `linkis_stream_job_config_def` +-- + +DROP TABLE IF EXISTS `linkis_stream_job_config_def`; +CREATE TABLE `linkis_stream_job_config_def` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `key` varchar(100) COLLATE utf8_bin NOT NULL, + `name` varchar(100) COLLATE utf8_bin DEFAULT NULL COMMENT 'Equals option', + `type` varchar(50) COLLATE utf8_bin NOT NULL DEFAULT 'NONE' COMMENT 'def type, NONE: 0, INPUT: 1, SELECT: 2', + `sort` int(10) DEFAULT '0' COMMENT 'In order to sort the configurations that have the same level', + `description` varchar(200) COLLATE utf8_bin DEFAULT NULL COMMENT 'Description of configuration', + `validate_type` varchar(50) COLLATE utf8_bin DEFAULT NULL COMMENT 'Method the validate the configuration', + `validate_rule` varchar(100) COLLATE utf8_bin DEFAULT NULL COMMENT 'Value of validation rule', + `style` varchar(200) COLLATE utf8_bin DEFAULT '' COMMENT 'Display style', + `visiable` tinyint(1) NOT NULL DEFAULT '1' COMMENT '0: hidden, 1: display', + `level` tinyint(1) NOT NULL DEFAULT '1' COMMENT '0: root, 1: leaf', + `unit` varchar(25) COLLATE utf8_bin DEFAULT NULL COMMENT 'Unit symbol', + `default_value` varchar(200) COLLATE utf8_bin DEFAULT NULL COMMENT 'Default value', + `ref_values` varchar(200) COLLATE utf8_bin DEFAULT '', + `parent_ref` bigint(20) DEFAULT NULL COMMENT 'Parent key of configuration def', + `required` tinyint(1) NOT NULL DEFAULT '0' COMMENT 'If the value of configuration is necessary', + `is_temp` tinyint(1) DEFAULT '0' COMMENT 'Temp configuration', + PRIMARY KEY (`id`), + UNIQUE KEY `config_def_key` (`key`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + +-- +-- Table structure for table `linkis_stream_job_config` +-- + +DROP TABLE IF EXISTS `linkis_stream_job_config`; +CREATE TABLE `linkis_stream_job_config` ( + `job_id` bigint(20) NOT NULL, + `job_name` varchar(200) COLLATE utf8_bin NOT NULL COMMENT 'Just store the job name', + `key` varchar(100) COLLATE utf8_bin NOT NULL, + `value` varchar(500) COLLATE utf8_bin NOT NULL, + `ref_def_id` bigint(20) DEFAULT NULL COMMENT 'Refer to id in config_def table', + PRIMARY KEY (`job_id`,`key`), + KEY `config_def_id` (`ref_def_id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + +-- ---------------------------- +-- Table structure for linkis_stream_job_alarm_send_history +-- ---------------------------- +DROP TABLE IF EXISTS `linkis_stream_job_alarm_send_history`; +CREATE TABLE `linkis_stream_job_alarm_send_history` ( + `id` bigint(20) NOT NULL, + `job_id` bigint(20) NULL DEFAULT NULL, + `task_id` bigint(20) NULL DEFAULT NULL, + `create_by` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + `type` tinyint(1) NULL DEFAULT NULL, + `rule_type` tinyint(1) NULL DEFAULT NULL, + `content` varchar(200) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + PRIMARY KEY (`id`) USING BTREE +) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci COMMENT = '报警历史信息' ROW_FORMAT = Compact; + +-- ---------------------------- +-- Records of linkis_stream_job_alarm_send_history +-- ---------------------------- + +-- ---------------------------- +-- Table structure for linkis_stream_job_checkpoints +-- ---------------------------- +DROP TABLE IF EXISTS `linkis_stream_job_checkpoints`; +CREATE TABLE `linkis_stream_job_checkpoints` ( + `id` bigint(20) NOT NULL, + `config_value_id` bigint(20) NULL DEFAULT NULL, + `path` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + `size` int(20) NULL DEFAULT NULL, + `status` tinyint(1) NULL DEFAULT NULL, + `trigger_timestamp` datetime NULL DEFAULT NULL, + `latest_ack_timestamp` datetime NULL DEFAULT NULL, + PRIMARY KEY (`id`) USING BTREE +) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci ROW_FORMAT = Compact; + +-- ---------------------------- +-- Records of linkis_stream_job_checkpoints +-- ---------------------------- + +-- ---------------------------- +-- Table structure for linkis_stream_job_role +-- ---------------------------- +DROP TABLE IF EXISTS `linkis_stream_job_role`; +CREATE TABLE `linkis_stream_job_role` ( + `id` bigint(20) NOT NULL, + `job_id` bigint(20) NULL DEFAULT NULL, + `name` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + `front_name` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + `update_time` datetime NULL DEFAULT NULL, + `description` varchar(200) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + PRIMARY KEY (`id`) USING BTREE +) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci ROW_FORMAT = Compact; + +-- ---------------------------- +-- Records of linkis_stream_job_role +-- ---------------------------- +INSERT INTO `linkis_stream_job_role` VALUES (1, -1, '管理员', '管理员', '2021-04-07 20:57:09', NULL); + + +-- ---------------------------- +-- Table structure for linkis_stream_job_user_role +-- ---------------------------- +DROP TABLE IF EXISTS `linkis_stream_job_user_role`; +CREATE TABLE `linkis_stream_job_user_role` ( + `id` bigint(20) NOT NULL, + `job_id` bigint(20) DEFAULT NULL, + `user_id` bigint(20) DEFAULT NULL, + `role_id` bigint(20) DEFAULT NULL, + `username` varchar(100) DEFAULT NULL, + PRIMARY KEY (`id`) USING BTREE + ) ENGINE=InnoDB DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='作业角色关系'; + +-- ---------------------------- +-- Records of linkis_stream_job_user_role +-- ---------------------------- + +/*Table structure for table `linkis_stream_job` */ + +DROP TABLE IF EXISTS `linkis_stream_job`; + +CREATE TABLE `linkis_stream_job` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `project_name` varchar(100) DEFAULT NULL, + `name` varchar(200) DEFAULT NULL, + `status` tinyint(1) DEFAULT '0' COMMENT '1:已完成 ,2:等待重启 ,3:告警 ,4:慢任务 ,5:运行中 ,6:失败任务', + `create_by` varchar(50) DEFAULT NULL, + `create_time` datetime DEFAULT NULL, + `label` varchar(200) DEFAULT NULL, + `description` varchar(200) DEFAULT NULL, + `job_type` varchar(30) DEFAULT NULL COMMENT '目前只支持flink.sql、flink.jar', + `submit_user` varchar(100) DEFAULT NULL, + `workspace_name` varchar(50) DEFAULT NULL, + `current_version` varchar(50) DEFAULT NULL, + PRIMARY KEY (`id`) USING BTREE, + UNIQUE KEY(`project_name`, `name`) +) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='作业表'; + +/*Table structure for table `linkis_stream_job_version` */ + +DROP TABLE IF EXISTS `linkis_stream_job_version`; + +CREATE TABLE `linkis_stream_job_version` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `job_id` varchar(50) DEFAULT NULL, + `version` varchar(20) DEFAULT NULL, + `source` varchar(255) DEFAULT NULL COMMENT '这个版本的来源,比如:用户上传,由某个历史版本回退回来的', + `job_content` text COMMENT '内容为meta.json', + `comment` varchar(255) DEFAULT NULL, + `create_time` datetime DEFAULT NULL, + `create_by` varchar(32) DEFAULT NULL, + PRIMARY KEY (`id`) USING BTREE, + UNIQUE KEY(`job_id`, `version`) +) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='作业表'; + +/*Table structure for table `linkis_stream_job_version_files` */ + +DROP TABLE IF EXISTS `linkis_stream_job_version_files`; + +CREATE TABLE `linkis_stream_job_version_files` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `job_id` varchar(50) NOT NULL, + `job_version_id` bigint(20) NOT NULL, + `file_name` varchar(500) DEFAULT NULL, + `version` varchar(30) DEFAULT NULL COMMENT '文件版本号,由用户上传时指定的', + `store_path` varchar(100) DEFAULT NULL COMMENT '如:{"resource":"22edar22", "version": "v0001"}', + `store_type` varchar(20) DEFAULT NULL COMMENT '存储类型,一般就是bml', + `create_time` datetime DEFAULT NULL, + `create_by` varchar(32) DEFAULT NULL, + PRIMARY KEY (`id`) USING BTREE +) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; + +/*Table structure for table `linkis_stream_project` */ + +DROP TABLE IF EXISTS `linkis_stream_project`; + +CREATE TABLE `linkis_stream_project` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `workspace_id` bigint(20) DEFAULT NULL, + `name` varchar(100) DEFAULT NULL, + `create_by` varchar(50) DEFAULT NULL, + PRIMARY KEY (`id`) USING BTREE +) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='项目表'; + +/*Table structure for table `linkis_stream_project_files` */ + +DROP TABLE IF EXISTS `linkis_stream_project_files`; + +CREATE TABLE `linkis_stream_project_files` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `file_name` varchar(500) DEFAULT NULL, + `version` varchar(30) DEFAULT NULL COMMENT '文件版本号,由用户上传时指定的', + `store_path` varchar(100) DEFAULT NULL COMMENT '如:{"resource":"22edar22", "version": "v0001"}', + `store_type` varchar(20) DEFAULT NULL COMMENT '存储类型,一般就是bml', + `project_name` varchar(50) DEFAULT NULL, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_by` varchar(32) DEFAULT NULL, + `comment` varchar(255) DEFAULT NULL COMMENT '说明', + PRIMARY KEY (`id`) USING BTREE +) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='项目表'; + +/*Table structure for table `linkis_stream_task` */ + +DROP TABLE IF EXISTS `linkis_stream_task`; + +CREATE TABLE `linkis_stream_task` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `job_version_id` bigint(20) NOT NULL, + `job_id` varchar(200) DEFAULT NULL, + `version` varchar(50) DEFAULT NULL, + `status` int(3) DEFAULT NULL, + `start_time` datetime DEFAULT NULL, + `last_update_time` datetime DEFAULT NULL, + `err_desc` varchar(10240) DEFAULT NULL, + `submit_user` varchar(50) DEFAULT NULL, + `linkis_job_id` varchar(200) DEFAULT NULL, + `linkis_job_info` mediumtext, + PRIMARY KEY (`id`) USING BTREE +) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='任务表'; + +DROP TABLE IF EXISTS `linkis_stream_alert_record`; + +CREATE TABLE `linkis_stream_alert_record` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `alert_level` varchar(20) NOT NULL DEFAULT 'critical' COMMENT '告警级别', + `alert_user` varchar(20) NOT NULL COMMENT '告警用户', + `alert_msg` varchar(200) NOT NULL COMMENT '告警信息', + `job_id` bigint(20) NOT NULL, + `job_version_id` bigint(20) DEFAULT NULL, + `task_id` bigint(20) DEFAULT NULL, + `create_time` datetime DEFAULT NULL, + `status` bigint(2) DEFAULT '1' COMMENT '''1为成功,0为失败''', + `error_msg` varchar(200) DEFAULT NULL COMMENT '告警发送失败后的错误信息', + PRIMARY KEY (`id`) +) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8; + +DROP TABLE IF EXISTS `linkis_stream_project_privilege`; + +CREATE TABLE `linkis_stream_project_privilege` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `project_id` bigint(20) NOT NULL, + `user_name` varchar(100) NOT NULL, + `privilege` tinyint(1) DEFAULT '0' NOT NULL COMMENT '1:发布权限 ,2:编辑权限 ,3:查看权限', + PRIMARY KEY (`id`) USING BTREE +) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 COMMENT='项目权限表'; + + +ALTER TABLE `linkis_stream_project` ADD create_time datetime DEFAULT NULL; +ALTER TABLE `linkis_stream_project` ADD last_update_by varchar(50) DEFAULT NULL; +ALTER TABLE `linkis_stream_project` ADD last_update_time datetime DEFAULT NULL; +ALTER TABLE `linkis_stream_project` ADD is_deleted tinyint unsigned DEFAULT 0; + +SET FOREIGN_KEY_CHECKS = 1; diff --git a/db/streamis_dml.sql b/db/streamis_dml.sql new file mode 100644 index 000000000..85bd4bc96 --- /dev/null +++ b/db/streamis_dml.sql @@ -0,0 +1,25 @@ +-- ---------------------------- +-- Records of linkis_stream_job_config_def +-- ---------------------------- + +INSERT INTO `linkis_stream_job_config_def` VALUES (1,'wds.linkis.flink.resource','资源配置','NONE',0,'资源配置','None',NULL,'',1,0,NULL,NULL,'',NULL,0,0); +INSERT INTO `linkis_stream_job_config_def` VALUES (2,'wds.linkis.flink.app.parallelism','Parallelism并行度','NUMBER',0,'Parallelism并行度','Regex','^([1-9]\\d{0,1}|100)$','',1,1,NULL,'4','',1,1,0); +INSERT INTO `linkis_stream_job_config_def` VALUES (3,'wds.linkis.flink.jobmanager.memory','JobManager Memory (M)','NUMBER',0,'JobManager Memory (M)','Regex','^([1-9]\\d{0,4}|100000)$','',1,1,'M','1024','',1,1,0); +INSERT INTO `linkis_stream_job_config_def` VALUES (4,'wds.linkis.flink.taskmanager.memory','TaskManager Memory (M)','NUMBER',0,'JobManager Memory (M)','Regex','^([1-9]\\d{0,4}|100000)$','',1,1,'M','4096','',1,1,0); +INSERT INTO `linkis_stream_job_config_def` VALUES (5,'wds.linkis.flink.taskmanager.numberOfTaskSlots','TaskManager Slot数量','NUMBER',0,'TaskManager Slot数量','Regex','^([1-9]\\d{0,1}|100)$','',1,1,NULL,'2','',1,1,0); +INSERT INTO `linkis_stream_job_config_def` VALUES (6,'wds.linkis.flink.taskmanager.cpus','TaskManager CPUs','NUMBER',0,'TaskManager CPUs','Regex','^([1-9]\\d{0,1}|100)$','',1,1,NULL,'2','',1,1,0); +INSERT INTO `linkis_stream_job_config_def` VALUES (7,'wds.linkis.flink.custom','Flink参数','NONE',0,'Flink自定义参数','None',NULL,'',1,0,NULL,NULL,'',NULL,0,0); +INSERT INTO `linkis_stream_job_config_def` VALUES (8,'wds.linkis.flink.produce','生产配置','NONE',0,'生产配置','None',NULL,'',1,0,NULL,NULL,'',NULL,0,0); +INSERT INTO `linkis_stream_job_config_def` VALUES (9,'wds.linkis.flink.checkpoint.switch','Checkpoint开关','SELECT',0,'Checkpoint开关',NULL,NULL,'',1,1,'','OFF','ON,OFF',8,0,0); +INSERT INTO `linkis_stream_job_config_def` VALUES (10,'wds.linkis.flink.savepoint.path','快照(Savepoint)文件位置【仅需恢复任务时指定】','INPUT',4,'快照(Savepoint)文件位置','None',NULL,'',1,1,NULL,NULL,'',8,0,1); +INSERT INTO `linkis_stream_job_config_def` VALUES (11,'wds.linkis.flink.alert','告警设置','NONE',0,'告警设置','None',NULL,'',1,1,NULL,NULL,'',NULL,0,0); +INSERT INTO `linkis_stream_job_config_def` VALUES (12,'wds.linkis.flink.alert.rule','告警规则','NONE',0,'告警规则','None',NULL,'',1,1,NULL,NULL,'',NULL,0,0); +INSERT INTO `linkis_stream_job_config_def` VALUES (13,'wds.linkis.flink.alert.user','告警用户','NONE',0,'告警用户',NULL,NULL,'',1,1,NULL,NULL,'',NULL,0,0); +INSERT INTO `linkis_stream_job_config_def` VALUES (14,'wds.linkis.flink.alert.level','告警级别','NONE',0,'告警级别','None',NULL,'',1,1,NULL,NULL,'',NULL,0,0); +INSERT INTO `linkis_stream_job_config_def` VALUES (15,'wds.linkis.flink.alert.failure.level','失败时告警级别','NONE',0,'失败时告警级别','None',NULL,'',1,1,NULL,NULL,'',NULL,0,0); +INSERT INTO `linkis_stream_job_config_def` VALUES (16,'wds.linkis.flink.alert.failure.user','失败时告警用户','NONE',0,'失败时告警用户','None',NULL,'',1,1,NULL,NULL,'',NULL,0,0); +INSERT INTO `linkis_stream_job_config_def` VALUES (32,'wds.linkis.flink.authority','权限设置','NONE',0,'权限设置','None',NULL,'',1,0,NULL,NULL,'',NULL,0,0); +INSERT INTO `linkis_stream_job_config_def` VALUES (33,'wds.linkis.flink.authority.visible','可见人员','INPUT',0,'可见人员','None',NULL,'',1,1,NULL,NULL,'',32,0,0); +INSERT INTO `linkis_stream_job_config_def` VALUES (34,'wds.linkis.rm.yarnqueue','使用Yarn队列','INPUT',0,'使用Yarn队列','None',NULL,'',1,1,NULL,NULL,'',1,0,0); +INSERT INTO `linkis_stream_job_config_def` VALUES (35,'wds.linkis.flink.app.fail-restart.switch','作业失败自动拉起开关','SELECT',1,'作业失败自动拉起开关','None',NULL,'',1,1,NULL,'OFF','ON,OFF',8,0,0); +INSERT INTO `linkis_stream_job_config_def` VALUES (36,'wds.linkis.flink.app.start-auto-restore.switch','作业启动状态自恢复','SELECT',2,'作业启动状态自恢复','None',NULL,'',1,1,NULL,'ON','ON,OFF',8,0,0); \ No newline at end of file diff --git a/docs/en_US/0.2.0/StreamisDeployment.md b/docs/en_US/0.2.0/StreamisDeployment.md new file mode 100644 index 000000000..429d7d8f2 --- /dev/null +++ b/docs/en_US/0.2.0/StreamisDeployment.md @@ -0,0 +1,165 @@ +# Streamis installation and deployment documentation + +## 1. Component introduction +Streamis0.2.4 provides the Streamis-JobManager component, the role of the component is
+1. Publish streaming applications
+2. Set streaming application parameters, such as the number of Flink slots, checkpoint related parameters, etc.
+3. Manage streaming applications (e.g. start and stop)
+4. Streaming application monitoring
+ + +## 2. Code compilation +Streamis does not require manual compilation. You can download the installation package directly for deployment. Please [click to download the installation package](https://github.com/WeBankFinTech/Streamis/releases) + +If you have already obtained the installation package, you can skip this step
+ +- The background compilation method is as follows +``` +cd ${STREAMIS_CODE_HOME} +mvn -N install +mvn clean install +``` +After successful compilation, the installation package will be generated in the 'assembly/target' directory of the project + +- The front-end compilation method is as follows + +Pre dependency: nodejs, python 2.0 + +```bash +cd ${STREAMIS_CODE_HOME}/web +npm i +npm run build +``` +After the compilation is successful, the installation package will be generated in the `${STREAMIS_CODE_HOME}/web` directory + +## 3. Installation preparation +### 3.1 Basic environment installation +        The following software must be installed: + +- MySQL (5.5+), [How to install MySQL](https://www.runoob.com/mysql/mysql-install.html) +- JDK (above 1.8.0_141), [How to install JDK](https://www.runoob.com/java/java-environment-setup.html) + +### 3.2 Linkis and DSS environments +- The execution of Streamis depends on Linkis, and it needs to be version 1.1.1 and above, so you need to install Linkis above 1.1.1 and ensure that the Flink engine can be used normally.Some functions need to be supported by linkis-1.1.2. +- Datasphere studio (> =1.1.0), the development and debugging of streaming jobs depend on DSS scriptis, and the streaming production center needs to be embedded in the DSS engineering framework system, so it depends on * * dss-1.1.0 * * and above. + +Before the formal installation of streamis, please install linkis-1.1.1 and dss-1.1.0 or above, and ensure that the linkis Flink engine and DSS can be used normally. For the installation of DSS and linkis, please refer to the [dss & linkis one click installation and deployment document](https://github.com/WeBankFinTech/DataSphereStudio-Doc/blob/main/zh_CN/%E5%AE%89%E8%A3%85%E9%83%A8%E7%BD%B2/DSS%E5%8D%95%E6%9C%BA%E9%83%A8%E7%BD%B2%E6%96%87%E6%A1%A3.md). + +How to verify that DSS and linkis are basically available? You can create a flinksql script on DSS scriptis and execute it. If flinksql can execute correctly and return the result set, it means that the DSS and linkis environments are available. + + +## 4. Installation and startup + +### Background installation + +1.installation package preparation + +Upload the installation package to the installation directory of the Linux server (currently only supports linux environment deployment), such as /appcom/install/streams, and then extract it: + +```bash +cd /appcom/Install/streamis +tar -xvf wedatasphere-streamis-${streamis-version}-dist.tar.gz +``` + +2.Modify the database configuration +```bash +vi conf/db.sh +#Configure basic database information +``` + +3.Modify the basic configuration file + +```bash +vi conf/config.sh +#Configure service port information +#Configure Linkis service information +``` +4.Installation +```bash +sh bin/install.sh +``` + +- The install.sh script will ask you if you need to initialize the database and import metadata. + +     Because the user is worried that the user repeatedly executes the install.sh script to clear the user data in the database, when the install.sh is executed, the user will be asked if they need to initialize the database and import metadata. + +     **Yes must be selected for the first installation**. + +5.start up +```bash +sh bin/start.sh +``` + +- Start verification +Verification method, because Streamis and Linkis use a set of Eureka, you need to check whether the Eureka page of Linkis already contains Streamis services, as shown in the figure, +![components](../../images/zh_CN/eureka_streamis.png) + + + +### Front-end deployment + +1.Install nginx + +```bash +sudo yum install -y nginx +``` +2.Deploy the front-end package +``` +mkdir ${STREAMIS_FRONT_PATH} +cd ${STREAMIS_FRONT_PATH} +#Place the front-end package +unzip streamis-{streamis-version}.zip +``` +3.Modify the nginx configuration file
+ +```bash +cd /etc/nginx/conf.d +vi streamis.conf +# Copy the following template and modify it according to the actual situation +``` +``` +server { + listen 9088;# access port + server_name localhost; + location / { + root ${STREAMIS_FRONT_PAH}; # Please modify it to the appropriate static file directory of Streamis + index index.html index.html; + } + location /api { + proxy_pass http://${Linkis_GATEWAY_IP}:${LINKIS_GATEWY_PORT}; #Back-end Linkis address, please modify it to the ip and port of the Linkis gateway + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header x_real_ipP $remote_addr; + proxy_set_header remote_addr $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_http_version 1.1; + proxy_connect_timeout 4s; + proxy_read_timeout 600s; + proxy_send_timeout 12s; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection upgrade; + } + + #error_page 404 /404.html; + # redirect server error pages to the static page /50x.html + # + error_page 500 502 503 504 /50x.html; + location = /50x.html { + root /usr/share/nginx/html; + } +} +``` +4.Load nginx configuration +```bash +sudo nginx -s reload +``` + +## 5. Access to DSS + +If you want to use the streamis0.2.4 front end normally, you also need to install the DSS StreamisAppConn plug-in. Please refer to: [StreamisAppConn plug-in installation document](development/StreamisAppConnInstallationDocument.md) + +## 6. Linkis Flink engine compilation and installation +If you want to run streamis0.2.4 normally, you also need to install the linkis Flink engine. Please refer to: [linkis Flink engine installation document](https://linkis.apache.org/zh-CN/docs/1.1.2/engine_usage/flink/) + +## 7. Streamis component upgrade document / script +If you want to upgrade from a lower version of streamis to streamis0.2.4, please refer to: [streamis upgrade document](development/StreamisUpgradeDocumentation.md) diff --git a/docs/en_US/0.2.0/architecture/StreamisAppConnDesignDocument.md b/docs/en_US/0.2.0/architecture/StreamisAppConnDesignDocument.md new file mode 100644 index 000000000..8a25eaee8 --- /dev/null +++ b/docs/en_US/0.2.0/architecture/StreamisAppConnDesignDocument.md @@ -0,0 +1,178 @@ +# Streamis access AppConn + +## Overall flow chart +![Streamis access DSS](../../../images/streamis_appconn_en.png) + +## DSS project APPCONN plug-in streamis-appconn + +### The configuration table +Configure the following four tables:dss_workspace_dictionary、dss_appconn、dss_workspace_menu_appconn、dss_appconn_instance,appconn_name for realTimeJobCenter is appconn accessed by the graphical interface,appconn_name for streamis is appconn accessed by the API,The StreamisAppConn object is instantiated based on the configuration information in the table when DSS is started.Appconn in the following SQL_ INSTALL_ IP and appconn_ INSTALL_ When executing DSS installation script for automatic installation, port will enter through interactive commands. +```roomsql +delete from `dss_workspace_dictionary` WHERE `appconn_name` = 'streamis'; + +INSERT INTO `dss_workspace_dictionary` ( `workspace_id`, `parent_key`, `dic_name`, `dic_name_en`, `dic_key`, `dic_value`, `dic_value_en`, `title`, `title_en`, `url`, `url_type`,`icon`, `order_num`, `remark`, `create_user`, `create_time`, `update_user`, `update_time`, appconn_name) +VALUES ('0','p_develop_process','流式生产中心','Streamis Product Center','pdp_streamis_product_center','streamis_prod',NULL,NULL,NULL, +'http://APPCONN_INSTALL_IP:APPCONN_INSTALL_PORT/#/realtimeJobCenter?projectName=${projectName}&workspaceName=${workspaceName}','0','kaifa-icon','1','工程开发流程-流式生产中心','SYSTEM','2020-12-28 17:32:35',NULL,'2022-06-30 17:49:02','streamis'); + +select @old_dss_appconn_id:=id from `dss_appconn` where `appconn_name` = 'streamis'; + +delete from `dss_workspace_menu_appconn` WHERE `appconn_id` = @old_dss_appconn_id; +delete from `dss_appconn_instance` where `appconn_id` = @old_dss_appconn_id; +delete from `dss_appconn` where `appconn_name`='streamis'; + +select @old_jobcenter_dss_appconn_id:=id from `dss_appconn` where `appconn_name` = 'realTimeJobCenter'; + +delete from `dss_workspace_menu_appconn` WHERE `appconn_id` = @old_jobcenter_dss_appconn_id; +delete from `dss_appconn_instance` where `appconn_id` = @old_jobcenter_dss_appconn_id; +delete from `dss_appconn` where `appconn_name`='realTimeJobCenter'; + +INSERT INTO dss_appconn +(appconn_name, is_user_need_init, `level`, if_iframe, is_external, reference, class_name, appconn_class_path, resource) +VALUES('streamis', 0, 1, 1, 1, NULL, 'com.webank.wedatasphere.streamis.dss.appconn.StreamisAppConn', NULL, NULL); +INSERT INTO dss_appconn +(appconn_name, is_user_need_init, `level`, if_iframe, is_external, reference, class_name, appconn_class_path, resource) +VALUES('realTimeJobCenter', 0, 1, 1, 1, 'sso', '', NULL, NULL); + +select @dss_appconn_id:=id from `dss_appconn` where `appconn_name` = 'streamis'; +select @jobcenter_dss_appconn_id:=id from `dss_appconn` where `appconn_name` = 'realTimeJobCenter'; + +INSERT INTO dss_workspace_menu_appconn +(appconn_id, menu_id, title_en, title_cn, desc_en, desc_cn, labels_en, labels_cn, is_active, access_button_en, access_button_cn, manual_button_en, manual_button_cn, manual_button_url, icon, `order`, create_by, create_time, last_update_time, last_update_user, image) +VALUES(@jobcenter_dss_appconn_id, 1, 'StreamSQL development', 'StreamSQL开发', 'Real-time application development is a streaming solution jointly built by WeDataSphere, Boss big data team and China Telecom ctcloud Big data team.', '实时应用开发是微众银行微数域(WeDataSphere)、Boss直聘大数据团队 和 中国电信天翼云大数据团队 社区联合共建的流式解决方案,以 Linkis 做为内核,基于 Flink Engine 构建的批流统一的 Flink SQL,助力实时化转型。', +'streaming, realtime', '流式,实时', 0, 'under union construction', '联合共建中', 'related information', '相关资讯', 'http://127.0.0.1:8088/wiki/scriptis/manual/workspace_cn.html', 'shujukaifa-logo', NULL, NULL, NULL, NULL, NULL, 'shujukaifa-icon'); + +INSERT INTO dss_appconn_instance +(appconn_id, label, url, enhance_json, homepage_uri) +VALUES(@dss_appconn_id, 'DEV', 'http://APPCONN_INSTALL_IP:APPCONN_INSTALL_PORT/', '', 'http://APPCONN_INSTALL_IP:APPCONN_INSTALL_PORT/#/realTimeJobCenter'); + +INSERT INTO dss_appconn_instance +(appconn_id, label, url, enhance_json, homepage_uri) +VALUES(@jobcenter_dss_appconn_id, 'DEV', 'http://APPCONN_INSTALL_IP:APPCONN_INSTALL_PORT/#/realTimeJobCenter', NULL, NULL); +``` + +### Concrete implementation description +StreamisAppConn extends AbstractOnlySSOAppConn implements SecondlyAppConn,Override method to create StreamisStructureIntegrationStandard.StreamisAppConn currently only implements section-free login and organizational structure specification capabilities。 + +To create a StreamisProjectService by rewriting the methods in StreamisStructureIntegrationStandard, four methods need to be rewritten internally, and four operation classes will be created: StreamisProjectSearchOperation, StreamisProjectCreationOperation, StreamisProjectUpdateOperation, and StreamisPrejectDeleteOperation. The operation class calls the streamis application through HTTP to query, create, modify and delete project respectively, and synchronize the DSS project information to Streamis. + +- When DSS creates a project, it will first call the query operation to query whether the same project name already exists in streamis. If it does, a prompt will pop up. If it does not exist, it will continue to call the create operation to create a new project in streamis; +- When DSS modifies a project, it will call the modify operation to update the project information in streamis; +- When DSS deletes an item, it will call the delete operation to delete the item information in streamis and change the deletion mark. + +## API +1 API name: query project +- API path:GET /streamis/project/searchProject +- Request parameters + +|Parameter name |Whether it is necessary |Example |remarks | +|-------------|---------|-------|--------| +|projectName |yes | | | + +- Return data + +|name |type |Whether it is necessary |Default |remarks | +|-------------|--------|---------|---------|--------| +|method |string |no | +|status |number |yes | +|message |string |no | +|data |object |yes | +|- projectId |number |yes | + +2 API name:create project +- API path:POST /streamis/project/createProject +- Request parameters + +|Parameter name |Whether it is necessary |Example |remarks | +|-------------|---------|-------|--------| +|projectName |yes | | | +|workspaceId |no | | | +|releaseUsers |no | | | +|editUsers |no | | | +|accessUsers |no | | | + +- Return data + +|name |type |Whether it is necessary |Default |remarks | +|----------|--------|---------|---------|--------| +|method |string |no | +|status |number |yes | +|message |string |no | +|data |object |yes | +|- projectId |number |yes | +|- projectName |string |no | + +3 API name:update project +- API path:PUT /streamis/project/updateProject +- Request parameters + +|Parameter name |Whether it is necessary |Example |remarks | +|-------------|---------|-------|--------| +|projectId |yes | | | +|projectName |yes | | | +|workspaceId |no | | | +|releaseUsers |no | | | +|editUsers |no | | | +|accessUsers |no | | | + +- Return data + +|name |type |Whether it is necessary |Default |remarks | +|----------|--------|---------|---------|--------| +|method |string |no | +|status |number |yes | +|message |string |no | +|data |object |no | + +4 API name:delete project +- API path:DELETE /streamis/project/deleteProject +- Request parameters + +|Parameter name |Whether it is necessary |Example |remarks | +|-------------|---------|-------|--------| +|projectId |yes | | | +|projectName |no | | | + +- Return data + +|name |type |Whether it is necessary |Default |remarks | +|----------|--------|---------|---------|--------| +|method |string |no | +|status |number |yes | +|message |string |no | +|data |object |no | + +## Streamis project streamis-project-server + +### Related table operation +There are 2 tables involved in streamis, including linkis_ stream_ Project and linkis_ stream_ project_ privilege. +```roomsql +--table already exists +CREATE TABLE `linkis_stream_project` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `workspace_id` bigint(20) DEFAULT NULL, + `name` varchar(100) DEFAULT NULL, + `create_by` varchar(50) DEFAULT NULL, + PRIMARY KEY (`id`) USING BTREE +) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='project table'; + +--newly added table +CREATE TABLE `linkis_stream_project_privilege` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `project_id` bigint(20) NOT NULL, + `user_name` varchar(100) NOT NULL, + `privilege` tinyint(1) DEFAULT '0' NOT NULL COMMENT '1:RELEASE ,2:EDIT ,3:ACCESS', + PRIMARY KEY (`id`) USING BTREE +) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 COMMENT='project privilege table'; + +--newly added field +ALTER TABLE `linkis_stream_project` ADD create_time datetime DEFAULT NULL; +ALTER TABLE `linkis_stream_project` ADD last_update_by varchar(50) DEFAULT NULL; +ALTER TABLE `linkis_stream_project` ADD last_update_time datetime DEFAULT NULL; +ALTER TABLE `linkis_stream_project` ADD is_deleted tinyint unsigned DEFAULT 0; +``` + +### Concrete implementation description +- The query operation will obtain the request parameter projectName in the table links_ stream_project Query the corresponding ID. If the query operation is successful, the returned status is 0, and the projectid is the queried ID. if the query result is empty, the projectid is null; +- The creation operation will insert the project information (projectName、workspaceId) in the request parameters into the project table linkis_stream_project and auto increment the generated ID, associate the user in the permission information data (releaseUsers、editUsers、accessUsers) in the request parameters with the generated ID of the project table, and insert the table linkis_stream_project_privilege, the status value returned from the successful creation operation is 0, and the project table generation ID will be returned as the value of projectId; +- The modification operation will update the request information data to the table linkis_stream_project and linkis_stream_project_privilege, the status value returned successfully is 0; +- In the delete operation, the is_deleted field of the table linkis_stream_project will be marked as 1 according to the projectId. The relevant data in the table linkis_stream_project_privilege will be deleted, the status value returned successfully is 0. diff --git a/docs/en_US/0.2.0/architecture/StreamisAuthenticationDesignDocument.md b/docs/en_US/0.2.0/architecture/StreamisAuthenticationDesignDocument.md new file mode 100644 index 000000000..6013b3b4a --- /dev/null +++ b/docs/en_US/0.2.0/architecture/StreamisAuthenticationDesignDocument.md @@ -0,0 +1,43 @@ +# Authentication + +## Authentication flow chart +In Streamis, the module that needs authentication does not rely on the Streamis project server module. The rest interface is called to handle authentication. + +![Streamis project authentication operation](../../../images/streamis_project_privilege_en.png) + +## Specific implementation instructions +Get the set of all permissions according to the current user name and item id/ name. If the permission set contains RELEASE permission, you have the permission to publish / edit / view; if the permission set contains EDIT permission, you have the permission to edit / view; if the permission set contains ACCESS permission, you have the permission to view; +Permission inclusion relationship: RELEASE permission includes EDIT permission and ACCESS permission; edit permission includes ACCESS permission. + +### edit privilege API: + +|RequestMethod |API path |name | +|------|----------------------------------------------------------|-----------------| +|POST |/streamis/streamProjectManager/project/files/upload |Project resource file - Import | +|GET |/streamis/streamProjectManager/project/files/delete |Delete all versions of the file under the project | +|GET |/streamis/streamProjectManager/project/files/version/delete |Delete version file | +|GET |/streamis/streamProjectManager/project/files/download |Task details - Download | +|POST |streamis/streamJobManager/job/createOrUpdate |create or Update streamis-job| +|POST |/streamis/streamJobManager/job/upload |Upload file | +|POST |/streamis/streamJobManager/job/execute |start-up | +|GET |/streamis/streamJobManager/job/stop |stop | +|PUT |/streamis/streamJobManager/job//snapshot/{jobId:\w+} |Snapshot generation | +|GET |/streamis/streamJobManager/config/json/{jobId:\w+} |Configuration - save | +|POST |/streamis/streamJobManager/job/bulk/execution |Batch start | +|POST |/streamis/streamJobManager/job/bulk/pause |Batch stop | + + +### access privilege API: + +|RequestMethod |API path |name | +|------|----------------------------------------------------------|-------------| +|GET |streamis/streamJobManager/job/list |Query the jobs that the current user can view | +|GET |/streamis/streamProjectManager/project/files/list |prokect resource document | +|GET |/streamis/streamProjectManager/project/files/version/list |Obtain all versions of the file under the project | +|GET |/streamis/streamJobManager/job/version |Query job version | +|GET |/streamis/streamJobManager/job/execute/history |Job execution history | +|GET |/streamis/streamJobManager/job/progress |Get the latest task status of the current version of the job | +|GET |/streamis/streamJobManager/job/jobContent |Task details | +|GET |/streamis/streamJobManager/job/logs |Get log | +|POST |/streamis/streamJobManager/config/json/{jobId:\w+} |Get task configuration | +|GET |/streamis/streamJobManager/config/view |Query the current job configuration information | \ No newline at end of file diff --git a/docs/en_US/0.2.0/development/StreamisAppConnInstallationDocument.md b/docs/en_US/0.2.0/development/StreamisAppConnInstallationDocument.md new file mode 100644 index 000000000..6891d5e92 --- /dev/null +++ b/docs/en_US/0.2.0/development/StreamisAppConnInstallationDocument.md @@ -0,0 +1,73 @@ +Streamisappconn installation document this article mainly introduces the deployment, configuration and installation of streamisappconn in DSS (datasphere studio) 1.1.0 + +# 1. Preparation for deploying streamisappconn +Before deploying streamisappconn, please complete the installation of streamis0.2.4 and other related components, and ensure that the basic functions of the project are available. + +# 2. Download and compilation of streamisappconn plug-in +1) Download binary package + +We provide the material package of streamisappconn, which you can download directly. [Click to jump to the release interface](https://github.com/WeBankFinTech/Streamis/releases) + +2) Compile package + +If you want to develop and compile streamisappconn yourself, the specific compilation steps are as follows: 1 Code for clone streams; 2 Find the streamis appconn module and compile streamis appconn separately +```shell script +cd {STREAMIS_CODE_HOME}/streamis-appconn +mvn clean install +``` +Streamis will be found under this path Zip installation package +```shell script +{STREAMIS_CODE_HOME}\streamis-appconn\target\streamis.zip +``` + +# 3. Overall steps for deployment and configuration of streamisappconn plug-in +1. get the packed streamis Zip material package + +2. place it in the following directory and unzip it + +Note: after extracting streamis appconn for the first time, make sure that there is no index in the current folder_ v0000XX. Index file, which will be generated later +```shell script +cd {DSS_Install_HOME}/dss/dss-appconns +unzip streamis.zip +``` +The extracted directory structure is: +```shell script +conf +db +lib +``` +3. execute scripts for automatic installation + ```shell script +cd {DSS_INSTALL_HOME}/dss/bin +sh ./appconn-install.sh +# Script is an interactive installation scheme. You need to enter the string streamis and the IP and port of streamis service to complete the installation +# The streamis port here refers to the front-end port, which is configured in nginx. Instead of the back-end service port +``` + +## 4. After the installation of streamis appconn, you need to restart the DSS service to finally complete the plug-in update +###4.1) make the deployed appconn effective +Use the DSS start / stop script to make appconn effective. Enter the directory {DSS_INSTALL_HOME}/dss/sbin where the script is located, and execute the script using the following commands in sequence: +```shell script +sh ./dss-stop-all.sh +sh ./dss-start-all.sh +``` +###4.2) verify whether streamis appconn is effective +After installing and deploying streamis appconn, you can preliminarily verify whether the streamis appconn is successfully installed through the following steps. + +Create a new project in the DSS workspace +![DSS_workspace_Streamis_project](../../../images/zh_CN/dss_streamis_project.png) + +Check whether the project is created synchronously in the streamis database. Query the records indicating that appconn is successfully installed +```roomsql +SELECT * FROM linkis_stream_project WHERE name = ' input project_name '; +``` + +# 5. Installation principle of streamis appconn +The relevant configuration information of streamis will be inserted into the following table. The use configuration of streamis can be completed by configuring the following table. (Note: if you only need to quickly install appconn, you don't need to pay too much attention to the following fields. Most of the init.sql provided are configured by default. Focus on the above operations.) + +|table name |table function |remarks | +|-------------------|-----------------------------------------|------| +|dss_workspace_dictionary |Configuring a streaming production center |must| +|dss_appconn |Basic information of appconn, used to load appconn |must| +|dss_workspace_menu_appconn |Appconn menu, front-end connection to streamis |must| +|dss_appconn_instance |The instance information of appconn, including its own URL information |must| diff --git a/docs/en_US/0.2.0/development/StreamisUpgradeDocumentation.md b/docs/en_US/0.2.0/development/StreamisUpgradeDocumentation.md new file mode 100644 index 000000000..3e4bd2b4a --- /dev/null +++ b/docs/en_US/0.2.0/development/StreamisUpgradeDocumentation.md @@ -0,0 +1,44 @@ +Streamis upgrade document. This article mainly introduces the upgrade steps of adapting DSS1.1.0 and linkis1.1.1 based on the original installation of Streamis service. The biggest difference between Streamis 0.2.4 and Streamis 0.1.0 is that it accesses DSS appconn and optimizes the start and stop of jobs. + +# 1. Work before upgrading streamis +Before upgrading Streamis, please install linkis1.1.1 and DSS1.1.0 or above, and ensure that the linkis Flink engine and DSS can be used normally. For the installation of DSS and linkis, please refer to [dss & linkis one click installation and deployment document](https://github.com/WeBankFinTech/DataSphereStudio-Doc/blob/main/zh_CN/%E5%AE%89%E8%A3%85%E9%83%A8%E7%BD%B2/DSS%E5%8D%95%E6%9C%BA%E9%83%A8%E7%BD%B2%E6%96%87%E6%A1%A3.md). + +# 2. Streamis upgrade steps + +## Install streamisappconn +1) Delete the old version of StreamisAppconn package + +Enter the following directory, find the appconn folder of streamis and delete it, if any: +```shell script +{DSS_Install_HOME}/dss/dss-appconns +``` + +2) StreamisAppconn installation deployment + +To install the DSS StreamisAppConn plug-in. Please refer to: [StreamisAppConn plug-in installation document](development/StreamisAppConnInstallationDocument.md) + +## Installing the Streamis backend +Update Lib in the obtained installation package to the path 'streamis-server/lib' under the streamis installation directory, and the file contents under 'streamis-server/conf' can be updated as needed. + +Enter the installation directory and execute the update script to complete the update of database table structure and data: +```shell script +cd {Streamis_Install_HOME} +sh bin/upgrade.sh +``` + +Then complete the update and restart of the Streamis server through the following command: +```shell script +cd {Streamis_Install_HOME}/streamis-server +sh bin/stop-streamis-server.sh +sh bin/start-streamis-server.sh +``` + +##Installing the Streamis front end +First delete the front-end directory folder of the old version, and then replace it with the new front-end installation package. +``` +mkdir ${STREAMIS_FRONT_PATH} +cd ${STREAMIS_FRONT_PATH} +#1.Delete front-end directory folder +#2.Place the front-end package +unzip streamis-${streamis-version}.zip +``` \ No newline at end of file diff --git a/docs/en_US/userManual/StreamisUserManual.md b/docs/en_US/userManual/StreamisUserManual.md new file mode 100644 index 000000000..86a65490b --- /dev/null +++ b/docs/en_US/userManual/StreamisUserManual.md @@ -0,0 +1,205 @@ +# Streamis quick start + +## 1. Preface + +         This article is a quick start document for Streamis 0.2.4, which covers the basic usage process of Stremis. More details on operation and usage will be provided in the user documentation. + + +## 2. Streamis entrance +         For the convenience of users,**the Streamis system is embedded in the DSS system in the form of DSS components** + +The entry path is **Home-DSS component application-Enter Streamis** + +![Streamis entrance](../../images/create_stream_product_center_en.png) +
Picture 2.1 Streamis entrance]
+ +## 3. Core indicators + +         Entering the homepage, the top half shows the core indicators. + +         The core indicator shows the status summary of the Flink tasks uploaded to the project for execution. There are temporarily 7 states, showing the state name and the number of tasks in that state. The specific content is as shown in the figure below. + +![Core indicators](../../images/home_page_en.png) +
Picture 3.1 Core indicators
+ +# 4. Job management + +         Support the release of Flink Jar and Flink SQL, and provide the development and debugging and production management capabilities of streaming applications, such as: start and stop, status monitoring, checkpoint, etc. + +## 4.1. Upload job + +         Click **"Upload"** and select the zip file to upload. The file is divided into two job types, Flink Jar and Flink SQL, and the format is as follows: + +### 4.1.1. The contents of the Flink Jar ZIP file are as follows: + +![jar zip](../../images/jarZip.png) +
Picture 4.1 Flink Jar ZIP
+ +
+ +#### Flink Jar meta.json: + +```json +{ + "projectName": "flinkJarTest3", + "jobName": "flinkJarTestc", + "jobType": "flink.jar", + "tags": "e,t,y,h,g", + "description": "test of FlinkJar Job3", + "jobContent": { + "main.class.jar": "frauddetection-0.1.jar", + "main.class": "spendreport.FraudDetectionJob", + "args": ["aa","bb"], + "hdfs.jars": [], + "dependency.jars": [], + "resources": [] + } +} +``` + +### 4.1.2. Flink SQL ZIP + +         The SQL type file is just a ZIP compressed package of the meta.json file, and its content is as follows: + +#### Flink SQL meta.json: + +```json +{ + "projectName": "flinkSqlTestD", + "jobName": "flinkSqlTesta", + "jobType": "flink.sql", + "tags": "a,b,c", + "description": "test FlinkSql JobD", + "jobContent": { + "type": "sql", + "sql": "select 1", + "file": "", + "resourceId": "", + "version": "" + } +} + +``` + +
+ +         After the ZIP file is uploaded successfully, the task is displayed in the task list in Figure 3.1, and operations such as start, stop, configuration and checkpoint are provided according to the task status; + +         The task list provides the function of **"query"**, where **job name** provides the function of fuzzy query. + + +## 4.3. Management jobs + +
+ +         Click **"job name"** of a task in the task list to provide the function of managing the task, or click **"three dots"** to the left of the name to call up the specific function configuration entry, as shown below: + +![Configuration job](../../images/job_list_en.png) +
Picture4.3 Configuration job
+ +
+ +   The configuration task provides four functions, namely: + +- Parameter configuration +- alarm configuration +- operation history +- operation log +- snapshot[savepoint] + +
+ +Click batch operation, and multiple job tasks can be restarted. Restart and snapshot will generate a snapshot and then restart. Restart directly will not generate a snapshot. + +![jobbulk_operate](../../images/jobbulk_operate_en.png) + +### 4.3.1. Job summary: + +![Operating condition](../../images/stream_job_detail_en.png) +
Picture 4.4 Job summary
+ +
+ +  The running status summarizes the real-time traffic, total amount of data, and load status of the task. + +
+ +### 4.3.2. Job history: + +![Execution history](../../images/stream_job_history_en.png) +
Picture 4.5 Job history
+ +
+ +  The running history records the record of each run of the task. + +
+ + +### 4.3.3. Job config: + +![Configuration](../../images/stream_job_config_en_1.png) +![Configuration](../../images/stream_job_config_en_2.png) +
Picture 4.6 Job config
+ +
+ +  The configuration page is used to configure various parameters of the running task, and provides the following types of configuration: + +- Resource configuration +- Production configuration +- Flink parameters +- Permission settings + +
+ +### 4.3.4. Job details: + +
+ +  The job details are divided into two display interfaces according to the task type Flink Jar and Flink SQL. + +
+ +**Flink Jar Job details** + +![Job details](../../images/stream_job_flinkjar_jobcontent_en.png) +
Picture 4.7 Flink Jar Job details
+ +
+ +  Flink Jar task details show the contents and parameters of the task Jar package, and provide the function of downloading the Jar package. + +
+ + +**Flink SQL job details** + +![Job details](../../images/stream_job_flinksql_jobcontent_en.png) +
Picture 4.8 Flink SQL job details
+ +
+ +  The Flink SQL job details show the SQL statement of the task. + +
+ + + +## 5. Project resource file + +
+ +          **Page entry:** On the top right of the homepage-core indicators **"Project Resource File"**. + +         The project resource file provides the function of uploading and managing the resource files required by the project, as shown in the figure below: + +
+ +![Engineering Documents Home Page](../../images/project_source_file_list_en.png) +
Picture 5.1 Engineering Documents Home Page
+ +
+ +![Upload project file](../../images/project_source_file_import_en.png) +
Picture 5.2 Upload project file
diff --git a/docs/images/SQLDetail.png b/docs/images/SQLDetail.png new file mode 100644 index 000000000..e847f9f1e Binary files /dev/null and b/docs/images/SQLDetail.png differ diff --git a/docs/images/SQLDetail_en.png b/docs/images/SQLDetail_en.png new file mode 100644 index 000000000..3c0f6bb14 Binary files /dev/null and b/docs/images/SQLDetail_en.png differ diff --git a/docs/images/config.png b/docs/images/config.png new file mode 100644 index 000000000..04f9f406d Binary files /dev/null and b/docs/images/config.png differ diff --git a/docs/images/config_en.png b/docs/images/config_en.png new file mode 100644 index 000000000..6c6f6f46e Binary files /dev/null and b/docs/images/config_en.png differ diff --git a/docs/images/create_script_file.png b/docs/images/create_script_file.png new file mode 100644 index 000000000..ce3030df9 Binary files /dev/null and b/docs/images/create_script_file.png differ diff --git a/docs/images/create_stream_product_center.png b/docs/images/create_stream_product_center.png new file mode 100644 index 000000000..701b4c683 Binary files /dev/null and b/docs/images/create_stream_product_center.png differ diff --git a/docs/images/create_stream_product_center_en.png b/docs/images/create_stream_product_center_en.png new file mode 100644 index 000000000..952d11af9 Binary files /dev/null and b/docs/images/create_stream_product_center_en.png differ diff --git a/docs/images/enter_flinksql.png b/docs/images/enter_flinksql.png new file mode 100644 index 000000000..c360dbe28 Binary files /dev/null and b/docs/images/enter_flinksql.png differ diff --git a/docs/images/entry.png b/docs/images/entry.png new file mode 100644 index 000000000..259fd268f Binary files /dev/null and b/docs/images/entry.png differ diff --git a/docs/images/entry_en.png b/docs/images/entry_en.png new file mode 100644 index 000000000..a4c5d0cc8 Binary files /dev/null and b/docs/images/entry_en.png differ diff --git a/docs/images/fileHome.png b/docs/images/fileHome.png new file mode 100644 index 000000000..b46c98dd3 Binary files /dev/null and b/docs/images/fileHome.png differ diff --git a/docs/images/fileHome_en.png b/docs/images/fileHome_en.png new file mode 100644 index 000000000..5c26bc03d Binary files /dev/null and b/docs/images/fileHome_en.png differ diff --git a/docs/images/fileUpload.png b/docs/images/fileUpload.png new file mode 100644 index 000000000..198ff6b11 Binary files /dev/null and b/docs/images/fileUpload.png differ diff --git a/docs/images/fileUpload_en.png b/docs/images/fileUpload_en.png new file mode 100644 index 000000000..42e8e8cc4 Binary files /dev/null and b/docs/images/fileUpload_en.png differ diff --git a/docs/images/flinksql_job_use_demo.png b/docs/images/flinksql_job_use_demo.png new file mode 100644 index 000000000..7c8dd715d Binary files /dev/null and b/docs/images/flinksql_job_use_demo.png differ diff --git a/docs/images/flinksql_job_use_demo2.png b/docs/images/flinksql_job_use_demo2.png new file mode 100644 index 000000000..606fe116f Binary files /dev/null and b/docs/images/flinksql_job_use_demo2.png differ diff --git a/docs/images/flinksql_script_file.png b/docs/images/flinksql_script_file.png new file mode 100644 index 000000000..47229ab8c Binary files /dev/null and b/docs/images/flinksql_script_file.png differ diff --git a/docs/images/history.png b/docs/images/history.png new file mode 100644 index 000000000..42004745e Binary files /dev/null and b/docs/images/history.png differ diff --git a/docs/images/history_en.png b/docs/images/history_en.png new file mode 100644 index 000000000..30b891b0f Binary files /dev/null and b/docs/images/history_en.png differ diff --git a/docs/images/homePage.png b/docs/images/homePage.png new file mode 100644 index 000000000..c1ca398e4 Binary files /dev/null and b/docs/images/homePage.png differ diff --git a/docs/images/homePage_en.png b/docs/images/homePage_en.png new file mode 100644 index 000000000..5e8797904 Binary files /dev/null and b/docs/images/homePage_en.png differ diff --git a/docs/images/home_page.png b/docs/images/home_page.png new file mode 100644 index 000000000..80c419d53 Binary files /dev/null and b/docs/images/home_page.png differ diff --git a/docs/images/home_page_en.png b/docs/images/home_page_en.png new file mode 100644 index 000000000..28b0399a1 Binary files /dev/null and b/docs/images/home_page_en.png differ diff --git a/docs/images/image-20211230174445688.png b/docs/images/image-20211230174445688.png new file mode 100644 index 000000000..066357b2f Binary files /dev/null and b/docs/images/image-20211230174445688.png differ diff --git a/docs/images/image-20211230174723105.png b/docs/images/image-20211230174723105.png new file mode 100644 index 000000000..484c55797 Binary files /dev/null and b/docs/images/image-20211230174723105.png differ diff --git a/docs/images/image-20211230175424588.png b/docs/images/image-20211230175424588.png new file mode 100644 index 000000000..9fc8934d2 Binary files /dev/null and b/docs/images/image-20211230175424588.png differ diff --git a/docs/images/image-20211231092431429.png b/docs/images/image-20211231092431429.png new file mode 100644 index 000000000..ec7305b09 Binary files /dev/null and b/docs/images/image-20211231092431429.png differ diff --git a/docs/images/image-20211231092520768.png b/docs/images/image-20211231092520768.png new file mode 100644 index 000000000..95822b05e Binary files /dev/null and b/docs/images/image-20211231092520768.png differ diff --git a/docs/images/image-20211231092959561.png b/docs/images/image-20211231092959561.png new file mode 100644 index 000000000..dcfbaf083 Binary files /dev/null and b/docs/images/image-20211231092959561.png differ diff --git a/docs/images/image-20211231093901173.png b/docs/images/image-20211231093901173.png new file mode 100644 index 000000000..d5effb0df Binary files /dev/null and b/docs/images/image-20211231093901173.png differ diff --git a/docs/images/image-20211231094103002.png b/docs/images/image-20211231094103002.png new file mode 100644 index 000000000..0cd4c6872 Binary files /dev/null and b/docs/images/image-20211231094103002.png differ diff --git a/docs/images/image-20211231101048962.png b/docs/images/image-20211231101048962.png new file mode 100644 index 000000000..f6be574aa Binary files /dev/null and b/docs/images/image-20211231101048962.png differ diff --git a/docs/images/image-20211231102020703.png b/docs/images/image-20211231102020703.png new file mode 100644 index 000000000..91b20ebfa Binary files /dev/null and b/docs/images/image-20211231102020703.png differ diff --git a/docs/images/jarDetail_en.png b/docs/images/jarDetail_en.png new file mode 100644 index 000000000..2143eec38 Binary files /dev/null and b/docs/images/jarDetail_en.png differ diff --git a/docs/images/jarZip.png b/docs/images/jarZip.png new file mode 100644 index 000000000..3f395acbf Binary files /dev/null and b/docs/images/jarZip.png differ diff --git a/docs/images/jobNav.png b/docs/images/jobNav.png new file mode 100644 index 000000000..f106ca1d1 Binary files /dev/null and b/docs/images/jobNav.png differ diff --git a/docs/images/jobNav_en.png b/docs/images/jobNav_en.png new file mode 100644 index 000000000..130d2aabc Binary files /dev/null and b/docs/images/jobNav_en.png differ diff --git a/docs/images/job_list.png b/docs/images/job_list.png new file mode 100644 index 000000000..50fb4e4c9 Binary files /dev/null and b/docs/images/job_list.png differ diff --git a/docs/images/job_list_en.png b/docs/images/job_list_en.png new file mode 100644 index 000000000..487bf502d Binary files /dev/null and b/docs/images/job_list_en.png differ diff --git a/docs/images/jobbulk_operate.png b/docs/images/jobbulk_operate.png new file mode 100644 index 000000000..38aabe699 Binary files /dev/null and b/docs/images/jobbulk_operate.png differ diff --git a/docs/images/jobbulk_operate_en.png b/docs/images/jobbulk_operate_en.png new file mode 100644 index 000000000..6c064cbf6 Binary files /dev/null and b/docs/images/jobbulk_operate_en.png differ diff --git a/docs/images/project_source_file_import.png b/docs/images/project_source_file_import.png new file mode 100644 index 000000000..a7b2404b8 Binary files /dev/null and b/docs/images/project_source_file_import.png differ diff --git a/docs/images/project_source_file_import_en.png b/docs/images/project_source_file_import_en.png new file mode 100644 index 000000000..01b3419ca Binary files /dev/null and b/docs/images/project_source_file_import_en.png differ diff --git a/docs/images/project_source_file_list.png b/docs/images/project_source_file_list.png new file mode 100644 index 000000000..ebcf25982 Binary files /dev/null and b/docs/images/project_source_file_list.png differ diff --git a/docs/images/project_source_file_list_en.png b/docs/images/project_source_file_list_en.png new file mode 100644 index 000000000..84590e46e Binary files /dev/null and b/docs/images/project_source_file_list_en.png differ diff --git a/docs/images/statusDetail.png b/docs/images/statusDetail.png new file mode 100644 index 000000000..6236aa578 Binary files /dev/null and b/docs/images/statusDetail.png differ diff --git a/docs/images/statusDetail_en.png b/docs/images/statusDetail_en.png new file mode 100644 index 000000000..120b2c415 Binary files /dev/null and b/docs/images/statusDetail_en.png differ diff --git a/docs/images/stream_job_config_1.png b/docs/images/stream_job_config_1.png new file mode 100644 index 000000000..5e391bf84 Binary files /dev/null and b/docs/images/stream_job_config_1.png differ diff --git a/docs/images/stream_job_config_2.png b/docs/images/stream_job_config_2.png new file mode 100644 index 000000000..a1ea87288 Binary files /dev/null and b/docs/images/stream_job_config_2.png differ diff --git a/docs/images/stream_job_config_en_1.png b/docs/images/stream_job_config_en_1.png new file mode 100644 index 000000000..40d403db5 Binary files /dev/null and b/docs/images/stream_job_config_en_1.png differ diff --git a/docs/images/stream_job_config_en_2.png b/docs/images/stream_job_config_en_2.png new file mode 100644 index 000000000..466f73ed2 Binary files /dev/null and b/docs/images/stream_job_config_en_2.png differ diff --git a/docs/images/stream_job_detail.png b/docs/images/stream_job_detail.png new file mode 100644 index 000000000..99dcb0a97 Binary files /dev/null and b/docs/images/stream_job_detail.png differ diff --git a/docs/images/stream_job_detail_en.png b/docs/images/stream_job_detail_en.png new file mode 100644 index 000000000..d7a7e3ba5 Binary files /dev/null and b/docs/images/stream_job_detail_en.png differ diff --git a/docs/images/stream_job_flinkjar_jobcontent.png b/docs/images/stream_job_flinkjar_jobcontent.png new file mode 100644 index 000000000..9b0063eb3 Binary files /dev/null and b/docs/images/stream_job_flinkjar_jobcontent.png differ diff --git a/docs/images/stream_job_flinkjar_jobcontent_en.png b/docs/images/stream_job_flinkjar_jobcontent_en.png new file mode 100644 index 000000000..6e02623ba Binary files /dev/null and b/docs/images/stream_job_flinkjar_jobcontent_en.png differ diff --git a/docs/images/stream_job_flinksql_jobcontent.png b/docs/images/stream_job_flinksql_jobcontent.png new file mode 100644 index 000000000..cdc9eceae Binary files /dev/null and b/docs/images/stream_job_flinksql_jobcontent.png differ diff --git a/docs/images/stream_job_flinksql_jobcontent_en.png b/docs/images/stream_job_flinksql_jobcontent_en.png new file mode 100644 index 000000000..16e5faeea Binary files /dev/null and b/docs/images/stream_job_flinksql_jobcontent_en.png differ diff --git a/docs/images/stream_job_history.png b/docs/images/stream_job_history.png new file mode 100644 index 000000000..c90a486a1 Binary files /dev/null and b/docs/images/stream_job_history.png differ diff --git a/docs/images/stream_job_history_en.png b/docs/images/stream_job_history_en.png new file mode 100644 index 000000000..d6e075ad3 Binary files /dev/null and b/docs/images/stream_job_history_en.png differ diff --git a/docs/images/stream_product_center.png b/docs/images/stream_product_center.png new file mode 100644 index 000000000..a039e76c5 Binary files /dev/null and b/docs/images/stream_product_center.png differ diff --git a/docs/images/stream_product_center_en.png b/docs/images/stream_product_center_en.png new file mode 100644 index 000000000..477fea4fd Binary files /dev/null and b/docs/images/stream_product_center_en.png differ diff --git a/docs/images/streamis_appconn_en.png b/docs/images/streamis_appconn_en.png new file mode 100644 index 000000000..8a96039f1 Binary files /dev/null and b/docs/images/streamis_appconn_en.png differ diff --git a/docs/images/streamis_project_privilege_en.png b/docs/images/streamis_project_privilege_en.png new file mode 100644 index 000000000..76f123fe2 Binary files /dev/null and b/docs/images/streamis_project_privilege_en.png differ diff --git a/docs/images/upload_jobtask_error.png b/docs/images/upload_jobtask_error.png new file mode 100644 index 000000000..ffa2033ae Binary files /dev/null and b/docs/images/upload_jobtask_error.png differ diff --git a/docs/images/upload_jobtask_error_solve.png b/docs/images/upload_jobtask_error_solve.png new file mode 100644 index 000000000..72971929e Binary files /dev/null and b/docs/images/upload_jobtask_error_solve.png differ diff --git a/docs/images/versionDetail.png b/docs/images/versionDetail.png new file mode 100644 index 000000000..1264b018b Binary files /dev/null and b/docs/images/versionDetail.png differ diff --git a/docs/images/versionDetail_en.png b/docs/images/versionDetail_en.png new file mode 100644 index 000000000..8ad144b3a Binary files /dev/null and b/docs/images/versionDetail_en.png differ diff --git a/docs/images/versionHistory.png b/docs/images/versionHistory.png new file mode 100644 index 000000000..9ec2505ff Binary files /dev/null and b/docs/images/versionHistory.png differ diff --git a/docs/images/versionHistory_en.png b/docs/images/versionHistory_en.png new file mode 100644 index 000000000..16e1567c5 Binary files /dev/null and b/docs/images/versionHistory_en.png differ diff --git a/docs/images/zh_CN/DSS_integration_Streamis.png b/docs/images/zh_CN/DSS_integration_Streamis.png new file mode 100644 index 000000000..e568c8655 Binary files /dev/null and b/docs/images/zh_CN/DSS_integration_Streamis.png differ diff --git a/docs/images/zh_CN/dss_streamis_project.png b/docs/images/zh_CN/dss_streamis_project.png new file mode 100644 index 000000000..f8316a9a6 Binary files /dev/null and b/docs/images/zh_CN/dss_streamis_project.png differ diff --git a/docs/images/zh_CN/eureka_streamis.png b/docs/images/zh_CN/eureka_streamis.png new file mode 100644 index 000000000..4ca0d8495 Binary files /dev/null and b/docs/images/zh_CN/eureka_streamis.png differ diff --git a/docs/images/zh_CN/meta_txt_demo.png b/docs/images/zh_CN/meta_txt_demo.png new file mode 100644 index 000000000..7d46f229d Binary files /dev/null and b/docs/images/zh_CN/meta_txt_demo.png differ diff --git a/docs/images/zh_CN/start-app.png b/docs/images/zh_CN/start-app.png new file mode 100644 index 000000000..1aafb49ae Binary files /dev/null and b/docs/images/zh_CN/start-app.png differ diff --git a/docs/images/zh_CN/streamis_appconn.png b/docs/images/zh_CN/streamis_appconn.png new file mode 100644 index 000000000..34949619d Binary files /dev/null and b/docs/images/zh_CN/streamis_appconn.png differ diff --git a/docs/images/zh_CN/streamis_config.png b/docs/images/zh_CN/streamis_config.png new file mode 100644 index 000000000..501bbfd53 Binary files /dev/null and b/docs/images/zh_CN/streamis_config.png differ diff --git a/docs/images/zh_CN/streamis_project_privilege.png b/docs/images/zh_CN/streamis_project_privilege.png new file mode 100644 index 000000000..9aa4c67fe Binary files /dev/null and b/docs/images/zh_CN/streamis_project_privilege.png differ diff --git a/docs/images/zh_CN/upload_zip.png b/docs/images/zh_CN/upload_zip.png new file mode 100644 index 000000000..9c1efbacc Binary files /dev/null and b/docs/images/zh_CN/upload_zip.png differ diff --git "a/docs/images/\345\274\200\345\217\221\344\270\255\345\277\203.png" "b/docs/images/\345\274\200\345\217\221\344\270\255\345\277\203.png" new file mode 100644 index 000000000..8a5d6619b Binary files /dev/null and "b/docs/images/\345\274\200\345\217\221\344\270\255\345\277\203.png" differ diff --git "a/docs/zh_CN/0.2.0/AppConn\347\232\204\344\275\277\347\224\250.md" "b/docs/zh_CN/0.2.0/AppConn\347\232\204\344\275\277\347\224\250.md" new file mode 100644 index 000000000..588eb4c1a --- /dev/null +++ "b/docs/zh_CN/0.2.0/AppConn\347\232\204\344\275\277\347\224\250.md" @@ -0,0 +1,90 @@ +# Streamis系统的AppConn插件使用 + +## 1.StreamisAppConn +---------- + +### 1.1介绍 +StreamisAppConn是Streamis用来与DSS集成的一个AppConn,功能包括 + +|实现的规范和Service | 功能 | 作用微服务 | +|---------------------|------------------------------------------------------|---------------------------------------------------------| +| 二级规范 | 与DSS工程打通,支持工程内容同步 | DSS-Framework-Project-Server | +| 三级规范-CRUDService | 支持流式编排创建、获取、更新、删除等操作 | DSS-Framework-Orchestrator-Server | +| 三级规范-ExportService和ImportService | 支持流式编排的导入导出 | DSS-Framework-Orchestrator-Server | + + + +### 1.2部署 + +1. 编译 + +```bash +#整体编译streamis代码 +cd ${STREAMIS_CODE_HOME} +mvn -N install +mvn clean install +#单独编译appconn插件 +cd ${STREAMIS_CODE_HOME}/streamis-plugins/streamis-appconn +mvn clean install +``` + +2. 部署 +1. 从 ${STREAMIS_CODE_HOME}/streamis-plugins/streamis-appconn/target 获取appconn的安装包 +2. 上传到DSS放置appconn的目录 +```bash +cd ${DSS_HOME}/dss/dss-appconns +unzip streamis-appconn.zip +``` +3. 执行sql +需要进入到 +```roomsql +SET @STREAMIS_INSTALL_IP_PORT='127.0.0.1:9003'; +SET @URL = replace('http://STREAMIS_IP_PORT', 'STREAMIS_IP_PORT', @STREAMIS_INSTALL_IP_PORT); +SET @HOMEPAGE_URL = replace('http://STREAMIS_IP_PORT', 'STREAMIS_IP_PORT', @STREAMIS_INSTALL_IP_PORT); +SET @PROJECT_URL = replace('http://STREAMIS_IP_PORT', 'STREAMIS_IP_PORT', @STREAMIS_INSTALL_IP_PORT); +SET @REDIRECT_URL = replace('http://STREAMIS_IP_PORT/udes/auth', 'STREAMIS_IP_PORT', @STREAMIS_INSTALL_IP_PORT); + +delete from `dss_application` WHERE `name` = 'Streamis'; +INSERT INTO `dss_application`(`name`,`url`,`is_user_need_init`,`level`,`user_init_url`,`exists_project_service`,`project_url`,`enhance_json`,`if_iframe`,`homepage_url`,`redirect_url`) VALUES ('Streamis', @URL, 0, 1, NULL, 0, @PROJECT_URL, '', 1, @HOMEPAGE_URL, @REDIRECT_URL); + +select @dss_streamis_applicationId:=id from `dss_application` WHERE `name` = 'Streamis'; + +delete from `dss_onestop_menu` WHERE `name` = '数据交换'; +select @dss_onestop_menu_id:=id from `dss_onestop_menu` where `name` = '数据交换'; + +delete from `dss_onestop_menu_application` WHERE title_en = 'Streamis'; +``` + + + +### 1.3使用 + +## 2.Streamis DataSource AppConn +---------- +### 2.1介绍 +|实现的规范和Service | 功能 | 作用微服务 | +|---------------------|------------------------------------------------------|---------------------------------------------------------| +| 三级规范的CRUDService | 支持数据源节点的创建、获取、更新、删除等操作 | DSS-Workflow-Server | +| 三级规范的ExportService和ImportService | 支持数据源的导入导出 | DSS-Workflow-Server | +| 三级规范的ExecutionService | 支持数据源的执行 | Linkis-AppConn-Engine | + +1. 实现三级规范的CRUDService,支持数据源节点的创建、获取、更新、删除等操作 +2. 实现三级规范的ExportService和ImportService,支持数据源的导入导出 +3. 实现三级规范的ExecutionService,支持数据源的执行 +### 2.2部署 + +### 2.3使用 + + +## 3.Streamis JobManager AppConn + +### 3.1介绍 +StreamisJobManager AppConn与SchedulisAppConn的功能是类似的,主要是将DSS的工作流转换成Streamis能够提交执行的流式应用,并把此流式应用发布到StreamisJobManager的 + +|实现的规范和Service | 功能 | 作用微服务 | +|---------------------|------------------------------------------------------|---------------------------------------------------------| +| 工作流转换规范 | 支持将流式工作流转换成Linkis Flink引擎可以执行的流式应用 | DSS-Framework-Orchestrator-Server | +| 工作流发布规范 | 支持将转换之后的流式应用发布到Streamis-JobManager | DSS-Framework-Orchestrator-Server | + +### 3.2部署 + diff --git "a/docs/zh_CN/0.2.0/Streamis\345\256\211\350\243\205\346\226\207\346\241\243.md" "b/docs/zh_CN/0.2.0/Streamis\345\256\211\350\243\205\346\226\207\346\241\243.md" new file mode 100644 index 000000000..f51c0cc43 --- /dev/null +++ "b/docs/zh_CN/0.2.0/Streamis\345\256\211\350\243\205\346\226\207\346\241\243.md" @@ -0,0 +1,206 @@ +# Streamis安装部署文档 + +## 1.组件介绍 + +Streamis0.2.4 提供了 Streamis-JobManager 流式生产中心,其作用主要有: + +1. 上传/更新流式应用 +2. 配置流式应用参数,如 Flink 的 Slot 数量、checkpoint相关参数等 +3. 管理流式应用,如启停、savepoint等 +4. 流式应用监控告警 + + +## 2.代码编译 + +**Streamis 无需手动编译,可以直接下载安装包进行部署,请 [点我下载安装包](https://github.com/WeBankFinTech/Streamis/releases)。** + +如果您想自己编译 Streamis,可参考如下步骤进行。 + +- 后台编译方式如下: + +```shell script +cd ${STREAMIS_CODE_HOME} +mvn -N install +mvn clean install +``` +编译成功后将会在项目的 `assembly/target` 目录下生成安装包 `wedatasphere-streamis-${streamis-version}-dist.tar.gz` + +- 前端编译方式如下: + +前置依赖:nodejs、python 2.0 + +```shell script +cd ${STREAMIS_CODE_HOME}/web +npm i +npm run build +``` +编译成功后,在 `${STREAMIS_CODE_HOME}/web` 目录下生成 `streamis-${streamis-version}-dist.zip` + +## 3.安装准备 + +#### 3.1 基础环境安装 + +        下面的软件必须安装: + +- MySQL (5.5+),[如何安装MySQL](https://www.runoob.com/mysql/mysql-install.html) +- JDK (1.8.0_141以上),[如何安装JDK](https://www.runoob.com/java/java-environment-setup.html) + +### 3.2 Linkis 和 DSS 环境 + +- Linkis (>=1.1.1),Streamis 的执行依赖于 Linkis 的 Flink 引擎,并且依赖 **Linkis-1.1.1** 及以上版本,部分功能需要Linkis-1.1.2支持。 +- DataSphere Studio (>=1.1.0),Streamis 流式作业的开发和调试,依赖于 DSS-Scriptis,Streamis 流式生产中心则需嵌入到 DSS 工程框架体系之中,所以依赖于 **DSS-1.1.0** 及以上版本。 + +在正式安装 Streamis 之前,请先安装 Linkis1.1.1 和 DSS1.1.0 及以上版本,并且保证 Linkis Flink 引擎 和 DSS 可以正常使用,DSS 和 Linkis 安装,可参照 [DSS & Linkis 一键安装部署文档](https://github.com/WeBankFinTech/DataSphereStudio-Doc/blob/main/zh_CN/%E5%AE%89%E8%A3%85%E9%83%A8%E7%BD%B2/DSS%E5%8D%95%E6%9C%BA%E9%83%A8%E7%BD%B2%E6%96%87%E6%A1%A3.md)。 + +如何验证 DSS 和 Linkis 已基本可用?您可以在 DSS-Scriptis 上新建一个 flinksql 脚本并执行,如果 flinksql 能正确执行并返回结果集,表示 DSS 和 linkis 环境是可用的。 + + +## 4.安装和启动 + +### 后台安装 + +1.安装包准备 + +将安装包上传到 Linux 服务器(目前只支持 Linux 环境部署)的安装目录,如 /appcom/Install/streamis,然后进行解压: + +```shell script +cd /appcom/Install/streamis +tar -xvf wedatasphere-streamis-${streamis-version}-dist.tar.gz +``` + +2.修改数据库配置 + +```shell script +vi conf/db.sh +#配置基础的数据库信息 + +``` + +3.修改基础配置文件 + +```shell script + vi conf/config.sh +``` + +```shell script +### deploy user +deployUser=hadoop + +### ssh port +SSH_PORT=22 + +##The Port of Streamis +STREAMIS_PORT=9400 + +### The install home path of STREAMIS,Must provided +STREAMIS_INSTALL_HOME=/appcom/Install/streamis + +### Linkis EUREKA information. # Microservices Service Registration Discovery Center +EUREKA_INSTALL_IP=127.0.0.1 +EUREKA_PORT=20303 + +### Linkis Gateway information +GATEWAY_INSTALL_IP=127.0.0.1 +GATEWAY_PORT=9001 + +``` + +4.执行安装脚本 + +```shell script +sh bin/install.sh +``` + +- install.sh脚本会询问您是否需要初始化数据库并导入元数据。 + +     因为担心用户重复执行install.sh脚本,把数据库中的用户数据清空,所以在install.sh执行时,会询问用户是否需要初始化数据库并导入元数据。 + +     **第一次安装**必须选是。 + + +5.启动 + +```shell script +sh bin/start.sh +``` + +- 启动验证 + +验证方式,因为 Streamis 与 Linkis 同用一套 Eureka,所以您需要检查 Linkis 的 Eureka 页面是否已经包含了 Streamis 的服务,如图: + +![components](../../images/zh_CN/eureka_streamis.png) + + +### 前端部署 + +1.安装nginx + +```bash +sudo yum install -y nginx +``` + +2.部署前端包 + +``` +mkdir ${STREAMIS_FRONT_PATH} +cd ${STREAMIS_FRONT_PATH} +#放置前端包 +unzip streamis-${streamis-version}.zip +``` + +3.修改nginx配置文件
+ +```bash +cd /etc/nginx/conf.d +vi streamis.conf +# 复制下面的模板并根据实际情况进行修改 +``` + +``` +server { + listen 9088;# 访问端口 + server_name localhost; + location / { + root ${STREAMIS_FRONT_PATH}/dist; # 请修改成Streamis前端的静态文件目录 + index index.html index.html; + } + location /api { + proxy_pass http://${Linkis_GATEWAY_IP}:${LINKIS_GATEWY_PORT}; #后端Linkis的地址,请修改成Linkis网关的ip和端口 + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header x_real_ipP $remote_addr; + proxy_set_header remote_addr $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_http_version 1.1; + proxy_connect_timeout 4s; + proxy_read_timeout 600s; + proxy_send_timeout 12s; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection upgrade; + } + + #error_page 404 /404.html; + # redirect server error pages to the static page /50x.html + # + error_page 500 502 503 504 /50x.html; + location = /50x.html { + root /usr/share/nginx/html; + } +} +``` + +4.加载nginx配置 + +```bash +sudo nginx -s reload +``` + +## 5. 接入DSS + +如您想正常使用 Streamis0.2.4 前端,还需安装 DSS StreamisAppConn 插件,请参考: [StreamisAppConn 插件安装文档](development/StreamisAppConn安装文档.md) + +## 6.Linkis Flink引擎编译安装 +如您想正常执行 Streamis0.2.4,还需安装 Linkis Flink 引擎,请参考: [Linkis Flink 引擎安装文档](https://linkis.apache.org/zh-CN/docs/1.1.2/engine_usage/flink/) + +## 7.Streamis组件升级文档/脚本 +如您想从Streamis较低版本升级到 Streamis0.2.4 ,请参考:[Streamis升级文档](development/Streamis升级文档.md) \ No newline at end of file diff --git a/docs/zh_CN/0.2.0/architecture/README.md b/docs/zh_CN/0.2.0/architecture/README.md new file mode 100644 index 000000000..e69de29bb diff --git a/docs/zh_CN/0.2.0/architecture/SUMMARY.md b/docs/zh_CN/0.2.0/architecture/SUMMARY.md new file mode 100644 index 000000000..9aed8f45f --- /dev/null +++ b/docs/zh_CN/0.2.0/architecture/SUMMARY.md @@ -0,0 +1,9 @@ +* [Streamis架构设计](README.md) + * [StreamDataSource 架构设计]() + * [StreamJobManager 架构设计]() + * [StreamWorkflow 架构设计]() + * [Stream Plugins 架构设计]() + * [DataSourceAppConn 介绍]() + * [StreamisAppConn 介绍]() + * [StreamJobManagerAppConn 介绍]() + * [FlinkAppConn 介绍]() \ No newline at end of file diff --git "a/docs/zh_CN/0.2.0/architecture/Streamis\346\216\245\345\205\245AppConn\350\256\276\350\256\241\346\226\207\346\241\243.md" "b/docs/zh_CN/0.2.0/architecture/Streamis\346\216\245\345\205\245AppConn\350\256\276\350\256\241\346\226\207\346\241\243.md" new file mode 100644 index 000000000..d8bf3122c --- /dev/null +++ "b/docs/zh_CN/0.2.0/architecture/Streamis\346\216\245\345\205\245AppConn\350\256\276\350\256\241\346\226\207\346\241\243.md" @@ -0,0 +1,178 @@ +# Streamis接入AppConn + +## 总体流程图 +![Streamis接入DSS](../../../images/zh_CN/streamis_appconn.png) + +## DSS项目appconn插件streamis-appconn + +### 配置表 +配置下面4张表dss_workspace_dictionary、dss_appconn、dss_workspace_menu_appconn、dss_appconn_instance,appconn_name为realTimeJobCenter是界面访问的appconn,appconn_name为streamis是接口访问的appconn,dss启动时会根据表中配置信息实例化StreamisAppConn对象。下面sql中APPCONN_INSTALL_IP和APPCONN_INSTALL_PORT在执行DSS安装脚本进行自动化安装时会通过交互式命令的方式输入。 +```roomsql +delete from `dss_workspace_dictionary` WHERE `appconn_name` = 'streamis'; + +INSERT INTO `dss_workspace_dictionary` ( `workspace_id`, `parent_key`, `dic_name`, `dic_name_en`, `dic_key`, `dic_value`, `dic_value_en`, `title`, `title_en`, `url`, `url_type`,`icon`, `order_num`, `remark`, `create_user`, `create_time`, `update_user`, `update_time`, appconn_name) +VALUES ('0','p_develop_process','流式生产中心','Streamis Product Center','pdp_streamis_product_center','streamis_prod',NULL,NULL,NULL, +'http://APPCONN_INSTALL_IP:APPCONN_INSTALL_PORT/#/realtimeJobCenter?projectName=${projectName}&workspaceName=${workspaceName}','0','kaifa-icon','1','工程开发流程-流式生产中心','SYSTEM','2020-12-28 17:32:35',NULL,'2022-06-30 17:49:02','streamis'); + +select @old_dss_appconn_id:=id from `dss_appconn` where `appconn_name` = 'streamis'; + +delete from `dss_workspace_menu_appconn` WHERE `appconn_id` = @old_dss_appconn_id; +delete from `dss_appconn_instance` where `appconn_id` = @old_dss_appconn_id; +delete from `dss_appconn` where `appconn_name`='streamis'; + +select @old_jobcenter_dss_appconn_id:=id from `dss_appconn` where `appconn_name` = 'realTimeJobCenter'; + +delete from `dss_workspace_menu_appconn` WHERE `appconn_id` = @old_jobcenter_dss_appconn_id; +delete from `dss_appconn_instance` where `appconn_id` = @old_jobcenter_dss_appconn_id; +delete from `dss_appconn` where `appconn_name`='realTimeJobCenter'; + +INSERT INTO dss_appconn +(appconn_name, is_user_need_init, `level`, if_iframe, is_external, reference, class_name, appconn_class_path, resource) +VALUES('streamis', 0, 1, 1, 1, NULL, 'com.webank.wedatasphere.streamis.dss.appconn.StreamisAppConn', NULL, NULL); +INSERT INTO dss_appconn +(appconn_name, is_user_need_init, `level`, if_iframe, is_external, reference, class_name, appconn_class_path, resource) +VALUES('realTimeJobCenter', 0, 1, 1, 1, 'sso', '', NULL, NULL); + +select @dss_appconn_id:=id from `dss_appconn` where `appconn_name` = 'streamis'; +select @jobcenter_dss_appconn_id:=id from `dss_appconn` where `appconn_name` = 'realTimeJobCenter'; + +INSERT INTO dss_workspace_menu_appconn +(appconn_id, menu_id, title_en, title_cn, desc_en, desc_cn, labels_en, labels_cn, is_active, access_button_en, access_button_cn, manual_button_en, manual_button_cn, manual_button_url, icon, `order`, create_by, create_time, last_update_time, last_update_user, image) +VALUES(@jobcenter_dss_appconn_id, 1, 'StreamSQL development', 'StreamSQL开发', 'Real-time application development is a streaming solution jointly built by WeDataSphere, Boss big data team and China Telecom ctcloud Big data team.', '实时应用开发是微众银行微数域(WeDataSphere)、Boss直聘大数据团队 和 中国电信天翼云大数据团队 社区联合共建的流式解决方案,以 Linkis 做为内核,基于 Flink Engine 构建的批流统一的 Flink SQL,助力实时化转型。', +'streaming, realtime', '流式,实时', 0, 'under union construction', '联合共建中', 'related information', '相关资讯', 'http://127.0.0.1:8088/wiki/scriptis/manual/workspace_cn.html', 'shujukaifa-logo', NULL, NULL, NULL, NULL, NULL, 'shujukaifa-icon'); + +INSERT INTO dss_appconn_instance +(appconn_id, label, url, enhance_json, homepage_uri) +VALUES(@dss_appconn_id, 'DEV', 'http://APPCONN_INSTALL_IP:APPCONN_INSTALL_PORT/', '', 'http://APPCONN_INSTALL_IP:APPCONN_INSTALL_PORT/#/realTimeJobCenter'); + +INSERT INTO dss_appconn_instance +(appconn_id, label, url, enhance_json, homepage_uri) +VALUES(@jobcenter_dss_appconn_id, 'DEV', 'http://APPCONN_INSTALL_IP:APPCONN_INSTALL_PORT/#/realTimeJobCenter', NULL, NULL); +``` + +### 具体实现说明 +StreamisAppConn继承AbstractOnlySSOAppConn实现SecondlyAppConn,重写方法创建一个StreamisStructureIntegrationStandard,目前StreamisAppConn只实现了免密登录和组织结构规范功能。 + +通过重写StreamisStructureIntegrationStandard中方法创建StreamisProjectService,其内部需要重写4个方法,会创建4个操作类StreamisProjectSearchOperation、StreamisProjectCreationOperation、StreamisProjectUpdateOperation、StreamisPrejectDeleteOperation。Operation类通过http调用streamis应用,分别实现查询、创建、修改、删除项目,同步dss项目信息到Streamis。 + +- dss创建项目时,会先调用查询操作,查询streamis中是否已存在相同的项目名称,如果存在则弹出提示信息,不存在则继续调用创建操作去streamis中新建项目, +- dss修改项目时,会调用修改操作去streamis中更新项目信息, +- dss删除项目时,会调用删除操作去streamis中删除项目信息,更改删除标记。 + +## 接口 +1 接口名称:查询项目 +- 接口路径:GET /streamis/project/searchProject +- 请求参数 + +|参数名称 |是否必须 |示例 |备注 | +|-------------|---------|-------|--------| +|projectName |是 | | | + +- 返回数据 + +|名称 |类型 |是否必须 |默认值 |备注 | +|-------------|--------|---------|---------|--------| +|method |string |否 | +|status |number |是 | +|message |string |否 | +|data |object |是 | +|- projectId |number |是 | + +2 接口名称:创建项目 +- 接口路径:POST /streamis/project/createProject +- 请求参数 + +|参数名称 |是否必须 |示例 |备注 | +|-------------|---------|-------|--------| +|projectName |是 | | | +|workspaceId |否 | | | +|releaseUsers |否 | | | +|editUsers |否 | | | +|accessUsers |否 | | | + +- 返回数据 + +|名称 |类型 |是否必须 |默认值 |备注 | +|----------|--------|---------|---------|--------| +|method |string |否 | +|status |number |是 | +|message |string |否 | +|data |object |是 | +|- projectId |number |是 | +|- projectName |string |否 | + +3 接口名称:修改项目 +- 接口路径:PUT /streamis/project/updateProject +- 请求参数 + +|参数名称 |是否必须 |示例 |备注 | +|-------------|---------|-------|--------| +|projectId |是 | | | +|projectName |是 | | | +|workspaceId |否 | | | +|releaseUsers |否 | | | +|editUsers |否 | | | +|accessUsers |否 | | | + +- 返回数据 + +|名称 |类型 |是否必须 |默认值 |备注 | +|----------|--------|---------|---------|--------| +|method |string |否 | +|status |number |是 | +|message |string |否 | +|data |object |否 | + +4 接口名称:删除项目 +- 接口路径:DELETE /streamis/project/deleteProject +- 请求参数 + +|参数名称 |是否必须 |示例 |备注 | +|-------------|---------|-------|--------| +|projectId |是 | | | +|projectName |否 | | | + +- 返回数据 + +|名称 |类型 |是否必须 |默认值 |备注 | +|----------|--------|---------|---------|--------| +|method |string |否 | +|status |number |是 | +|message |string |否 | +|data |object |否 | + +## Streamis项目streamis-project-server + +### 相关表操作 +Streamis涉及到的表共2张,项目表linkis_stream_project和项目权限表linkis_stream_project_privilege。 +```roomsql +--已存在表 +CREATE TABLE `linkis_stream_project` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `workspace_id` bigint(20) DEFAULT NULL, + `name` varchar(100) DEFAULT NULL, + `create_by` varchar(50) DEFAULT NULL, + PRIMARY KEY (`id`) USING BTREE +) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='项目表'; + +--新增加表 +CREATE TABLE `linkis_stream_project_privilege` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `project_id` bigint(20) NOT NULL, + `user_name` varchar(100) NOT NULL, + `privilege` tinyint(1) DEFAULT '0' NOT NULL COMMENT '1:发布权限 ,2:编辑权限 ,3:查看权限', + PRIMARY KEY (`id`) USING BTREE +) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 COMMENT='项目权限表'; + +--新增加字段 +ALTER TABLE `linkis_stream_project` ADD create_time datetime DEFAULT NULL; +ALTER TABLE `linkis_stream_project` ADD last_update_by varchar(50) DEFAULT NULL; +ALTER TABLE `linkis_stream_project` ADD last_update_time datetime DEFAULT NULL; +ALTER TABLE `linkis_stream_project` ADD is_deleted tinyint unsigned DEFAULT 0; +``` + +### 具体实现说明 +- 查询操作,会获取请求参数projectName,在表linkis_stream_project中查询对应id,查询操作成功返回status为0,projectId为查询出来的id,如果查询结果为空则projectId为null; +- 创建操作,会将请求参数中项目信息(projectName、workspaceId)插入项目表linkis_stream_project并自增生成id,将请求参数中权限信息数据(releaseUsers、editUsers、accessUsers)中user和项目表生成id关联,插入表linkis_stream_project_privilege,创建操作成功返回status值为0,会将项目表生成id作为projectId的值返回; +- 修改操作,会根据请求参数projectId将请求信息数据更新到表linkis_stream_project和 linkis_stream_project_privilege,成功直接返回status值为0; +- 删除操作,会根据projectId将表linkis_stream_project的is_deleted字段标记为1,linkis_stream_project_privilege中相关数据删除,成功直接返回status值为0。 diff --git "a/docs/zh_CN/0.2.0/architecture/Streamis\351\211\264\346\235\203\350\256\276\350\256\241\346\226\207\346\241\243.md" "b/docs/zh_CN/0.2.0/architecture/Streamis\351\211\264\346\235\203\350\256\276\350\256\241\346\226\207\346\241\243.md" new file mode 100644 index 000000000..02514a3e8 --- /dev/null +++ "b/docs/zh_CN/0.2.0/architecture/Streamis\351\211\264\346\235\203\350\256\276\350\256\241\346\226\207\346\241\243.md" @@ -0,0 +1,43 @@ +# 鉴权 + +## 鉴权流程图 +streamis存在需要鉴权的模块没有依赖streamis-project-server模块的情况,使用rest接口调用来处理鉴权。 +![Streamis项目鉴权操作](../../../images/zh_CN/streamis_project_privilege.png) + +## 具体实现说明 +根据当前用户名和项目ID/名称获取到所有权限的集合,如果权限集合中包含RELEASE权限,则拥有发布/编辑/查看的权限,如果权限集合中包含EDIT权限,则拥有编辑/查看的权限,如果权限集合中包含ACCESS权限,则拥有查看的权限; +权限包含关系:RELEASE权限包含EDIT权限、ACCESS权限;EDIT权限包含ACCESS权限。 + +## 接口 +### edit权限接口: + +|RequestMethod |接口路径 |名称 | +|------|----------------------------------------------------------|-----------------| +|POST |/streamis/streamProjectManager/project/files/upload |工程资源文件-导入 | +|GET |/streamis/streamProjectManager/project/files/delete |删除项目下该文件所有版本 | +|GET |/streamis/streamProjectManager/project/files/version/delete |删除版本文件 | +|GET |/streamis/streamProjectManager/project/files/download |任务详情-下载 | +|POST |streamis/streamJobManager/job/createOrUpdate |创建或更新streamis-job| +|POST |/streamis/streamJobManager/job/upload |上传文件 | +|POST |/streamis/streamJobManager/job/execute |启动 | +|GET |/streamis/streamJobManager/job/stop |停止 | +|PUT |/streamis/streamJobManager/job//snapshot/{jobId:\w+} |快照生成 | +|GET |/streamis/streamJobManager/config/json/{jobId:\w+} |配置-保存 | +|POST |/streamis/streamJobManager/job/bulk/execution |批量启动 | +|POST |/streamis/streamJobManager/job/bulk/pause |批量停止 | + + +### access权限接口: + +|RequestMethod |接口路径 |名称 | +|------|----------------------------------------------------------|-------------| +|GET |streamis/streamJobManager/job/list |查询当前用户可查看job | +|GET |/streamis/streamProjectManager/project/files/list |工程资源文件 | +|GET |/streamis/streamProjectManager/project/files/version/list |获取工程下该文件所有版本 | +|GET |/streamis/streamJobManager/job/version |查询job版本 | +|GET |/streamis/streamJobManager/job/execute/history |job执行历史 | +|GET |/streamis/streamJobManager/job/progress |获取job当前版本最新task状态 | +|GET |/streamis/streamJobManager/job/jobContent |任务详情 | +|GET |/streamis/streamJobManager/job/logs |获取日志 | +|POST |/streamis/streamJobManager/config/json/{jobId:\w+} |获得任务配置 | +|GET |/streamis/streamJobManager/config/view |查询当前job配置信息 | \ No newline at end of file diff --git a/docs/zh_CN/0.2.0/development/Interface_documentation/README.md b/docs/zh_CN/0.2.0/development/Interface_documentation/README.md new file mode 100644 index 000000000..05897a731 --- /dev/null +++ b/docs/zh_CN/0.2.0/development/Interface_documentation/README.md @@ -0,0 +1,63 @@ +## 1. 目录 + +* [StreamDataSource 接口文档]() +* [StreamJobManager 接口文档]() +* [StreamWorkflow 接口文档]() + +## 2. URL规范 + +``` +/api/rest_j/v1/streamis/{moduleName}/.+ +``` + +**约定**: + + - rest_j表示接口符合Jersey规范 + - v1为服务的版本号,**版本号会随着 Linkis 版本进行升级** + - streamis为微服务名 + - {moduleName}为模块名,其中: + * StreamDataSource 模块名命名为 streamDataSource; + * StreamJobManager 模块名命名为 streamJobManager; + * StreamWorkflow 模块名命名为 streamWorkflow; + +## 3. 接口请求格式 + +## 3. 接口请求格式 + +```json +{ + "method": "/api/rest_j/v1/streamis/.+", + "data": {} +} +``` + +**约定**: + + - method:请求的Restful API URL。 + - data:请求的具体数据。 + +## 4. 接口返回格式 + +```json +{ + "method": "/api/rest_j/v1/streamis/.+", + "status": 0, + "message": "创建成功!", + "data": {} +} +``` + +**约定**: + + - method:返回请求的Restful API URL,主要是websocket模式需要使用。 + - status:返回状态信息,其中:-1表示没有登录,0表示成功,1表示错误,2表示验证失败,3表示没该接口的访问权限。 + - data:返回具体的数据。 + - message:返回请求的提示信息。如果status非0时,message返回的是错误信息,其中data有可能存在stack字段,返回具体的堆栈信息。 + +另:根据status的不同,HTTP请求的状态码也不一样,一般情况下: + + - 当status为0时,HTTP的状态码为200 + - 当status为-1时,HTTP的状态码为401 + - 当status为1时,HTTP的状态码为400 + - 当status为2时,HTTP的状态码为412 + - 当status为3时,HTTP的状态码为403 \ No newline at end of file diff --git a/docs/zh_CN/0.2.0/development/Requirements_documentation/README.md b/docs/zh_CN/0.2.0/development/Requirements_documentation/README.md new file mode 100644 index 000000000..9991b7b0c --- /dev/null +++ b/docs/zh_CN/0.2.0/development/Requirements_documentation/README.md @@ -0,0 +1,4 @@ +## 1. 目录 + +* [StreamDataSource 需求文档]() +* [StreamJobManager 需求文档]() \ No newline at end of file diff --git a/docs/zh_CN/0.2.0/development/SUMMARY.md b/docs/zh_CN/0.2.0/development/SUMMARY.md new file mode 100644 index 000000000..7b9a53b5b --- /dev/null +++ b/docs/zh_CN/0.2.0/development/SUMMARY.md @@ -0,0 +1,13 @@ +* [开发规范](Specification_documentation/README.md) +* [接口文档](Interface_documentation/README.md) + * [StreamDataSource 接口文档]() + * [StreamJobManager 接口文档]() + * [StreamWorkflow 接口文档]() +* [表结构设计文档](Table_Structure_documentation/README.md) + * [StreamDataSource 表结构设计文档]() + * [StreamJobManager 表结构设计文档]() + * [StreamWorkflow 表结构设计文档]() +* [需求文档](Requirements_documentation/README.md) + * [StreamDataSource 需求文档]() + * [StreamJobManager 需求文档]() +* [UI交互稿](UI_draft/Streamis交互稿V4.zip) \ No newline at end of file diff --git a/docs/zh_CN/0.2.0/development/Specification_documentation/Exception_Throws.md b/docs/zh_CN/0.2.0/development/Specification_documentation/Exception_Throws.md new file mode 100644 index 000000000..8598b026b --- /dev/null +++ b/docs/zh_CN/0.2.0/development/Specification_documentation/Exception_Throws.md @@ -0,0 +1,31 @@ +## 如何定义新异常? + +- 自定义的异常都必须继承自WarnException、ErrorException或FatalException之一 + +- 自定义的异常必须包含错误码和错误描述,如有必要,也可将发生异常的ip地址和进程端口封装到异常当中 + +- 慎用WarnException!WarnException抛出来的异常,如果在Restful和RPC的Receiver端被捕获,不会给前端或sender端抛出执行失败,而是只返回一条警告信息! + +- WARNException的异常级别为1,ErrorException的异常级别为2,FatalException的异常级别为3 + + +|异常类| 所在服务| 错误码| 错误描述| +|:---- |:--- |:--- |:--- | +|DWCException| common| 无| 顶级父类,继承自Exception,不允许直接继承| +|DWCRuntimeException| common| 无| 顶级父类,继承自RuntimeException,不允许直接继承| +|WarnException| common| 无| 次级父类,继承自DWCRuntimeException。提示级的异常,必须直接或间接继承该类| +|ErrorException| common| 无| 次级父类,继承自DWCException。错误级的异常,必须直接或间接继承该类| +|FatalException| common| 无| 次级父类,继承自DWCException。致命级的异常,必须直接或间接继承该类| + + +## 模块异常规范 + +Streamis 架构错误码范围为:30000~30999,其中: + +- StreamDataSource 错误码范围为:30000~30099 + +- StreamJobManager 错误码范围为:30100~30199 + +- StreamWorkflow 错误码范围为:30200~30299 + +- Stream Plugins 错误码范围为:30300~30499 \ No newline at end of file diff --git a/docs/zh_CN/0.2.0/development/Specification_documentation/Log_out.md b/docs/zh_CN/0.2.0/development/Specification_documentation/Log_out.md new file mode 100644 index 000000000..693ab7c93 --- /dev/null +++ b/docs/zh_CN/0.2.0/development/Specification_documentation/Log_out.md @@ -0,0 +1,23 @@ +## 日志规范 + +1. 【**约定**】Streamis 选择引用 Linkis Commons 通用模块,其中已包含了日志框架,主要以 **slf4j** 和 **Log4j2** 作为日志打印框架,去除了Spring-Cloud包中自带的logback。 +由于Slf4j会随机选择一个日志框架进行绑定,所以以后在引入新maven包的时候,需要将诸如slf4j-log4j等桥接包exclude掉,不然日志打印会出现问题。但是如果新引入的maven包依赖log4j等包,不要进行exclude,不然代码运行可能会报错。 + +2. 【**配置**】log4j2的配置文件默认为 log4j2.xml ,需要放置在 classpath 中。如果需要和 springcloud 结合,可以在 application.yml 中加上 logging:config:classpath:log4j2-spring.xml (配置文件的位置)。 + +3. 【**强制**】类中不可直接使用日志系统(log4j2、Log4j、Logback)中的API。 + + * 如果是Scala代码,强制继承Logging trait + * java采用 LoggerFactory.getLogger(getClass)。 + +4. 【**强制**】严格区分日志级别。其中: + + * Fatal级别的日志,在初始化的时候,就应该抛出来,并使用System.out(-1)退出。 + * ERROR级别的异常为开发人员必须关注和处理的异常,不要随便用ERROR级别的。 + * Warn级别是用户操作异常日志和一些方便日后排除BUG的日志。 + * INFO为关键的流程日志。 + * DEBUG为调式日志,非必要尽量少写。 + +5. 【**强制**】要求:INFO级别的日志,每个小模块都必须有,关键的流程、跨模块级的调用,都至少有INFO级别的日志。守护线程清理资源等必须有WARN级别的日志。 + +6. 【**强制**】异常信息应该包括两类信息:案发现场信息和异常堆栈信息。如果不处理,那么通过关键字throws往上抛出。 正例:logger.error(各类参数或者对象toString + "_" + e.getMessage(), e); \ No newline at end of file diff --git a/docs/zh_CN/0.2.0/development/Specification_documentation/README.md b/docs/zh_CN/0.2.0/development/Specification_documentation/README.md new file mode 100644 index 000000000..d2e847b1c --- /dev/null +++ b/docs/zh_CN/0.2.0/development/Specification_documentation/README.md @@ -0,0 +1,22 @@ +## 1. 说明 + +为了规范 Streamis 社区的联合共建开发环境,提高 Streamis 版本开发迭代的产出质量,规范 Streamis 的整个开发设计流程,强烈建议各位Contributor遵守以下开发规范: + +说明:Streamis 初始版本的开发规范较为精简,后续会随着 Streamis 的版本迭代不断补充和完善,欢迎各位 Contributor 提出自己的见解和意见。 + +## 2. 代码提交规范 + +但是在正式发布Release Notes时,为了保证Release Notes的完整性,请各位模块负责人按照需求文档,先提好各个 issue,并将 issue 加入到 [Project-0.2.4](https://github.com/WeBankFinTech/Streamis/projects/2)。 + +请注意:Streamis-0.2.4 使用 [Project-0.2.4](https://github.com/WeBankFinTech/Streamis/projects/2) 作为 DPMS 工具,来全程追踪和管理版本的进度。 + +## 2. 后台开发规范 + +* [接口开发规范](../Interface_documentation/README.md) +* [Dao层开发规范](../Table_Structure_documentation/README.md) +* [异常规范](Exception_Throws.md) +* [日志规范](Log_out.md) + +## 3. 前端开发规范 + +* [前端开发规范](../../../../../web/README.md) \ No newline at end of file diff --git "a/docs/zh_CN/0.2.0/development/StreamisAppConn\345\256\211\350\243\205\346\226\207\346\241\243.md" "b/docs/zh_CN/0.2.0/development/StreamisAppConn\345\256\211\350\243\205\346\226\207\346\241\243.md" new file mode 100644 index 000000000..9bc8dfe00 --- /dev/null +++ "b/docs/zh_CN/0.2.0/development/StreamisAppConn\345\256\211\350\243\205\346\226\207\346\241\243.md" @@ -0,0 +1,71 @@ +StreamisAppConn安装文档 本文主要介绍在DSS(DataSphere Studio)1.1.0中StreamisAppConn的部署、配置以及安装 + +# 1.部署StreamisAppConn的准备工作 +您在部署StreamisAppConn之前,请安装完成Streamis0.2.4及其他相关组件的安装,并确保工程基本功能可用。 + +# 2.StreamisAppConn插件的下载和编译 +1)下载二进制包 +我们提供StreamisAppconn的物料包,您可直接下载使用。[点击跳转 Release 界面](https://github.com/WeBankFinTech/Streamis/releases) + +2) 编译打包 +如果您想自己开发和编译StreamisAppConn,具体编译步骤如下: 1.clone Streamis的代码 2.找到streamis-appconn模块,单独编译streamis-appconn +```shell script +cd {STREAMIS_CODE_HOME}/streamis-appconn +mvn clean install +``` +会在该路径下找到streamis.zip安装包 +```shell script +{STREAMIS_CODE_HOME}\streamis-appconn\target\streamis.zip +``` + +# 3.StreamisAppConn插件的部署和配置总体步骤 + 1.拿到打包出来的streamis.zip物料包 + + 2.放置到如下目录并进行解压 + +注意:第一次解压streamis appconn后,确保当前文件夹下没有index_v0000XX.index文件,该文件在后面才会生成 +```shell script +cd {DSS_Install_HOME}/dss/dss-appconns +unzip streamis.zip +``` +解压出来的目录结构为: +```shell script +conf +db +lib +``` + 3.执行脚本进行自动化安装 + ```shell script +cd {DSS_INSTALL_HOME}/dss/bin +sh ./appconn-install.sh +# 脚本是交互式的安装方案,您需要输入字符串streamis以及streamis服务的ip和端口,即可以完成安装 +# 这里的streamis端口是指前端端口,在nginx进行配置。而不是后端的服务端口 +``` + +## 4.完成streamis-appconn的安装后,需要重启dss服务,才能最终完成插件的更新 +### 4.1)使部署好的APPCONN生效 +使用DSS启停脚本使APPCONN生效,进入到脚本所在目录{DSS_INSTALL_HOME}/dss/sbin中,依次使用如下命令执行脚本: +```shell script +sh ./dss-stop-all.sh +sh ./dss-start-all.sh +``` +### 4.2)验证streamis-appconn是否生效 +在安装部署完成streamis-appconn之后,可通过以下步骤初步验证streamis-appconn是否安装成功。 + +在DSS工作空间创建一个新的项目 +![DSS工作空间Streamis项目](../../../images/zh_CN/dss_streamis_project.png) + +在streamis数据库查看是否同步创建项目,查询有记录说明appconn安装成功 +```roomsql +SELECT * FROM linkis_stream_project WHERE name = '项目名称'; +``` + +# 5.Streamis AppConn安装原理 +Streamis 的相关配置信息会插入到以下表中,通过配置下表,可以完成 Streamis 的使用配置。(注:如果仅仅需要快速安装APPCONN,无需过分关注以下字段,提供的init.sql中大多以进行默认配置。重点关注以上操作即可) + +|表名 |表作用 |备注 | +|-------------------|-----------------------------------------|------| +|dss_workspace_dictionary |配置流式生产中心 |必须| +|dss_appconn |AppConn的基本信息,用于加载AppConn |必须| +|dss_workspace_menu_appconn |AppConn菜单,前端连接Streamis |必须| +|dss_appconn_instance |AppConn的实例的信息,包括自身的url信息 |必须| diff --git "a/docs/zh_CN/0.2.0/development/Streamis\345\215\207\347\272\247\346\226\207\346\241\243.md" "b/docs/zh_CN/0.2.0/development/Streamis\345\215\207\347\272\247\346\226\207\346\241\243.md" new file mode 100644 index 000000000..9935164fe --- /dev/null +++ "b/docs/zh_CN/0.2.0/development/Streamis\345\215\207\347\272\247\346\226\207\346\241\243.md" @@ -0,0 +1,45 @@ +Streamis 升级文档,本文主要介绍在原有安装Streamis服务的基础上适配DSS1.1.0和Linkis1.1.1的升级步骤,Streamis0.2.4相对与Streamis0.1.0版本最大的区别在于接入了DSS AppConn,对job的启停做了优化。 + +# 1.升级Streamis前的工作 +您在升级Streamis之前,请先安装 Linkis1.1.1 和 DSS1.1.0 及以上版本,并且保证 Linkis Flink 引擎 和 DSS 可以正常使用,DSS 和 Linkis 安装,可参照 [DSS & Linkis 一键安装部署文档](https://github.com/WeBankFinTech/DataSphereStudio-Doc/blob/main/zh_CN/%E5%AE%89%E8%A3%85%E9%83%A8%E7%BD%B2/DSS%E5%8D%95%E6%9C%BA%E9%83%A8%E7%BD%B2%E6%96%87%E6%A1%A3.md)。 + +# 2.Streamis升级步骤 + +## 安装StreamisAppConn + +1)删除旧版本StreamisAppConn包 + +进入下列目录,找到streamis的appconn文件夹并删除,如果存在的话: +```shell script +{DSS_Install_HOME}/dss/dss-appconns +``` + +2)StreamisAppConn安装部署 + +安装 DSS StreamisAppConn 插件,请参考: [StreamisAppConn 插件安装文档](development/StreamisAppConn安装文档.md) + +## 安装Streamis后端 +将获取到的安装包中lib更新到Streamis安装目录下的路径 `streamis-server/lib` 中,`streamis-server/conf`下的文件内容可根据需要进行更新。 + +进入安装目录下,执行更新脚本,完成对数据库表结构和数据的更新: +```shell script +cd {Streamis_Install_HOME} +sh bin/upgrade.sh +``` + +再通过以下命令完成 Streamis Server 的更新重启: +```shell script +cd {Streamis_Install_HOME}/streamis-server +sh bin/stop-streamis-server.sh +sh bin/start-streamis-server.sh +``` + +## 安装Streamis前端 +先删除旧版本前端目录文件夹,再替换为新的前端安装包 +``` +mkdir ${STREAMIS_FRONT_PATH} +cd ${STREAMIS_FRONT_PATH} +#1.删除前端目录文件夹 +#2.放置前端包 +unzip streamis-${streamis-version}.zip +``` \ No newline at end of file diff --git a/docs/zh_CN/0.2.0/development/Table_Structure_documentation/README.md b/docs/zh_CN/0.2.0/development/Table_Structure_documentation/README.md new file mode 100644 index 000000000..ccf939700 --- /dev/null +++ b/docs/zh_CN/0.2.0/development/Table_Structure_documentation/README.md @@ -0,0 +1,9 @@ +## 1. 目录 + +* [StreamDataSource 表结构设计文档]() +* [StreamJobManager 表结构设计文档]() +* [StreamWorkflow 表结构设计文档]() + +## 2. Dao层开发规范 + +统一引入 Linkis 的 linkis-commons/linkis-mybatis 模块,使用标准的 Dao 层接口 + Dao 层xml 形式 \ No newline at end of file diff --git a/docs/zh_CN/0.2.0/development/Table_Structure_documentation/db/streamis-jobmanager.sql b/docs/zh_CN/0.2.0/development/Table_Structure_documentation/db/streamis-jobmanager.sql new file mode 100644 index 000000000..f5a64f2c4 --- /dev/null +++ b/docs/zh_CN/0.2.0/development/Table_Structure_documentation/db/streamis-jobmanager.sql @@ -0,0 +1,342 @@ + +SET NAMES utf8mb4; +SET FOREIGN_KEY_CHECKS = 0; + +-- ---------------------------- +-- Table structure for linkis_stream_bml +-- ---------------------------- +DROP TABLE IF EXISTS `linkis_stream_bml`; +CREATE TABLE `linkis_stream_bml` ( + `id` bigint(20) NOT NULL, + `name` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + `bml_type` tinyint(1) NULL DEFAULT NULL, + `org_identification` bigint(20) NULL DEFAULT NULL, + ` latest_version` varchar(20) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + PRIMARY KEY (`id`) USING BTREE +) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci ROW_FORMAT = Compact; + +-- ---------------------------- +-- Records of linkis_stream_bml +-- ---------------------------- + +-- ---------------------------- +-- Table structure for linkis_stream_bml_version +-- ---------------------------- +DROP TABLE IF EXISTS `linkis_stream_bml_version`; +CREATE TABLE `linkis_stream_bml_version` ( + `id` bigint(20) NOT NULL, + `bml_id` bigint(20) NULL DEFAULT NULL, + `version` varchar(20) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + `storage_path` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + ` attribute` varchar(200) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + PRIMARY KEY (`id`) USING BTREE +) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci COMMENT = '物料版本' ROW_FORMAT = Compact; + +-- ---------------------------- +-- Records of linkis_stream_bml_version +-- ---------------------------- + +-- ---------------------------- +-- Table structure for linkis_stream_cluster +-- ---------------------------- +DROP TABLE IF EXISTS `linkis_stream_cluster`; +CREATE TABLE `linkis_stream_cluster` ( + `id` int(11) NOT NULL, + `yarn_conf_dir` varchar(200) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + `hdfs_conf_dir` varchar(200) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + `resource_manager_url` varchar(200) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + `savepoint_dir` varchar(200) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + PRIMARY KEY (`id`) USING BTREE +) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci COMMENT = 'flink 集群信息' ROW_FORMAT = Compact; + +-- ---------------------------- +-- Records of linkis_stream_cluster +-- ---------------------------- + +-- ---------------------------- +-- Table structure for linkis_stream_configuration_config_key +-- ---------------------------- +DROP TABLE IF EXISTS `linkis_stream_configuration_config_key`; +CREATE TABLE `linkis_stream_configuration_config_key` ( + `id` bigint(20) NOT NULL, + `key` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + `name` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + `description` varchar(200) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + `default_value` varchar(200) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + `validate_type` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + `validate_range` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + `is_hidden` tinyint(1) NULL DEFAULT NULL, + `is_advanced` tinyint(1) NULL DEFAULT NULL, + `level` tinyint(1) NULL DEFAULT NULL, + `treename` varchar(20) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + `type` int(10) NULL DEFAULT NULL, + `sort` int(10) NULL DEFAULT NULL, + `status` tinyint(10) NULL DEFAULT NULL COMMENT '1 custom , 2 selected ', + PRIMARY KEY (`id`) USING BTREE, + UNIQUE INDEX `key_index`(`key`) USING BTREE +) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci COMMENT = '配置信息' ROW_FORMAT = Compact; + +-- ---------------------------- +-- Records of linkis_stream_configuration_config_key +-- ---------------------------- +INSERT INTO `linkis_stream_configuration_config_key` VALUES (1, 'wds.linkis.flink.resource', '资源配置', '资源配置', NULL, 'None', NULL, 0, 0, 1, '资源配置', 1, 0, 1); +INSERT INTO `linkis_stream_configuration_config_key` VALUES (2, 'wds.linkis.flink.taskmanager.num', 'Task Managers数量', 'Task Managers数量', '4', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', 0, 0, 2, '资源配置', 1, 1, 1); +INSERT INTO `linkis_stream_configuration_config_key` VALUES (3, 'wds.linkis.flink.jobmanager.memory', 'JobManager Memory', 'JobManager Memory', '1.5', 'Regex', '^([1-9]\\d{0,2}|1000)(G|g)$', 0, 0, 2, '资源配置', 1, 2, 1); +INSERT INTO `linkis_stream_configuration_config_key` VALUES (4, 'wds.linkis.flink.taskmanager.memory', 'TaskManager Memory', 'TaskManager Memory', '1.5', 'Regex', '^([1-9]\\d{0,2}|1000)(G|g)$', 0, 0, 2, '资源配置', 1, 3, 1); +INSERT INTO `linkis_stream_configuration_config_key` VALUES (5, 'wds.linkis.flink.jobmanager.cpus', 'JobManager CPUs', 'JobManager CPUs', '1', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', 0, 0, 2, '资源配置', 1, 4, 1); +INSERT INTO `linkis_stream_configuration_config_key` VALUES (6, 'wds.linkis.flink.taskManager.cpus', 'TaskManager CPUs', 'TaskManager CPUs', '1', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', 0, 0, 2, '资源配置', 1, 5, 1); +INSERT INTO `linkis_stream_configuration_config_key` VALUES (7, 'wds.linkis.flink.custom', '自定义参数', '自定义参数', NULL, 'None', NULL, 0, 0, 1, '自定义参数', 2, 0, 1); +INSERT INTO `linkis_stream_configuration_config_key` VALUES (8, 'wds.linkis.flink.produce', '生产配置', '生产配置', NULL, 'None', NULL, 0, 0, 1, '生产配置', 3, 0, 1); +INSERT INTO `linkis_stream_configuration_config_key` VALUES (9, 'wds.linkis.flink.checkpoint.interval', 'Checkpoint间隔', 'Checkpoint间隔', NULL, NULL, NULL, 0, 0, 2, '生产配置', 3, 1, 1); +INSERT INTO `linkis_stream_configuration_config_key` VALUES (10, 'wds.linkis.flink.reboot.strategy', '重启策略', '重启策略', '不重启,基于Checkpoint自动重启,无Checkpoint不重启', 'None', NULL, 0, 0, 2, '重启策略', 3, 2, 2); +INSERT INTO `linkis_stream_configuration_config_key` VALUES (11, 'wds.linkis.flink.alert', '告警设置', '告警设置', NULL, 'None', NULL, 0, 0, 1, '告警设置', 4, 0, 1); +INSERT INTO `linkis_stream_configuration_config_key` VALUES (12, 'wds.linkis.flink.alert.rule', '告警规则', '告警规则', '任务日志中出现ERROR/EXCEPTION,任务核心指标出现异常', 'None', NULL, 0, 0, 2, '告警规则', 4, 1, 2); +INSERT INTO `linkis_stream_configuration_config_key` VALUES (13, 'wds.linkis.flink.alert.user', '告警用户', '告警用户', NULL, NULL, NULL, 0, 0, 2, '告警用户', 4, 3, 1); +INSERT INTO `linkis_stream_configuration_config_key` VALUES (14, 'wds.linkis.flink.alert.leve', '告警级别', '告警级别', 'CLEARED,INDETERMINATE,WARNING,MINOR,MAJOR,CRITICAL', 'None', NULL, 0, 0, 2, '告警级别', 4, 2, 2); +INSERT INTO `linkis_stream_configuration_config_key` VALUES (15, 'wds.linkis.flink.alert.failure.level', '失败时告警级别', '失败时告警级别', 'CLEARED,INDETERMINATE,WARNING,MINOR,MAJOR,CRITICAL', 'None', NULL, 0, 0, 2, '失败时告警级别', 4, 4, 2); +INSERT INTO `linkis_stream_configuration_config_key` VALUES (16, 'wds.linkis.flink.alert.failure.user', '失败时告警用户', '失败时告警用户', NULL, 'None', NULL, 0, 0, 2, '失败时告警用户', 4, 5, 1); +INSERT INTO `linkis_stream_configuration_config_key` VALUES (17, 'wds.linkis.flink.authority', '权限设置', '权限设置', NULL, 'None', NULL, 0, 0, 1, '权限设置', 5, 0, 1); +INSERT INTO `linkis_stream_configuration_config_key` VALUES (18, 'wds.linkis.flink.authority.author', '授权模式', '授权模式', '私密,指定全员可见,指定人员可见', 'None', NULL, 0, 0, 2, '授权模式', 5, 1, 2); +INSERT INTO `linkis_stream_configuration_config_key` VALUES (19, 'wds.linkis.flink.authority.visible', '可见人员', '可见人员', NULL, 'None', NULL, 0, 0, 2, '可见人员', 5, 2, 1); + +-- ---------------------------- +-- Table structure for linkis_stream_configuration_config_value +-- ---------------------------- +DROP TABLE IF EXISTS `linkis_stream_configuration_config_value`; +CREATE TABLE `linkis_stream_configuration_config_value` ( + `id` bigint(20) NOT NULL, + `configkey_id` bigint(20) NULL DEFAULT NULL, + `config_value` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + `type` int(10) NULL DEFAULT NULL, + `job_id` bigint(20) NULL DEFAULT NULL, + `job_name` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + `config_key` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + PRIMARY KEY (`id`) USING BTREE, + INDEX `key`(`config_key`) USING BTREE, + INDEX `keyid`(`configkey_id`) USING BTREE +) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci COMMENT = '配置信息' ROW_FORMAT = Compact; + +-- ---------------------------- +-- Records of linkis_stream_configuration_config_value +-- ---------------------------- + +-- ---------------------------- +-- Table structure for linkis_stream_frame_version +-- ---------------------------- +DROP TABLE IF EXISTS `linkis_stream_frame_version`; +CREATE TABLE `linkis_stream_frame_version` ( + `id` bigint(20) NOT NULL, + `frame` varchar(20) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + `version` varchar(20) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + `java_version` varchar(20) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + PRIMARY KEY (`id`) USING BTREE +) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci COMMENT = '框架信息' ROW_FORMAT = COMPACT; + +-- ---------------------------- +-- Records of linkis_stream_frame_version +-- ---------------------------- + +-- ---------------------------- +-- Table structure for linkis_stream_job +-- ---------------------------- +DROP TABLE IF EXISTS `linkis_stream_job`; +CREATE TABLE `linkis_stream_job` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `project_id` bigint(20) NULL DEFAULT NULL, + `name` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + `type` tinyint(1) NULL DEFAULT NULL, + `current_task_id` bigint(20) NULL DEFAULT NULL, + `current_version` varchar(20) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + `current_release_time` datetime NULL DEFAULT NULL, + `status` tinyint(1) NULL DEFAULT NULL COMMENT '1:已完成 ,2:等待重启 ,3:告警 ,4:慢任务 ,5:运行中 ,6:失败任务', + `org_identification` bigint(20) NULL DEFAULT NULL, + `create_by` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + `label` varchar(200) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + `current_released` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + `description` varchar(200) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + PRIMARY KEY (`id`) USING BTREE +) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci COMMENT = '作业表' ROW_FORMAT = Compact; + +-- ---------------------------- +-- Records of linkis_stream_job +-- ---------------------------- + +-- ---------------------------- +-- Table structure for linkis_stream_job_alarm_send_history +-- ---------------------------- +DROP TABLE IF EXISTS `linkis_stream_job_alarm_send_history`; +CREATE TABLE `linkis_stream_job_alarm_send_history` ( + `id` bigint(20) NOT NULL, + `job_id` bigint(20) NULL DEFAULT NULL, + `task_id` bigint(20) NULL DEFAULT NULL, + `create_by` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + `type` tinyint(1) NULL DEFAULT NULL, + `rule_type` tinyint(1) NULL DEFAULT NULL, + `content` varchar(200) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + PRIMARY KEY (`id`) USING BTREE +) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci COMMENT = '报警历史信息' ROW_FORMAT = Compact; + +-- ---------------------------- +-- Records of linkis_stream_job_alarm_send_history +-- ---------------------------- + +-- ---------------------------- +-- Table structure for linkis_stream_job_checkpoints +-- ---------------------------- +DROP TABLE IF EXISTS `linkis_stream_job_checkpoints`; +CREATE TABLE `linkis_stream_job_checkpoints` ( + `id` bigint(20) NOT NULL, + `config_value_id` bigint(20) NULL DEFAULT NULL, + `path` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + `size` int(20) NULL DEFAULT NULL, + `status` tinyint(1) NULL DEFAULT NULL, + `trigger_timestamp` datetime NULL DEFAULT NULL, + `latest_ack_timestamp` datetime NULL DEFAULT NULL, + PRIMARY KEY (`id`) USING BTREE +) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci ROW_FORMAT = Compact; + +-- ---------------------------- +-- Records of linkis_stream_job_checkpoints +-- ---------------------------- + +-- ---------------------------- +-- Table structure for linkis_stream_job_code_resource +-- ---------------------------- +DROP TABLE IF EXISTS `linkis_stream_job_code_resource`; +CREATE TABLE `linkis_stream_job_code_resource` ( + `id` bigint(20) NOT NULL, + `job_version_id` bigint(20) NULL DEFAULT NULL, + `bml_version_id` bigint(20) NULL DEFAULT NULL, + PRIMARY KEY (`id`) USING BTREE +) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci COMMENT = '其他代码' ROW_FORMAT = Compact; + +-- ---------------------------- +-- Records of linkis_stream_job_code_resource +-- ---------------------------- + +-- ---------------------------- +-- Table structure for linkis_stream_job_role +-- ---------------------------- +DROP TABLE IF EXISTS `linkis_stream_job_role`; +CREATE TABLE `linkis_stream_job_role` ( + `id` bigint(20) NOT NULL, + `job_id` bigint(20) NULL DEFAULT NULL, + `name` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + `front_name` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + `update_time` datetime NULL DEFAULT NULL, + `description` varchar(200) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + PRIMARY KEY (`id`) USING BTREE +) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci ROW_FORMAT = Compact; + +-- ---------------------------- +-- Records of linkis_stream_job_role +-- ---------------------------- +INSERT INTO `linkis_stream_job_role` VALUES (1, -1, '管理员', '管理员', '2021-04-07 20:57:09', NULL); + +-- ---------------------------- +-- Table structure for linkis_stream_job_sql_resource +-- ---------------------------- +DROP TABLE IF EXISTS `linkis_stream_job_sql_resource`; +CREATE TABLE `linkis_stream_job_sql_resource` ( + `id` bigint(20) NOT NULL, + `job_version_id` bigint(20) NULL DEFAULT NULL, + `execute_sql` varchar(200) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + PRIMARY KEY (`id`) USING BTREE +) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci ROW_FORMAT = Compact; + +-- ---------------------------- +-- Records of linkis_stream_job_sql_resource +-- ---------------------------- + +-- ---------------------------- +-- Table structure for linkis_stream_job_user_role +-- ---------------------------- +DROP TABLE IF EXISTS `linkis_stream_job_user_role`; +CREATE TABLE `linkis_stream_job_user_role` ( + `id` bigint(20) NOT NULL, + `job_id` bigint(20) NULL DEFAULT NULL, + `user_id` bigint(20) NULL DEFAULT NULL, + `role_id` bigint(20) NULL DEFAULT NULL, + `type` tinyint(1) NULL DEFAULT NULL, + PRIMARY KEY (`id`) USING BTREE +) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci COMMENT = '作业角色关系' ROW_FORMAT = Compact; + +-- ---------------------------- +-- Records of linkis_stream_job_user_role +-- ---------------------------- + +-- ---------------------------- +-- Table structure for linkis_stream_job_version +-- ---------------------------- +DROP TABLE IF EXISTS `linkis_stream_job_version`; +CREATE TABLE `linkis_stream_job_version` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + ` job_id` bigint(20) NULL DEFAULT NULL, + `version` varchar(20) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + `program_arguments` varchar(200) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + `bml_version` varchar(200) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + `bml_id` bigint(20) DEFAULT NULL, + PRIMARY KEY (`id`) USING BTREE +) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci ROW_FORMAT = Compact; + +-- ---------------------------- +-- Records of linkis_stream_job_version +-- ---------------------------- + +-- ---------------------------- +-- Table structure for linkis_stream_project +-- ---------------------------- +DROP TABLE IF EXISTS `linkis_stream_project`; +CREATE TABLE `linkis_stream_project` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `workspace_id` bigint(20) NULL DEFAULT NULL, + `name` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + `create_by` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + PRIMARY KEY (`id`) USING BTREE +) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci COMMENT = '项目表' ROW_FORMAT = Compact; + +-- ---------------------------- +-- Records of linkis_stream_project +-- ---------------------------- + +-- ---------------------------- +-- Table structure for linkis_stream_task +-- ---------------------------- +DROP TABLE IF EXISTS `linkis_stream_task`; +CREATE TABLE `linkis_stream_task` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `job_version_id` bigint(20) NOT NULL, + `job_id` varchar(50) DEFAULT NULL, + `version` varchar(50) DEFAULT NULL, + `status` int(3) DEFAULT NULL, + `start_time` datetime DEFAULT NULL, + `last_update_time` datetime DEFAULT NULL, + `end_time` datetime DEFAULT NULL, + `err_desc` varchar(10240) DEFAULT NULL, + `submit_user` varchar(50) DEFAULT NULL, + `linkis_job_id` varchar(50) DEFAULT NULL, + `linkis_job_info` mediumtext, + PRIMARY KEY (`id`) USING BTREE + ) ENGINE=InnoDB AUTO_INCREMENT=0 DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='任务表' + +-- ---------------------------- +-- Records of linkis_stream_task +-- ---------------------------- + +-- ---------------------------- +-- Table structure for linkis_stream_user +-- ---------------------------- +DROP TABLE IF EXISTS `linkis_stream_user`; +CREATE TABLE `linkis_stream_user` ( + `id` bigint(20) NOT NULL, + `username` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + `name` varchar(50) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL, + PRIMARY KEY (`id`) USING BTREE +) ENGINE = InnoDB CHARACTER SET = utf8 COLLATE = utf8_general_ci COMMENT = '用户表' ROW_FORMAT = Compact; + +-- ---------------------------- +-- Records of linkis_stream_user +-- ---------------------------- +INSERT INTO `linkis_stream_user` VALUES (1, 'hdfs', 'hdfs'); + +SET FOREIGN_KEY_CHECKS = 1; diff --git "a/docs/zh_CN/0.2.0/\344\275\277\347\224\250\346\226\207\346\241\243/Streamis\347\224\250\346\210\267\346\211\213\345\206\214.md" "b/docs/zh_CN/0.2.0/\344\275\277\347\224\250\346\226\207\346\241\243/Streamis\347\224\250\346\210\267\346\211\213\345\206\214.md" new file mode 100644 index 000000000..6281caee2 --- /dev/null +++ "b/docs/zh_CN/0.2.0/\344\275\277\347\224\250\346\226\207\346\241\243/Streamis\347\224\250\346\210\267\346\211\213\345\206\214.md" @@ -0,0 +1,209 @@ +# Streamis快速入门 + +## 1. 前言 + +         本文是Streamis0.2.4的快速入门文档,涵盖了Stremis的基本使用流程,更多的操作使用细节,将会在用户使用文档中提供。 + + +## 2. Streamis整合至DSS + +         为了方便用户使用,**Streamis系统以DSS组件的形式嵌入DSS系统中** + +##### 2.1 **如何接入?** + +按照 [StreamisAppConn安装文档](../development/StreamisAppConn安装文档.md) 安装部署StreamisAppConn成功后,Streamis系统会自动嵌入DSS系统中。 + +##### 2.2 如何验证 DSS 已经成功集成了 Streamis? + +请进入 DSS 的工程首页,创建一个工程 + +![image-20211230173334826](../../../images/create_stream_product_center.png) + +进入到工程里面,点击左上角按钮切换到”流式生产中心“,如果出现streamis的首页,则表示 DSS 已经成功集成了 Streamis。如下图: + +![image-20211230173839138](../../../images/stream_product_center.png) + + +## 3. 核心指标 + +进入到streamis首页,上半部显示的是核心指标。 + +核心指标显示当前用户可查看到的上传到该项目执行的Flink任务的状态汇总,状态暂时有9种,显示状态名称和处于该状态的任务数量,具体内容如下图。 + +![核心指标](../../../images/home_page.png) + +
图 3.1 首页核心指标
+ +# 4. 任务示例 + +       主要演示案例从Script FlinkSQL开发,调试到Streamis发布的整个流程。 + +## 4.1. Script开发SQL + +       顶部Scriptis菜单创建一个脚本文件,脚本类型选择Flink,如下图所示: + +![进入FlinkSQL](../../../images/enter_flinksql.png) + +![create_script_file.png](../../../images/create_script_file.png) + +编写FlinkSQL,source,sink,transform等。 + +![flinksql_script_file](../../../images/flinksql_script_file.png) + +点击运行后,即可调试该脚本 + +## 4.2. 发布至Streamis + +### 4.2.1 打包Streamis Job任务 + +​ 流式应用物料包是指的按照Streamis打包规范,将元数据信息(流式应用描述信息),流式应用代码,流式应用使用到的物料等内容打包成zip包。zip具体格式如下: + + xxx.zip + ├── meta.json + ├── test.sql + ├── test.jar + ├── file3 + +其中,meta.json是StreamisJob的元数据信息,其格式为: + +``` +{ + "projectName": "", # 项目名 + "jobName": "", # 作业名 + "jobType": "flink.sql", # 目前只支持flink.sql、flink.jar + "tags": "", # 应用标签 + "description": "" # 作业描述, + "jobContent": { + # 不同的jobType,其内容各不相同,具体请往下看 + } +} +``` + +!!!!!**特别需要注意的是:** + +​ **此处的projectName需要和您dss工程中创建的工程名一致,不然在streamis页面导入ZIP包时,刷新列表后会不会显示,因为两者的projectName不一致** + +如果jobType为"flink.sql",则jobContent为: + +``` +{ + "type": "" # file, bml or sql + "sql": "select 1", + "file": "test.sql", + "resourceId": "", + "version": "" +} +其中,如果type为"file",则只识别file字段;如果type为"sql",则只识别sql字段;如果type为"bml",则只识别resourceId和version字段。 +``` + +如果jobType为"flink.jar",则jobContent为: + +``` +{ + "main.class.jar": "", # string。main class的jar,如:test.jar + "main.class": "", # main class,如 com.webank.Test + "args": "", # main class 的入参,即main函数的args,请以空格为分隔符 + "hdfs.jars"; [], # 依赖的HDFS jars,如:hdfs:///user/hadoop/test1.jar + "dependency.jars": [], # 依赖的jars,如:test2.jar + "resources": [] # 依赖的资源文件,如:test.properties +} +``` + +### 4.2.2 示例 + +​ streamisjobtest为flinksql文件,meta.json是该任务的元数据信息。 + +![flinksql_job_use_demo](../../../images/flinksql_job_use_demo.png) + +
+ +![flinksql_job_use_demo2](../../../images/flinksql_job_use_demo2.png) + +将SQL文件和meta.json文件打包成一个zip文件,注意:只能打包成zip文件,其他格式如rar、7z等格式无法识别。 + +如果上传zip文件出现下面错误,请调整下nginx的配置`vi /etc/nginx/conf.d/streamis.conf`,添加属性`client_max_body_size`,如下图所示。 +![upload_jobtask_error](../../../images/upload_jobtask_error.png) +![upload_jobtask_error_solve](../../../images/upload_jobtask_error_solve.png) +----- + +在streamis中将该zip包导入,导入任务后,任务的运行状态变成"未启动",版本会+1(导入新的job任务版本从1开始),最新发布时间会更新至最新时间。 + +点击相应的作业名称、配置或左边3个竖点中(参数配置/告警配置/运行历史/运行日志)可进入job任务详情,点击 启动 可执行作业。 + +点击左边3个竖点中 快照【savepoint】 可保存快照。 + +![job_list](../../../images/job_list.png) + +点击批量操作,可选中多个作业任务重启,快照重启会先生成快照再重新启动,直接重启不会生成快照 + +![jobbulk_operate](../../../images/jobbulk_operate.png) + +#### + + +# 5、Streamis任务介绍 + +点击”作业名称“,可查看任务的详情,包括,运行情况、执行历史、配置、任务详情、告警等。 + +## 5.1 运行情况 + +![stream_job_detail](../../../images/stream_job_detail.png) + +## 5.2 执行历史 + +打开执行历史可以查看该任务的历史运行情况, + +历史日志:只有正在运行的任务才能查看历史日志。 + +历史日志中可以查看当前任务启动的flink引擎的日志,可以根据关键字等查看关键日志,点击查看最新日志,可以查看当前引擎的最新日志。 + +![stream_job_history](../../../images/stream_job_history.png) + +## 5.3 配置 + +给Streamis任务配置一些flink资源参数以及checkpoint的参数 + +![image-20211231101503678](../../../images/stream_job_config_1.png) +![image-20211231101503678](../../../images/stream_job_config_2.png) + + + +## 5.4任务详情 + +
+ +  任务详情根据任务类型Flink Jar 和 Flink SQL分为两种显示界面。 + +
+ +- **Flink Jar任务详情** + +![任务详情](../../../images/stream_job_flinkjar_jobcontent.png) + +  Flink Jar任务详情展示了任务Jar包的内容和参数, 同时提供下载该Jar包的功能。 + +
+ +- **Flink SQL任务详情** + +![任务详情](../../../images/stream_job_flinksql_jobcontent.png) + +  Flink SQL任务详情展示了该任务的SQL语句。 + +
+ +## 5.5 进入Yarn页面 + +正在运行的Streamis任务可以通过该按钮进入到yarn管理界面上的查看flink任务运行情况。 + +![image-20211231102020703](../../../images/image-20211231102020703.png) + +## 6 工程资源文件 +Streamis首页-核心指标右上角-工程资源文件。 +工程资源文件提供了上传和管理项目所需资源文件的功能,如下图所示: + +![project_source_file_list](../../../images/project_source_file_list.png) + +上传项目文件 + +![project_source_file_import](../../../images/project_source_file_import.png) diff --git a/images/en_US/readme/architecture.png b/images/en_US/readme/architecture.png new file mode 100644 index 000000000..36a4a1f66 Binary files /dev/null and b/images/en_US/readme/architecture.png differ diff --git a/images/zh_CN/readme/architecture.png b/images/zh_CN/readme/architecture.png new file mode 100644 index 000000000..872891e43 Binary files /dev/null and b/images/zh_CN/readme/architecture.png differ diff --git a/images/zh_CN/readme/communication.png b/images/zh_CN/readme/communication.png new file mode 100644 index 000000000..12e86727d Binary files /dev/null and b/images/zh_CN/readme/communication.png differ diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 000000000..48e341a09 --- /dev/null +++ b/package-lock.json @@ -0,0 +1,3 @@ +{ + "lockfileVersion": 1 +} diff --git a/pom.xml b/pom.xml new file mode 100644 index 000000000..d35eae47f --- /dev/null +++ b/pom.xml @@ -0,0 +1,323 @@ + + + + + 4.0.0 + + com.webank.wedatasphere.streamis + streamis + 0.2.4 + pom + + Streamis Project Parent POM + https://github.com/WeBankFinTech/Streamis + + + + Apache 2.0 License + http://www.apache.org/licenses/LICENSE-2.0.html + repo + + + + + + streamis-jobmanager + streamis-project + streamis-server + assembly + streamis-appconn + + + + 1.1.3 + 4.12 + 1.1.0 + 0.2.4 + 2.11.12 + 1.8 + 3.3.3 + 2.8.5 + 2.13.2 + 3.1.1 + 4.5.4 + 4.5.4 + 1.9.4 + UTF-8 + 5.2.12.RELEASE + 2.1.2 + 2.3.7.RELEASE + 2.2.6.RELEASE + 3.1.1 + 3.8.1 + 2.6 + 0.9.10 + 2.21 + 1.9.5 + 1.4.19 + 0.2.4 + 5.1.47 + 2.0.1.Final + + + + + + + + org.scala-lang + scala-library + ${scala.version} + + + org.scala-lang + scala-compiler + ${scala.version} + + + org.scala-lang + scala-reflect + ${scala.version} + + + org.scala-lang + scalap + ${scala.version} + + + commons-lang + commons-lang + ${commons.lang.version} + + + org.apache.linkis + linkis-mybatis + ${linkis.version} + + + + org.apache.linkis + linkis-scheduler + ${linkis.version} + + + org.apache.linkis + linkis-module + + + org.springframework.boot + spring-boot-starter-tomcat + + + hibernate-validator + org.hibernate.validator + + + ${linkis.version} + + + org.apache.linkis + linkis-common + ${linkis.version} + + + + org.apache.linkis + linkis-protocol + ${linkis.version} + + + + com.google.code.gson + gson + ${gson.version} + + + com.fasterxml.jackson.core + jackson-databind + ${fasterxml.jackson.version} + + + org.apache.commons + commons-math3 + ${commons.math.version} + + + xstream + com.thoughtworks.xstream + ${xstream.version} + + + javax.validation + validation-api + ${validation.api.version} + + + + + + + release + + + + org.apache.maven.plugins + maven-source-plugin + 3.1.0 + + true + + + + create-source-jar + + jar-no-fork + test-jar-no-fork + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + 3.0.0-M1 + + + org.apache.maven.plugins + maven-gpg-plugin + 1.5 + + + sign-artifacts + verify + + sign + + + + + + + + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + 2.8.2 + + + org.apache.maven.plugins + maven-enforcer-plugin + 1.4.1 + + + enforce-versions + + enforce + + + + + ${maven.version} + + + ${java.version} + + + + org.jboss.netty + + true + + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + 2.5.1 + + ${jdk.compile.version} + ${jdk.compile.version} + + + + org.apache.maven.plugins + maven-site-plugin + 3.3 + + + net.alchim31.maven + scala-maven-plugin + 3.2.2 + + + eclipse-add-source + + add-source + + + + scala-compile-first + process-resources + + compile + + + + scala-test-compile-first + process-test-resources + + testCompile + + + + attach-scaladocs + verify + + doc-jar + + + + + ${scala.version} + incremental + true + + + + org.apache.maven.plugins + maven-jar-plugin + 2.6 + + + + + + + + + + \ No newline at end of file diff --git a/streamis-appconn/pom.xml b/streamis-appconn/pom.xml new file mode 100644 index 000000000..bf07ee0dc --- /dev/null +++ b/streamis-appconn/pom.xml @@ -0,0 +1,135 @@ + + + + streamis + com.webank.wedatasphere.streamis + 0.2.4 + + 4.0.0 + + streamis-appconn + + + 8 + 8 + + + + + com.webank.wedatasphere.dss + dss-appconn-core + ${dss.version} + + + linkis-common + org.apache.linkis + + + json4s-jackson_2.11 + org.json4s + + + scala-compiler + org.scala-lang + + + scala-library + org.scala-lang + + + scala-reflect + org.scala-lang + + + scalap + org.scala-lang + + + dss-common + com.webank.wedatasphere.dss + + + + + org.apache.linkis + linkis-common + ${linkis.version} + provided + + + org.apache.linkis + linkis-module + ${linkis.version} + provided + + + com.webank.wedatasphere.dss + dss-common + ${dss.version} + provided + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + org.apache.maven.plugins + maven-assembly-plugin + 2.3 + false + + + make-assembly + package + + single + + + + src/main/assembly/distribution.xml + + + + + + false + streamis + false + false + + src/main/assembly/distribution.xml + + + + + + + src/main/java + + **/*.xml + + + + src/main/resources + + **/application.yml + **/bootstrap.yml + **/log4j2.xml + + + + + \ No newline at end of file diff --git a/streamis-appconn/src/main/assembly/distribution.xml b/streamis-appconn/src/main/assembly/distribution.xml new file mode 100644 index 000000000..b82589846 --- /dev/null +++ b/streamis-appconn/src/main/assembly/distribution.xml @@ -0,0 +1,62 @@ + + + + dss-streamis-appconn + + zip + + true + streamis + + + + lib + true + true + false + true + true + + + + + + + ${basedir}/src/main/resources + + appconn.properties + + 0777 + conf + unix + + + + ${basedir}/src/main/resources + + init.sql + + 0777 + db + + + \ No newline at end of file diff --git a/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/StreamisAppConn.java b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/StreamisAppConn.java new file mode 100644 index 000000000..c28dc6d7c --- /dev/null +++ b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/StreamisAppConn.java @@ -0,0 +1,21 @@ +package com.webank.wedatasphere.streamis.dss.appconn; + +import com.webank.wedatasphere.dss.appconn.core.ext.SecondlyAppConn; +import com.webank.wedatasphere.dss.appconn.core.impl.AbstractOnlySSOAppConn; +import com.webank.wedatasphere.dss.standard.app.structure.StructureIntegrationStandard; +import com.webank.wedatasphere.streamis.dss.appconn.structure.StreamisStructureIntegrationStandard; + +public class StreamisAppConn extends AbstractOnlySSOAppConn implements SecondlyAppConn { + + private StreamisStructureIntegrationStandard structureIntegrationStandard; + + @Override + public StructureIntegrationStandard getOrCreateStructureStandard() { + return structureIntegrationStandard; + } + + @Override + protected void initialize() { + structureIntegrationStandard = new StreamisStructureIntegrationStandard(); + } +} diff --git a/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/constraints/Constraints.java b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/constraints/Constraints.java new file mode 100644 index 000000000..46507753f --- /dev/null +++ b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/constraints/Constraints.java @@ -0,0 +1,18 @@ +package com.webank.wedatasphere.streamis.dss.appconn.constraints; + +import org.apache.linkis.common.conf.CommonVars; + +/** + * use Constraints class to manage the constant value + */ +public class Constraints { + + // AppConn name + public static final String STREAMIS_APPCONN_NAME = CommonVars.apply("wds.dss.appconn.streamis.name", "Streamis").getValue(); + + public static final String STREAMIS_SERVER_VERSION = CommonVars.apply("wds.dss.appconn.streamis.server.version", "v1").getValue(); + + public static final String API_REQUEST_PREFIX = CommonVars.apply("wds.dss.appconn.streamis.api.request-prefix", "/api/rest_j/"+STREAMIS_SERVER_VERSION+"/streamis/project").getValue(); + + +} diff --git a/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/exception/StreamisAppConnErrorException.java b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/exception/StreamisAppConnErrorException.java new file mode 100644 index 000000000..6765b3d89 --- /dev/null +++ b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/exception/StreamisAppConnErrorException.java @@ -0,0 +1,14 @@ +package com.webank.wedatasphere.streamis.dss.appconn.exception; + +import com.webank.wedatasphere.dss.standard.common.exception.operation.ExternalOperationFailedException; + +public class StreamisAppConnErrorException extends ExternalOperationFailedException { + + public StreamisAppConnErrorException(int errorCode, String message) { + super(errorCode, message); + } + + public StreamisAppConnErrorException(int errorCode, String message, Throwable cause) { + super(errorCode, message, cause); + } +} diff --git a/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/StreamisStructureIntegrationStandard.java b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/StreamisStructureIntegrationStandard.java new file mode 100644 index 000000000..43e44c8b4 --- /dev/null +++ b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/StreamisStructureIntegrationStandard.java @@ -0,0 +1,20 @@ +package com.webank.wedatasphere.streamis.dss.appconn.structure; + +import com.webank.wedatasphere.dss.standard.app.structure.AbstractStructureIntegrationStandard; +import com.webank.wedatasphere.dss.standard.app.structure.project.ProjectService; +import com.webank.wedatasphere.streamis.dss.appconn.structure.project.StreamisProjectService; + +/** + * Structure integration standard + */ +public class StreamisStructureIntegrationStandard extends AbstractStructureIntegrationStandard { + + /** + * Singleton project service + * @return + */ + @Override + protected ProjectService createProjectService() { + return new StreamisProjectService(); + } +} diff --git a/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/project/StreamisPrejectDeleteOperation.java b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/project/StreamisPrejectDeleteOperation.java new file mode 100644 index 000000000..bfbb8a36a --- /dev/null +++ b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/project/StreamisPrejectDeleteOperation.java @@ -0,0 +1,38 @@ +package com.webank.wedatasphere.streamis.dss.appconn.structure.project; + +import com.webank.wedatasphere.dss.standard.app.sso.origin.request.action.DSSDeleteAction; +import com.webank.wedatasphere.dss.standard.app.structure.AbstractStructureOperation; +import com.webank.wedatasphere.dss.standard.app.structure.project.ProjectDeletionOperation; +import com.webank.wedatasphere.dss.standard.common.entity.ref.ResponseRef; +import com.webank.wedatasphere.dss.standard.common.exception.operation.ExternalOperationFailedException; +import com.webank.wedatasphere.streamis.dss.appconn.structure.ref.StreamisProjectContentReqRef; +import com.webank.wedatasphere.streamis.dss.appconn.utils.StreamisCommonUtil; + +import static com.webank.wedatasphere.streamis.dss.appconn.constraints.Constraints.API_REQUEST_PREFIX; +import static com.webank.wedatasphere.streamis.dss.appconn.constraints.Constraints.STREAMIS_APPCONN_NAME; + +public class StreamisPrejectDeleteOperation extends AbstractStructureOperation + implements ProjectDeletionOperation { + + private String projectUrl; + + @Override + protected String getAppConnName() { + return STREAMIS_APPCONN_NAME; + } + + @Override + public ResponseRef deleteProject(StreamisProjectContentReqRef refProjectContentRequestRef) throws ExternalOperationFailedException { + DSSDeleteAction deleteAction = new DSSDeleteAction(); + deleteAction.setUser(refProjectContentRequestRef.getUserName()); + deleteAction.setParameter("projectId", refProjectContentRequestRef.getRefProjectId()); + return StreamisCommonUtil.getInternalResponseRef(refProjectContentRequestRef, ssoRequestOperation, projectUrl, deleteAction); + } + + @Override + public void init() { + super.init(); + projectUrl = mergeBaseUrl(mergeUrl(API_REQUEST_PREFIX, "deleteProject")); + } + +} diff --git a/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/project/StreamisProjectCreationOperation.java b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/project/StreamisProjectCreationOperation.java new file mode 100644 index 000000000..e8d000c2b --- /dev/null +++ b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/project/StreamisProjectCreationOperation.java @@ -0,0 +1,57 @@ +package com.webank.wedatasphere.streamis.dss.appconn.structure.project; + +import com.webank.wedatasphere.dss.common.entity.project.DSSProject; +import com.webank.wedatasphere.dss.common.utils.DSSCommonUtils; +import com.webank.wedatasphere.dss.standard.app.sso.Workspace; +import com.webank.wedatasphere.dss.standard.app.sso.origin.request.action.DSSPostAction; +import com.webank.wedatasphere.dss.standard.app.structure.AbstractStructureOperation; +import com.webank.wedatasphere.dss.standard.app.structure.project.ProjectCreationOperation; +import com.webank.wedatasphere.dss.standard.app.structure.project.ref.DSSProjectContentRequestRef; +import com.webank.wedatasphere.dss.standard.app.structure.project.ref.DSSProjectPrivilege; +import com.webank.wedatasphere.dss.standard.app.structure.project.ref.ProjectResponseRef; +import com.webank.wedatasphere.dss.standard.common.entity.ref.InternalResponseRef; +import com.webank.wedatasphere.dss.standard.common.exception.operation.ExternalOperationFailedException; +import com.webank.wedatasphere.streamis.dss.appconn.exception.StreamisAppConnErrorException; +import com.webank.wedatasphere.streamis.dss.appconn.utils.StreamisCommonUtil; + +import static com.webank.wedatasphere.streamis.dss.appconn.constraints.Constraints.API_REQUEST_PREFIX; +import static com.webank.wedatasphere.streamis.dss.appconn.constraints.Constraints.STREAMIS_APPCONN_NAME; + +public class StreamisProjectCreationOperation extends AbstractStructureOperation + implements ProjectCreationOperation { + + private String projectUrl; + + @Override + protected String getAppConnName() { + return STREAMIS_APPCONN_NAME; + } + + @Override + public ProjectResponseRef createProject(DSSProjectContentRequestRef.DSSProjectContentRequestRefImpl dssProjectContentRequestRef) throws ExternalOperationFailedException { + DSSPostAction streamisPostAction = new DSSPostAction(); + streamisPostAction.setUser(dssProjectContentRequestRef.getUserName()); + DSSProject dssProject = dssProjectContentRequestRef.getDSSProject(); + Workspace workspace = dssProjectContentRequestRef.getWorkspace(); + DSSProjectPrivilege dssProjectPrivilege = dssProjectContentRequestRef.getDSSProjectPrivilege(); + if(dssProject == null || dssProjectPrivilege == null){ + //TODO error code need to amend + throw new StreamisAppConnErrorException(-1, "the dssProject or dssProjectPrivilege is null"); + } + streamisPostAction.addRequestPayload("projectName",dssProject.getName()); + streamisPostAction.addRequestPayload("workspaceId", workspace==null?null:workspace.getWorkspaceId()); + streamisPostAction.addRequestPayload("releaseUsers",dssProjectPrivilege.getReleaseUsers()); + streamisPostAction.addRequestPayload("editUsers",dssProjectPrivilege.getEditUsers()); + streamisPostAction.addRequestPayload("accessUsers",dssProjectPrivilege.getAccessUsers()); + InternalResponseRef responseRef = StreamisCommonUtil.getInternalResponseRef(dssProjectContentRequestRef, ssoRequestOperation, projectUrl, streamisPostAction); + Long projectId = DSSCommonUtils.parseToLong(responseRef.getData().get("projectId")); + return ProjectResponseRef.newExternalBuilder() + .setRefProjectId(projectId).success(); + } + + @Override + public void init() { + super.init(); + projectUrl = mergeBaseUrl(mergeUrl(API_REQUEST_PREFIX, "createProject")); + } +} diff --git a/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/project/StreamisProjectSearchOperation.java b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/project/StreamisProjectSearchOperation.java new file mode 100644 index 000000000..097bd484b --- /dev/null +++ b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/project/StreamisProjectSearchOperation.java @@ -0,0 +1,45 @@ +package com.webank.wedatasphere.streamis.dss.appconn.structure.project; + +import com.webank.wedatasphere.dss.common.utils.DSSCommonUtils; +import com.webank.wedatasphere.dss.standard.app.sso.origin.request.action.DSSGetAction; +import com.webank.wedatasphere.dss.standard.app.structure.AbstractStructureOperation; +import com.webank.wedatasphere.dss.standard.app.structure.project.ProjectSearchOperation; +import com.webank.wedatasphere.dss.standard.app.structure.project.ref.ProjectResponseRef; +import com.webank.wedatasphere.dss.standard.common.entity.ref.InternalResponseRef; +import com.webank.wedatasphere.dss.standard.common.exception.operation.ExternalOperationFailedException; +import com.webank.wedatasphere.streamis.dss.appconn.structure.ref.StreamisProjectContentReqRef; +import com.webank.wedatasphere.streamis.dss.appconn.utils.StreamisCommonUtil; + +import static com.webank.wedatasphere.streamis.dss.appconn.constraints.Constraints.API_REQUEST_PREFIX; +import static com.webank.wedatasphere.streamis.dss.appconn.constraints.Constraints.STREAMIS_APPCONN_NAME; + +public class StreamisProjectSearchOperation extends AbstractStructureOperation + implements ProjectSearchOperation { + + private String projectUrl; + + @Override + protected String getAppConnName() { + return STREAMIS_APPCONN_NAME; + } + + @Override + public ProjectResponseRef searchProject(StreamisProjectContentReqRef streamisProjectContentReqRef) throws ExternalOperationFailedException { + DSSGetAction getAction = new DSSGetAction(); + getAction.setUser(streamisProjectContentReqRef.getUserName()); + getAction.setParameter("projectName",streamisProjectContentReqRef.getProjectName()); + InternalResponseRef responseRef = StreamisCommonUtil.getInternalResponseRef(streamisProjectContentReqRef, ssoRequestOperation, projectUrl, getAction); + if(responseRef.getData().get("projectId")==null){ + return ProjectResponseRef.newExternalBuilder().success(); + } + Long projectId = DSSCommonUtils.parseToLong(responseRef.getData().get("projectId")); + return ProjectResponseRef.newExternalBuilder() + .setRefProjectId(projectId).success(); + } + + @Override + public void init() { + super.init(); + projectUrl = mergeBaseUrl(mergeUrl(API_REQUEST_PREFIX, "searchProject")); + } +} diff --git a/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/project/StreamisProjectService.java b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/project/StreamisProjectService.java new file mode 100644 index 000000000..ce5a7fa7d --- /dev/null +++ b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/project/StreamisProjectService.java @@ -0,0 +1,32 @@ +package com.webank.wedatasphere.streamis.dss.appconn.structure.project; + +import com.webank.wedatasphere.dss.standard.app.structure.project.*; +import com.webank.wedatasphere.dss.standard.app.structure.project.ref.DSSProjectContentRequestRef; +import com.webank.wedatasphere.streamis.dss.appconn.structure.ref.StreamisProjectContentReqRef; +import com.webank.wedatasphere.streamis.dss.appconn.structure.ref.StreamisProjectUpdateReqRef; + +/** + * Streamis project service + */ +public class StreamisProjectService extends ProjectService { + + @Override + protected ProjectCreationOperation createProjectCreationOperation() { + return new StreamisProjectCreationOperation(); + } + + @Override + protected ProjectUpdateOperation createProjectUpdateOperation() { + return new StreamisProjectUpdateOperation(); + } + + @Override + protected ProjectDeletionOperation createProjectDeletionOperation() { + return new StreamisPrejectDeleteOperation(); + } + + @Override + protected ProjectSearchOperation createProjectSearchOperation() { + return new StreamisProjectSearchOperation(); + } +} diff --git a/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/project/StreamisProjectUpdateOperation.java b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/project/StreamisProjectUpdateOperation.java new file mode 100644 index 000000000..fe4ca033d --- /dev/null +++ b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/project/StreamisProjectUpdateOperation.java @@ -0,0 +1,56 @@ +package com.webank.wedatasphere.streamis.dss.appconn.structure.project; + +import com.webank.wedatasphere.dss.common.entity.project.DSSProject; +import com.webank.wedatasphere.dss.standard.app.sso.Workspace; +import com.webank.wedatasphere.dss.standard.app.sso.origin.request.action.DSSPutAction; +import com.webank.wedatasphere.dss.standard.app.structure.AbstractStructureOperation; +import com.webank.wedatasphere.dss.standard.app.structure.project.ProjectUpdateOperation; +import com.webank.wedatasphere.dss.standard.app.structure.project.ref.DSSProjectPrivilege; +import com.webank.wedatasphere.dss.standard.common.entity.ref.ResponseRef; +import com.webank.wedatasphere.dss.standard.common.exception.operation.ExternalOperationFailedException; +import com.webank.wedatasphere.streamis.dss.appconn.exception.StreamisAppConnErrorException; +import com.webank.wedatasphere.streamis.dss.appconn.structure.ref.StreamisProjectUpdateReqRef; +import com.webank.wedatasphere.streamis.dss.appconn.utils.StreamisCommonUtil; + +import static com.webank.wedatasphere.streamis.dss.appconn.constraints.Constraints.API_REQUEST_PREFIX; +import static com.webank.wedatasphere.streamis.dss.appconn.constraints.Constraints.STREAMIS_APPCONN_NAME; + +public class StreamisProjectUpdateOperation extends AbstractStructureOperation + implements ProjectUpdateOperation { + + private String projectUrl; + + @Override + protected String getAppConnName() { + return STREAMIS_APPCONN_NAME; + } + + @Override + public ResponseRef updateProject(StreamisProjectUpdateReqRef projectUpdateRequestRef) throws ExternalOperationFailedException { + DSSPutAction updateAction = new DSSPutAction(); + updateAction.setUser(projectUpdateRequestRef.getUserName()); + DSSProject dssProject = projectUpdateRequestRef.getDSSProject(); + DSSProjectPrivilege dssProjectPrivilege = projectUpdateRequestRef.getDSSProjectPrivilege(); + Workspace workspace = projectUpdateRequestRef.getWorkspace(); + if(dssProject == null || dssProjectPrivilege == null){ + throw new StreamisAppConnErrorException(600500, "the dssProject or dssProjectPrivilege is null"); + } + updateAction.addRequestPayload("projectId",projectUpdateRequestRef.getRefProjectId()); + updateAction.addRequestPayload("projectName",dssProject.getName()); + updateAction.addRequestPayload("workspaceId", workspace==null?null:workspace.getWorkspaceId()); + updateAction.addRequestPayload("releaseUsers",dssProjectPrivilege.getReleaseUsers()); + updateAction.addRequestPayload("editUsers",dssProjectPrivilege.getEditUsers()); + updateAction.addRequestPayload("accessUsers",dssProjectPrivilege.getAccessUsers()); + return StreamisCommonUtil.getInternalResponseRef(projectUpdateRequestRef, ssoRequestOperation, projectUrl, updateAction); + } + + + @Override + public void init() { + super.init(); + projectUrl = mergeBaseUrl(mergeUrl(API_REQUEST_PREFIX, "updateProject")); + } + +} + + diff --git a/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/ref/StreamisProjectContentReqRef.java b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/ref/StreamisProjectContentReqRef.java new file mode 100644 index 000000000..3f3421948 --- /dev/null +++ b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/ref/StreamisProjectContentReqRef.java @@ -0,0 +1,13 @@ +package com.webank.wedatasphere.streamis.dss.appconn.structure.ref; + +import com.webank.wedatasphere.dss.standard.app.structure.project.ref.RefProjectContentRequestRef; + +/** + * Streamis project content ref + */ +public class StreamisProjectContentReqRef extends StreamisStructureReqRef + implements RefProjectContentRequestRef { + public StreamisProjectContentReqRef(){ + + } +} diff --git a/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/ref/StreamisProjectUpdateReqRef.java b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/ref/StreamisProjectUpdateReqRef.java new file mode 100644 index 000000000..3bd209dc2 --- /dev/null +++ b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/ref/StreamisProjectUpdateReqRef.java @@ -0,0 +1,12 @@ +package com.webank.wedatasphere.streamis.dss.appconn.structure.ref; + +import com.webank.wedatasphere.dss.standard.app.structure.project.ref.ProjectUpdateRequestRef; + +/** + * Streamis project update request ref + */ +public class StreamisProjectUpdateReqRef extends StreamisStructureReqRef implements ProjectUpdateRequestRef { + public StreamisProjectUpdateReqRef(){ + + } +} diff --git a/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/ref/StreamisStructureReqRef.java b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/ref/StreamisStructureReqRef.java new file mode 100644 index 000000000..b5ab1f330 --- /dev/null +++ b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/ref/StreamisStructureReqRef.java @@ -0,0 +1,7 @@ +package com.webank.wedatasphere.streamis.dss.appconn.structure.ref; + +import com.webank.wedatasphere.dss.standard.app.structure.StructureRequestRefImpl; + +public abstract class StreamisStructureReqRef> extends StructureRequestRefImpl{ + // Extend the structure request ref +} diff --git a/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/utils/NumberUtils.java b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/utils/NumberUtils.java new file mode 100644 index 000000000..db22cf871 --- /dev/null +++ b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/utils/NumberUtils.java @@ -0,0 +1,11 @@ +package com.webank.wedatasphere.streamis.dss.appconn.utils; + +public class NumberUtils { + + public static Integer getInt(Object original) { + if (original instanceof Double) { + return ((Double) original).intValue(); + } + return (Integer) original; + } +} diff --git a/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/utils/StreamisCommonUtil.java b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/utils/StreamisCommonUtil.java new file mode 100644 index 000000000..2db1d396e --- /dev/null +++ b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/utils/StreamisCommonUtil.java @@ -0,0 +1,61 @@ +package com.webank.wedatasphere.streamis.dss.appconn.utils; + +import com.webank.wedatasphere.dss.standard.app.sso.builder.SSOUrlBuilderOperation; +import com.webank.wedatasphere.dss.standard.app.sso.origin.request.action.DSSHttpAction; +import com.webank.wedatasphere.dss.standard.app.sso.ref.WorkspaceRequestRef; +import com.webank.wedatasphere.dss.standard.app.sso.request.SSORequestOperation; +import com.webank.wedatasphere.dss.standard.common.entity.ref.InternalResponseRef; +import com.webank.wedatasphere.dss.standard.common.entity.ref.ResponseRef; +import com.webank.wedatasphere.dss.standard.common.entity.ref.ResponseRefBuilder; +import com.webank.wedatasphere.dss.standard.common.exception.operation.ExternalOperationFailedException; +import com.webank.wedatasphere.dss.standard.sso.utils.SSOHelper; +import org.apache.linkis.httpclient.request.HttpAction; +import org.apache.linkis.httpclient.response.HttpResult; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import static com.webank.wedatasphere.streamis.dss.appconn.constraints.Constraints.STREAMIS_APPCONN_NAME; + +public class StreamisCommonUtil { + + private final static Logger logger = LoggerFactory.getLogger(StreamisCommonUtil.class); + + public static SSOUrlBuilderOperation getSSOUrlBuilderOperation(WorkspaceRequestRef requestRef, String url) { + SSOUrlBuilderOperation ssoUrlBuilderOperation = SSOHelper.createSSOUrlBuilderOperation(requestRef.getWorkspace()); + ssoUrlBuilderOperation.setAppName(STREAMIS_APPCONN_NAME); + ssoUrlBuilderOperation.setReqUrl(url); + return ssoUrlBuilderOperation; + } + + public static HttpResult getHttpResult(WorkspaceRequestRef requestRef, + SSORequestOperation ssoRequestOperation, + String url, + DSSHttpAction streamisHttpAction) throws ExternalOperationFailedException { + + try { + SSOUrlBuilderOperation ssoUrlBuilderOperation = getSSOUrlBuilderOperation(requestRef, url); + streamisHttpAction.setUrl(ssoUrlBuilderOperation.getBuiltUrl()); + return ssoRequestOperation.requestWithSSO(ssoUrlBuilderOperation, streamisHttpAction); + } catch (Exception e) { + throw new ExternalOperationFailedException(90177, "Create streamis node Exception", e); + } + } + + public static InternalResponseRef getInternalResponseRef(WorkspaceRequestRef requestRef, + SSORequestOperation ssoRequestOperation, + String url, + DSSHttpAction streamisHttpAction) throws ExternalOperationFailedException { + HttpResult httpResult = getHttpResult(requestRef, ssoRequestOperation, url, streamisHttpAction); + logger.info("responseBody from streamis is {}",httpResult.getResponseBody()); + InternalResponseRef responseRef = new ResponseRefBuilder.InternalResponseRefBuilder().setResponseBody(httpResult.getResponseBody()).build(); + checkResponseRef(responseRef); + return responseRef; + } + + public static void checkResponseRef(ResponseRef responseRef) throws ExternalOperationFailedException { + if (responseRef.getStatus() != 0 ) { + logger.error(responseRef.getResponseBody()); + throw new ExternalOperationFailedException(90177, responseRef.getErrorMsg(), null); + } + } +} diff --git a/streamis-appconn/src/main/resources/appconn.properties b/streamis-appconn/src/main/resources/appconn.properties new file mode 100644 index 000000000..a9a9181b9 --- /dev/null +++ b/streamis-appconn/src/main/resources/appconn.properties @@ -0,0 +1,14 @@ +#/* +#* Copyright 2021 WeBank +#* Licensed under the Apache License, Version 2.0 (the "License"); +#* you may not use this file except in compliance with the License. +#* You may obtain a copy of the License at +#* +#* http://www.apache.org/licenses/LICENSE-2.0 +#* +#* Unless required by applicable law or agreed to in writing, software +#* distributed under the License is distributed on an "AS IS" BASIS, +#* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +#* See the License for the specific language governing permissions and +#* limitations under the License. +#*/ \ No newline at end of file diff --git a/streamis-appconn/src/main/resources/init.sql b/streamis-appconn/src/main/resources/init.sql new file mode 100644 index 000000000..6d9224ba6 --- /dev/null +++ b/streamis-appconn/src/main/resources/init.sql @@ -0,0 +1,40 @@ +delete from `dss_workspace_dictionary` WHERE `workspace_id` = '0' and `dic_key` = 'pdp_streamis_product_center'; + +INSERT INTO `dss_workspace_dictionary` ( `workspace_id`, `parent_key`, `dic_name`, `dic_name_en`, `dic_key`, `dic_value`, `dic_value_en`, `title`, `title_en`, `url`, `url_type`,`icon`, `order_num`, `remark`, `create_user`, `create_time`, `update_user`, `update_time`) +VALUES ('0','p_develop_process','流式生产中心','Streamis Product Center','pdp_streamis_product_center','streamis_prod',NULL,NULL,NULL, +'http://APPCONN_INSTALL_IP:APPCONN_INSTALL_PORT/#/realtimeJobCenter?projectName=${projectName}&workspaceName=${workspaceName}','0','kaifa-icon','1','工程开发流程-流式生产中心','SYSTEM','2020-12-28 17:32:35',NULL,'2022-06-30 17:49:02'); + +select @old_dss_appconn_id:=id from `dss_appconn` where `appconn_name` = 'streamis'; + +delete from `dss_workspace_menu_appconn` WHERE `appconn_id` = @old_dss_appconn_id; +delete from `dss_appconn_instance` where `appconn_id` = @old_dss_appconn_id; +delete from `dss_appconn` where `appconn_name`='streamis'; + +select @old_jobcenter_dss_appconn_id:=id from `dss_appconn` where `appconn_name` = 'realTimeJobCenter'; + +delete from `dss_workspace_menu_appconn` WHERE `appconn_id` = @old_jobcenter_dss_appconn_id; +delete from `dss_appconn_instance` where `appconn_id` = @old_jobcenter_dss_appconn_id; +delete from `dss_appconn` where `appconn_name`='realTimeJobCenter'; + +INSERT INTO dss_appconn +(appconn_name, is_user_need_init, `level`, if_iframe, is_external, reference, class_name, appconn_class_path, resource) +VALUES('streamis', 0, 1, 1, 1, NULL, 'com.webank.wedatasphere.streamis.dss.appconn.StreamisAppConn', NULL, NULL); +INSERT INTO dss_appconn +(appconn_name, is_user_need_init, `level`, if_iframe, is_external, reference, class_name, appconn_class_path, resource) +VALUES('realTimeJobCenter', 0, 1, 1, 1, 'sso', '', NULL, NULL); + +select @dss_appconn_id:=id from `dss_appconn` where `appconn_name` = 'streamis'; +select @jobcenter_dss_appconn_id:=id from `dss_appconn` where `appconn_name` = 'realTimeJobCenter'; + +INSERT INTO dss_workspace_menu_appconn +(appconn_id, menu_id, title_en, title_cn, desc_en, desc_cn, labels_en, labels_cn, is_active, access_button_en, access_button_cn, manual_button_en, manual_button_cn, manual_button_url, icon, `order`, create_by, create_time, last_update_time, last_update_user, image) +VALUES(@jobcenter_dss_appconn_id, 1, 'StreamSQL development', 'StreamSQL开发', 'Real-time application development is a streaming solution jointly built by WeDataSphere, Boss big data team and China Telecom ctcloud Big data team.', '实时应用开发是微众银行微数域(WeDataSphere)、Boss直聘大数据团队 和 中国电信天翼云大数据团队 社区联合共建的流式解决方案,以 Linkis 做为内核,基于 Flink Engine 构建的批流统一的 Flink SQL,助力实时化转型。', +'streaming, realtime', '流式,实时', 0, 'under union construction', '联合共建中', 'related information', '相关资讯', 'http://127.0.0.1:8088/wiki/scriptis/manual/workspace_cn.html', 'shujukaifa-logo', NULL, NULL, NULL, NULL, NULL, 'shujukaifa-icon'); + +INSERT INTO dss_appconn_instance +(appconn_id, label, url, enhance_json, homepage_uri) +VALUES(@dss_appconn_id, 'DEV', 'http://APPCONN_INSTALL_IP:APPCONN_INSTALL_PORT/', '', 'http://APPCONN_INSTALL_IP:APPCONN_INSTALL_PORT/#/realTimeJobCenter'); + +INSERT INTO dss_appconn_instance +(appconn_id, label, url, enhance_json, homepage_uri) +VALUES(@jobcenter_dss_appconn_id, 'DEV', 'http://APPCONN_INSTALL_IP:APPCONN_INSTALL_PORT/#/realTimeJobCenter', NULL, NULL); \ No newline at end of file diff --git a/streamis-jobmanager/pom.xml b/streamis-jobmanager/pom.xml new file mode 100644 index 000000000..72529fb40 --- /dev/null +++ b/streamis-jobmanager/pom.xml @@ -0,0 +1,39 @@ + + + + + + streamis + com.webank.wedatasphere.streamis + 0.2.4 + + 4.0.0 + + streamis-jobmanager + pom + + streamis-jobmanager-common + streamis-job-launcher + streamis-job-manager + streamis-jobmanager-server + streamis-projectmanager-server + streamis-job-log + + + + \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-launcher/pom.xml b/streamis-jobmanager/streamis-job-launcher/pom.xml new file mode 100755 index 000000000..b26fa26b7 --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/pom.xml @@ -0,0 +1,35 @@ + + + + + + streamis-jobmanager + com.webank.wedatasphere.streamis + 0.2.4 + + 4.0.0 + + streamis-job-launcher + pom + + + streamis-job-launcher-base + streamis-job-launcher-service + streamis-job-launcher-linkis + + \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/pom.xml b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/pom.xml new file mode 100755 index 000000000..9d95cfbd0 --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/pom.xml @@ -0,0 +1,61 @@ + + + + + + streamis-jobmanager + com.webank.wedatasphere.streamis + 0.2.4 + ../../pom.xml + + 4.0.0 + + streamis-job-launcher-base + + + + 8 + 8 + + + + + org.apache.linkis + linkis-common + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + ${project.artifactId}-${project.version} + + \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/JobConfDefinition.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/JobConfDefinition.java new file mode 100644 index 000000000..3f6e2ce4e --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/JobConfDefinition.java @@ -0,0 +1,251 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.launcher.entity; + +/** + * Job configuration definition + */ +public class JobConfDefinition { + /** + * Id + */ + private Long id; + + /** + * keyword + */ + private String key; + + /** + * Display name equals 'option' + */ + private String name; + + /** + * Type: NONE: 0, INPUT: 1, SELECT: 2, NUMBER: 3 + */ + private String type; + + /** + * Sort + */ + private Integer sort; + + /** + * Description + */ + private String description; + + /** + * Validate type + */ + private String validateType; + + /** + * Validate rule + */ + private String validateRule; + + /** + * Style (Json/html/css) + */ + private String style; + + /** + * Visiable + */ + private int visiable = 1; + + /** + * Level + */ + private int level = 1; + + /** + * Unit symbol + */ + private String unit; + + /** + * Default value + */ + private String defaultValue; + + /** + * Refer values + */ + private String refValues; + + /** + * Parent ref + */ + private Long parentRef; + + /** + * Is required + */ + private boolean required; + + private boolean mark; + + public JobConfDefinition(){ + + } + + public JobConfDefinition(Long id, String key, + String type, Long parentRef, Integer level){ + this.id = id; + this.key = key; + this.type = type; + this.parentRef = parentRef; + this.level = level; + } + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public Integer getSort() { + return sort; + } + + public void setSort(Integer sort) { + this.sort = sort; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public String getValidateType() { + return validateType; + } + + public void setValidateType(String validateType) { + this.validateType = validateType; + } + + public String getValidateRule() { + return validateRule; + } + + public void setValidateRule(String validateRule) { + this.validateRule = validateRule; + } + + public String getStyle() { + return style; + } + + public void setStyle(String style) { + this.style = style; + } + + public int getVisiable() { + return visiable; + } + + public void setVisiable(int visiable) { + this.visiable = visiable; + } + + public int getLevel() { + return level; + } + + public void setLevel(int level) { + this.level = level; + } + + public String getDefaultValue() { + return defaultValue; + } + + public void setDefaultValue(String defaultValue) { + this.defaultValue = defaultValue; + } + + public String getRefValues() { + return refValues; + } + + public void setRefValues(String refValues) { + this.refValues = refValues; + } + + public Long getParentRef() { + return parentRef; + } + + public void setParentRef(Long parentRef) { + this.parentRef = parentRef; + } + + public String getUnit() { + return unit; + } + + public void setUnit(String unit) { + this.unit = unit; + } + + public boolean isRequired() { + return required; + } + + public void setRequired(boolean required) { + this.required = required; + } + + public boolean isMark() { + return mark; + } + + public void setMark(boolean mark) { + this.mark = mark; + } +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/JobConfValue.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/JobConfValue.java new file mode 100644 index 000000000..208ddf35a --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/JobConfValue.java @@ -0,0 +1,97 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +package com.webank.wedatasphere.streamis.jobmanager.launcher.entity; + +/** + * Job conf value + */ +public class JobConfValue { + + /** + * Job id + */ + private Long jobId; + + /** + * Job name + */ + private String jobName; + + /** + * Keyword refer to 'JobConfDefinition' + */ + private String key; + + /** + * Actual value + */ + private String value; + + /** + * Id refer to 'JobConfDefinition' + */ + private Long referDefId; + + public JobConfValue(){ + + } + + public JobConfValue(String key, String value, Long referDefId){ + this.key = key; + this.value = value; + this.referDefId = referDefId; + } + public Long getJobId() { + return jobId; + } + + public void setJobId(Long jobId) { + this.jobId = jobId; + } + + public String getJobName() { + return jobName; + } + + public void setJobName(String jobName) { + this.jobName = jobName; + } + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public Long getReferDefId() { + return referDefId; + } + + public void setReferDefId(Long referDefId) { + this.referDefId = referDefId; + } +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/JobRole.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/JobRole.java new file mode 100644 index 000000000..f88be08f4 --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/JobRole.java @@ -0,0 +1,66 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.launcher.entity; + +import java.util.Date; + +public class JobRole { + private Long id; + private Long jobId; + private Long roleId; + private Date updateTime; + private String description; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public Long getJobId() { + return jobId; + } + + public void setJobId(Long jobId) { + this.jobId = jobId; + } + + public Long getRoleId() { + return roleId; + } + + public void setRoleId(Long roleId) { + this.roleId = roleId; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/JobUser.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/JobUser.java new file mode 100644 index 000000000..acb9cfafa --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/JobUser.java @@ -0,0 +1,55 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.launcher.entity; + +public class JobUser { + private Long id; + private Long jobId; + private Long userId; + private String username; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public Long getJobId() { + return jobId; + } + + public void setJobId(Long jobId) { + this.jobId = jobId; + } + + public Long getUserId() { + return userId; + } + + public void setUserId(Long userId) { + this.userId = userId; + } + + public String getUsername() { + return username; + } + + public void setUsername(String username) { + this.username = username; + } +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/JobUserRole.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/JobUserRole.java new file mode 100755 index 000000000..616bdd04b --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/JobUserRole.java @@ -0,0 +1,65 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.launcher.entity; + + +public class JobUserRole { + private Long id; + private Long jobId; + private Long userId; + private Long roleId; + private String username; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public Long getJobId() { + return jobId; + } + + public void setJobId(Long jobId) { + this.jobId = jobId; + } + + public Long getUserId() { + return userId; + } + + public void setUserId(Long userId) { + this.userId = userId; + } + + public Long getRoleId() { + return roleId; + } + + public void setRoleId(Long roleId) { + this.roleId = roleId; + } + + public String getUsername() { + return username; + } + + public void setUsername(String username) { + this.username = username; + } +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/User.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/User.java new file mode 100755 index 000000000..b4f8dc2ec --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/User.java @@ -0,0 +1,47 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.launcher.entity; + + +public class User { + private Long id; + private String username; + private String name; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getUsername() { + return username; + } + + public void setUsername(String username) { + this.username = username; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/vo/JobConfDefinitionVo.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/vo/JobConfDefinitionVo.java new file mode 100644 index 000000000..1529a326f --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/vo/JobConfDefinitionVo.java @@ -0,0 +1,238 @@ +package com.webank.wedatasphere.streamis.jobmanager.launcher.entity.vo; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.webank.wedatasphere.streamis.jobmanager.launcher.entity.JobConfDefinition; +import org.apache.commons.lang3.StringUtils; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Optional; + +/** + * According to JobConfDefinition + */ +@JsonInclude(JsonInclude.Include.NON_NULL) +public class JobConfDefinitionVo { + + /** + * keyword + */ + private String key; + + /** + * Display name equals 'option' + */ + private String name; + + /** + * Type: NONE: 0, INPUT: 1, SELECT: 2, NUMBER: 3 + */ + private String type; + + /** + * Sort + */ + private Integer sort; + + /** + * Description + */ + private String description; + + /** + * Validate type + */ + @JsonProperty("validate_type") + private String validateType; + + /** + * Validate rule + */ + @JsonProperty("validate_rule") + private String validateRule; + + /** + * Style (Json/html/css) + */ + private String style; + + /** + * Visiable + */ + private int visiable = 1; + + /** + * Level + */ + private int level = 1; + + /** + * Unit symbol + */ + private String unit; + + /** + * Default value + */ + @JsonProperty("default_value") + private String defaultValue; + + /** + * Refer values + */ + @JsonProperty("ref_values") + private List refValues = new ArrayList<>(); + + /** + * Children definition + */ + @JsonProperty("child_def") + private List childDef; + + private boolean required; + + public JobConfDefinitionVo(){ + + } + + public JobConfDefinitionVo(JobConfDefinition definition){ + this.key = definition.getKey(); + this.name = definition.getName(); + this.type = definition.getType(); + this.sort = definition.getSort(); + this.description = definition.getDescription(); + this.validateType = definition.getValidateType(); + this.validateRule = definition.getValidateRule(); + this.style = definition.getStyle(); + this.visiable = definition.getVisiable(); + this.level = definition.getLevel(); + this.defaultValue = definition.getDefaultValue(); + if (StringUtils.isNotBlank(definition.getRefValues())){ + this.refValues = Arrays.asList(definition.getRefValues().split(",")); + } + this.required = definition.isRequired(); + this.unit = definition.getUnit(); + } + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public Integer getSort() { + return sort; + } + + public void setSort(Integer sort) { + this.sort = sort; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public String getValidateType() { + return validateType; + } + + public void setValidateType(String validateType) { + this.validateType = validateType; + } + + public String getValidateRule() { + return validateRule; + } + + public void setValidateRule(String validateRule) { + this.validateRule = validateRule; + } + + public String getStyle() { + return style; + } + + public void setStyle(String style) { + this.style = style; + } + + public int getVisiable() { + return visiable; + } + + public void setVisiable(int visiable) { + this.visiable = visiable; + } + + public int getLevel() { + return level; + } + + public void setLevel(int level) { + this.level = level; + } + + public String getDefaultValue() { + return defaultValue; + } + + public void setDefaultValue(String defaultValue) { + this.defaultValue = defaultValue; + } + + public List getRefValues() { + return refValues; + } + + public void setRefValues(List refValues) { + this.refValues = refValues; + } + + public List getChildDef() { + return childDef; + } + + public void setChildDef(List childDef) { + this.childDef = childDef; + } + + public String getUnit() { + return unit; + } + + public void setUnit(String unit) { + this.unit = unit; + } + + public boolean isRequired() { + return required; + } + + public void setRequired(boolean required) { + this.required = required; + } +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/vo/JobConfValueSet.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/vo/JobConfValueSet.java new file mode 100755 index 000000000..67c59ee7d --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/vo/JobConfValueSet.java @@ -0,0 +1,103 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.launcher.entity.vo; + +import java.util.List; + +/** + * Config value set + */ +public class JobConfValueSet { + + /** + * Job id + */ + private Long jobId; + + /** + * Resource config + */ + private List resourceConfig; + + /** + * Produce config + */ + private List produceConfig; + + /** + * Parameter config + */ + private List parameterConfig; + + /** + * Alarm config + */ + private List alarmConfig; + + /** + * Permission config + */ + private List permissionConfig; + + + public Long getJobId() { + return jobId; + } + + public void setJobId(Long jobId) { + this.jobId = jobId; + } + + public List getResourceConfig() { + return resourceConfig; + } + + public void setResourceConfig(List resourceConfig) { + this.resourceConfig = resourceConfig; + } + + public List getProduceConfig() { + return produceConfig; + } + + public void setProduceConfig(List produceConfig) { + this.produceConfig = produceConfig; + } + + public List getParameterConfig() { + return parameterConfig; + } + + public void setParameterConfig(List parameterConfig) { + this.parameterConfig = parameterConfig; + } + + public List getAlarmConfig() { + return alarmConfig; + } + + public void setAlarmConfig(List alarmConfig) { + this.alarmConfig = alarmConfig; + } + + public List getPermissionConfig() { + return permissionConfig; + } + + public void setPermissionConfig(List permissionConfig) { + this.permissionConfig = permissionConfig; + } +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/vo/JobConfValueVo.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/vo/JobConfValueVo.java new file mode 100755 index 000000000..7b42b07fb --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/vo/JobConfValueVo.java @@ -0,0 +1,122 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.launcher.entity.vo; + +import java.util.List; + +/** + * View object + */ +public class JobConfValueVo { + /** + * Config key id + */ + private Long configkeyId; + /** + * Key + */ + private String key; + /** + * Name + */ + private String name; + /** + * Value + */ + private String value; + /** + * Value list + */ + private List valueLists; + + public JobConfValueVo(){ + + } + + public JobConfValueVo(String key, String value){ + this.key = key; + this.value = value; + } + + public static class ValueList { + private String value; + private Boolean selected; + + public ValueList() { + } + + public ValueList(String value, Boolean selected) { + this.value = value; + this.selected = selected; + } + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public Boolean getSelected() { + return selected; + } + + public void setSelected(Boolean selected) { + this.selected = selected; + } + } + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public List getValueLists() { + return valueLists; + } + + public void setValueLists(List valueLists) { + this.valueLists = valueLists; + } + + public Long getConfigkeyId() { + return configkeyId; + } + + public void setConfigkeyId(Long configkeyId) { + this.configkeyId = configkeyId; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/JobClient.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/JobClient.scala new file mode 100644 index 000000000..bcb1dfbbc --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/JobClient.scala @@ -0,0 +1,33 @@ +package com.webank.wedatasphere.streamis.jobmanager.launcher.job + +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobStateInfo + +/** + * Job client + * + * @tparam T job info type + */ +trait JobClient[T <: JobInfo] { + + def getJobInfo: T + + /** + * Refresh job info and return + * @param refresh refresh + * @return + */ + def getJobInfo(refresh: Boolean): T + /** + * Stop the job connected remote + * @param snapshot if do snapshot to save the job state + * @return return the jobState info (if use snapshot) else return null + */ + def stop(snapshot: Boolean): JobStateInfo + + /** + * Stop directly + */ + def stop(): Unit + + +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/JobInfo.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/JobInfo.scala new file mode 100644 index 000000000..6cde5ba12 --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/JobInfo.scala @@ -0,0 +1,66 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.launcher.job + +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.{JobState, JobStateInfo} + +/** + * Basic job information + */ +trait JobInfo { + + /** + * Job name + * @return name + */ + def getName: String + /** + * Job Id + * @return + */ + def getId: String + + /** + * Creator + * @return + */ + def getUser: String + + /** + * Job status + * @return + */ + def getStatus: String + + def setStatus(status: String): Unit + + /** + * Job log path + * @return + */ + def getLogPath: String + + def getResources: java.util.Map[String, Object] + + def getCompletedMsg: String + + /** + * Contains the check point and save points + * @return + */ + def getJobStates: Array[JobStateInfo] + +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/LaunchJob.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/LaunchJob.scala new file mode 100644 index 000000000..b18d56091 --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/LaunchJob.scala @@ -0,0 +1,125 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.launcher.job + +import java.util + + +trait LaunchJob { + + /** + * Job name + * @return + */ + def getJobName: String + + def getSubmitUser: String + + def getLabels: util.Map[String, Any] + + def getJobContent: util.Map[String, Any] + + def getParams: util.Map[String, Any] + + def getSource: util.Map[String, Any] + + def getLaunchConfigs: util.Map[String, Any] + +} + +object LaunchJob { + + val LAUNCH_CONFIG_CREATE_SERVICE = "createService" + val LAUNCH_CONFIG_DESCRIPTION = "description" + val LAUNCH_CONFIG_MAX_SUBMIT_TIME = "maxSubmitTime" + + def builder(): Builder = new Builder + + class Builder { + private var submitUser: String = _ + private var jobName: String = _ + private var labels: util.Map[String, Any] = _ + private var jobContent: util.Map[String, Any] = _ + private var params: util.Map[String, Any] = _ + private var source: util.Map[String, Any] = _ + private var launchConfigs: util.Map[String, Any] = _ + + def setJobName(jobName: String): this.type = { + this.jobName = jobName + this + } + + def setSubmitUser(submitUser: String): this.type = { + this.submitUser = submitUser + this + } + + def setLabels(labels: util.Map[String, Any]): this.type = { + this.labels = labels + this + } + + def setJobContent(jobContent: util.Map[String, Any]): this.type = { + this.jobContent = jobContent + this + } + + def setParams(param: util.Map[String, Any]): this.type = { + this.params = param + this + } + + def setSource(source: util.Map[String, Any]): this.type = { + this.source = source + this + } + + def setLaunchConfigs(launchConfigs: util.Map[String, Any]): this.type = { + this.launchConfigs = launchConfigs + this + } + + def setLaunchJob(launchJob: LaunchJob): this.type = { + setSubmitUser(launchJob.getSubmitUser).setLabels(launchJob.getLabels) + .setJobContent(launchJob.getJobContent).setParams(launchJob.getParams) + .setSource(launchJob.getSource).setLaunchConfigs(launchJob.getLaunchConfigs).setJobName(launchJob.getJobName) + } + + def build(): LaunchJob = new LaunchJob { + override def getSubmitUser: String = submitUser + + override def getLabels: util.Map[String, Any] = labels + + override def getJobContent: util.Map[String, Any] = jobContent + + override def getParams: util.Map[String, Any] = params + + override def getSource: util.Map[String, Any] = source + + override def getLaunchConfigs: util.Map[String, Any] = launchConfigs + + override def toString: String = s"LaunchJob(submitUser: $submitUser, labels: $labels, jobContent: $jobContent, params: $params, source: $source)" + + /** + * Job name + * + * @return + */ + override def getJobName: String = jobName + } + } + +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/manager/JobLaunchManager.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/manager/JobLaunchManager.scala new file mode 100644 index 000000000..1a1365dfc --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/manager/JobLaunchManager.scala @@ -0,0 +1,69 @@ +package com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager + +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobState +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.{JobClient, JobInfo, LaunchJob} + +import java.util.concurrent.ConcurrentHashMap + +/** + * Basic job manager interface for launching job + */ +trait JobLaunchManager[T <: JobInfo] { + + /** + * Init method + */ + def init(): Unit + + /** + * Destroy method + */ + def destroy(): Unit + + /** + * Manager name + * @return + */ + def getName: String + + def launch(job: LaunchJob): JobClient[T] + + /** + * This method is used to launch a new job. + * @param job a StreamisJob wanted to be launched. + * @param jobState job state used to launch + * @return the job id. + */ + def launch(job: LaunchJob, jobState: JobState): JobClient[T] + /** + * Connect the job which already launched in another process, + * if the job has been stored in process, just return the job info + * @param id id + * @param jobInfo job info + * @return + */ + def connect(id: String, jobInfo: String): JobClient[T] + + def connect(id: String, jobInfo: T): JobClient[T] + /** + * Job state manager(store the state information, example: Checkpoint/Savepoint) + * @return state manager instance + */ + def getJobStateManager: JobStateManager + +} +object JobLaunchManager{ + + /** + * Store the job launch managers + */ + private val launchManagers = new ConcurrentHashMap[String, JobLaunchManager[_ <: JobInfo]]() + + def registerJobManager(name: String, jobLaunchManager: JobLaunchManager[_ <: JobInfo]): Unit = { + launchManagers.put(name, jobLaunchManager) + } + + def getJobManager(name: String): JobLaunchManager[_ <: JobInfo] = { + launchManagers.get(name) + } +} \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/manager/JobStateManager.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/manager/JobStateManager.scala new file mode 100644 index 000000000..4d0e7d051 --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/manager/JobStateManager.scala @@ -0,0 +1,76 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.JobInfo +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.{JobState, JobStateFetcher} + +import java.net.URI + +/** + * Job state manager + */ +trait JobStateManager { + + /** + * Init method + */ + def init(): Unit + + /** + * Destroy method + */ + def destroy(): Unit + + /** + * Register job state fetcher + * @param clazz clazz + * @param builder job state fetcher + * @tparam T + */ + def registerJobStateFetcher(clazz: Class[_], builder: () => JobStateFetcher[_ <: JobState]): Unit + /** + * Job state fetcher + * @param clazz clazz + * @tparam T name + * @return + */ + def getOrCreateJobStateFetcher[T <: JobState](clazz: Class[_]): JobStateFetcher[T] + + /** + * Get job state + * @param jobInfo job info + * @tparam T name + * @return + */ + def getJobState[T <: JobState](clazz: Class[_], jobInfo: JobInfo): T + + + def getJobStateDir[T <: JobState](clazz: Class[_], scheme: String, relativePath: String): URI + + def getJobStateDir[T <: JobState](clazz: Class[_], relativePath: String): URI + /** + * Get job state directory uri + * @param clazz clazz + * @param scheme scheme + * @param authority authority + * @param relativePath relative path + * @tparam T + * @return + */ + def getJobStateDir[T <: JobState](clazz: Class[_], scheme: String, authority: String, relativePath: String): URI +} + + diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/state/JobState.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/state/JobState.scala new file mode 100644 index 000000000..b05df81c2 --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/state/JobState.scala @@ -0,0 +1,33 @@ +package com.webank.wedatasphere.streamis.jobmanager.launcher.job.state + +import java.net.URI + +/** + * Job state + */ +trait JobState { + + /** + * Job state id + * @return + */ + def getId: String + + /** + * location + * @return + */ + def getLocation: URI + + /** + * Metadata info + * @return + */ + def getMetadataInfo: Any + + /** + * Timestamp to save the state + * @return + */ + def getTimestamp: Long +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/state/JobStateFetcher.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/state/JobStateFetcher.scala new file mode 100644 index 000000000..41f47fd75 --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/state/JobStateFetcher.scala @@ -0,0 +1,38 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.launcher.job.state + +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.JobInfo + +trait JobStateFetcher[T <: JobState] { + + /** + * Init method + */ + def init(): Unit + + /** + * Get state information + * @param jobInfo JobInfo + * @return + */ + def getState(jobInfo: JobInfo): T + + /** + * Destroy method + */ + def destroy(): Unit +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/state/JobStateInfo.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/state/JobStateInfo.scala new file mode 100644 index 000000000..84e29bf73 --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/state/JobStateInfo.scala @@ -0,0 +1,31 @@ +package com.webank.wedatasphere.streamis.jobmanager.launcher.job.state + +/** + * Basic info + */ +class JobStateInfo { + /** + * Location + */ + private var location: String = _ + + /** + * Timestamp + */ + private var timestamp: Long = -1 + + def setLocation(location: String): Unit = { + this.location = location + } + + def getLocation: String = { + this.location + } + + def setTimestamp(timestamp: Long): Unit = { + this.timestamp = timestamp + } + def getTimestamp: Long = { + timestamp + } +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/pom.xml b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/pom.xml new file mode 100644 index 000000000..1e266bedb --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/pom.xml @@ -0,0 +1,65 @@ + + + + + + streamis-job-launcher + com.webank.wedatasphere.streamis + 0.2.4 + + 4.0.0 + + streamis-job-launcher-linkis + + + 8 + 8 + + + + + com.webank.wedatasphere.streamis + streamis-job-launcher-base + ${jobmanager.version} + + + org.apache.linkis + linkis-computation-client + ${linkis.version} + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + + ${project.artifactId}-${project.version} + + \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/exception/StreamisJobLaunchException.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/exception/StreamisJobLaunchException.java new file mode 100644 index 000000000..aee2c5570 --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/exception/StreamisJobLaunchException.java @@ -0,0 +1,24 @@ +package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.exception; + +import org.apache.linkis.common.exception.ExceptionLevel; +import org.apache.linkis.common.exception.LinkisRuntimeException; + +public class StreamisJobLaunchException extends FlinkJobLaunchErrorException{ + + public StreamisJobLaunchException(int errorCode, String errorMsg, Throwable t) { + super(errorCode, errorMsg, t); + } + + public static class Runtime extends LinkisRuntimeException { + + public Runtime(int errCode, String desc,Throwable t) { + super(errCode, desc); + super.initCause(t); + } + + @Override + public ExceptionLevel getLevel() { + return ExceptionLevel.ERROR; + } + } +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/AbstractLinkisJobStateFetcher.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/AbstractLinkisJobStateFetcher.java new file mode 100644 index 000000000..4a423326a --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/AbstractLinkisJobStateFetcher.java @@ -0,0 +1,236 @@ + +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state; + +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.JobInfo; +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobStateManager; +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobState; +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobStateFetcher; +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.conf.JobLauncherConfiguration; +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.exception.FlinkJobStateFetchException; +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.exception.StreamisJobLaunchException; +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.client.StateFileTree; +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.client.LinkisJobStateGetAction; +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.client.LinkisJobStateResult; +import org.apache.commons.lang3.StringUtils; +import org.apache.linkis.computation.client.LinkisJobBuilder$; +import org.apache.linkis.httpclient.Client; +import org.apache.linkis.httpclient.dws.DWSHttpClient; +import org.apache.linkis.httpclient.dws.response.DWSResult; +import org.apache.linkis.httpclient.response.Result; +import org.apache.linkis.ujes.client.response.ResultSetListResult; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.net.URI; +import java.util.HashMap; +import java.util.List; +import java.util.Objects; +import java.util.Optional; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Function; + + +/** + * Linkis Job state fetcher + * 1) Init to build http client + * 2) Invoke the getState method to fetch form /api/rest_j/v1/filesystem/getDirFileTrees, the new JobState info + * (Note: linkis doesn't support to fetch the file tree recursively, so should invoke several times) + * 3) Destroy to close the http client when the system is closed + * @param + */ + +public abstract class AbstractLinkisJobStateFetcher implements JobStateFetcher { + + private static final Logger LOG = LoggerFactory.getLogger(AbstractLinkisJobStateFetcher.class); + + /** + * Modify time properties name + */ + private static final String PROPS_MODIFY_TIME = "modifytime"; + + /** + * Size properties name + */ + private static final String PROPS_SIZE = "size"; + /** + * Http Client + */ + Client client; + + private final Class stateClass; + + private final JobStateManager jobStateManager; + + public AbstractLinkisJobStateFetcher(Class stateClass, JobStateManager jobStateManager){ + this.stateClass = stateClass; + this.jobStateManager = jobStateManager; + } + + /** + * Init method + */ + @Override + public void init() { + String fetcherName = this.getClass().getSimpleName(); + LOG.info("Initialize httpClient in JobStateFetcher for [{}] start", fetcherName); + client = new DWSHttpClient(LinkisJobBuilder$.MODULE$.getDefaultClientConfig(), fetcherName + "-Client"); + LOG.info("Initialize httpClient in JobStateFetcher for [{}] finished", fetcherName); + } + + /** + * Main entrance + * @param jobInfo job info + * @return + */ + @Override + public T getState(JobInfo jobInfo) { + String treeDir = this.jobStateManager.getJobStateDir(stateClass, jobInfo.getName()).toString(); + StateFileTree stateFileTree = traverseFileTreeToFind(jobInfo, getDirFileTree(jobInfo, treeDir), this::isMatch, false); + if (Objects.nonNull(stateFileTree) && StringUtils.isNotBlank(stateFileTree.getPath())){ + JobStateFileInfo stateFileInfo = new JobStateFileInfo(stateFileTree.getName(), + stateFileTree.getPath(), stateFileTree.getParentPath(), + Long.parseLong(stateFileTree.getProperties().getOrDefault(PROPS_SIZE, "0")), + Long.parseLong(stateFileTree.getProperties().getOrDefault(PROPS_MODIFY_TIME, "0"))); + return getState(stateFileInfo); + } + return null; + } + + + @Override + public void destroy() { + try { + client.close(); + } catch (IOException e) { + throw new StreamisJobLaunchException.Runtime(-1, + "Fail to destroy httpClient in JobStateFetcher[" + this.getClass().getSimpleName() + "]",e); + } + } + + /** + * Traverse the file tree to find the suitable state file + * @param jobInfo job info + * @param stateFileTree state file tree + * @param matcher matcher + * @param resolved resolved + * @return + */ + private StateFileTree traverseFileTreeToFind(JobInfo jobInfo, StateFileTree stateFileTree, Function matcher, + boolean resolved){ + AtomicReference latestFileTree = new AtomicReference<>(new StateFileTree()); + if (Objects.nonNull(stateFileTree)){ + if (!resolved && stateFileTree.getIsLeaf()){ + if (matcher.apply(stateFileTree.getPath()) && compareTime(stateFileTree, latestFileTree.get()) > 0){ + latestFileTree.set(stateFileTree); + } + } else if (!stateFileTree.getIsLeaf()){ + Optional.ofNullable(stateFileTree.getChildren()).ifPresent(children -> children.forEach(childStateFileTree -> { + StateFileTree candidateFileTree = childStateFileTree.getIsLeaf() ? childStateFileTree : + traverseFileTreeToFind(jobInfo, + Objects.nonNull(childStateFileTree.getChildren())? childStateFileTree : getDirFileTree(jobInfo, childStateFileTree.getPath()), + matcher, + true); + if (compareTime(candidateFileTree, latestFileTree.get()) > 0 && matcher.apply(candidateFileTree.getPath())){ + latestFileTree.set(candidateFileTree); + } + })); + } + } + return latestFileTree.get(); + } + + /** + * Fetch the File tree form directory + * @param jobInfo job info + * @param dirPath directory path + * @return state file tree + */ + private StateFileTree getDirFileTree(JobInfo jobInfo, String dirPath){ + try { + LinkisJobStateGetAction getAction = new LinkisJobStateGetAction(jobInfo.getUser(), dirPath); + Result result = client.execute(getAction); + String responseBody = Optional.ofNullable(result.getResponseBody()).orElse(""); + LOG.trace("JobState FileTree => [responseBody: {}]", + responseBody.length() > 100? responseBody.substring(0, 100) + "..." : responseBody); + StateFileTree stateFileTree; + if (result instanceof ResultSetListResult){ + ResultSetListResult setListResult = (ResultSetListResult)result; + checkFetchStateResult(setListResult); + stateFileTree = DWSHttpClient.jacksonJson().convertValue(setListResult.getDirFileTrees(), StateFileTree.class); + } else if(result instanceof LinkisJobStateResult){ + LinkisJobStateResult stateResult = (LinkisJobStateResult) result; + checkFetchStateResult(stateResult); + stateFileTree = stateResult.getStateFileTree(); + }else { + throw new FlinkJobStateFetchException(-1, "JobState FileTree result is not a unrecognized type: " + + "[" + result.getClass().getCanonicalName() + "]",null); + } + if(stateFileTree == null){ + LOG.warn("'StateFileTree' for path [{}] is null/empty, just return the null FileTree", dirPath); + return null; + } + LOG.trace(stateFileTree.getChildren() + ""); + return stateFileTree; + } catch (FlinkJobStateFetchException e) { + throw new StreamisJobLaunchException.Runtime(e.getErrCode(),e.getMessage(),e); + } catch (Exception e) { + throw new StreamisJobLaunchException.Runtime(-1,"Unexpected exception in fetching JobState FileTree",e); + } + } + + private void checkFetchStateResult(DWSResult result) throws FlinkJobStateFetchException { + if(result.getStatus()!= 0) { + String errMsg = result.getMessage(); + throw new FlinkJobStateFetchException(-1, "Fail to fetch JobState FileTree, message: " + errMsg, null); + } + } + /** + * Compare timestamp value in file trees + * @param leftTree left + * @param rightTree right + * @return size + */ + private long compareTime(StateFileTree leftTree, StateFileTree rightTree){ + long leftTime = 0L,rightTime = 0L; + try { + leftTime = Long.parseLong(Optional.ofNullable(leftTree.getProperties()).orElse(new HashMap<>()).getOrDefault(PROPS_MODIFY_TIME, "0")); + } catch (NumberFormatException e){ + LOG.warn("Illegal format value for property '{}' in FilePath [{}]", PROPS_MODIFY_TIME, leftTree.getPath(), e); + } + try { + rightTime = Long.parseLong(Optional.ofNullable(rightTree.getProperties()).orElse(new HashMap<>()).getOrDefault(PROPS_MODIFY_TIME, "0")); + } catch (NumberFormatException e){ + LOG.warn("Illegal format value for property '{}' in FilePath [{}]", PROPS_MODIFY_TIME, rightTree.getPath(), e); + } + return leftTime - rightTime; + } + /** + * Is the path is match + * @param path path + * @return boolean + */ + protected abstract boolean isMatch(String path); + + /** + * Get the concrete JobState entity from FileInfo + * @param fileInfo file info + * @return JobState + */ + protected abstract T getState(JobStateFileInfo fileInfo); + +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/CheckpointJobStateFetcher.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/CheckpointJobStateFetcher.java new file mode 100644 index 000000000..377c525c3 --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/CheckpointJobStateFetcher.java @@ -0,0 +1,69 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state; + +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobStateManager; +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.conf.JobLauncherConfiguration; +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.exception.StreamisJobLaunchException; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.net.URI; +import java.net.URISyntaxException; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import static com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.JobStateConf.CHECKPOINT_PATH_PATTERN; + +/** + * Checkpoint JobState Fetcher + */ +public class CheckpointJobStateFetcher extends AbstractLinkisJobStateFetcher { + + private static final Logger LOG = LoggerFactory.getLogger(CheckpointJobStateFetcher.class); + + private static final Pattern PATH_PATTERN = Pattern.compile(CHECKPOINT_PATH_PATTERN.getValue()); + + public CheckpointJobStateFetcher(Class stateClass, JobStateManager jobStateManager) { + super(stateClass, jobStateManager); + } + + @Override + protected boolean isMatch(String path) { + return PATH_PATTERN.matcher(path).matches(); + } + + @Override + public Checkpoint getState(JobStateFileInfo fileInfo) { + // TODO from linkis will lost the authority info + URI location = URI.create(fileInfo.getPath()); + if (StringUtils.isBlank(location.getAuthority()) && + StringUtils.isNotBlank(JobLauncherConfiguration.FLINK_STATE_DEFAULT_AUTHORITY().getValue())){ + try { + location = new URI(location.getScheme(), JobLauncherConfiguration.FLINK_STATE_DEFAULT_AUTHORITY().getValue(), + location.getPath(), null, null); + } catch (URISyntaxException e) { + throw new StreamisJobLaunchException.Runtime(-1, "Fail to resolve checkpoint location, message: " + e.getMessage(), e); + } + } + Checkpoint checkpoint = new Checkpoint(location.toString()); + checkpoint.setMetadataInfo(fileInfo); + checkpoint.setTimestamp(fileInfo.getModifytime()); + LOG.info("Checkpoint info is [path: {}, timestamp: {}]" ,checkpoint.getLocation(), checkpoint.getTimestamp()); + return checkpoint; + } + +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/JobStateConf.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/JobStateConf.java new file mode 100644 index 000000000..be4f27ee0 --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/JobStateConf.java @@ -0,0 +1,27 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state; + +import org.apache.linkis.common.conf.CommonVars; + +/** + * JobState configuration + */ +public class JobStateConf { + + public static final CommonVars CHECKPOINT_PATH_PATTERN = CommonVars.apply("wds.streamis.job.state.checkpoint.path-pattern", "^[\\s\\S]+?/\\w+?/chk-\\d+/_metadata$"); + + public static final CommonVars SAVEPOINT_PATH_PATTERN = CommonVars.apply("wds.streamis.job.state.savepoint.path-pattern", "^[\\s\\S]+?/savepoint-[\\w-]+/_metadata$"); +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/JobStateFileInfo.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/JobStateFileInfo.java new file mode 100644 index 000000000..d0b235217 --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/JobStateFileInfo.java @@ -0,0 +1,74 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state; + +/** + * JobState File info + */ +public class JobStateFileInfo { + private String name; + private String path; + private String parentPath; + private long size; + private long modifytime; + + public JobStateFileInfo(String name, String path, String parentPath, long size, long modifytime) { + this.name = name; + this.path = path; + this.parentPath = parentPath; + this.size = size; + this.modifytime = modifytime; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getPath() { + return path; + } + + public void setPath(String path) { + this.path = path; + } + + public String getParentPath() { + return parentPath; + } + + public void setParentPath(String parentPath) { + this.parentPath = parentPath; + } + + public long getSize() { + return size; + } + + public void setSize(long size) { + this.size = size; + } + + public long getModifytime() { + return modifytime; + } + + public void setModifytime(long modifytime) { + this.modifytime = modifytime; + } +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/SavepointJobStateFetcher.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/SavepointJobStateFetcher.java new file mode 100644 index 000000000..69dbb51d0 --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/SavepointJobStateFetcher.java @@ -0,0 +1,67 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state; + +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobStateManager; +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.conf.JobLauncherConfiguration; +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.exception.StreamisJobLaunchException; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.net.URI; +import java.net.URISyntaxException; +import java.util.regex.Pattern; + +import static com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.JobStateConf.SAVEPOINT_PATH_PATTERN; + +/** + * Savepoint JobState Fetcher + */ +public class SavepointJobStateFetcher extends AbstractLinkisJobStateFetcher{ + + private static final Logger LOG = LoggerFactory.getLogger(CheckpointJobStateFetcher.class); + + private static final Pattern PATH_PATTERN = Pattern.compile(SAVEPOINT_PATH_PATTERN.getValue()); + + public SavepointJobStateFetcher(Class stateClass, JobStateManager jobStateManager) { + super(stateClass, jobStateManager); + } + + @Override + protected boolean isMatch(String path) { + return PATH_PATTERN.matcher(path).matches(); + } + + @Override + protected Savepoint getState(JobStateFileInfo fileInfo) { + // TODO from linkis will lost the authority info + URI location = URI.create(fileInfo.getPath()); + if (StringUtils.isBlank(location.getAuthority()) && + StringUtils.isNotBlank(JobLauncherConfiguration.FLINK_STATE_DEFAULT_AUTHORITY().getValue())){ + try { + location = new URI(location.getScheme(), JobLauncherConfiguration.FLINK_STATE_DEFAULT_AUTHORITY().getValue(), + location.getPath(), null, null); + } catch (URISyntaxException e) { + throw new StreamisJobLaunchException.Runtime(-1, "Fail to resolve checkpoint location, message: " + e.getMessage(), e); + } + } + Savepoint savepoint = new Savepoint(location.toString()); + savepoint.setMetadataInfo(fileInfo); + savepoint.setTimestamp(fileInfo.getModifytime()); + LOG.info("Savepoint info is [path: {}, timestamp: {}]", savepoint.getLocation(), savepoint.getTimestamp()); + return savepoint; + } +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/client/LinkisJobStateResult.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/client/LinkisJobStateResult.java new file mode 100644 index 000000000..48c6d7a6a --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/client/LinkisJobStateResult.java @@ -0,0 +1,47 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.client; + +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.exception.FlinkJobStateFetchException; +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.AbstractJobStateResult; +import org.apache.linkis.httpclient.dws.DWSHttpClient; +import org.apache.linkis.httpclient.dws.annotation.DWSHttpMessageResult; + +import java.util.HashMap; +import java.util.Map; + +/** + * JobState result + */ +@DWSHttpMessageResult("/api/rest_j/v\\d+/filesystem/getDirFileTrees") +public class LinkisJobStateResult extends AbstractJobStateResult { + + private Map dirFileTrees = new HashMap<>(); + + /** + * Convert the result data to state file tree + * @return state file tree + */ + public StateFileTree getStateFileTree() throws FlinkJobStateFetchException { + try { + return DWSHttpClient.jacksonJson().convertValue(dirFileTrees, StateFileTree.class); + }catch(Exception e){ + throw new FlinkJobStateFetchException(-1, "Fail to parse JobState result data, message: " + e.getMessage(), e); + } + } + +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/client/StateFileTree.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/client/StateFileTree.java new file mode 100644 index 000000000..3844970c4 --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/client/StateFileTree.java @@ -0,0 +1,62 @@ +package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.client; + +import java.util.HashMap; +import java.util.List; + +public class StateFileTree { + + private String name; + private String path; + private HashMap properties; + private List children; + private Boolean isLeaf = false; + private String parentPath; + + public Boolean getIsLeaf() { + return isLeaf; + } + + public void setIsLeaf(Boolean isLeaf) { + this.isLeaf = isLeaf; + } + + public String getParentPath() { + return parentPath; + } + + public void setParentPath(String parentPath) { + this.parentPath = parentPath; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getPath() { + return path; + } + + public void setPath(String path) { + this.path = path; + } + + public HashMap getProperties() { + return properties; + } + + public void setProperties(HashMap properties) { + this.properties = properties; + } + + public List getChildren() { + return children; + } + + public void setChildren(List children) { + this.children = children; + } +} \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/url/LinkisURLStreamHandler.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/url/LinkisURLStreamHandler.java new file mode 100644 index 000000000..8394b9d72 --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/url/LinkisURLStreamHandler.java @@ -0,0 +1,31 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.url; + +import java.io.IOException; +import java.net.URL; +import java.net.URLConnection; +import java.net.URLStreamHandler; + +/** + * URL stream handler for linkis client (cannot open connection) + */ +public class LinkisURLStreamHandler extends URLStreamHandler { + @Override + protected URLConnection openConnection(URL url) throws IOException { + throw new IllegalArgumentException("Cannot open connection for url [" + url.getPath() + "]"); + } +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/url/LinkisURLStreamHandlerFactory.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/url/LinkisURLStreamHandlerFactory.java new file mode 100644 index 000000000..1352c8b28 --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/url/LinkisURLStreamHandlerFactory.java @@ -0,0 +1,52 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.url; + + +import java.net.URLStreamHandler; +import java.net.URLStreamHandlerFactory; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +/** + * Default linkis stream handler factory (support specific schemas) + */ +public class LinkisURLStreamHandlerFactory implements URLStreamHandlerFactory { + + /** + * Support schemas + */ + private final List supportSchemas = new ArrayList<>(); + + /** + * Stream handler + */ + private final URLStreamHandler defaultStreamHandler; + + public LinkisURLStreamHandlerFactory(String... schemas){ + supportSchemas.addAll(Arrays.asList(schemas)); + this.defaultStreamHandler = new LinkisURLStreamHandler(); + } + + @Override + public URLStreamHandler createURLStreamHandler(String protocol) { + if (supportSchemas.stream().anyMatch( schema -> schema.equals(protocol))){ + return this.defaultStreamHandler; + } + return null; + } +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/conf/JobLauncherConfiguration.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/conf/JobLauncherConfiguration.scala new file mode 100644 index 000000000..812475016 --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/conf/JobLauncherConfiguration.scala @@ -0,0 +1,52 @@ +package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.conf + +import org.apache.linkis.common.conf.CommonVars + +/** + * Job Launcher configuration + */ +object JobLauncherConfiguration { + + + val FLINK_FETCH_APPLICATION_INFO_MAX_TIMES: CommonVars[Int] = CommonVars("wds.streamis.application.info.fetch.max", 6) + + val FLINK_STATE_DEFAULT_SCHEME: CommonVars[String] = CommonVars("wds.streamis.launch.flink.state.default.scheme", "hdfs") + /** + * Support schema protocols to store flink job states + */ + val FLINK_STATE_SUPPORT_SCHEMES: CommonVars[String] = CommonVars("wds.streamis.launch.flink.state.support.schemas", "hdfs,file,viewfs,s3") + + /** + * Authority(host) value to store flink job states + */ + val FLINK_STATE_DEFAULT_AUTHORITY: CommonVars[String] = CommonVars("wds.streamis.launch.flink.state.authority", "") + /** + * Savepoint mode + */ + val FLINK_TRIGGER_SAVEPOINT_MODE: CommonVars[String] = CommonVars("wds.streamis.launch.flink.savepoint.mode", "trigger") + + /** + * Savepoint dir + */ + val FLINK_SAVEPOINT_PATH: CommonVars[String] = CommonVars("wds.streamis.launch.flink.savepoint.dir", "/flink/flink-savepoints") + + /** + * Checkpoint dir + */ + val FLINK_CHECKPOINT_PATH: CommonVars[String] = CommonVars("wds.streamis.launch.flink.checkpoint.dir", "/flink/flink-checkpoints") + + /** + * Linkis release version + */ + val FLINK_LINKIS_RELEASE_VERSION: CommonVars[String] = CommonVars("wds.streamis.launch.flink.linkis.release.version", "") + /** + * Variable: savepoint path + */ + val VAR_FLINK_SAVEPOINT_PATH: CommonVars[String] = CommonVars("wds.streamis.launch.variable.flink.savepoint.path", "flink.app.savePointPath") + + /** + * Variable: flink app + */ + val VAR_FLINK_APP_NAME: CommonVars[String] = CommonVars("wds.streamis.launch.variable.flink.app.name", "flink.app.name") + +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/core/FlinkLogIterator.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/core/FlinkLogIterator.scala new file mode 100644 index 000000000..d6fb5c661 --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/core/FlinkLogIterator.scala @@ -0,0 +1,78 @@ +package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.core + +import java.io.Closeable +import java.util + +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.entity.LogRequestPayload +import org.apache.linkis.common.utils.Utils +import org.apache.linkis.computation.client.operator.impl.EngineConnLogOperator + +/** + * + * @date 2021-11-10 + * @author enjoyyin + * @since 0.5.0 + */ +trait FlinkLogIterator extends Iterator[String] with Closeable { + val requestPayload: LogRequestPayload + val engineConnLogOperator: EngineConnLogOperator + def init(): Unit + def getLogPath: String + def getLogDirSuffix: String + def getLogs: util.ArrayList[String] + def getEndLine: Long +} + +class SimpleFlinkJobLogIterator(override val requestPayload: LogRequestPayload, + override val engineConnLogOperator: EngineConnLogOperator) extends FlinkLogIterator { + + private var logs: util.ArrayList[String] = _ + private var index = 0 + private var logPath: String = _ + private var logDirSuffix: String = _ + private var isClosed = true + private var endLine = 0 + + override def init(): Unit = { + engineConnLogOperator.setPageSize(requestPayload.getPageSize) + engineConnLogOperator.setFromLine(requestPayload.getFromLine) + engineConnLogOperator.setIgnoreKeywords(requestPayload.getIgnoreKeywords) + engineConnLogOperator.setOnlyKeywords(requestPayload.getOnlyKeywords) + engineConnLogOperator.setLastRows(requestPayload.getLastRows) + val engineConnLog = engineConnLogOperator() + logs = engineConnLog.logs + logPath = engineConnLog.logPath + endLine = engineConnLog.endLine + } + + override def close(): Unit = isClosed = true + + override def hasNext: Boolean = { + if(isClosed) return false + else if(index < logs.size()) return true + logs = engineConnLogOperator().logs + while (logs == null || logs.isEmpty) { + logs = engineConnLogOperator().logs + if(isClosed) return false + Utils.sleepQuietly(2000) + } + index = 0 + true + } + + override def next(): String = { + val log = logs.get(index) + index += 1 + log + } + + override def getLogPath: String = logPath + + override def getLogs: util.ArrayList[String] = logs + + override def getEndLine: Long = endLine + + def setLogDirSuffix(logDirSuffix: String) : Unit = this.logDirSuffix = logDirSuffix + + override def getLogDirSuffix: String = logDirSuffix +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/entity/LogRequestPayload.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/entity/LogRequestPayload.scala new file mode 100644 index 000000000..20cb4d081 --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/entity/LogRequestPayload.scala @@ -0,0 +1,40 @@ +package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.entity + +/** + * + * @date 2021-11-10 + * @author enjoyyin + * @since 0.5.0 + */ +class LogRequestPayload { + + private var pageSize = 100 + private var fromLine = 1 + private var ignoreKeywords: String = _ + private var onlyKeywords: String = _ + private var lastRows = 0 + private var logType: String = _ + private var logHistory: Boolean = false + def getPageSize: Int = pageSize + def setPageSize(pageSize: Int): Unit = this.pageSize = pageSize + + def getFromLine: Int = fromLine + def setFromLine(fromLine: Int): Unit = this.fromLine = fromLine + + def getIgnoreKeywords: String = ignoreKeywords + def setIgnoreKeywords(ignoreKeywords: String): Unit = this.ignoreKeywords = ignoreKeywords + + def getOnlyKeywords: String = onlyKeywords + def setOnlyKeywords(onlyKeywords: String): Unit = this.onlyKeywords = onlyKeywords + + def getLastRows: Int = lastRows + def setLastRows(lastRows: Int): Unit = this.lastRows = lastRows + + def getLogType: String = logType + + def setLogType(logType: String): Unit = this.logType = logType + + def isLogHistory: Boolean = logHistory + + def setLogHistory(logHistory: Boolean): Unit = this.logHistory = logHistory +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/exception/FlinkJobLaunchErrorException.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/exception/FlinkJobLaunchErrorException.scala new file mode 100644 index 000000000..d3dddbc4d --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/exception/FlinkJobLaunchErrorException.scala @@ -0,0 +1,43 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.exception + +import org.apache.linkis.common.exception.ErrorException + +/** + * Basic job launch exception + * @param errorCode error code + * @param errorMsg error message + */ +class FlinkJobLaunchErrorException(errorCode: Int, errorMsg: String, t: Throwable) extends ErrorException(errorCode, errorMsg){ + this.initCause(t) +} + +/** + * Exception in triggering savepoint + */ +class FlinkSavePointException(errorCode: Int, errorMsg: String, t: Throwable) + extends FlinkJobLaunchErrorException(errorCode, errorMsg, t) + +/** + * Exception in fetching job state + */ +class FlinkJobStateFetchException(errorCode: Int, errorMsg: String, t: Throwable) + extends FlinkJobLaunchErrorException(errorCode, errorMsg, t) + +class FlinkJobLogFetchException(errorCode: Int, errorMsg: String, t: Throwable) + extends FlinkJobLaunchErrorException(errorCode, errorMsg, t) + diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/FlinkJobClient.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/FlinkJobClient.scala new file mode 100644 index 000000000..b5c64a523 --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/FlinkJobClient.scala @@ -0,0 +1,220 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job + +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.JobClient +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobStateManager +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobStateInfo +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.conf.JobLauncherConfiguration +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.core.{FlinkLogIterator, SimpleFlinkJobLogIterator} +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.entity.LogRequestPayload +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.exception.{FlinkJobLaunchErrorException, FlinkJobStateFetchException, FlinkSavePointException} +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.manager.FlinkJobLaunchManager +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.operator.{FlinkClientLogOperator, FlinkTriggerSavepointOperator, FlinkYarnLogOperator} +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.{Checkpoint, Savepoint} +import org.apache.commons.lang3.StringUtils +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.computation.client.once.action.ECResourceInfoAction +import org.apache.linkis.computation.client.once.result.ECResourceInfoResult +import org.apache.linkis.computation.client.once.{LinkisManagerClient, LinkisManagerClientImpl, OnceJob} +import org.apache.linkis.computation.client.once.simple.{SimpleOnceJob, SimpleOnceJobBuilder} +import org.apache.linkis.computation.client.operator.impl.EngineConnLogOperator +import org.apache.linkis.httpclient.dws.DWSHttpClient +import java.util +import java.net.URI + +class FlinkJobClient(onceJob: OnceJob, var jobInfo: FlinkJobInfo, stateManager: JobStateManager) + extends JobClient[FlinkJobInfo] with Logging{ + + /** + * Log operator + */ + private var logOperatorMap = Map( + "client" -> FlinkClientLogOperator.OPERATOR_NAME, + "yarn" -> FlinkYarnLogOperator.OPERATOR_NAME + ) + /** + * The linkis client in onceJob + */ + private var linkisClient: DWSHttpClient = _ + + override def getJobInfo: FlinkJobInfo = { + getJobInfo(false) + } + + /** + * Refresh job info and return + * + * @param refresh refresh + * @return + */ + override def getJobInfo(refresh: Boolean): FlinkJobInfo = { + onceJob match { + case simpleOnceJob: SimpleOnceJob => + simpleOnceJob.getStatus + jobInfo.setStatus(if (refresh) onceJob.getNodeInfo + .getOrDefault("nodeStatus", simpleOnceJob.getStatus).asInstanceOf[String] else simpleOnceJob.getStatus) + } + jobInfo + } + + /** + * Stop the job connected remote + * + * @param snapshot if do snapshot to save the job state + */ + override def stop(snapshot: Boolean): JobStateInfo = { + var stateInfo: JobStateInfo = null + if (snapshot){ + // Begin to call the savepoint operator + info(s"Trigger Savepoint operator for job [${jobInfo.getId}] before pausing job.") + Option(triggerSavepoint()) match { + case Some(savepoint) => + stateInfo = new JobStateInfo + stateInfo.setLocation(savepoint.getLocation.toString) + stateInfo.setTimestamp(savepoint.getTimestamp) + case _ => + } + } + onceJob.kill() + stateInfo + } + + /** + * Stop directly + */ + override def stop(): Unit = stop(false) +/** + * Fetch logs + * @param requestPayload request payload + * @return + */ + def fetchLogs(requestPayload: LogRequestPayload): FlinkLogIterator = { + logOperatorMap.get(requestPayload.getLogType) match { + case Some(operator) => + onceJob.getOperator(operator) match { + case engineConnLogOperator: EngineConnLogOperator => + val logIterator = new SimpleFlinkJobLogIterator(requestPayload, engineConnLogOperator) + engineConnLogOperator match { + case clientLogOperator: FlinkClientLogOperator => + var logDirSuffix = this.jobInfo.getLogDirSuffix + if (StringUtils.isBlank(logDirSuffix) && requestPayload.isLogHistory){ + // If want to fetch the history log, must get the log directory suffix first + getLinkisClient match { + case client: DWSHttpClient => + Option(Utils.tryCatch{ + client.execute(ECResourceInfoAction.newBuilder().setUser(jobInfo.getUser) + .setTicketid(clientLogOperator.getTicketId).build()).asInstanceOf[ECResourceInfoResult] + }{ + case e: Exception => + warn("Fail to query the engine conn resource info from linkis", e) + null + }) match { + case Some(result) => logDirSuffix = Utils.tryAndWarn{result.getData.getOrDefault("ecResourceInfoRecord", new util.HashMap[String, Any]).asInstanceOf[util.Map[String, Any]] + .getOrDefault("logDirSuffix", "").asInstanceOf[String]} + case _ => + } + } + } + clientLogOperator.setLogDirSuffix(logDirSuffix) + logIterator.setLogDirSuffix(logDirSuffix) + case _ => + } + engineConnLogOperator match { + case yarnLogOperator: FlinkYarnLogOperator => yarnLogOperator.setApplicationId(jobInfo.getApplicationId) + case _ => + } + engineConnLogOperator.setECMServiceInstance(jobInfo.getECMInstance) + engineConnLogOperator.setEngineConnType(FlinkJobLaunchManager.FLINK_ENGINE_CONN_TYPE) + logIterator.init() + jobInfo match { + case jobInfo: FlinkJobInfo => jobInfo.setLogPath(logIterator.getLogPath) + case _ => + } + logIterator + } + case None => + throw new FlinkJobStateFetchException(-1, s"Unrecognized log type: ${requestPayload.getLogType}", null) + } + + + } + + /** + * Get check points + * @return + */ + def getCheckpoints: Array[Checkpoint] = throw new FlinkJobStateFetchException(30401, "Not support method", null) + + + /** + * Trigger save point operation + * @param savePointDir savepoint directory + * @param mode mode + */ + def triggerSavepoint(savePointDir: String, mode: String): Savepoint = { + Utils.tryCatch{ + onceJob.getOperator(FlinkTriggerSavepointOperator.OPERATOR_NAME) match{ + case savepointOperator: FlinkTriggerSavepointOperator => { + // TODO Get scheme information from job info + savepointOperator.setSavepointDir(savePointDir) + savepointOperator.setMode(mode) + Option(savepointOperator()) match { + case Some(savepoint: Savepoint) => + savepoint + // TODO store into job Info + case _ => throw new FlinkSavePointException(-1, "The response savepoint info is empty", null) + } + } + } + }{ + case se: FlinkSavePointException => + throw se + case e: Exception => + // TODO defined the code for savepoint exception + throw new FlinkSavePointException(-1, "Fail to trigger savepoint operator", e) + } + } + + def triggerSavepoint(): Savepoint = { + val savepointURI: URI = this.stateManager.getJobStateDir(classOf[Savepoint], jobInfo.getName) + triggerSavepoint(savepointURI.toString, JobLauncherConfiguration.FLINK_TRIGGER_SAVEPOINT_MODE.getValue) + } + + /** + * Get linkis client + * @return + */ + def getLinkisClient: DWSHttpClient = { + Utils.tryAndWarn{ + if (null == this.linkisClient){ + this.synchronized{ + if (null == this.linkisClient){ + this.linkisClient = SimpleOnceJobBuilder.getLinkisManagerClient match { + case client: LinkisManagerClient => + val dwsClientField = classOf[LinkisManagerClientImpl].getDeclaredField("dwsHttpClient") + dwsClientField.setAccessible(true) + dwsClientField.get(client).asInstanceOf[DWSHttpClient] + case _ => null + } + + } + } + } + this.linkisClient + } + } +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/FlinkJobInfo.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/FlinkJobInfo.scala new file mode 100644 index 000000000..7de4452bd --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/FlinkJobInfo.scala @@ -0,0 +1,123 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job + +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.{JobState, JobStateInfo} +import org.apache.linkis.common.ServiceInstance +import org.apache.linkis.httpclient.dws.DWSHttpClient + +import java.util + + +class FlinkJobInfo extends YarnJobInfo { + + private var id: String = _ + private var name: String = _ + private var ecmInstance: ServiceInstance = _ + private var user: String = _ + private var savepoint: String = _ + private var checkpoint: String = _ + private var applicationId: String = _ + private var applicationUrl: String = _ + private var status: String = _ + private var logPath: String = _ + private var logDirSuffix: String = _ + private var resources: java.util.Map[String, Object] = _ + private var completedMsg: String = _ + private var jobStates: Array[JobStateInfo] = _ + + override def getApplicationId: String = applicationId + + def setApplicationId(applicationId: String): Unit = this.applicationId = applicationId + + override def getApplicationUrl: String = applicationUrl + + def setApplicationUrl(applicationUrl: String): Unit = this.applicationUrl = applicationUrl + + override def getId: String = id + + def setId(id: String): Unit = this.id = id + + override def getECMInstance: ServiceInstance = ecmInstance + + def setECMInstance(ecmInstance: ServiceInstance): Unit = this.ecmInstance = ecmInstance + + override def getUser: String = user + + def setUser(user: String): Unit = this.user = user + + override def getStatus: String = status + + override def setStatus(status: String): Unit = this.status = status + + override def getLogPath: String = logPath + + def setLogPath(logPath: String): Unit = this.logPath = logPath + + override def getResources: util.Map[String, Object] = resources + + def setResources(resources: java.util.Map[String, Object]): Unit = this.resources = resources + + def getSavepoint: String = savepoint + + def setSavepoint(savepoint: String): Unit = this.savepoint = savepoint + + def getCheckpoint: String = checkpoint + + def setCheckpoint(checkpoint: String): Unit = this.checkpoint = checkpoint + + override def getCompletedMsg: String = completedMsg + + def setCompletedMsg(completedMsg: String): Unit = this.completedMsg = completedMsg + + override def toString: String = s"FlinkJobInfo(id: $id, name: $name, status: $status, applicationId: $applicationId, applicationUrl: $applicationUrl, logPath: $logPath)" + + /** + * Contains the check point and save points + * + * @return + */ + override def getJobStates: Array[JobStateInfo] = { + jobStates + } + + def setJobStates(jobStates: Array[JobStateInfo]): Unit = { + this.jobStates = jobStates + } + + /** + * Job name + * + * @return name + */ + override def getName: String = name + + def setName(name: String): Unit = { + this.name = name + } + + /** + * Job log directory suffix + * + * @return + */ + override def getLogDirSuffix: String = this.logDirSuffix + + override def setLogDirSuffix(logDirSuffix: String): Unit = { + this.logDirSuffix = logDirSuffix + } +} + diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/LinkisJobInfo.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/LinkisJobInfo.scala new file mode 100644 index 000000000..4183f5025 --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/LinkisJobInfo.scala @@ -0,0 +1,21 @@ +package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job + +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.JobInfo +import org.apache.linkis.common.ServiceInstance + +trait LinkisJobInfo extends JobInfo { + + /** + * Fetch engine conn manager instance info + * @return + */ + def getECMInstance: ServiceInstance + + /** + * Job log directory suffix + * @return + */ + def getLogDirSuffix: String + + def setLogDirSuffix(logDirSuffix: String): Unit +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/YarnJobInfo.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/YarnJobInfo.scala new file mode 100644 index 000000000..d2907f901 --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/YarnJobInfo.scala @@ -0,0 +1,9 @@ +package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job + +trait YarnJobInfo extends LinkisJobInfo { + + def getApplicationId: String + + def getApplicationUrl: String + +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/AbstractJobStateManager.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/AbstractJobStateManager.scala new file mode 100644 index 000000000..767b9e58e --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/AbstractJobStateManager.scala @@ -0,0 +1,118 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.manager + +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.JobInfo +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobStateManager +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.{JobState, JobStateFetcher} +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.conf.JobLauncherConfiguration +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.exception.FlinkJobStateFetchException +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.manager.AbstractJobStateManager.WINDOWS_ROOT_DIR_REGEX +import org.apache.linkis.common.utils.Utils + +import java.net.{URI, URL, URLConnection, URLStreamHandler} +import java.util.concurrent.ConcurrentHashMap +import java.util +import scala.util.matching.Regex +/** + * Abstract job state manager + */ +abstract class AbstractJobStateManager extends JobStateManager { + + /** + * Hold the job state fetcher with its type + */ + protected val jobStateFetcherHolder: ConcurrentHashMap[String, JobStateFetcher[_ <: JobState]] + = new ConcurrentHashMap[String, JobStateFetcher[_ <: JobState]]() + + /** + * Fetcher loaders + */ + protected val stateFetcherLoaders: util.Map[String, ()=> JobStateFetcher[_ <: JobState]] = new util.HashMap[String, () => JobStateFetcher[_ <: JobState]]() + + override def getOrCreateJobStateFetcher[T <: JobState](clazz: Class[_]): JobStateFetcher[T] = { + val stateType = clazz.getCanonicalName + val loader = Option(stateFetcherLoaders.get(stateType)) + if (loader.isEmpty){ + throw new FlinkJobStateFetchException(-1, s"Cannot find the fetcher loader for [$stateType]", null) + } + jobStateFetcherHolder.computeIfAbsent(stateType, new util.function.Function[String, JobStateFetcher[_ <: JobState]]{ + override def apply(t: String): JobStateFetcher[_ <: JobState] = { + val fetcher = loader.get.apply() + Utils.tryCatch(fetcher.init()){ + case e: Exception => + throw new FlinkJobStateFetchException(-1, s"Unable to init the state fetcher [${fetcher.getClass.getName}", e) + } + fetcher + } + }).asInstanceOf[JobStateFetcher[T]] + } + + override def getJobState[T <: JobState](clazz: Class[_], jobInfo: JobInfo): T = Option(getOrCreateJobStateFetcher[T](clazz)) match { + case Some(jobStateFetcher: JobStateFetcher[T]) =>jobStateFetcher.getState(jobInfo) + case _ => null.asInstanceOf[T] + } + + /** + * Register job state fetcher + * + * @param clazz clazz + * @param builder job state fetcher loader/builder + * @tparam T + */ + override def registerJobStateFetcher(clazz: Class[_], builder: () => JobStateFetcher[_ <: JobState]): Unit = { + stateFetcherLoaders.put(clazz.getCanonicalName, builder) + } + + override def getJobStateDir[T <: JobState](clazz: Class[_], scheme: String, relativePath: String): URI = { + getJobStateDir(clazz, scheme, null, relativePath) + } + + + override def getJobStateDir[T <: JobState](clazz: Class[_], relativePath: String): URI = { + getJobStateDir(clazz, JobLauncherConfiguration.FLINK_STATE_DEFAULT_SCHEME.getValue, + JobLauncherConfiguration.FLINK_STATE_DEFAULT_AUTHORITY.getValue, relativePath) + } + + /** + * Get job state directory uri + * + * @param clazz clazz + * @param scheme scheme + * @param authority authority + * @param relativePath relative path + * @tparam T + * @return + */ + override def getJobStateDir[T <: JobState](clazz: Class[_], scheme: String, authority: String, relativePath: String): URI = { + // To Support all schema + new URI(scheme, authority, normalizePath(getJobStateRootPath(clazz, scheme) + "/" + relativePath), null, null) + } + + private def normalizePath(input: String): String = { + var path = input.replace("\\", "/") + path = path.replaceAll("/+", "/") + // Replace "." to "/" + path = path.replaceAll("\\.", "/") + if (path.endsWith("/") && !(path == "/") && !WINDOWS_ROOT_DIR_REGEX.pattern.matcher(path).matches()) path = path.substring(0, path.length - "/".length) + path + } + def getJobStateRootPath[T <: JobState](clazz: Class[_], schema: String): String +} + +object AbstractJobStateManager{ + val WINDOWS_ROOT_DIR_REGEX: Regex = "/\\p{Alpha}+:/".r +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/FlinkJobLaunchManager.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/FlinkJobLaunchManager.scala new file mode 100644 index 000000000..a00edd8e8 --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/FlinkJobLaunchManager.scala @@ -0,0 +1,132 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.manager + +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.{JobClient, LaunchJob} +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobStateManager +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobState +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.conf.JobLauncherConfiguration.{VAR_FLINK_APP_NAME, VAR_FLINK_SAVEPOINT_PATH} +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.exception.FlinkJobLaunchErrorException +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.LinkisJobInfo +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.computation.client.once.{OnceJob, SubmittableOnceJob} +import org.apache.linkis.computation.client.utils.LabelKeyUtils +import org.apache.linkis.protocol.utils.TaskUtils + + + +trait FlinkJobLaunchManager extends LinkisJobLaunchManager with Logging { + + protected var jobStateManager: JobStateManager = _ + + protected def buildOnceJob(job: LaunchJob): SubmittableOnceJob + + protected def createSubmittedOnceJob(id: String, jobInfo: LinkisJobInfo): OnceJob + + + protected def createJobInfo(onceJob: SubmittableOnceJob, job: LaunchJob, jobState: JobState): LinkisJobInfo + + protected def createJobInfo(jobInfo: String): LinkisJobInfo + + /** + * This method is used to launch a new job. + * + * @param job a StreamisJob wanted to be launched. + * @param jobState job state used to launch + * @return the job id. + */ + override def innerLaunch(job: LaunchJob, jobState: JobState): JobClient[LinkisJobInfo] = { + // Transform the JobState into the params in LaunchJob + Option(jobState).foreach(state => { + val startUpParams = TaskUtils.getStartupMap(job.getParams) + startUpParams.putIfAbsent(VAR_FLINK_SAVEPOINT_PATH.getValue, + state.getLocation.toString) + }) + TaskUtils.getStartupMap(job.getParams).put(VAR_FLINK_APP_NAME.getValue, + Option(job.getJobName) match { + case None => "EngineConn-Flink" + case Some(jobName) => + val index = jobName.lastIndexOf(".") + if (index > 0) jobName.substring(0, index) else jobName + }) + job.getLabels.get(LabelKeyUtils.ENGINE_TYPE_LABEL_KEY) match { + case engineConnType: String => + if(!engineConnType.toLowerCase.startsWith(FlinkJobLaunchManager.FLINK_ENGINE_CONN_TYPE)) + throw new FlinkJobLaunchErrorException(30401, s"Only ${FlinkJobLaunchManager.FLINK_ENGINE_CONN_TYPE} job is supported to be launched to Linkis, but $engineConnType is found.", null) + case _ => throw new FlinkJobLaunchErrorException(30401, s"Not exists ${LabelKeyUtils.ENGINE_TYPE_LABEL_KEY}, StreamisJob cannot be submitted to Linkis successfully.", null) + } + Utils.tryCatch { + val onceJob = buildOnceJob(job) + onceJob.submit() + val jobInfo = Utils.tryCatch(createJobInfo(onceJob, job, jobState)) { + case e: FlinkJobLaunchErrorException => + throw e + case t: Throwable => + error(s"${job.getSubmitUser} create jobInfo failed, now stop this EngineConn ${onceJob.getId}.") + Utils.tryAndWarn(onceJob.kill()) + throw new FlinkJobLaunchErrorException(-1, "Fail to obtain launched job info", t) + } + createJobClient(onceJob, jobInfo) + }{ + case e: FlinkJobLaunchErrorException => throw e + case t: Throwable => + error(s"Server Exception in submitting Flink job [${job.getJobName}] to Linkis remote server", t) + throw new FlinkJobLaunchErrorException(-1, s"Exception in submitting Flink job to Linkis remote server (提交至Linkis服务失败,请检查服务及网络)", t) + } + } + + override def launch(job: LaunchJob): JobClient[LinkisJobInfo] = { + launch(job, null) + } + + + override def connect(id: String, jobInfo: String): JobClient[LinkisJobInfo] = { + connect(id, createJobInfo(jobInfo)) + } + + override def connect(id: String, jobInfo: LinkisJobInfo): JobClient[LinkisJobInfo] = { + createJobClient(createSubmittedOnceJob(id, jobInfo), jobInfo) + } + + + /** + * Job state manager(store the state information, example: Checkpoint/Savepoint) + * + * @return state manager instance + */ + override def getJobStateManager: JobStateManager = { + Option(jobStateManager) match { + case None => + this synchronized{ + // Flink job state manager + jobStateManager = new FlinkJobStateManager + } + jobStateManager + case Some(stateManager) => stateManager + } + } + + /** + * Create job client + * @param onceJob once job + * @param jobInfo job info + * @return + */ + protected def createJobClient(onceJob: OnceJob, jobInfo: LinkisJobInfo): JobClient[LinkisJobInfo] +} +object FlinkJobLaunchManager { + val FLINK_ENGINE_CONN_TYPE = "flink" +} \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/FlinkJobStateManager.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/FlinkJobStateManager.scala new file mode 100644 index 000000000..75a4af2b1 --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/FlinkJobStateManager.scala @@ -0,0 +1,56 @@ +package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.manager +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobState +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.conf.JobLauncherConfiguration +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.{Checkpoint, CheckpointJobStateFetcher, Savepoint, SavepointJobStateFetcher} +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.url.LinkisURLStreamHandlerFactory +import org.apache.linkis.common.utils.{Logging, Utils} + +import java.net.URL +import scala.collection.JavaConverters.mapAsScalaMapConverter + + +/** + * Flink job state manager + */ +class FlinkJobStateManager extends AbstractJobStateManager with Logging{ + /** + * State type => root path + */ + val stateRootPath: Map[String, String] = Map( + classOf[Savepoint].getCanonicalName -> JobLauncherConfiguration.FLINK_SAVEPOINT_PATH.getValue, + classOf[Checkpoint].getCanonicalName -> JobLauncherConfiguration.FLINK_CHECKPOINT_PATH.getValue + ) + + override def getJobStateRootPath[T <: JobState](clazz: Class[_], schema: String): String = { + stateRootPath.getOrElse(clazz.getCanonicalName, "") + } + + /** + * Init method + */ + override def init(): Unit = { + info("Register the loader for JobState fetcher") + // TODO register the fetcher + registerJobStateFetcher(classOf[Checkpoint], () => new CheckpointJobStateFetcher(classOf[Checkpoint], this)) + registerJobStateFetcher(classOf[Savepoint], () => new SavepointJobStateFetcher(classOf[Savepoint], this)) + } + + /** + * Destroy method + */ + override def destroy(): Unit = { + // Close the loaded fetcher + jobStateFetcherHolder.asScala.foreach(stateFetcher => { + Utils.tryAndWarn(stateFetcher._2.destroy()) + }) + } +} + +object FlinkJobStateManager{ + // set urlStreamHandler use support schemas + URL.setURLStreamHandlerFactory(new LinkisURLStreamHandlerFactory(JobLauncherConfiguration.FLINK_STATE_SUPPORT_SCHEMES.getValue.split(","): _*)) + + def main(args: Array[String]): Unit = { + + } +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/LinkisJobLaunchManager.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/LinkisJobLaunchManager.scala new file mode 100644 index 000000000..26d76cfdf --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/LinkisJobLaunchManager.scala @@ -0,0 +1,97 @@ +package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.manager + +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobLaunchManager +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobState +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.{JobClient, LaunchJob} +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.conf.JobLauncherConfiguration +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.LinkisJobInfo +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.manager.LinkisJobLaunchManager.LINKIS_JAR_VERSION_PATTERN +import org.apache.commons.io.IOUtils +import org.apache.commons.lang3.StringUtils +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.computation.client.LinkisJob +import org.apache.linkis.protocol.utils.TaskUtils + +import java.util +import scala.collection.JavaConverters._ +import scala.util.matching.Regex + +trait LinkisJobLaunchManager extends JobLaunchManager[LinkisJobInfo] with Logging{ + /** + * This method is used to launch a new job. + * + * @param job a StreamisJob wanted to be launched. + * @param jobState job state used to launch + * @return the job id. + */ + override def launch(job: LaunchJob, jobState: JobState): JobClient[LinkisJobInfo] = { + // Support different version of Linkis + var linkisVersion = JobLauncherConfiguration.FLINK_LINKIS_RELEASE_VERSION.getValue + if (StringUtils.isBlank(linkisVersion)) { + val linkisJarPath = classOf[LinkisJob].getProtectionDomain.getCodeSource.getLocation.getPath; + val lastSplit = linkisJarPath.lastIndexOf(IOUtils.DIR_SEPARATOR); + if (lastSplit >= 0) { + linkisVersion = linkisJarPath.substring(lastSplit + 1) + } + } + if (StringUtils.isNotBlank(linkisVersion)) { + Utils.tryAndWarn { + val LINKIS_JAR_VERSION_PATTERN(version) = linkisVersion + linkisVersion = version + } + } + if (StringUtils.isNotBlank(linkisVersion)){ + val versionSplitter: Array[String] = linkisVersion.split("\\.") + val major = Integer.valueOf(versionSplitter(0)) + val sub = Integer.valueOf(versionSplitter(1)) + val fix = Integer.valueOf(versionSplitter(2)) + val versionNum = major * 10000 + sub * 100 + fix + info(s"Recognized the linkis release version: [${linkisVersion}, version number: [${versionNum}]") + if (versionNum <= 10101){ + warn("Linkis version number is less than [10101], should compatible the startup params in launcher.") + val startupParams = TaskUtils.getStartupMap(job.getParams) + // Change the unit of memory params for linkis older version + changeUnitOfMemoryToG(startupParams, "flink.taskmanager.memory") + changeUnitOfMemoryToG(startupParams, "flink.jobmanager.memory") + // Avoid the _FLINK_CONFIG_. prefix for linkis older version + val newParams = avoidParamsPrefix(startupParams, "_FLINK_CONFIG_.") + startupParams.clear(); + startupParams.putAll(newParams) + } + } + innerLaunch(job, jobState) + } + + private def changeUnitOfMemoryToG(params: util.Map[String, Any], name: String): Unit = { + params.get(name) match { + case memory: String => + var actualMem = Integer.valueOf(memory) / 1024 + actualMem = if (actualMem <= 0) 1 else actualMem + info(s"Change the unit of startup param: [${name}], value [${memory}] => [${actualMem}]") + params.put(name, actualMem) + case _ => // Ignores + } + } + + /** + * Avoid params prefix + * @param params params + * @param prefix prefix + */ + private def avoidParamsPrefix(params: util.Map[String, Any], prefix: String): util.Map[String, Any] = { + params.asScala.map{ + case (key, value) => + if (key.startsWith(prefix)){ + info(s"Avoid the prefix of startup param: [${key}] => [${key.substring(prefix.length)}]") + (key.substring(prefix.length), value) + } else { + (key, value) + } + }.toMap.asJava + } + def innerLaunch(job: LaunchJob, jobState: JobState): JobClient[LinkisJobInfo] +} + +object LinkisJobLaunchManager{ + val LINKIS_JAR_VERSION_PATTERN: Regex = "^[\\s\\S]*([\\d]+\\.[\\d]+\\.[\\d]+)[\\s\\S]*$".r +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/SimpleFlinkJobLaunchManager.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/SimpleFlinkJobLaunchManager.scala new file mode 100644 index 000000000..8b4308a2b --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/SimpleFlinkJobLaunchManager.scala @@ -0,0 +1,133 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.manager + +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobState +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.{JobClient, LaunchJob} +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.conf.JobLauncherConfiguration +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.manager.SimpleFlinkJobLaunchManager.INSTANCE_NAME +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.{FlinkJobClient, FlinkJobInfo, LinkisJobInfo} +import org.apache.commons.lang3.StringEscapeUtils +import org.apache.linkis.common.utils.{RetryHandler, Utils} +import org.apache.linkis.computation.client.once.simple.{SimpleOnceJob, SubmittableSimpleOnceJob} +import org.apache.linkis.computation.client.once.{OnceJob, SubmittableOnceJob} +import org.apache.linkis.computation.client.operator.impl.EngineConnApplicationInfoOperator +import org.apache.linkis.httpclient.dws.DWSHttpClient +import org.apache.linkis.ujes.client.exception.UJESJobException + +import java.util +import scala.collection.JavaConverters.mapAsScalaMapConverter + +class SimpleFlinkJobLaunchManager extends FlinkJobLaunchManager { + + override def getName: String = INSTANCE_NAME + + protected def buildOnceJob(job: LaunchJob): SubmittableOnceJob = { + val builder = SimpleOnceJob.builder().addExecuteUser(job.getSubmitUser).setLabels(job.getLabels) + .setJobContent(job.getJobContent).setParams(job.getParams).setSource(job.getSource) + if(job.getLaunchConfigs != null) { + job.getLaunchConfigs.asScala.get(LaunchJob.LAUNCH_CONFIG_CREATE_SERVICE).foreach{ case createService: String => builder.setCreateService(createService)} + job.getLaunchConfigs.asScala.get(LaunchJob.LAUNCH_CONFIG_DESCRIPTION).foreach{ case desc: String => builder.setDescription(desc)} + job.getLaunchConfigs.asScala.get(LaunchJob.LAUNCH_CONFIG_MAX_SUBMIT_TIME).foreach{ case maxSubmitTime: Long => builder.setMaxSubmitTime(maxSubmitTime)} + } + builder.build() + } + + override protected def createSubmittedOnceJob(id: String, jobInfo: LinkisJobInfo): OnceJob = SimpleOnceJob.build(id, jobInfo.getUser) + + + override protected def createJobInfo(onceJob: SubmittableOnceJob, job: LaunchJob, jobState: JobState): LinkisJobInfo = { + val nodeInfo = onceJob.getNodeInfo + val jobInfo = new FlinkJobInfo + // Escape the job name + jobInfo.setName(StringEscapeUtils.escapeJava(job.getJobName)) + jobInfo.setId(onceJob.getId) + jobInfo.setUser(job.getSubmitUser) + onceJob match { + case simpleOnceJob: SubmittableSimpleOnceJob => + jobInfo.setECMInstance(simpleOnceJob.getECMServiceInstance) + case _ => + } + Utils.tryCatch(fetchApplicationInfo(onceJob, jobInfo)) { t => + val message = s"Unable to fetch the application info of launched job [${job.getJobName}], maybe the engine has been shutdown" + error(message, t) + // Mark failed + jobInfo.setStatus("failed") + jobInfo.setCompletedMsg(message) + } + jobInfo.setResources(nodeInfo.get("nodeResource").asInstanceOf[util.Map[String, Object]]) + // Set job state info into +// Option(jobState).foreach(state => { +// val stateInfo = new JobStateInfo +// stateInfo.setTimestamp(state.getTimestamp) +// stateInfo.setLocation(state.getLocation.toString) +// jobInfo.setJobStates(Array(stateInfo)) +// }) + jobInfo + } + + override protected def createJobInfo(jobInfo: String): LinkisJobInfo = DWSHttpClient.jacksonJson.readValue(jobInfo, classOf[FlinkJobInfo]) + + protected def fetchApplicationInfo(onceJob: OnceJob, jobInfo: FlinkJobInfo): Unit = { + onceJob.getOperator(EngineConnApplicationInfoOperator.OPERATOR_NAME) match { + case applicationInfoOperator: EngineConnApplicationInfoOperator => + val retryHandler = new RetryHandler {} + retryHandler.setRetryNum(JobLauncherConfiguration.FLINK_FETCH_APPLICATION_INFO_MAX_TIMES.getValue) + retryHandler.setRetryMaxPeriod(5000) + retryHandler.setRetryPeriod(500) + retryHandler.addRetryException(classOf[UJESJobException]) + val applicationInfo = retryHandler.retry(applicationInfoOperator(), "Fetch-Yarn-Application-Info") + jobInfo.setApplicationId(applicationInfo.applicationId) + jobInfo.setApplicationUrl(applicationInfo.applicationUrl) + } + } + + /** + * Create job client + * + * @param onceJob once job + * @param jobInfo job info + * @return + */ + override protected def createJobClient(onceJob: OnceJob, jobInfo: LinkisJobInfo): JobClient[LinkisJobInfo] = { + jobInfo match { + case flinkJobInfo: FlinkJobInfo => + new FlinkJobClient(onceJob, flinkJobInfo, this.jobStateManager).asInstanceOf[JobClient[LinkisJobInfo]] + case _ => null + } + } + + /** + * Init method + */ + override def init(): Unit = { + // Init the job state manager + getJobStateManager.init() + } + + + /** + * Destroy method + */ + override def destroy(): Unit = { + // Destroy the job state manager + getJobStateManager.destroy() + } +} +object SimpleFlinkJobLaunchManager{ + + val INSTANCE_NAME = "simpleFlink"; +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/operator/FlinkClientLogOperator.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/operator/FlinkClientLogOperator.scala new file mode 100644 index 000000000..a41018a74 --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/operator/FlinkClientLogOperator.scala @@ -0,0 +1,34 @@ +package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.operator + +import org.apache.commons.lang3.StringUtils +import org.apache.linkis.computation.client.once.action.EngineConnOperateAction +import org.apache.linkis.computation.client.operator.impl.{EngineConnLogOperator, EngineConnLogs} + +/** + * Append "logDirSuffix" parameter + */ +class FlinkClientLogOperator extends EngineConnLogOperator{ + + private var logDirSuffix: String = _ + + def setLogDirSuffix(logDirSuffix: String): Unit = { + this.logDirSuffix = logDirSuffix + } + + protected override def addParameters(builder: EngineConnOperateAction.Builder): Unit = { + builder.operatorName(EngineConnLogOperator.OPERATOR_NAME) + if (StringUtils.isNotBlank(this.logDirSuffix)) { + builder.addParameter("logDirSuffix", logDirSuffix) + } + super.addParameters(builder) + } + + + override def getTicketId: String = super.getTicketId + + override def getName: String = FlinkClientLogOperator.OPERATOR_NAME +} + +object FlinkClientLogOperator { + val OPERATOR_NAME = "engineConnLog_flink" +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/operator/FlinkTriggerSavepointOperator.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/operator/FlinkTriggerSavepointOperator.scala new file mode 100644 index 000000000..993847836 --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/operator/FlinkTriggerSavepointOperator.scala @@ -0,0 +1,61 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.operator + +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.Savepoint +import org.apache.linkis.computation.client.once.action.EngineConnOperateAction +import org.apache.linkis.computation.client.once.result.EngineConnOperateResult +import org.apache.linkis.computation.client.operator.OnceJobOperator + +/** + * Flink trigger savepoint operator + */ +class FlinkTriggerSavepointOperator extends OnceJobOperator[Savepoint]{ + + /** + * Save point directory + */ + private var savepointDir: String = _ + + /** + * Mode + */ + private var mode: String = _ + + def setSavepointDir(savepointDir: String): Unit ={ + this.savepointDir = savepointDir + } + + def setMode(mode: String): Unit = { + this.mode = mode + } + + override protected def addParameters(builder: EngineConnOperateAction.Builder): Unit = { + builder.addParameter("savepointPath", savepointDir) + builder.addParameter("mode", mode) + } + + override protected def resultToObject(result: EngineConnOperateResult): Savepoint = { + val savepointPath:String = result.getAs("writtenSavepoint") + info(s"Get the savepoint store path: [$savepointPath] form ${FlinkTriggerSavepointOperator.OPERATOR_NAME} operation") + new Savepoint(savepointPath) + } + + override def getName: String = FlinkTriggerSavepointOperator.OPERATOR_NAME +} +object FlinkTriggerSavepointOperator{ + val OPERATOR_NAME = "doSavepoint" +} \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/operator/FlinkYarnLogOperator.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/operator/FlinkYarnLogOperator.scala new file mode 100644 index 000000000..975b23405 --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/operator/FlinkYarnLogOperator.scala @@ -0,0 +1,43 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.operator + +import org.apache.linkis.computation.client.once.action.EngineConnOperateAction +import org.apache.linkis.computation.client.operator.impl.EngineConnLogOperator + +/** + * Extend the flink client log operator + */ +class FlinkYarnLogOperator extends FlinkClientLogOperator { + + private var applicationId: String = _ + + def setApplicationId(applicationId: String): Unit = { + this.applicationId = applicationId + } + + protected override def addParameters(builder: EngineConnOperateAction.Builder): Unit = { + super.addParameters(builder) + builder.operatorName(getName) + builder.addParameter("yarnApplicationId", this.applicationId) + } + + override def getName: String = FlinkYarnLogOperator.OPERATOR_NAME +} + +object FlinkYarnLogOperator{ + val OPERATOR_NAME = "engineConnYarnLog" +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/AbstractJobStateResult.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/AbstractJobStateResult.scala new file mode 100644 index 000000000..0f5cbe327 --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/AbstractJobStateResult.scala @@ -0,0 +1,7 @@ +package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state + +import org.apache.linkis.httpclient.dws.response.DWSResult + +abstract class AbstractJobStateResult extends DWSResult { + +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/Checkpoint.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/Checkpoint.scala new file mode 100644 index 000000000..a5e42e599 --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/Checkpoint.scala @@ -0,0 +1,37 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state + +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobState + +/** + * Hold the check point information + */ +class Checkpoint(location: String) extends GenericFlinkJobState(location) with JobState { + + /** + * Record the sequence of checkpoint + */ + private var order: Long = -1 + + def setOrder(order: Long): Unit = { + this.order = order + } + + def getOrder: Long = { + this.order + } +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/GenericFlinkJobState.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/GenericFlinkJobState.scala new file mode 100644 index 000000000..32aa1bc86 --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/GenericFlinkJobState.scala @@ -0,0 +1,62 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state + +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobState + +import java.net.URI + +/** + * Generic flink job state + */ +class GenericFlinkJobState(location: String) extends JobState{ + + private var timestamp: Long = -1 + + private var id: String = "{ID}" + + private var metadataInfo: Any = _ + + override def getLocation: URI = URI.create(location) + + override def getMetadataInfo: Any = { + metadataInfo + } + + def setMetadataInfo(metadataInfo: Any): Unit = { + this.metadataInfo = metadataInfo + } + + /** + * Job state id + * + * @return + */ + override def getId: String = id + + def setId(id: String): Unit = { + this.id = id + } + /** + * Timestamp to save the state + * + * @return + */ + override def getTimestamp: Long = timestamp + + def setTimestamp(timestamp: Long): Unit = { + this.timestamp = timestamp + } +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/Savepoint.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/Savepoint.scala new file mode 100644 index 000000000..fd91292c6 --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/Savepoint.scala @@ -0,0 +1,22 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state + +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobState + +class Savepoint(location: String) extends GenericFlinkJobState(location) with JobState { + +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/client/LinkisJobStateClientConf.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/client/LinkisJobStateClientConf.scala new file mode 100644 index 000000000..45e5cc54c --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/client/LinkisJobStateClientConf.scala @@ -0,0 +1,10 @@ +package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.client + + +class LinkisJobStateClientConf { + +} +object LinkisJobStateClientConf{ + +} + diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/client/LinkisJobStateGetAction.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/client/LinkisJobStateGetAction.scala new file mode 100644 index 000000000..180cf95ff --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/client/LinkisJobStateGetAction.scala @@ -0,0 +1,25 @@ +package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.client + +import org.apache.linkis.httpclient.dws.request.DWSHttpAction +import org.apache.linkis.httpclient.request.{GetAction, UserAction} + +/** + * Get job state action + */ +class LinkisJobStateGetAction extends GetAction with DWSHttpAction with UserAction{ + + private var user: String = _ + + def this(user: String, path: String) = { + this() + this.user = user + this.setParameter("path", path); + } + + + override def suffixURLs: Array[String] = Array("filesystem", "getDirFileTrees") + + override def setUser(user: String): Unit = this.user = user + + override def getUser: String = user +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/pom.xml b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/pom.xml new file mode 100755 index 000000000..d9c4324e7 --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/pom.xml @@ -0,0 +1,81 @@ + + + + + + streamis-jobmanager + com.webank.wedatasphere.streamis + 0.2.4 + ../../pom.xml + + 4.0.0 + + streamis-job-launcher-service + + + 8 + 8 + + + + + org.apache.linkis + linkis-mybatis + + + com.webank.wedatasphere.streamis + streamis-job-launcher-base + ${jobmanager.version} + + + com.webank.wedatasphere.streamis + streamis-job-manager-base + ${jobmanager.version} + + + org.apache.linkis + linkis-module + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + + + src/main/java + + **/*.xml + + + + ${project.artifactId}-${project.version} + + \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/JobLauncherAutoConfiguration.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/JobLauncherAutoConfiguration.java new file mode 100644 index 000000000..58b781b1c --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/JobLauncherAutoConfiguration.java @@ -0,0 +1,70 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.launcher; + +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.JobInfo; +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobLaunchManager; +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobLaunchManager$; +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.manager.SimpleFlinkJobLaunchManager$; +import org.apache.linkis.common.utils.ClassUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationTargetException; +import java.util.Objects; + +@Configuration +public class JobLauncherAutoConfiguration { + + private static final Logger LOG = LoggerFactory.getLogger(JobLauncherAutoConfiguration.class); + + public static final String DEFAULT_JOB_LAUNCH_MANGER = SimpleFlinkJobLaunchManager$.MODULE$.INSTANCE_NAME(); + + @Bean(initMethod = "init", destroyMethod = "destroy") + @ConditionalOnMissingBean(JobLaunchManager.class) + @SuppressWarnings("unchecked") + public JobLaunchManager defaultJobLaunchManager(){ + // First to scan the available job launch manager + ClassUtils.reflections().getSubTypesOf(JobLaunchManager.class).stream() + .filter(clazz -> !ClassUtils.isInterfaceOrAbstract(clazz)).forEach(clazz -> { + Constructor constructor = null; + try { + constructor = clazz.getConstructor(); + } catch (NoSuchMethodException e) { + LOG.warn("Job launch manger: [{}] has no empty constructor ", clazz.getCanonicalName(), e); + } + if (Objects.nonNull(constructor)){ + try { + JobLaunchManager launchManager = (JobLaunchManager) constructor.newInstance(); + JobLaunchManager$.MODULE$.registerJobManager(launchManager.getName(), launchManager); + } catch (InstantiationException | IllegalAccessException | InvocationTargetException e) { + LOG.warn("Unable to instance the job launch manager: [{}]", clazz.getCanonicalName(), e); + } + } + }); + // Use the flink job launch manager as default + JobLaunchManager defaultManager = JobLaunchManager$.MODULE$.getJobManager(DEFAULT_JOB_LAUNCH_MANGER); + if (Objects.isNull(defaultManager)){ + throw new IllegalArgumentException("Unable to find the default job launch manger: [" + DEFAULT_JOB_LAUNCH_MANGER + + "], please check the jar classpath and configuration"); + } + return defaultManager; + } +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/dao/StreamJobConfMapper.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/dao/StreamJobConfMapper.java new file mode 100644 index 000000000..8aba65896 --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/dao/StreamJobConfMapper.java @@ -0,0 +1,66 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.launcher.dao; + +import com.webank.wedatasphere.streamis.jobmanager.launcher.entity.JobConfDefinition; +import com.webank.wedatasphere.streamis.jobmanager.launcher.entity.JobConfValue; +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +/** + * Operate the job configuration + */ +public interface StreamJobConfMapper { + + /** + * Select all config definitions + * @return list + */ + List loadAllDefinitions(); + + /** + * Get raw value + * @param jobId job id + * @param key key + * @return + */ + String getRawConfValue(@Param("jobId")Long jobId, @Param("key")String key); + /** + * Get config values by job id + * @param jobId job id + * @return + */ + List getConfValuesByJobId(@Param("jobId")Long jobId); + + /** + * Delete values by job id + * @param jobId job id + */ + int deleteConfValuesByJobId(@Param("jobId")Long jobId); + + /** + * Delete temporary config value + * @param jobId job id + * @return affect rows + */ + int deleteTemporaryConfValue(@Param("jobId")Long jobId); + /** + * Batch insert + * @param values values + */ + void batchInsertValues(@Param("values")List values); +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/dao/impl/StreamJobConfMapper.xml b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/dao/impl/StreamJobConfMapper.xml new file mode 100644 index 000000000..94556b3f9 --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/dao/impl/StreamJobConfMapper.xml @@ -0,0 +1,71 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + DELETE FROM `linkis_stream_job_config` WHERE job_id = #{jobId}; + + + + DELETE c FROM `linkis_stream_job_config` c INNER JOIN `linkis_stream_job_config_def` d ON c.job_id = #{jobId} AND d.id = c.ref_def_id AND d.is_temp = 1; + + + INSERT INTO `linkis_stream_job_config`(`job_id`, `job_name`, `key`, `value`, `ref_def_id`) VALUES + + (#{item.jobId}, #{item.jobName}, #{item.key}, #{item.value}, #{item.referDefId}) + + + \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/conf/JobConfKeyConstants.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/conf/JobConfKeyConstants.scala new file mode 100644 index 000000000..187b02288 --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/conf/JobConfKeyConstants.scala @@ -0,0 +1,92 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.launcher.conf + +import org.apache.linkis.common.conf.CommonVars + + +/** + * Config key constants + */ +object JobConfKeyConstants { + + /** + * Config group for streamis internal configuration + */ + val GROUP_INTERNAL: CommonVars[String] = CommonVars("wds.streamis.job.internal.config.group", "wds.streamis.internal.params") + /** + * Group: Flink extra + */ + val GROUP_FLINK_EXTRA: CommonVars[String] = CommonVars("wds.streamis.job.config.key.group.flink-extra", "wds.linkis.flink.custom") + + /** + * Group: produce + */ + val GROUP_PRODUCE: CommonVars[String] = CommonVars("wds.streamis.job.config.key.group.produce", "wds.linkis.flink.produce") + + /** + * Group: resource + */ + val GROUP_RESOURCE: CommonVars[String] = CommonVars("wds.streamis.job.config.key.group.resource", "wds.linkis.flink.resource") + + /** + * Group: permission + */ + val GROUP_PERMISSION: CommonVars[String] = CommonVars("wds.streamis.job.config.key.group.permission", "wds.linkis.flink.authority") + + /** + * Group: alert + */ + val GROUP_ALERT: CommonVars[String] = CommonVars("wds.streamis.job.config.key.group.alert", "wds.linkis.flink.alert") + /** + * Checkpoint prefix + */ + val CHECKPOINT: CommonVars[String] = CommonVars("wds.streamis.job.config.key.checkpoint", "wds.linkis.flink.checkpoint.") + + /** + * Checkpoint switch + */ + val CHECKPOINT_SWITCH: CommonVars[String] = CommonVars("wds.streamis.job.config.key.checkpoint.switch", "wds.linkis.flink.checkpoint.switch") + /** + * Savepoint prefix + */ + val SAVEPOINT: CommonVars[String] = CommonVars("wds.streamis.job.config.key.savepoint", "wds.linkis.flink.savepoint.") + + /** + * Switch to restart job automatically when fail + */ + val FAIL_RESTART_SWITCH: CommonVars[String] = CommonVars("wds.streamis.job.config.key.fail-restart.switch", "wds.linkis.flink.app.fail-restart.switch") + + /** + * Switch to restore job automatically when starting + */ + val START_AUTO_RESTORE_SWITCH: CommonVars[String] = CommonVars("wds.streamis.job.config.key.start-auto-restore.switch", "wds.linkis.flink.app.start-auto-restore.switch") + + /** + * Authority author + */ + val AUTHORITY_AUTHOR_VISIBLE: CommonVars[String] = CommonVars("wds.streamis.job.config.key.authority.visible", "wds.linkis.flink.authority.visible") + + /** + * Alert user + */ + val ALERT_USER: CommonVars[String] = CommonVars("wds.streamis.job.config.key.alert.user", "wds.linkis.flink.alert.failure.user") + + /** + * Alert level + */ + val ALERT_LEVEL: CommonVars[String] = CommonVars("wds.streamis.job.config.key.alert.level", "wds.linkis.flink.alert.level") +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/exception/ConfigurationException.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/exception/ConfigurationException.java new file mode 100644 index 000000000..d84382cc1 --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/exception/ConfigurationException.java @@ -0,0 +1,26 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.launcher.exception; + + +public class ConfigurationException extends Exception { + public ConfigurationException() { + } + + public ConfigurationException(String message) { + super(message); + } +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/service/DefaultStreamJobConfService.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/service/DefaultStreamJobConfService.scala new file mode 100644 index 000000000..7eaf7c8a5 --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/service/DefaultStreamJobConfService.scala @@ -0,0 +1,237 @@ +package com.webank.wedatasphere.streamis.jobmanager.launcher.service +import com.webank.wedatasphere.streamis.jobmanager.launcher.conf.JobConfKeyConstants +import com.webank.wedatasphere.streamis.jobmanager.launcher.dao.StreamJobConfMapper +import com.webank.wedatasphere.streamis.jobmanager.launcher.entity.vo.JobConfValueVo.ValueList +import com.webank.wedatasphere.streamis.jobmanager.launcher.entity.vo.{JobConfValueSet, JobConfValueVo} +import com.webank.wedatasphere.streamis.jobmanager.launcher.entity.{JobConfDefinition, JobConfValue} +import com.webank.wedatasphere.streamis.jobmanager.launcher.exception.ConfigurationException +import com.webank.wedatasphere.streamis.jobmanager.launcher.service.tools.JobConfValueUtils +import com.webank.wedatasphere.streamis.jobmanager.manager.dao.StreamJobMapper +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamJob +import org.apache.commons.lang3.StringUtils +import org.apache.linkis.common.utils.Logging +import org.springframework.stereotype.Service +import org.springframework.transaction.annotation.Transactional + +import java.util +import javax.annotation.Resource +import scala.collection.JavaConverters._ + +@Service +class DefaultStreamJobConfService extends StreamJobConfService with Logging{ + + @Resource + private var streamJobConfMapper: StreamJobConfMapper = _ + + @Resource + private var streamJobMapper: StreamJobMapper = _ + /** + * Get all config definitions + * + * @return list + */ + override def loadAllDefinitions(): util.List[JobConfDefinition] = { + streamJobConfMapper.loadAllDefinitions() + } + + /** + * Save job configuration + * + * @param jobId job id + * @param valueMap value map + */ + @Transactional(rollbackFor = Array(classOf[Exception])) + override def saveJobConfig(jobId: Long, valueMap: util.Map[String, Any]): Unit = { + val definitions = Option(this.streamJobConfMapper.loadAllDefinitions()) + .getOrElse(new util.ArrayList[JobConfDefinition]()) + // Can deserialize the value map at first + val configValues = JobConfValueUtils.deserialize(valueMap, definitions) + suppleDefaultConfValue(configValues, definitions) + saveJobConfig(jobId, configValues) + } + + /** + * Query the job configuration + * + * @param jobId job id + * @return + */ + override def getJobConfig(jobId: Long): util.Map[String, Any] = { + getJobConfig(jobId, this.streamJobConfMapper.loadAllDefinitions()) + } + + /** + * Query the job value + * + * @param jobId job id + * @param configKey config key + * @return + */ + override def getJobConfValue(jobId: Long, configKey: String): String = { + this.streamJobConfMapper.getRawConfValue(jobId, configKey) + } + + /** + * Get job configuration value set + * + * @param jobId job id + * @return + */ + override def getJobConfValueSet(jobId: Long): JobConfValueSet = { + val valueSet = new JobConfValueSet + val definitions: util.List[JobConfDefinition] = this.streamJobConfMapper.loadAllDefinitions() + val jobConfig: util.Map[String, Any] = getJobConfig(jobId, definitions) + val definitionMap: util.Map[String, JobConfDefinition] = definitions.asScala.map(definition => (definition.getKey, definition)).toMap.asJava + valueSet.setResourceConfig(resolveConfigValueVo(JobConfKeyConstants.GROUP_RESOURCE.getValue, jobConfig, definitionMap)) + valueSet.setParameterConfig(resolveConfigValueVo(JobConfKeyConstants.GROUP_FLINK_EXTRA.getValue, jobConfig, definitionMap)) + valueSet.setProduceConfig(resolveConfigValueVo(JobConfKeyConstants.GROUP_PRODUCE.getValue, jobConfig, definitionMap)) + valueSet.setPermissionConfig(resolveConfigValueVo(JobConfKeyConstants.GROUP_PERMISSION.getValue, jobConfig, definitionMap)) + valueSet.setAlarmConfig(resolveConfigValueVo(JobConfKeyConstants.GROUP_ALERT.getValue, jobConfig, definitionMap)) + valueSet.setJobId(jobId) + valueSet + } + + /** + * Save job configuration value set + * + * @param valueSet value set + */ + override def saveJobConfValueSet(valueSet: JobConfValueSet): Unit = { + val configValues: util.List[JobConfValue] = new util.ArrayList[JobConfValue]() + val definitions = this.streamJobConfMapper.loadAllDefinitions() + val definitionMap: util.Map[String, JobConfDefinition] = definitions + .asScala.map(definition => (definition.getKey, definition)).toMap.asJava + configValues.addAll(convertToConfigValue( + valueSet.getResourceConfig, definitionMap, Option(definitionMap.get(JobConfKeyConstants.GROUP_RESOURCE.getValue)) match { + case Some(definition) => definition.getId + case _ => 0 + })) + configValues.addAll(convertToConfigValue( + valueSet.getParameterConfig, definitionMap, Option(definitionMap.get(JobConfKeyConstants.GROUP_FLINK_EXTRA.getValue)) match { + case Some(definition) => definition.getId + case _ => 0 + })) + configValues.addAll(convertToConfigValue( + valueSet.getProduceConfig, definitionMap, Option(definitionMap.get(JobConfKeyConstants.GROUP_PRODUCE.getValue)) match { + case Some(definition) => definition.getId + case _ => 0 + })) + configValues.addAll(convertToConfigValue( + valueSet.getPermissionConfig, definitionMap, Option(definitionMap.get(JobConfKeyConstants.GROUP_PERMISSION.getValue)) match { + case Some(definition) => definition.getId + case _ => 0 + })) + configValues.addAll(convertToConfigValue( + valueSet.getAlarmConfig, definitionMap, Option(definitionMap.get(JobConfKeyConstants.GROUP_ALERT.getValue)) match { + case Some(definition) => definition.getId + case _ => 0 + })) + suppleDefaultConfValue(configValues, definitions) + saveJobConfig(valueSet.getJobId, configValues) + } + /** + * Get job configuration map + * @param jobId job id + * @param definitions definitions + * @return + */ + private def getJobConfig(jobId: Long, definitions: util.List[JobConfDefinition]): util.Map[String, Any] = { + Option(this.streamJobConfMapper.getConfValuesByJobId(jobId)) match { + case None => new util.HashMap[String, Any]() + case Some(list: util.List[JobConfValue]) => + JobConfValueUtils.serialize(list, + Option(definitions) + .getOrElse(new util.ArrayList[JobConfDefinition]())) + } + } + + private def saveJobConfig(jobId: Long, configValues: util.List[JobConfValue]): Unit = { + trace(s"Query and lock the StreamJob in [$jobId] before saving/update configuration") + Option(streamJobMapper.queryAndLockJobById(jobId)) match { + case None => throw new ConfigurationException(s"Unable to saving/update configuration, the StreamJob [$jobId] is not exists.") + case Some(job: StreamJob) => + // Delete all configuration + this.streamJobConfMapper.deleteConfValuesByJobId(job.getId) + configValues.asScala.foreach(configValue => {{ + configValue.setJobId(job.getId) + configValue.setJobName(job.getName) + }}) + info(s"Save the job configuration size: ${configValues.size()}, jobName: ${job.getName}") + if (!configValues.isEmpty) { + // Send to save the configuration new + this.streamJobConfMapper.batchInsertValues(configValues) + } + } + } + + /** + * Supple the default value into the configuration + * @param configValues config value list + * @param definitions definitions + */ + private def suppleDefaultConfValue(configValues: util.List[JobConfValue], definitions: util.List[JobConfDefinition]): Unit = { + val configMark = configValues.asScala.filter(configValue => configValue.getReferDefId != null) + .map(configValue => (configValue.getReferDefId, 1)).toMap + definitions.asScala.filter(definition => definition.getLevel > 0 && StringUtils.isNotBlank(definition.getDefaultValue)) + .foreach(definition => configMark.get(definition.getId) match { + case Some(mark) => + case None => + val configValue = new JobConfValue(definition.getKey, definition.getDefaultValue, definition.getId) + configValues.add(configValue) + } + ) + } + /** + * Resolve to config value view object + * @param group group + * @param jobConfig job config + * @param definitionMap (key => definition) + */ + private def resolveConfigValueVo(group: String, jobConfig: util.Map[String, Any], + definitionMap: util.Map[String, JobConfDefinition]): util.List[JobConfValueVo] = { + Option(jobConfig.get(group)) match { + case Some(configMap: util.Map[String, Any]) => + configMap.asScala.map{ + case (key, value) => + val configValue = new JobConfValueVo(key, String.valueOf(value)) + Option(definitionMap.get(key)) match { + case Some(definition) => + configValue.setConfigkeyId(definition.getId) + configValue.setName(definition.getName) + val refValues = definition.getRefValues + if (StringUtils.isNotBlank(refValues)){ + val valueList = new util.ArrayList[ValueList]() + refValues.split(",").foreach(refValue =>{ + valueList.add(new ValueList(refValue, refValue.equals(value))) + }) + configValue.setValueLists(valueList) + } + case _ => + } + configValue + }.toList.asJava + case None => new util.ArrayList[JobConfValueVo]() + } + } + + /** + * Convert to config value entities + * @param configValueVos view object + * @param definitionMap definition map + * @param parentRef parent ref id + * @return + */ + private def convertToConfigValue(configValueVos: util.List[JobConfValueVo], + definitionMap: util.Map[String, JobConfDefinition], parentRef: Long): util.List[JobConfValue] = { + Option(configValueVos) match { + case Some(voList) => + voList.asScala.map(vo => { + val definition = definitionMap.get(vo.getKey) + val confValue = new JobConfValue(vo.getKey, vo.getValue, if (null == definition) parentRef else definition.getId) + confValue + }).asJava + case _ => new util.ArrayList[JobConfValue]() + } + } + +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/service/StreamJobConfService.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/service/StreamJobConfService.scala new file mode 100644 index 000000000..0e87a9e4c --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/service/StreamJobConfService.scala @@ -0,0 +1,66 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.webank.wedatasphere.streamis.jobmanager.launcher.service + +import com.webank.wedatasphere.streamis.jobmanager.launcher.entity.JobConfDefinition +import com.webank.wedatasphere.streamis.jobmanager.launcher.entity.vo.JobConfValueSet + +import java.util +/** + * Job configuration service + */ +trait StreamJobConfService { + + /** + * Get all config definitions + * @return list + */ + def loadAllDefinitions(): util.List[JobConfDefinition] + + /** + * Save job configuration + * @param jobId job id + * @param valueMap value map + */ + def saveJobConfig(jobId: Long, valueMap: util.Map[String, Any]): Unit + + /** + * Query the job configuration + * @param jobId job id + * @return + */ + def getJobConfig(jobId: Long): util.Map[String, Any] + + /** + * Query the job value + * @param jobId job id + * @param configKey config key + * @return + */ + def getJobConfValue(jobId: Long, configKey: String): String + + /** + * Get job configuration value set + * @param jobId job id + * @return + */ + def getJobConfValueSet(jobId: Long): JobConfValueSet + + /** + * Save job configuration value set + * @param valueSet value set + */ + def saveJobConfValueSet(valueSet: JobConfValueSet): Unit +} diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/service/tools/JobConfValueUtils.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/service/tools/JobConfValueUtils.scala new file mode 100644 index 000000000..9cd088488 --- /dev/null +++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/service/tools/JobConfValueUtils.scala @@ -0,0 +1,180 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.launcher.service.tools + +import com.webank.wedatasphere.streamis.jobmanager.launcher.entity.{JobConfDefinition, JobConfValue} +import org.apache.commons.lang.StringUtils + +import scala.collection.JavaConverters._ +import java.util +/** + * TODO dive into JobConfValueSerializer and JobConfValueDeserializer + */ +class JobConfValueUtils { + +} + +object JobConfValueUtils{ + /** + * Serialize the job conf values + * @return + */ + def serialize(configValues: util.List[JobConfValue], definitions: util.List[JobConfDefinition]): util.Map[String, Any] = { + // First to build a definition map + val definitionMap: util.Map[String, JobConfDefinition] = definitions.asScala.map(definition => { + (definition.getId.toString, definition) + }).toMap.asJava + // Init a value map to store relation of config values + val relationMap: util.Map[String, Any] = new util.HashMap[String, Any]() + configValues.asScala.foreach(keyValue => { + val refDefId = keyValue.getReferDefId + if (null != refDefId) { + Option(relationMap.get(refDefId.toString)) match { + case Some(value: util.Map[String, Any]) => { + // Put the value into relation + value.put(keyValue.getKey, keyValue.getValue) + } + case Some(value: String) => { + // Overwrite it's value + relationMap.put(refDefId.toString, keyValue.getValue) + } + case _ => + // Set the value/relation recursively + var definition = definitionMap.get(refDefId.toString) + var value: Any = if (null != definition && (StringUtils.isBlank(definition.getType) || + definition.getType.equalsIgnoreCase("NONE"))) { + val relation = new util.HashMap[String, Any]() + relation.put(keyValue.getKey, keyValue.getValue) + relation + } else { + keyValue.getValue + } + while (null != definition){ + value = Option(relationMap.get(definition.getId.toString)) match { + case Some(existV: util.Map[String, Any]) => { + value match { + case map: util.Map[String, Any] => + existV.putAll(map) + existV + case _ => + relationMap.put(definition.getId.toString, value) + value + } + } + case _ => + relationMap.put(definition.getId.toString, value) + value + } + Option(definition.getParentRef) match { + case Some(parentRef) => + val newValue: util.Map[String, Any] = new util.HashMap[String, Any]() + newValue.put(definition.getKey, value) + definition = definitionMap.get(parentRef.toString) + value = newValue + case _ => definition = null + } + } + } + } + }) + // Filter the root configuration + relationMap.asScala + .filter(entry=> definitionMap.get(entry._1).getLevel == 0).map{ + case (defId, value) => (definitionMap.get(defId).getKey, value) + }.asJava + + } + + /** + * Deserialize + * @param valueMap value map + * @param definitions definitions + * @return + */ + def deserialize(valueMap: util.Map[String, Any], definitions: util.List[JobConfDefinition]):util.List[JobConfValue] = { + // First to build a definition map + val definitionMap: util.Map[String, JobConfDefinition] = definitions.asScala.map(definition => { + (definition.getKey, definition) + }).toMap.asJava + // Configuration value list + val configValues: util.List[JobConfValue] = new util.ArrayList[JobConfValue]() + valueMap.asScala.foreach{ + case (key, value) => { + Option(definitionMap.get(key)) match { + case Some(definition) => if (definition.getLevel == 0){ + configValues.addAll(deserializeInnerObj(key, value, null, definitionMap)) + definition.setMark(true) + } + case _ => + } + } + } + configValues + } + + private def deserializeInnerObj(key: String, value: Any, parentRef: String, + definitionMap: util.Map[String, JobConfDefinition]): util.List[JobConfValue] = { + val result: util.List[JobConfValue] = new util.ArrayList[JobConfValue]() + if (null != value) { + value match { + case innerMap: util.Map[String, Any] => + Option(definitionMap.get(key)) match { + case Some(definition) => + innerMap.asScala.foreach{ + case (childK, childV) => { + val childResult = deserializeInnerObj(childK, childV, + definition.getId.toString, definitionMap) + childResult.asScala.foreach(confValue => if (confValue.getReferDefId == null){ + confValue.setReferDefId(definition.getId) + }) + result.addAll(childResult) + } + } + // Mark it used + definition.setMark(true) + case _ => //ignore + } + + case other: Any => + Option(definitionMap.get(key)) match { + case Some(definition) => + if (StringUtils.isBlank(parentRef) || parentRef.equals(String.valueOf(definition.getParentRef))){ + result.add(new JobConfValue(key, String.valueOf(other), definition.getId)) + // Mark it used + definition.setMark(true) + } + case _ => result.add(new JobConfValue(key, String.valueOf(other), null)) + } + } + } + result + } +// def main(args: Array[String]): Unit = { +// val definitions: util.List[JobConfDefinition] = new util.ArrayList[JobConfDefinition]() +// val configValues: util.List[JobConfValue] = new util.ArrayList[JobConfValue]() +// definitions.add(new JobConfDefinition(0, "wds.linkis.flink.resource", "None", null, 0)) +// definitions.add(new JobConfDefinition(1, "wds.linkis.flink.custom", "None", null, 0)) +// definitions.add(new JobConfDefinition(2, "wds.linkis.flink.taskmanager.num", "NUMBER", 0, 1)) +// definitions.add(new JobConfDefinition(3, "wds.linkis.flink.jobmanager.memeory", "NUMBER", 0, 1)) +// configValues.add(new JobConfValue("wds.linkis.flink.taskmanager.num", "1", 2)) +// configValues.add(new JobConfValue("env.java.opts", "-DHADOOP_USER_NAME=hadoop", 1)) +// configValues.add(new JobConfValue("security.kerberos.login.principal", "hadoop@WEBANK.com", 1)) +// configValues.add(new JobConfValue("wds.linkis.flink.jobmanager.memeory", "1024", 3)) +// val result = serialize(configValues, definitions) +// println(DWSHttpClient.jacksonJson.writeValueAsString(result)) +// println(DWSHttpClient.jacksonJson.writeValueAsString(deserialize(result, definitions))) +// } +} diff --git a/streamis-jobmanager/streamis-job-log/flink-streamis-log-collector/target/classes/META-INF/services/com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired b/streamis-jobmanager/streamis-job-log/flink-streamis-log-collector/target/classes/META-INF/services/com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired new file mode 100644 index 000000000..dc13253b7 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/flink-streamis-log-collector/target/classes/META-INF/services/com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired @@ -0,0 +1 @@ +com.webank.wedatasphere.streamis.jobmanager.log.collector.flink.FlinkStreamisConfigAutowired \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/pom.xml b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/pom.xml new file mode 100644 index 000000000..cf612b7dd --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/pom.xml @@ -0,0 +1,96 @@ + + + + streamis-job-log + com.webank.wedatasphere.streamis + 0.2.4 + ../../pom.xml + + 4.0.0 + + flink-streamis-log-collector + + + 8 + 8 + + 1.12.2 + 2.17.1 + 1.7.15 + + + + com.webank.wedatasphere.streamis + streamis-job-log-collector + ${streamis.version} + + + + org.apache.flink + flink-java + ${flink.version} + provided + + + org.apache.flink + flink-yarn_2.11 + ${flink.version} + provided + + + + junit + junit + ${junit.version} + test + + + + org.slf4j + slf4j-api + ${slf4j.version} + provided + + + + org.apache.logging.log4j + log4j-slf4j-impl + ${log4j.version} + provided + + + + org.apache.logging.log4j + log4j-api + ${log4j.version} + provided + + + + + + org.apache.maven.plugins + maven-assembly-plugin + 2.3 + + + assemble + + single + + + install + + + + + src/main/assembly/package.xml + + false + + + + + \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/assembly/package.xml b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/assembly/package.xml new file mode 100644 index 000000000..8da27bf2c --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/assembly/package.xml @@ -0,0 +1,19 @@ + + + package + + + jar + + false + + + / + true + runtime + + + \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/flink/FlinkStreamisConfigAutowired.java b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/flink/FlinkStreamisConfigAutowired.java new file mode 100644 index 000000000..a5459021c --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/flink/FlinkStreamisConfigAutowired.java @@ -0,0 +1,130 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.flink; + +import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.StreamisLogAppenderConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j2.StreamisLog4j2AppenderConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j2.filters.KeywordThresholdFilter; +import com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired; +import org.apache.commons.lang3.StringUtils; +import org.apache.flink.configuration.Configuration; +import org.apache.flink.configuration.GlobalConfiguration; +import org.apache.flink.runtime.util.EnvironmentInformation; +import org.apache.flink.yarn.configuration.YarnConfigOptions; +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.core.Filter; +import org.apache.logging.log4j.core.filter.LevelMatchFilter; +import org.apache.logging.log4j.core.filter.RegexFilter; +import org.apache.logging.log4j.core.filter.ThresholdFilter; + +import java.util.Enumeration; +import java.util.List; +import java.util.Properties; + +import static com.webank.wedatasphere.streamis.jobmanager.log.collector.flink.FlinkStreamisConfigDefine.*; + +/** + * Autoconfigure the streamis config inf Flink environment + */ +public class FlinkStreamisConfigAutowired implements StreamisConfigAutowired { + + /** + * Flink configuration + */ + private Configuration configuration; + + public FlinkStreamisConfigAutowired(){ + // First to load configuration + // We should sleep and wait for append of the flink-yaml.conf + } + @Override + public StreamisLogAppenderConfig logAppenderConfig(StreamisLogAppenderConfig.Builder builder) throws Exception{ + this.configuration = loadConfiguration(); + String applicationName = + this.configuration.getString(YarnConfigOptions.APPLICATION_NAME); + if (StringUtils.isNotBlank(applicationName)){ + builder.setAppName(applicationName); + } + String gateway = this.configuration.getString(LOG_GATEWAY_ADDRESS); + if (StringUtils.isNotBlank(gateway)){ + if (gateway.endsWith("/")){ + gateway = gateway.substring(0, gateway.length() - 1); + } + gateway += this.configuration.getString(LOG_COLLECT_PATH, "/"); + builder.setRpcAddress(gateway); + } + if (builder instanceof StreamisLog4j2AppenderConfig.Builder) { + List filterStrategies = this.configuration.get(LOG_FILTER_STRATEGIES); + for (String filterStrategy : filterStrategies) { + if ("LevelMatch".equals(filterStrategy)) { + ((StreamisLog4j2AppenderConfig.Builder)builder).withFilter(LevelMatchFilter.newBuilder().setOnMatch(Filter.Result.ACCEPT).setOnMismatch(Filter.Result.DENY) + .setLevel(Level.getLevel(this.configuration.getString(LOG_FILTER_LEVEL_MATCH))).build()); + } else if ("ThresholdMatch".equals(filterStrategy)) { + ((StreamisLog4j2AppenderConfig.Builder)builder).withFilter(ThresholdFilter.createFilter(Level + .getLevel(this.configuration.getString(LOG_FILTER_THRESHOLD_MATCH)), Filter.Result.ACCEPT, Filter.Result.DENY)); + } else if ("RegexMatch".equals(filterStrategy)) { + ((StreamisLog4j2AppenderConfig.Builder)builder).withFilter(RegexFilter.createFilter(this.configuration.getString(LOG_FILTER_REGEX), + null, true, Filter.Result.ACCEPT, Filter.Result.DENY)); + } else if ("Keyword".equals(filterStrategy)){ + ((StreamisLog4j2AppenderConfig.Builder)builder).withFilter( + new KeywordThresholdFilter( + StringUtils.split(this.configuration.getString(LOG_FILTER_KEYWORDS), ","), + StringUtils.split(this.configuration.getString(LOG_FILTER_KEYWORDS_EXCLUDE), ","))); + } + } + } + String hadoopUser = EnvironmentInformation.getHadoopUser(); + if (hadoopUser.equals("") || hadoopUser.equals("")){ + hadoopUser = System.getProperty("user.name"); + } + return builder.setDebugMode(this.configuration.getBoolean(DEBUG_MODE)) + .setRpcConnTimeout(this.configuration.getInteger(LOG_RPC_CONN_TIMEOUT)) + .setRpcSocketTimeout(this.configuration.getInteger(LOG_RPC_SOCKET_TIMEOUT)) + .setRpcSendRetryCnt(this.configuration.getInteger(LOG_RPC_SEND_RETRY_COUNT)) + .setRpcServerRecoveryTimeInSec(this.configuration.getInteger(LOG_RPC_SERVER_RECOVERY_TIME)) + .setRpcMaxDelayTimeInSec(this.configuration.getInteger(LOG_RPC_MAX_DELAY_TIME)) + .setRpcAuthTokenCodeKey(this.configuration.getString(LOG_RPC_AUTH_TOKEN_CODE_KEY)) + .setRpcAuthTokenUserKey(this.configuration.getString(LOG_RPC_AUTH_TOKEN_USER_KEY)) + .setRpcAuthTokenCode(this.configuration.getString(LOG_RPC_AUTH_TOKEN_CODE)) + .setRpcAuthTokenUser(this.configuration.getString(LOG_RPC_AUTH_TOKEN_USER, + hadoopUser)) + .setRpcCacheSize(this.configuration.getInteger(LOG_RPC_CACHE_SIZE)) + .setRpcCacheMaxConsumeThread(this.configuration.getInteger(LOG_PRC_CACHE_MAX_CONSUME_THREAD)) + .setDiscard(this.configuration.getBoolean(LOG_RPC_CACHE_DISCARD)) + .setDiscardWindow(this.configuration.getInteger(LOG_RPC_CACHE_DISCARD_WINDOW)) + .setRpcBufferSize(this.configuration.getInteger(LOG_RPC_BUFFER_SIZE)) + .setRpcBufferExpireTimeInSec(this.configuration.getInteger(LOG_RPC_BUFFER_EXPIRE_TIME)).build(); + } + + + /** + * According to : + * String launchCommand = + * BootstrapTools.getTaskManagerShellCommand( + * flinkConfig, + * tmParams, + * ".", + * ApplicationConstants.LOG_DIR_EXPANSION_VAR, + * hasLogback, + * hasLog4j, + * hasKrb5, + * taskManagerMainClass, + * taskManagerDynamicProperties); + * the configuration directory of Flink yarn container is always ".", + * @return configuration + */ + private synchronized Configuration loadConfiguration(){ +// String configDir = System.getenv("FLINK_CONF_DIR"); +// if (null == configDir){ +// configDir = "."; +// } + String configDir = "."; + Properties properties = System.getProperties(); + Enumeration enumeration = properties.propertyNames(); + Configuration dynamicConfiguration = new Configuration(); + while(enumeration.hasMoreElements()){ + String prop = String.valueOf(enumeration.nextElement()); + dynamicConfiguration.setString(prop, properties.getProperty(prop)); + } + return GlobalConfiguration.loadConfiguration(configDir, dynamicConfiguration); + } + +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/flink/FlinkStreamisConfigDefine.java b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/flink/FlinkStreamisConfigDefine.java new file mode 100644 index 000000000..5f23ae681 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/flink/FlinkStreamisConfigDefine.java @@ -0,0 +1,155 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.flink; + +import org.apache.flink.configuration.ConfigOption; +import org.apache.flink.configuration.ConfigOptions; +import scala.Int; + +import java.util.List; + +/** + * Config definition + */ +public class FlinkStreamisConfigDefine { + + /** + * Gateway address of log module for streamis + */ + public static final ConfigOption LOG_GATEWAY_ADDRESS = ConfigOptions.key("stream.log.gateway.address") + .stringType().noDefaultValue().withDescription("The gateway address ex: http://127.0.0.1:8080"); + + /** + * Entrypoint path of collecting log + */ + public static final ConfigOption LOG_COLLECT_PATH = ConfigOptions.key("stream.log.collect.path") + .stringType().defaultValue("/api/rest_j/v1/streamis/streamJobManager/log/collect/events").withDescription("The entrypoint path of collecting log"); + + /** + * Connection timeout(in milliseconds) in log RPC module + */ + public static final ConfigOption LOG_RPC_CONN_TIMEOUT = ConfigOptions.key("stream.log.rpc.connect-timeout") + .intType().defaultValue(3000).withDescription("Connection timeout(ms) in log RPC module"); + + /** + * Socket timeout(in milliseconds) in log RPC module + */ + public static final ConfigOption LOG_RPC_SOCKET_TIMEOUT = ConfigOptions.key("stream.log.rpc.socket-timeout") + .intType().defaultValue(15000).withDescription("Socket timeout(ms) in log RPC module"); + + /** + * Max retry count of sending message in log RPC module + */ + public static final ConfigOption LOG_RPC_SEND_RETRY_COUNT = ConfigOptions.key("stream.log.rpc.send-retry-count") + .intType().defaultValue(3).withDescription("Max retry count of sending message in log RPC module"); + + /** + * Server recovery time(in seconds) in log RPC module + */ + public static final ConfigOption LOG_RPC_SERVER_RECOVERY_TIME = ConfigOptions.key("stream.log.rpc.server-recovery-time-in-sec") + .intType().defaultValue(5).withDescription("Server recovery time(sec) in log RPC module"); + + /** + * Max delay time(in seconds) in log RPC module. if reach the limit, the message will be dropped + */ + public static final ConfigOption LOG_RPC_MAX_DELAY_TIME = ConfigOptions.key("stream.log.rpc.max-delay-time") + .intType().defaultValue(60).withDescription("Max delay time(sec) in log RPC module"); + + /** + * Token code key in log RPC auth module + */ + public static final ConfigOption LOG_RPC_AUTH_TOKEN_CODE_KEY = ConfigOptions.key("stream.log.rpc.auth.token-code-key") + .stringType().defaultValue("Token-Code").withDescription("Token code key in log RPC auth module"); + + /** + * Token user key in log RPC auth module + */ + public static final ConfigOption LOG_RPC_AUTH_TOKEN_USER_KEY = ConfigOptions.key("stream.log.rpc.auth.token-user-key") + .stringType().defaultValue("Token-User").withDescription("Token user key in log RPC auth module"); + + /** + * Token code in log RPC auth module + */ + public static final ConfigOption LOG_RPC_AUTH_TOKEN_CODE = ConfigOptions.key("stream.log.rpc.auth.token-code") + .stringType().defaultValue("STREAM-LOG").withDescription("Token code in log RPC auth module"); + + /** + * Token user in log RPC auth module + */ + public static final ConfigOption LOG_RPC_AUTH_TOKEN_USER = ConfigOptions.key("stream.log.rpc.auth.token-user") + .stringType().defaultValue(System.getProperty("user.name")).withDescription("Token user in log RPC auth module"); + + /** + * Cache size in log RPC module + */ + public static final ConfigOption LOG_RPC_CACHE_SIZE = ConfigOptions.key("stream.log.rpc.cache.size") + .intType().defaultValue(150).withDescription("Cache size in log RPC module"); + + /** + * Max cache consume threads in log RPC module + */ + public static final ConfigOption LOG_PRC_CACHE_MAX_CONSUME_THREAD = ConfigOptions.key("stream.log.rpc.cache.max-consume-thread") + .intType().defaultValue(2).withDescription("Max cache consume threads in log RPC module"); + + /** + * If discard the useless log + */ + public static final ConfigOption LOG_RPC_CACHE_DISCARD = ConfigOptions.key("stream.log.rpc.cache.discard") + .booleanType().defaultValue(true).withDescription("If discard the useless log"); + + /** + * The window size of discarding + */ + public static final ConfigOption LOG_RPC_CACHE_DISCARD_WINDOW = ConfigOptions.key("stream.log.rpc.cache.discard-window") + .intType().defaultValue(2).withDescription("The window size of discarding"); + /** + * Buffer size in log RPC module + */ + public static final ConfigOption LOG_RPC_BUFFER_SIZE = ConfigOptions.key("stream.log.rpc.buffer.size") + .intType().defaultValue(50).withDescription("Buffer size in log RPC module"); + + /** + * Buffer expire time(sec) in log RPC module + */ + public static final ConfigOption LOG_RPC_BUFFER_EXPIRE_TIME = ConfigOptions.key("stream.log.rpc.buffer.expire-time-in-sec") + .intType().defaultValue(2).withDescription("Buffer expire time (sec) in log RPC module"); + + /** + * Log filter strategy list + */ + public static final ConfigOption> LOG_FILTER_STRATEGIES = ConfigOptions.key("stream.log.filter.strategies") + .stringType().asList().defaultValues("Keyword").withDescription("Log filter strategy list"); + + /** + * Level value of LevelMatch filter strategy + */ + public static final ConfigOption LOG_FILTER_LEVEL_MATCH = ConfigOptions.key("stream.log.filter.level-match.level") + .stringType().defaultValue("ERROR").withDescription("Level value of LevelMatch filter strategy"); + + /** + * Level value of ThresholdMatch filter strategy + */ + public static final ConfigOption LOG_FILTER_THRESHOLD_MATCH = ConfigOptions.key("stream.log.filter.threshold.level") + .stringType().defaultValue("ERROR").withDescription("Level value of ThresholdMatch filter strategy"); + /** + * Regex value of RegexMatch filter strategy + */ + public static final ConfigOption LOG_FILTER_REGEX = ConfigOptions.key("stream.log.filter.regex.value") + .stringType().defaultValue(".*").withDescription("Regex value of RegexMatch filter strategy"); + + /** + * Accept keywords of Keyword filter strategy + */ + public static final ConfigOption LOG_FILTER_KEYWORDS = ConfigOptions.key("stream.log.filter.keywords") + .stringType().defaultValue("ERROR").withDescription("Accept keywords of Keyword filter strategy"); + + /** + * Exclude keywords of Keyword filter strategy + */ + public static final ConfigOption LOG_FILTER_KEYWORDS_EXCLUDE = ConfigOptions.key("stream.log.filter.keywords.exclude") + .stringType().defaultValue("").withDescription("Exclude keywords of Keyword filter strategy"); + + /** + * Debug mode + */ + public static final ConfigOption DEBUG_MODE = ConfigOptions.key("stream.log.debug") + .booleanType().defaultValue(false).withDescription("Debug mode"); +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/resources/META-INF/services/com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/resources/META-INF/services/com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired new file mode 100644 index 000000000..dc13253b7 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/resources/META-INF/services/com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired @@ -0,0 +1 @@ +com.webank.wedatasphere.streamis.jobmanager.log.collector.flink.FlinkStreamisConfigAutowired \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/flink/FlinkConfigurationLoadTest.java b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/flink/FlinkConfigurationLoadTest.java new file mode 100644 index 000000000..79ad46014 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/flink/FlinkConfigurationLoadTest.java @@ -0,0 +1,28 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.flink; + +import org.apache.flink.configuration.Configuration; +import org.apache.flink.configuration.GlobalConfiguration; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Enumeration; +import java.util.Objects; +import java.util.Properties; + +public class FlinkConfigurationLoadTest { + private static final Logger LOG = LoggerFactory.getLogger(FlinkConfigurationLoadTest.class); + @Test + public void loadConfiguration() { + String configDir = Objects.requireNonNull(FlinkConfigurationLoadTest.class.getResource("/")).getFile(); + Properties properties = System.getProperties(); + Enumeration enumeration = properties.propertyNames(); + Configuration dynamicConfiguration = new Configuration(); + while(enumeration.hasMoreElements()){ + String prop = String.valueOf(enumeration.nextElement()); + dynamicConfiguration.setString(prop, properties.getProperty(prop)); + } + GlobalConfiguration.loadConfiguration(configDir, dynamicConfiguration); + } + +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/test/resources/META-INF/services/com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/test/resources/META-INF/services/com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired new file mode 100644 index 000000000..dc13253b7 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/test/resources/META-INF/services/com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired @@ -0,0 +1 @@ +com.webank.wedatasphere.streamis.jobmanager.log.collector.flink.FlinkStreamisConfigAutowired \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/test/resources/flink-conf.yaml b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/test/resources/flink-conf.yaml new file mode 100644 index 000000000..e69de29bb diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/test/resources/log4j2.xml b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/test/resources/log4j2.xml new file mode 100644 index 000000000..ee3f4125a --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/test/resources/log4j2.xml @@ -0,0 +1,37 @@ + + + + + + + + + + + + + ` + + + + + + + + \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/pom.xml b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/pom.xml new file mode 100644 index 000000000..643119621 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/pom.xml @@ -0,0 +1,40 @@ + + + + streamis-job-log + com.webank.wedatasphere.streamis + 0.2.4 + ../../pom.xml + + 4.0.0 + + streamis-job-log-collector-core + + + 8 + 8 + 4.5.13 + 4.5.4 + + + + + com.webank.wedatasphere.streamis + streamis-job-log-common + ${streamis.version} + + + + org.apache.httpcomponents + httpclient + ${httpclient.version} + + + org.apache.httpcomponents + httpmime + ${httpmine.version} + + + \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/ExceptionListener.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/ExceptionListener.java new file mode 100644 index 000000000..4c9ac6ea8 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/ExceptionListener.java @@ -0,0 +1,15 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector; + +/** + * Exception listener + */ +public interface ExceptionListener { + + /** + * Listen the exception + * @param subject the subject that throws the exception + * @param t Throwable + * @param message message + */ + void onException(Object subject, Throwable t, String message); +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/cache/LogCache.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/cache/LogCache.java new file mode 100644 index 000000000..f11556cc8 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/cache/LogCache.java @@ -0,0 +1,43 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.cache; + +import com.webank.wedatasphere.streamis.jobmanager.log.entities.LogElement; + +import java.util.List; +import java.util.concurrent.TimeUnit; + +/** + * Log cache + * @param element + */ +public interface LogCache { + + /** + * Cache log + * @param logElement log element + */ + void cacheLog(E logElement) throws InterruptedException; + + /** + * Drain log elements into collection + * @param elements elements + * @param maxElements max elements size + * @return count + */ + int drainLogsTo(List elements, int maxElements); + + /** + * Take log element + * @return log element + */ + E takeLog(long timeout, TimeUnit unit) throws InterruptedException; + + /** + * If the cache is full + * @return + */ + boolean isCacheable(); + /** + * Release the resource + */ + void destroy(); +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/RpcAuthConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/RpcAuthConfig.java new file mode 100644 index 000000000..ebf9b7f68 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/RpcAuthConfig.java @@ -0,0 +1,86 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.config; + + +/** + * Authentication config + */ +public class RpcAuthConfig { + /** + * Key of token-code + */ + private String tokenCodeKey = "Token-Code"; + + /** + * Key of token-user + */ + private String tokenUserKey = "Token-User"; + + /** + * Token user + */ + private String tokenUser = System.getProperty("user.name"); + + /** + * Token code + */ + private String tokenCode = "STREAM-LOG"; + + public RpcAuthConfig(){ + + } + + public RpcAuthConfig(String tokenCodeKey, String tokenCode, String tokenUserKey, String tokenUser){ + if (null != tokenCodeKey) { + this.tokenCodeKey = tokenCodeKey; + } + if (null != tokenCode){ + this.tokenCode = tokenCode; + } + if (null != tokenUserKey){ + this.tokenUserKey = tokenUserKey; + } + if (null != tokenUser){ + this.tokenUser = tokenUser; + } + } + + public String getTokenCodeKey() { + return tokenCodeKey; + } + + public void setTokenCodeKey(String tokenCodeKey) { + this.tokenCodeKey = tokenCodeKey; + } + + public String getTokenUserKey() { + return tokenUserKey; + } + + public void setTokenUserKey(String tokenUserKey) { + this.tokenUserKey = tokenUserKey; + } + + public String getTokenUser() { + return tokenUser; + } + + public void setTokenUser(String tokenUser) { + this.tokenUser = tokenUser; + } + + public String getTokenCode() { + return tokenCode; + } + + public void setTokenCode(String tokenCode) { + this.tokenCode = tokenCode; + } + + @Override + public String toString() { + return "RpcAuthConfig{" + + ", tokenUserKey='" + tokenUserKey + '\'' + + ", tokenUser='" + tokenUser + '\'' + + '}'; + } +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/RpcLogSenderConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/RpcLogSenderConfig.java new file mode 100644 index 000000000..0fb03185f --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/RpcLogSenderConfig.java @@ -0,0 +1,180 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.config; + + +import java.util.Objects; + +/** + * Rpc sender configuration + */ +public class RpcLogSenderConfig { + + /** + * Send address + */ + private String address; + + /** + * Timeout of connecting + */ + private int connectionTimeout = 3000; + + /** + * Timeout of reading from socket + */ + private int socketTimeout = 15000; + + /** + * Retry count of sending + */ + private int sendRetryCnt = 3; + + /** + * The time for server recovery + */ + private int serverRecoveryTimeInSec = 5; + + /** + * Retry max delay time of sender + */ + private int maxDelayTimeInSec = 60; + + /** + * If open debug mode + */ + private boolean debugMode = false; + /** + * Auth config + */ + private RpcAuthConfig authConfig = new RpcAuthConfig(); + + /** + * Cache config + */ + private SendLogCacheConfig cacheConfig = new SendLogCacheConfig(); + + /** + * Buffer config + */ + private SendBufferConfig bufferConfig = new SendBufferConfig(); + + public RpcLogSenderConfig(){ + + } + + public RpcLogSenderConfig(String address, int sendRetryCnt, int connectionTimeout, int socketTimeout, + int serverRecoveryTimeInSec, int maxDelayTimeInSec, + RpcAuthConfig authConfig, SendLogCacheConfig cacheConfig, SendBufferConfig bufferConfig){ + this.address = address; + this.sendRetryCnt = sendRetryCnt; + this.connectionTimeout = connectionTimeout; + this.socketTimeout = socketTimeout; + this.serverRecoveryTimeInSec = serverRecoveryTimeInSec; + this.maxDelayTimeInSec = maxDelayTimeInSec; + if (Objects.nonNull(authConfig)){ + this.authConfig = authConfig; + } + if (Objects.nonNull(cacheConfig)){ + this.cacheConfig = cacheConfig; + } + if (Objects.nonNull(bufferConfig)){ + this.bufferConfig = bufferConfig; + } + } + + public RpcAuthConfig getAuthConfig() { + return authConfig; + } + + public void setAuthConfig(RpcAuthConfig authConfig) { + this.authConfig = authConfig; + } + + public SendLogCacheConfig getCacheConfig() { + return cacheConfig; + } + + public void setCacheConfig(SendLogCacheConfig cacheConfig) { + this.cacheConfig = cacheConfig; + } + + public SendBufferConfig getBufferConfig() { + return bufferConfig; + } + + public void setBufferConfig(SendBufferConfig bufferConfig) { + this.bufferConfig = bufferConfig; + } + + public String getAddress() { + return address; + } + + public void setAddress(String address) { + this.address = address; + } + + public int getSendRetryCnt() { + return sendRetryCnt; + } + + public void setSendRetryCnt(int sendRetryCnt) { + this.sendRetryCnt = sendRetryCnt; + } + + public int getConnectionTimeout() { + return connectionTimeout; + } + + public void setConnectionTimeout(int connectionTimeout) { + this.connectionTimeout = connectionTimeout; + } + + public int getSocketTimeout() { + return socketTimeout; + } + + public void setSocketTimeout(int socketTimeout) { + this.socketTimeout = socketTimeout; + } + + public int getMaxDelayTimeInSec() { + return maxDelayTimeInSec; + } + + public void setMaxDelayTimeInSec(int maxDelayTimeInSec) { + this.maxDelayTimeInSec = maxDelayTimeInSec; + } + + public int getServerRecoveryTimeInSec() { + return serverRecoveryTimeInSec; + } + + public void setServerRecoveryTimeInSec(int serverRecoveryTimeInSec) { + this.serverRecoveryTimeInSec = serverRecoveryTimeInSec; + } + + public boolean isDebugMode() { + return debugMode; + } + + public void setDebugMode(boolean debugMode) { + this.debugMode = debugMode; + } + + @Override + public String toString() { + return "RpcLogSenderConfig{" + + "address='" + address + '\'' + + ", connectionTimeout=" + connectionTimeout + + ", socketTimeout=" + socketTimeout + + ", sendRetryCnt=" + sendRetryCnt + + ", serverRecoveryTimeInSec=" + serverRecoveryTimeInSec + + ", maxDelayTimeInSec=" + maxDelayTimeInSec + + ", authConfig=" + authConfig + + ", cacheConfig=" + cacheConfig + + ", bufferConfig=" + bufferConfig + + ", debug=" + debugMode + + '}'; + } + +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/SendBufferConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/SendBufferConfig.java new file mode 100644 index 000000000..6be0ae826 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/SendBufferConfig.java @@ -0,0 +1,47 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.config; + + +public class SendBufferConfig { + /** + * Size of send buffer + */ + private int size = 50; + + /** + * Expire time of send buffer + */ + private long expireTimeInSec = 2; + + public SendBufferConfig(){ + + } + + public SendBufferConfig(int size, long expireTimeInSec){ + this.size = size; + this.expireTimeInSec = expireTimeInSec; + } + + public int getSize() { + return size; + } + + public void setSize(int size) { + this.size = size; + } + + public long getExpireTimeInSec() { + return expireTimeInSec; + } + + public void setExpireTimeInSec(long expireTimeInSec) { + this.expireTimeInSec = expireTimeInSec; + } + + @Override + public String toString() { + return "SendBufferConfig{" + + "size=" + size + + ", expireTimeInSec=" + expireTimeInSec + + '}'; + } +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/SendLogCacheConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/SendLogCacheConfig.java new file mode 100644 index 000000000..e40a630c7 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/SendLogCacheConfig.java @@ -0,0 +1,81 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.config; + +/** + * Cache config + */ + +public class SendLogCacheConfig { + /** + * Size of send cache + */ + private int size = 150; + + /** + * Max number of consuming thread + */ + private int maxConsumeThread = 2; + + /** + * The switch to discard log + */ + private boolean discard = true; + + /** + * Discard window in second + */ + private int discardWindow = 2; + + public SendLogCacheConfig(){ + + } + + public SendLogCacheConfig(int size, int maxConsumeThread){ + this.size = size; + this.maxConsumeThread = maxConsumeThread; + } + + public int getSize() { + return size; + } + + public void setSize(int size) { + this.size = size; + } + + public int getMaxConsumeThread() { + return maxConsumeThread; + } + + public void setMaxConsumeThread(int maxConsumeThread) { + this.maxConsumeThread = maxConsumeThread; + } + + public boolean isDiscard() { + return discard; + } + + public void setDiscard(boolean discard) { + this.discard = discard; + } + + public int getDiscardWindow() { + return discardWindow; + } + + public void setDiscardWindow(int discardWindow) { + this.discardWindow = discardWindow; + } + + @Override + public String toString() { + return "SendLogCacheConfig{" + + "size=" + size + + ", maxConsumeThread=" + maxConsumeThread + + ", discard=" + discard + + ", discardWindow=" + discardWindow + + '}'; + } + + + +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/StreamisLogAppenderConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/StreamisLogAppenderConfig.java new file mode 100644 index 000000000..76fbd0c91 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/StreamisLogAppenderConfig.java @@ -0,0 +1,261 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.config; + +import com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters.LogMessageFilter; + +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import java.util.Optional; + +/** + * Appender config + */ +public class StreamisLogAppenderConfig { + + protected final String applicationName; + + + protected final RpcLogSenderConfig senderConfig; + + /** + * Message filters + */ + protected final List messageFilters; + protected StreamisLogAppenderConfig(String applicationName, RpcLogSenderConfig rpcLogSenderConfig, + List messageFilters){ + this.applicationName = applicationName; + this.senderConfig = null != rpcLogSenderConfig? rpcLogSenderConfig : new RpcLogSenderConfig(); + this.messageFilters = messageFilters; + } + + public static class Builder{ + /** + * Application name + */ + protected String applicationName; + + /** + * Sender config + */ + protected final RpcLogSenderConfig rpcLogSenderConfig; + + /** + * Message filters + */ + protected final List messageFilters = new ArrayList<>(); + + public Builder(String applicationName, + RpcLogSenderConfig rpcLogSenderConfig){ + this.applicationName = applicationName; + + this.rpcLogSenderConfig = Optional.ofNullable(rpcLogSenderConfig).orElse(new RpcLogSenderConfig()); + } + + /** + * Set application name + * @param applicationName application name + * @return builder + */ + public StreamisLogAppenderConfig.Builder setAppName(String applicationName){ + this.applicationName = applicationName; + return this; + } + + + + /** + * Rpc address + * @param address address + * @return builder + */ + public StreamisLogAppenderConfig.Builder setRpcAddress(String address){ + this.rpcLogSenderConfig.setAddress(address); + return this; + } + + /** + * Rpc connect timeout + * @param connectionTimeout connection timeout + * @return builder + */ + public StreamisLogAppenderConfig.Builder setRpcConnTimeout(int connectionTimeout){ + this.rpcLogSenderConfig.setConnectionTimeout(connectionTimeout); + return this; + } + + /** + * Rpc socket timeout + * @param socketTimeout socket timeout + * @return builder + */ + public StreamisLogAppenderConfig.Builder setRpcSocketTimeout(int socketTimeout){ + this.rpcLogSenderConfig.setSocketTimeout(socketTimeout); + return this; + } + + /** + * Rpc send retry count + * @param sendRetryCnt send retry count + * @return builder + */ + public StreamisLogAppenderConfig.Builder setRpcSendRetryCnt(int sendRetryCnt){ + this.rpcLogSenderConfig.setSendRetryCnt(sendRetryCnt); + return this; + } + + /** + * Rpc server recovery time in seconds + * @param serverRecoveryTimeInSec server recovery time + * @return builder + */ + public StreamisLogAppenderConfig.Builder setRpcServerRecoveryTimeInSec(int serverRecoveryTimeInSec){ + this.rpcLogSenderConfig.setServerRecoveryTimeInSec(serverRecoveryTimeInSec); + return this; + } + + /** + * Rpc max delay time in seconds + * @param maxDelayTimeInSec max delay time in seconds + * @return builder + */ + public StreamisLogAppenderConfig.Builder setRpcMaxDelayTimeInSec(int maxDelayTimeInSec){ + this.rpcLogSenderConfig.setMaxDelayTimeInSec(maxDelayTimeInSec); + return this; + } + + /** + * Rpc auth token code key + * @param tokenCodeKey key of token code + * @return builder + */ + public StreamisLogAppenderConfig.Builder setRpcAuthTokenCodeKey(String tokenCodeKey){ + this.rpcLogSenderConfig.getAuthConfig().setTokenCodeKey(tokenCodeKey); + return this; + } + + /** + * Rpc auth token user key + * @param tokenUserKey key of token user + * @return builder + */ + public StreamisLogAppenderConfig.Builder setRpcAuthTokenUserKey(String tokenUserKey){ + this.rpcLogSenderConfig.getAuthConfig().setTokenUserKey(tokenUserKey); + return this; + } + + /** + * Rpc auth token user + * @param tokenUser token user + * @return builder + */ + public StreamisLogAppenderConfig.Builder setRpcAuthTokenUser(String tokenUser){ + this.rpcLogSenderConfig.getAuthConfig().setTokenUser(tokenUser); + return this; + } + + /** + * Rpc auth token code + * @param tokenCode token code + * @return builder + */ + public StreamisLogAppenderConfig.Builder setRpcAuthTokenCode(String tokenCode){ + this.rpcLogSenderConfig.getAuthConfig().setTokenCode(tokenCode); + return this; + } + + /** + * Rpc cache size + * @param cacheSize cache size + * @return builder + */ + public StreamisLogAppenderConfig.Builder setRpcCacheSize(int cacheSize){ + this.rpcLogSenderConfig.getCacheConfig().setSize(cacheSize); + return this; + } + + /** + * Rpc cache max consume thread + * @param maxConsumeThread max consume thread + * @return builder + */ + public StreamisLogAppenderConfig.Builder setRpcCacheMaxConsumeThread(int maxConsumeThread){ + this.rpcLogSenderConfig.getCacheConfig().setMaxConsumeThread(maxConsumeThread); + return this; + } + + /** + * Rpc buffer size + * @param bufferSize buffer size + * @return builder + */ + public StreamisLogAppenderConfig.Builder setRpcBufferSize(int bufferSize){ + this.rpcLogSenderConfig.getBufferConfig().setSize(bufferSize); + return this; + } + + /** + * Rpc buffer expire time in seconds + * @param expireTimeInSec expire time + * @return builder + */ + public StreamisLogAppenderConfig.Builder setRpcBufferExpireTimeInSec(int expireTimeInSec){ + this.rpcLogSenderConfig.getBufferConfig().setExpireTimeInSec(expireTimeInSec); + return this; + } + + /** + * Add log message filter + * @param messageFilter message filter + * @return builder + */ + public StreamisLogAppenderConfig.Builder withMessageFilter(LogMessageFilter messageFilter){ + this.messageFilters.add(messageFilter); + return this; + } + + /** + * Set to discard the useless log + * @param discard discard + * @return builder + */ + public StreamisLogAppenderConfig.Builder setDiscard(boolean discard){ + this.rpcLogSenderConfig.getCacheConfig().setDiscard(discard); + return this; + } + + /** + * Set the window size of discarding + * @param windowSize + * @return + */ + public StreamisLogAppenderConfig.Builder setDiscardWindow(int windowSize){ + this.rpcLogSenderConfig.getCacheConfig().setDiscardWindow(windowSize); + return this; + } + /** + * Switch to debug + * @param debugMode debug mode + * @return builder + */ + public StreamisLogAppenderConfig.Builder setDebugMode(boolean debugMode){ + this.rpcLogSenderConfig.setDebugMode(debugMode); + return this; + } + + public StreamisLogAppenderConfig build(){ + return new StreamisLogAppenderConfig(applicationName, rpcLogSenderConfig, messageFilters); + } + } + public String getApplicationName() { + return applicationName; + } + + + public RpcLogSenderConfig getSenderConfig() { + return senderConfig; + } + + public List getMessageFilters() { + return messageFilters; + } +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/message/filters/KeywordMessageFilter.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/message/filters/KeywordMessageFilter.java new file mode 100644 index 000000000..5d12ea071 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/message/filters/KeywordMessageFilter.java @@ -0,0 +1,126 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters; + +import java.lang.reflect.Field; +import java.util.Arrays; +import java.util.Comparator; +import java.util.regex.Pattern; + +/** + * Message filter of keyword + */ +public class KeywordMessageFilter implements LogMessageFilter{ + + /** + * Accept keywords + */ + private final String[] acceptKeywords; + + /** + * Regex pattern of accept keywords + */ + private Pattern acceptPattern; + /** + * Exclude keywords + */ + private final String[] excludeKeywords; + + /** + * Regex pattern of exclude keywords + */ + private Pattern excludePattern; + + /** + * Flags for pattern + */ + private int patternFlag = 0; + + public KeywordMessageFilter(String[] acceptKeywords, String[] excludeKeywords){ + this(acceptKeywords, excludeKeywords, null); + } + + public KeywordMessageFilter(String[] acceptKeywords, String[] excludeKeywords, String[] patternFlags){ + this.acceptKeywords = acceptKeywords; + this.excludeKeywords = excludeKeywords; + try { + this.patternFlag = toPatternFlags(patternFlags); + } catch (IllegalAccessException e) { + // Ignore + } + // Build regex pattern + if (acceptKeywords != null && acceptKeywords.length > 0){ + this.acceptPattern = toMatchPattern(acceptKeywords, this.patternFlag); + } + if (excludeKeywords != null && excludeKeywords.length > 0){ + this.excludePattern = toMatchPattern(excludeKeywords, this.patternFlag); + } + } + + @Override + public boolean doFilter(String logger, String message) { + boolean accept = true; + if (null != acceptPattern){ + accept = acceptPattern.matcher(message).find(); + } + if (accept && null != excludePattern){ + accept = !excludePattern.matcher(message).find(); + } + return accept; + } + + /** + * Convert to pattern + * @param keywords keyword array + * @param flag pattern flag + * @return Regex pattern + */ + protected Pattern toMatchPattern(String[] keywords, int flag){ + StringBuilder patternStr = new StringBuilder("("); + for(int i = 0; i < keywords.length; i++){ + patternStr.append(keywords[i]); + if (i != keywords.length - 1){ + patternStr.append("|"); + } + } + patternStr.append(")"); + return Pattern.compile(patternStr.toString(), flag); + } + + /** + * Convert the pattern flag array to int + * @param patternFlags flag string array + * @return int value + * @throws IllegalArgumentException + * @throws IllegalAccessException + */ + private static int toPatternFlags(final String[] patternFlags) throws IllegalArgumentException, + IllegalAccessException { + if (patternFlags == null || patternFlags.length == 0) { + return 0; + } + final Field[] fields = Pattern.class.getDeclaredFields(); + final Comparator comparator = Comparator.comparing(Field::getName); + Arrays.sort(fields, comparator); + final String[] fieldNames = new String[fields.length]; + for (int i = 0; i < fields.length; i++) { + fieldNames[i] = fields[i].getName(); + } + int flags = 0; + for (final String test : patternFlags) { + final int index = Arrays.binarySearch(fieldNames, test); + if (index >= 0) { + final Field field = fields[index]; + flags |= field.getInt(Pattern.class); + } + } + return flags; + } + + public final String[] getAcceptKeywords(){ + return this.acceptKeywords; + } + + public final String[] getExcludeKeywords(){ + return this.excludeKeywords; + } + +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/message/filters/LogMessageFilter.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/message/filters/LogMessageFilter.java new file mode 100644 index 000000000..bc778bea5 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/message/filters/LogMessageFilter.java @@ -0,0 +1,15 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters; + +/** + * Log message filter, filter the message content (layout formatted) + */ +public interface LogMessageFilter { + /** + * Filter formatted message + * @param logger logger name + * @param message message content + * @return if match the filter + */ + boolean doFilter(String logger, String message); + +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/message/filters/LogMessageFilterAdapter.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/message/filters/LogMessageFilterAdapter.java new file mode 100644 index 000000000..f98427afa --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/message/filters/LogMessageFilterAdapter.java @@ -0,0 +1,13 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters; + +/** + * Interface for adaptor + */ +public interface LogMessageFilterAdapter { + + /** + * Message filter + * @return filter + */ + LogMessageFilter getLogMessageFilter(); +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/AbstractRpcLogSender.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/AbstractRpcLogSender.java new file mode 100644 index 000000000..14e3cceea --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/AbstractRpcLogSender.java @@ -0,0 +1,573 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender; + +import com.webank.wedatasphere.streamis.jobmanager.log.collector.ExceptionListener; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.cache.LogCache; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcLogSenderConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.SendLogCacheConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.buf.ImmutableSendBuffer; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.buf.SendBuffer; +import com.webank.wedatasphere.streamis.jobmanager.log.entities.LogElement; + +import java.util.*; +import java.util.concurrent.*; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.locks.Condition; +import java.util.concurrent.locks.ReentrantLock; + +/** + * Abstract rpc log sender + * @param + * @param + */ +public abstract class AbstractRpcLogSender implements RpcLogSender{ + + /** + * Size of log cache + */ + int cacheSize; + + /** + * The buffer size of sender + */ + int sendBufSize; + + /** + * Max thread num of send + */ + int maxCacheConsume; + + /** + * Connect config + */ + protected RpcLogSenderConfig rpcSenderConfig; + + /** + * Rpc log context + */ + private volatile RpcLogContext rpcLogContext; + + protected boolean isTerminated = false; + /** + * Use the listener instead of log4j structure + */ + protected ExceptionListener exceptionListener; + + public AbstractRpcLogSender(RpcLogSenderConfig rpcSenderConfig){ + this.rpcSenderConfig = rpcSenderConfig; + SendLogCacheConfig cacheConfig = rpcSenderConfig.getCacheConfig(); + this.cacheSize = cacheConfig.getSize(); + this.maxCacheConsume = cacheConfig.getMaxConsumeThread(); + this.sendBufSize = rpcSenderConfig.getBufferConfig().getSize(); + if (sendBufSize > cacheSize) { + throw new IllegalArgumentException("Size of send buffer is larger than cache size"); + } + + } + + @Override + public LogCache getOrCreateLogCache() { + return getOrCreateRpcLogContext().getLogCache(); + } + + @Override + public void sendLog(T log) { + // Just send it into log cache + try { + getOrCreateLogCache().cacheLog(log); + } catch (InterruptedException e) { + // Invoke exception listener + Optional.ofNullable(exceptionListener).ifPresent(listener -> + listener.onException(this, e, null)); + } + } + + @Override + public void syncSendLog(T log) { + + } + + @Override + public void setExceptionListener(ExceptionListener listener) { + this.exceptionListener = listener; + } + + @Override + public void close() { + getOrCreateRpcLogContext().destroyCacheConsumers(); + this.isTerminated = true; + } + + /** + * Aggregate send buffer for sending + * @param sendBuffer send buffer + * @return E aggregated entity + */ + protected abstract E aggregateBuffer(SendBuffer sendBuffer); + + /** + * Sending operation + * @param aggregatedEntity agg entity + * @param rpcSenderConfig rpc sender config + */ + protected abstract void doSend(E aggregatedEntity, RpcLogSenderConfig rpcSenderConfig) throws Exception; + + /** + * Send log exception strategy + * @return exception strategy + */ + protected abstract SendLogExceptionStrategy getSendLogExceptionStrategy(); + + protected RpcLogContext getOrCreateRpcLogContext(){ + if (null == this.rpcLogContext){ + synchronized (this){ + if (null == this.rpcLogContext){ + // Use fair lock + SendLogCache logCache = new QueuedSendLogCache(this.cacheSize, + this.rpcSenderConfig.getCacheConfig().isDiscard(), + this.rpcSenderConfig.getCacheConfig().getDiscardWindow() * 1000,false); + this.rpcLogContext = new RpcLogContext(logCache); + // Start cache consumers + for(int i = 0; i < maxCacheConsume; i++) { + this.rpcLogContext.startCacheConsumer(); + } + } + } + + } + return this.rpcLogContext; + } + + private class RpcLogContext{ + + private static final String RPC_LOG_CACHE_CONSUMER = "RpcLog-Cache-Consumer-Thread-"; + /** + * Send log cache + */ + private final SendLogCache logCache; + + /** + * Consume pool + */ + private final ThreadPoolExecutor consumePool; + + /** + * Count of the consumers + */ + private int consumers = 0; + + /** + * Futures of consumers + */ + private final LinkedList> sendLogCacheConsumers = new LinkedList<>(); + /** + * Context lock + */ + private final ReentrantLock ctxLock; + public RpcLogContext(SendLogCache logCache){ + this.logCache = logCache; + this.ctxLock = new ReentrantLock(); + this.consumePool = new ThreadPoolExecutor(0, maxCacheConsume, + 60L, TimeUnit.SECONDS, + new SynchronousQueue<>(), new ThreadFactory() { + private final ThreadGroup group = Thread.currentThread().getThreadGroup(); + private final AtomicInteger threadNum = new AtomicInteger(1); + @Override + public Thread newThread(Runnable r) { + Thread t = new Thread(group, r, RPC_LOG_CACHE_CONSUMER + + threadNum.getAndIncrement(), 0); + if (t.isDaemon()) { + t.setDaemon(false); + } + if (t.getPriority() != Thread.NORM_PRIORITY) { + t.setPriority(Thread.NORM_PRIORITY); + } + return t; + } + }); + } + + public boolean startCacheConsumer(){ + if (consumers >= maxCacheConsume) { +// throw new IllegalStateException("Over the limit number of cache consumers: [" + maxCacheConsume + "]"); + return false; + } + this.ctxLock.lock(); + try { + if (consumers < maxCacheConsume) { + String id = UUID.randomUUID().toString(); + SendBuffer sendBuffer = new ImmutableSendBuffer<>(sendBufSize); + SendLogCacheConsumer consumer = new SendLogCacheConsumer(id, logCache, sendBuffer, rpcSenderConfig) { + @Override + protected void onFlushAndSend(SendBuffer sendBuffer) { + // First to aggregate the buffer + E aggEntity = aggregateBuffer(sendBuffer); + Optional.ofNullable(getSendLogExceptionStrategy()).ifPresent( + strategy -> strategy.doSend(() -> { + doSend(aggEntity, rpcSenderConfig); + return null; + }, sendBuffer)); + } + }; + Future future = this.consumePool.submit(consumer); + consumer.setFuture(future); + sendLogCacheConsumers.add(consumer); + this.consumers++; + return true; + } + } finally { + this.ctxLock.unlock(); + } + return false; + } + + public SendLogCache getLogCache(){ + return this.logCache; + } + + /** + * Destroy cache consumer(select the tail one) + */ + public boolean destroyCacheConsumer(){ + if (this.consumers <= 1){ + return false; + } + this.ctxLock.lock(); + try { + if (this.consumers > 1 && this.sendLogCacheConsumers.size() > 1) { + SendLogCacheConsumer consumer = sendLogCacheConsumers.removeLast(); + consumer.shutdown(); + this.consumers --; + return true; + } + } finally { + this.ctxLock.unlock(); + } + return false; + } + + /** + * Destroy all the consumers + */ + public void destroyCacheConsumers(){ + this.ctxLock.lock(); + try { + sendLogCacheConsumers.forEach(SendLogCacheConsumer::shutdown); + sendLogCacheConsumers.clear(); + this.consumers = 0; + } finally { + this.ctxLock.unlock(); + } + } + } + /** + * Act as ArrayBlockingQueue (jdk 1.8) + */ + private class QueuedSendLogCache implements SendLogCache{ + + // Queued items + final Object[] items; + + // Take index + int takeIndex; + + // Put index + int putIndex; + + // Count + int count; + + // Wait time in caching + final AtomicLong cacheWaitTime = new AtomicLong(0); + + // Wait time in taking + final AtomicLong takeWaitTime = new AtomicLong(0); + + // Performance of processing + final AtomicLong process = new AtomicLong(0); + + // Control flow + final AtomicLong control = new AtomicLong(Long.MAX_VALUE - 1); + + // If enable to discard log + boolean discard; + + int discardCount = 0; + + // Time clock + long clock = System.currentTimeMillis(); + + // interval to control + long controlInterval = 1 * 1000; + + // Reentrant lock + final ReentrantLock lock; + + // Condition for waiting takes + private final Condition notEmpty; + + // Condition for waiting puts(cacheLog) + private final Condition notFull; + + public QueuedSendLogCache(int capacity, boolean discard, int discardWind, boolean fair) { + this.items = new Object[capacity]; + lock = new ReentrantLock(fair); + this.notEmpty = lock.newCondition(); + this.notFull = lock.newCondition(); + this.discard = discard; + // Make the discard window size as the control interval + this.controlInterval = discardWind; + this.clock = System.currentTimeMillis() + controlInterval; + } + + @Override + public void cacheLog(T logElement) throws InterruptedException { + // Skip the null element + if (Objects.nonNull(logElement)){ + final ReentrantLock lock = this.lock; + boolean tryLock = lock.tryLock(); + if (!tryLock){ + lock.lockInterruptibly(); + } + try{ + flowControl(); + if (discard && control.decrementAndGet() <= 0){ + if (logElement.mark() < 2){ + discardCount++; + return; + } + } + while (count == items.length){ +// System.out.println("The queue is full, maybe lost the data"); + long ws = System.currentTimeMillis(); + notFull.await(); + cacheWaitTime.addAndGet(System.currentTimeMillis() - ws); + } + enqueue(logElement); + }finally{ + lock.unlock(); + } + } + } + + @Override + public int drainLogsTo(List elements, int maxElements) { + if (Objects.nonNull(elements) && maxElements > 0){ + final Object[] items = this.items; + final ReentrantLock lock = this.lock; + lock.lock(); + try{ + int n = Math.min(maxElements, count); + int take = takeIndex; + int i = 0; + try { + while (i < n){ + @SuppressWarnings("unchecked") + T x = (T) items[take]; + elements.add(x); + items[take] = null; + if (++ take == items.length) + take = 0; + i++; + } + return n; + }finally { + restoreInvariants(i, take, false); + } + } finally { + lock.unlock(); + } + } + return 0; + } + + // Equal to the poll method in ArrayBlockingQueue + @Override + public T takeLog(long timeout, TimeUnit unit) throws InterruptedException { + long nanos = unit.toNanos(timeout); + final ReentrantLock lock = this.lock; + T element; + lock.lockInterruptibly(); + try{ + flowControl(); + while (count == 0){ + long ws = System.currentTimeMillis(); + if (nanos <= 0){ + return null; + } + nanos = notEmpty.awaitNanos(nanos); + takeWaitTime.addAndGet(System.currentTimeMillis() - ws); + } + element = dequeue(); + process.incrementAndGet(); + } finally { + lock.unlock(); + } + return element; + } + + @Override + public boolean isCacheable() { + final ReentrantLock lock = this.lock; + lock.lock(); + try { + return count < items.length; + }finally { + lock.unlock(); + } + } + + // The same as the clear() method, + @Override + public void destroy() { + final Object[] items = this.items; + final ReentrantLock lock = this.lock; + lock.lock(); + try { + int k = count; + if (k > 0) { + final int putIndex = this.putIndex; + int i = takeIndex; + do { + items[i] = null; + if (++i == items.length) + i = 0; + } while (i != putIndex); + takeIndex = putIndex; + count = 0; + for (; k > 0 && lock.hasWaiters(notFull); k--) + notFull.signal(); + } + } finally { + lock.unlock(); + } + } + + /** + * Drain the elements into send buffer + * @param sendBuffer send buffer + * @param maxElements max element size + * @return int + */ + @Override + public int drainLogsTo(SendBuffer sendBuffer, int maxElements) { + if (Objects.nonNull(sendBuffer) && maxElements > 0){ + final Object[] items = this.items; + final ReentrantLock lock = this.lock; + lock.lock(); + try{ + flowControl(); + int n = Math.min(maxElements, count); + int take = takeIndex; + int i = 0; + int send; + try { + while (n > 0) { + int len = items.length - take; + int send0 = Math.min(n, len); + // Copy the array element to buffer directly + send = sendBuf(sendBuffer, this.items, take, send0); + n -= send; + if ((take = take + send) >= items.length) { + take = 0; + } + i += send; + if (send < send0 || send <= 0) { + break; + } + } + process.addAndGet(i); + return i; + } finally { + if (i > 0){ + restoreInvariants(i, take, true); + } + } + }finally { + lock.unlock(); + } + } + return 0; + } + + @SuppressWarnings("unchecked") + private int sendBuf(SendBuffer sendBuffer, Object[] items, int takeIndex, int len){ + int send = sendBuffer.writeBuf(items, takeIndex, len); + if (send < len){ + // Buffer full exception + exceptionListener.onException(this, null, "The sender buffer is full," + + " expected: [" + len + "], actual: [" + send + "]"); + } + // Allow data loss + return send; + } + + private void restoreInvariants(int i, int take, boolean clearItems){ + this.count -= i; + if (clearItems){ + int index = this.takeIndex; + int j = i; + for (; j > 0; j --){ + this.items[index] = null; + if (++index == items.length){ + index = 0; + } + } + //At last index equals take + } + this.takeIndex = take; + for (; i > 0 && lock.hasWaiters(notFull); i--){ + notFull.signal(); + } + } + // Inserts element at current put position, advances, and signals. Call only when holding lock. + private void enqueue(T element){ + this.items[putIndex] = element; + if (++putIndex >= items.length){ + putIndex = 0; + } + count ++; + notEmpty.signal(); + } + + // Extracts element at current take position, advances, and signals. Call only when holding lock. + private T dequeue(){ + @SuppressWarnings("unchecked") + T element = (T)this.items[takeIndex]; + this.items[takeIndex] = null; + if ( ++ takeIndex == items.length){ + this.takeIndex = 0; + } + count --; + // Not need to support iterator + notFull.signal(); + return element; + } + + /** + * Flow control + */ + private void flowControl(){ + long ws = System.currentTimeMillis(); + if (clock <= ws) { + long interval = ws - clock + controlInterval; + clock = ws + controlInterval; + if (rpcSenderConfig.isDebugMode()) { + System.out.println("cacheWait: " + cacheWaitTime.get() + ", takeWait:" + takeWaitTime.get() + ", discarded: " + discardCount); + } + if (takeWaitTime.get() <= 0 && process.get() > 0){ + this.control.set((long) ((double)process.get() * ((double)controlInterval / (double)interval))); + if (rpcSenderConfig.isDebugMode()) { + System.out.println("new window control: " + this.control.get()); + } + } else { + this.control.set(Long.MAX_VALUE); + } + cacheWaitTime.set(0); + takeWaitTime.set(0); + process.set(0); + discardCount = 0; + } + } + } +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/RpcLogSender.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/RpcLogSender.java new file mode 100644 index 000000000..8254f0a34 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/RpcLogSender.java @@ -0,0 +1,39 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender; + +import com.webank.wedatasphere.streamis.jobmanager.log.collector.ExceptionListener; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.cache.LogCache; +import com.webank.wedatasphere.streamis.jobmanager.log.entities.LogElement; + +/** + * Rpc Log sender + */ +public interface RpcLogSender { + + /** + * Produce log cache + * @return log cache + */ + LogCache getOrCreateLogCache(); + + /** + * Send log (async) + * @param log log element + */ + void sendLog(T log); + + /** + * Send log (sync) + * @param log log element + */ + void syncSendLog(T log); + + /** + * Exception listener + * @param listener listener + */ + void setExceptionListener(ExceptionListener listener); + /** + * Close sender + */ + void close(); +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/SendLogCache.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/SendLogCache.java new file mode 100644 index 000000000..200c573d9 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/SendLogCache.java @@ -0,0 +1,20 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender; + +import com.webank.wedatasphere.streamis.jobmanager.log.collector.cache.LogCache; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.buf.SendBuffer; +import com.webank.wedatasphere.streamis.jobmanager.log.entities.LogElement; + +/** + * Send log cache + * @param + */ +public interface SendLogCache extends LogCache { + + /** + * Drain the logs into send buffer + * @param sendBuffer send buffer + * @param maxElements max element size + * @return count + */ + int drainLogsTo(SendBuffer sendBuffer, int maxElements);; +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/SendLogCacheConsumer.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/SendLogCacheConsumer.java new file mode 100644 index 000000000..fac98b90a --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/SendLogCacheConsumer.java @@ -0,0 +1,128 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender; + +import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcLogSenderConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.buf.SendBuffer; +import com.webank.wedatasphere.streamis.jobmanager.log.entities.LogElement; + +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; + +/** + * Send log consumer + * Consume the log elements from cache and put into send buffer + * @param + */ +public abstract class SendLogCacheConsumer implements Runnable{ + + private boolean isTerminated = false; + + /** + * Buffer expire time in milliseconds + */ + private final long bufferExpireTimeInMills; + /** + * Send log cache + */ + private final SendLogCache cache; + + /** + * Send buffer + */ + private final SendBuffer sendBuffer; + + private final String id; + + /** + * Future for execution + */ + private Future future; + + public SendLogCacheConsumer(String id, SendLogCache cache, + SendBuffer sendBuffer, + RpcLogSenderConfig rpcSenderConfig){ + this.id = id; + this.cache = cache; + this.sendBuffer = sendBuffer; + long expireTimeInSec = rpcSenderConfig.getBufferConfig().getExpireTimeInSec(); + this.bufferExpireTimeInMills = expireTimeInSec > 0 ? TimeUnit.SECONDS + .toMillis(expireTimeInSec) : -1; + + } + + @Override + public void run() { + int remain; + long expireTimeInMills = requireNewFlushTime(); + int capacity = sendBuffer.capacity(); + while (!this.isTerminated) { + try { + remain = this.sendBuffer.remaining(); + if ((expireTimeInMills > 0 && expireTimeInMills <= System.currentTimeMillis()) || remain <= 0) { + // Transient to the read mode + if (remain < capacity) { + sendBuffer.flip(); + onFlushAndSend(sendBuffer); + } + expireTimeInMills = requireNewFlushTime(); + if (sendBuffer.isReadMode()) { + // Clear the buffer and transient to the write mode, otherwise continue writing + sendBuffer.clear(); + } + remain = this.sendBuffer.remaining(); + } + if (remain > 0) { + int inBuf = this.cache.drainLogsTo(sendBuffer, remain); + if (inBuf < remain) { + // Means that the cache is empty, take and wait the log element + long waitTime = expireTimeInMills - System.currentTimeMillis(); + if (waitTime > 0) { + T logElement = this.cache.takeLog(waitTime, TimeUnit.MILLISECONDS); + if (null != logElement) { + sendBuffer.writeBuf(logElement); + } + } + } + } + } catch (Throwable e){ + if (this.isTerminated && e instanceof InterruptedException){ + return; + } else { + e.printStackTrace(); + System.err.println("SendLogCacheConsumer[" + Thread.currentThread().getName() + "] occurred exception [" + e.getLocalizedMessage() + "]"); + // For the unknown exception clear the cache + sendBuffer.clear(); + expireTimeInMills = requireNewFlushTime(); + } + try { + Thread.sleep(500); + } catch (InterruptedException ex) { + // Ignore + } + } + } + } + + public void shutdown(){ + this.isTerminated = true; + if (null != this.future){ + this.future.cancel(true); + } + } + + public Future getFuture() { + return future; + } + + public void setFuture(Future future) { + this.future = future; + } + + private long requireNewFlushTime(){ + return bufferExpireTimeInMills > 0 ? System.currentTimeMillis() + bufferExpireTimeInMills : -1; + } + /** + * When the buffer is full or reach the idle time, invoke the method + * @param sendBuffer send buffer + */ + protected abstract void onFlushAndSend(SendBuffer sendBuffer); +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/SendLogExceptionStrategy.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/SendLogExceptionStrategy.java new file mode 100644 index 000000000..d33b7d2e4 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/SendLogExceptionStrategy.java @@ -0,0 +1,61 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender; + +import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.buf.SendBuffer; +import com.webank.wedatasphere.streamis.jobmanager.log.entities.LogElement; + +import java.util.Objects; +import java.util.concurrent.Callable; + +/** + * Strategy control the action on exception + */ +public abstract class SendLogExceptionStrategy { + + protected final RpcLogSender sender; + + public SendLogExceptionStrategy(RpcLogSender sender){ + this.sender = sender; + } + /** + * Retry count + * @return retry + */ + public abstract int retryCount(); + + /** + * + * @param e exception + * @return boolean + */ + public abstract RetryDescription onException(Exception e, SendBuffer sendBuffer); + + V doSend(Callable sendOperation, SendBuffer sendBuffer){ + int retryCount = retryCount(); + int count = 0; + RetryDescription retryDescription; + while (++count <= retryCount) { + try { + return sendOperation.call(); + } catch (Exception e) { + retryDescription = onException(e, sendBuffer); + if (Objects.isNull(retryDescription) || !retryDescription.canRetry) { + break; + } + } + } + return null; + } + + protected static class RetryDescription{ + + private final boolean canRetry; + + public RetryDescription(boolean canRetry){ + this.canRetry = canRetry; + } + + public boolean isCanRetry() { + return canRetry; + } + } +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/StreamisRpcLogSender.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/StreamisRpcLogSender.java new file mode 100644 index 000000000..d019c29f5 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/StreamisRpcLogSender.java @@ -0,0 +1,45 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender; + + +import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcLogSenderConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.buf.SendBuffer; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.http.AbstractHttpLogSender; +import com.webank.wedatasphere.streamis.jobmanager.log.entities.StreamisLogEvent; +import com.webank.wedatasphere.streamis.jobmanager.log.entities.StreamisLogEvents; + +/** + * Log sender for streamis + */ +public class StreamisRpcLogSender extends AbstractHttpLogSender { + + /** + * Each sender register an application + */ + private final String applicationName; + + public StreamisRpcLogSender(String applicationName, RpcLogSenderConfig rpcSenderConfig) { + super(rpcSenderConfig); + this.applicationName = applicationName; + } + + /** + * Aggregate to streamis log events + * @param sendBuffer send buffer + * @return + */ + @Override + protected StreamisLogEvents aggregateBuffer(SendBuffer sendBuffer) { + int remain = sendBuffer.remaining(); + if (remain > 0) { + StreamisLogEvent[] logEvents = new StreamisLogEvent[remain]; + sendBuffer.readBuf(logEvents, 0, logEvents.length); + return new StreamisLogEvents(applicationName, logEvents); + } + return null; + } + + @Override + protected String convertToJsonString(StreamisLogEvents aggregatedEntity) { + return aggregatedEntity.toJson(); + } +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/buf/AbstractSendBuffer.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/buf/AbstractSendBuffer.java new file mode 100644 index 000000000..1b42ad957 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/buf/AbstractSendBuffer.java @@ -0,0 +1,135 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.buf; + +/** + * Abstract sender buffer; + * non-blocking and reduces out-of-bounds exceptions + */ +public abstract class AbstractSendBuffer implements SendBuffer{ + + protected enum Flag{ + WRITE_MODE, READ_MODE + } + + /** + * Access flag + */ + private Flag accessFlag = Flag.WRITE_MODE; + + private int position = 0; + private int limit; + /** + * The capacity is mutable + */ + protected int capacity; + + + public AbstractSendBuffer(int capacity){ + this.capacity = capacity; + limit(this.capacity); + } + + public AbstractSendBuffer(){ + this(Integer.MAX_VALUE); + } + + @Override + public boolean isReadMode() { + return accessFlag == Flag.READ_MODE; + } + + @Override + public boolean isWriteMode() { + return accessFlag == Flag.WRITE_MODE; + } + + @Override + public int capacity() { + return this.capacity; + } + + @Override + public int remaining() { + int rem = this.limit - this.position; + return Math.max(rem, 0); + } + + @Override + public void flip() { + checkFlag(Flag.WRITE_MODE); + this.limit = this.position; + this.position = 0; + this.accessFlag = Flag.READ_MODE; + } + + @Override + public void rewind() { + position = 0; + } + + @Override + public void clear() { + limit(this.capacity); + this.position = 0; + this.accessFlag = Flag.WRITE_MODE; + clearBuf(); + } + + /** + * Change the limit value + * @param newLimit new limit + */ + final void limit(int newLimit){ + if (newLimit > this.capacity || (newLimit < 0)){ + throw new IllegalArgumentException("Set the illegal limit value: " + newLimit + " in send buffer, [capacity: " + this.capacity + "]"); + } + this.limit = newLimit; + if (this.position > newLimit){ + this.position = newLimit; + } + } + + /** + * Inc the position with offset + * @param offset offset value + * @param accessFlag access flag + * @return the current position value + */ + final int nextPosition(int offset, Flag accessFlag){ + checkFlag(accessFlag); + int p = position; + // Reach the limit, return -1 value + if (p >= limit){ + return -1; + } + if (p + offset > limit){ + this.position = limit; + } else { + this.position = p + offset; + } + return p; + } + + final void checkFlag(Flag accessFlag){ + if (this.accessFlag != accessFlag){ + throw new IllegalStateException("Illegal access flag [" + accessFlag + "] for send buffer"); + } + } + final void setFlag(Flag accessFlag){ + this.accessFlag = accessFlag; + } + /** + * + * @return the current position + */ + final int position(){ + return this.position; + } + + final void position(int position){ + this.position = position; + } + /** + * Do the actual clear + */ + protected abstract void clearBuf(); +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/buf/ImmutableSendBuffer.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/buf/ImmutableSendBuffer.java new file mode 100644 index 000000000..0e64c4ffa --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/buf/ImmutableSendBuffer.java @@ -0,0 +1,102 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.buf; + +import java.util.Arrays; +import java.util.function.Function; + +/** + * Immutable send buffer (use array) + */ +public class ImmutableSendBuffer extends AbstractSendBuffer{ + + /** + * Buffer object array + */ + private final Object[] buf; + + public ImmutableSendBuffer(int capacity) { + super(capacity); + buf = new Object[capacity]; + } + + @Override + protected void clearBuf() { + // Release the memory occupied + Arrays.fill(buf, null); + } + + @Override + public void capacity(String newCapacity) { + throw new IllegalArgumentException("Unsupported to scale-in/scale-up the send buffer"); + } + + @Override + @SuppressWarnings("all") + public int writeBuf(Object[] elements, int srcIndex, int length) { + if (srcIndex < elements.length){ + int startPos = nextPosition(Math.min(elements.length - srcIndex, length), Flag.WRITE_MODE); + if (startPos >= 0){ + int writes = position() - startPos; + System.arraycopy(elements, srcIndex, this.buf, startPos, writes); + return writes; + } + } + return -1; + } + + @Override + @SuppressWarnings("all") + public int readBuf(Object[] elements, int srcIndex, int length) { + if (srcIndex < elements.length){ + int startPos = nextPosition(Math.min(elements.length - srcIndex, length), Flag.READ_MODE); + if (startPos >= 0){ + int reads = position() - startPos; + System.arraycopy(this.buf, startPos, elements, srcIndex, reads); + return reads; + } + } + return -1; + } + + @Override + public int writeBuf(E element) { + int startPos = nextPosition(1, Flag.WRITE_MODE); + if (startPos >= 0){ + buf[startPos] = element; + return 1; + } + return -1; + } + + @Override + @SuppressWarnings("unchecked") + public E readBuf() { + int startPos = nextPosition(1, Flag.READ_MODE); + if (startPos >= 0){ + return (E)buf[startPos]; + } + return null; + } + + @Override + @SuppressWarnings("unchecked") + public SendBuffer compact(Function dropAble) { + checkFlag(Flag.READ_MODE); + int offset = 0; + int compact = position() - 1; + for(int i = position(); i < capacity; i ++){ + Object element = buf[i]; + if (dropAble.apply((E)element)){ + buf[i] = null; + offset ++; + } else { + compact = i - offset; + buf[compact] = element; + } + } + position(compact + 1); + limit(this.capacity); + setFlag(Flag.WRITE_MODE); + return this; + } + +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/buf/SendBuffer.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/buf/SendBuffer.java new file mode 100644 index 000000000..0a98580fb --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/buf/SendBuffer.java @@ -0,0 +1,92 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.buf; + +import java.util.function.Function; + +/** + * Buffer for Rpc sender + * @param buffer element + */ +public interface SendBuffer { + + /** + * Capacity + * @return int + */ + int capacity(); + + /** + * Is read mode + * @return boolean + */ + boolean isReadMode(); + + /** + * Is write mode + * @return boolean + */ + boolean isWriteMode(); + /** + * Scale-up or scale-in + * @param newCapacity new capacity + */ + void capacity(String newCapacity); + /** + * Remain size + * (remain space for writing or remain elements for reading) + * @return int + */ + int remaining(); + + /** + * Transient between write-mode and read-mode + */ + void flip(); + + /** + * Restart from the beginning of window + */ + void rewind(); + /** + * Clear to reuse the buffer + */ + void clear(); + /** + * Write buffer element + * @param element element + * @return if succeed + */ + int writeBuf(E element); + + /** + * Write buffer element array + * @param elements elements + * @param srcIndex the src index in elements + * @param length the length to read + * @return write num + */ + int writeBuf(Object[] elements, int srcIndex, int length); + + /** + * Read buffer element + * @return element + */ + E readBuf(); + + /** + * Read buffer element array + * @param elements elements + * @param srcIndex the src index in elements + * @param length the length to write + * @return read num + */ + int readBuf(Object[] elements, int srcIndex, int length); + + /** + * Compact the buffer, avoid the useless elements + * @param dropAble drop function + * @return send buffer + */ + SendBuffer compact(Function dropAble); + + +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/AbstractHttpLogSender.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/AbstractHttpLogSender.java new file mode 100644 index 000000000..c693d0152 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/AbstractHttpLogSender.java @@ -0,0 +1,163 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.http; + +import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcAuthConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.AbstractRpcLogSender; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcLogSenderConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.SendLogExceptionStrategy; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.buf.SendBuffer; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.http.request.StringPostAction; +import com.webank.wedatasphere.streamis.jobmanager.log.entities.LogElement; +import org.apache.http.HttpResponse; +import org.apache.http.client.HttpClient; +import org.apache.http.client.HttpResponseException; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.conn.ConnectTimeoutException; + +import javax.net.ssl.SSLException; +import java.io.*; +import java.net.UnknownHostException; +import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; +import java.util.Optional; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicLong; + +public abstract class AbstractHttpLogSender extends AbstractRpcLogSender { + + /** + * Retry strategy + */ + private final SendLogExceptionStrategy sendRetryStrategy; + + /** + * Exception counter + */ + private final AtomicInteger exceptionCounter = new AtomicInteger(); + /** + * Hold the global http client + */ + private final HttpClient globalHttpClient; + + /** + * Recover time point + */ + private final AtomicLong serverRecoveryTimePoint = new AtomicLong(-1L); + + public AbstractHttpLogSender(RpcLogSenderConfig rpcSenderConfig) { + super(rpcSenderConfig); + this.globalHttpClient = HttpClientTool.createHttpClient(rpcSenderConfig); + this.sendRetryStrategy = new SendLogExceptionStrategy(this) { + + private final Class[] retryOnExceptions = new Class[]{ + InterruptedIOException.class, UnknownHostException.class, + ConnectTimeoutException.class, SSLException.class}; + @Override + public int retryCount() { + return rpcSenderConfig.getSendRetryCnt(); + } + + @Override + public SendLogExceptionStrategy.RetryDescription onException(Exception e, SendBuffer sendBuffer) { + boolean shouldRetry = false; + // Limit of exception number is the same as the retry times + if (exceptionCounter.incrementAndGet() > retryCount()){ + serverRecoveryTimePoint.set(System.currentTimeMillis() + + TimeUnit.SECONDS.toMillis(rpcSenderConfig.getServerRecoveryTimeInSec())); + } else { + for (Class retryOnException : retryOnExceptions) { + if (retryOnException.equals(e.getClass())) { + shouldRetry = true; + break; + } + } + if (!shouldRetry && e instanceof HttpResponseException){ + if (((HttpResponseException) e).getStatusCode() < 500){ + shouldRetry = true; + } + } + } + if (shouldRetry && !sender.getOrCreateLogCache().isCacheable()){ + // Means that the cache is full + // Set the position of buffer to 0 + sendBuffer.rewind(); + // Compact the buffer and transient to write mode; + sendBuffer.compact( element -> element.mark() > 1); + shouldRetry = false; + } + Optional.ofNullable(exceptionListener).ifPresent(listener -> listener.onException(sender, e, null)); + return new RetryDescription(shouldRetry); + } + }; + } + + @Override + protected SendLogExceptionStrategy getSendLogExceptionStrategy() { + return this.sendRetryStrategy; + } + + @Override + protected void doSend(E aggregatedEntity, RpcLogSenderConfig rpcSenderConfig) throws IOException { + if (System.currentTimeMillis() >= serverRecoveryTimePoint.get()) { + if (aggregatedEntity instanceof LogElement) { + long timestamp = ((LogElement) aggregatedEntity).getLogTimeStamp(); + if (System.currentTimeMillis() - timestamp > rpcSenderConfig.getMaxDelayTimeInSec() * 1000L) { + // Abort the entity + return; + } + } + String address = rpcSenderConfig.getAddress(); + if (null != address && !address.trim().equals("")) { + StringPostAction postAction = new StringPostAction(rpcSenderConfig.getAddress(), convertToJsonString(aggregatedEntity)); + RpcAuthConfig authConfig = rpcSenderConfig.getAuthConfig(); + postAction.getRequestHeaders().put(authConfig.getTokenUserKey(), authConfig.getTokenUser()); + HttpResponse response = null; + try { + response = postAction.execute(this.globalHttpClient); + int statusCode = response.getStatusLine().getStatusCode(); + if (statusCode > 200){ + throw new HttpResponseException(statusCode, + convertToString(response.getEntity().getContent(), StandardCharsets.UTF_8)); + } + }finally { + // Close the response and release the conn + if (null != response){ + if (response instanceof CloseableHttpResponse){ + ((CloseableHttpResponse)response).close(); + } else { + // Destroy the stream + response.getEntity().getContent().close(); + } + } + } + // Init the counter + this.exceptionCounter.set(0); + } + } + } + + /** + * Convert input to string + * @param inputStream input stream + * @param charset charset + * @return string value + * @throws IOException + */ + private String convertToString(InputStream inputStream, Charset charset) throws IOException { + StringBuilder builder = new StringBuilder(); + try(BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream, charset))){ + String line; + while((line = reader.readLine()) != null){ + builder.append(line); + } + } + return builder.toString(); + } + + /** + * Convert the entity to json + * @param aggregatedEntity aggregated entity + * @return json string + */ + protected abstract String convertToJsonString(E aggregatedEntity); +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/HttpClientTool.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/HttpClientTool.java new file mode 100644 index 000000000..12f3f7dab --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/HttpClientTool.java @@ -0,0 +1,72 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.http; + +import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcLogSenderConfig; +import org.apache.http.Header; +import org.apache.http.client.HttpClient; +import org.apache.http.client.config.RequestConfig; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClientBuilder; +import org.apache.http.impl.client.HttpClients; +import org.apache.http.message.BasicHeader; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +/** + * Http clients + */ +public class HttpClientTool { + + /** + * Connect timeout + */ + public static final int DEFAULT_CONNECT_TIMEOUT = 3000; + + /** + * Socket timeout + */ + public static final int DEFAULT_SOCKET_TIMEOUT = 15000; + + /** + * Max connections + */ + public static final int DEFAULT_MAX_CONN = 10; + + /** + * Create http client + * @param rpcSenderConfig rpc sender config + * @return http client + */ + public static HttpClient createHttpClient(RpcLogSenderConfig rpcSenderConfig){ + int connectTimeout = rpcSenderConfig.getConnectionTimeout() > 0? rpcSenderConfig.getConnectionTimeout() : DEFAULT_CONNECT_TIMEOUT; + int socketTimeout = rpcSenderConfig.getSocketTimeout() > 0? rpcSenderConfig.getSocketTimeout() : DEFAULT_SOCKET_TIMEOUT; + RequestConfig requestConfig = RequestConfig.custom() + .setConnectTimeout(connectTimeout) + .setConnectionRequestTimeout(socketTimeout) + .setSocketTimeout(socketTimeout) + .build(); + int maxConsumeThread = rpcSenderConfig.getCacheConfig().getMaxConsumeThread(); + int maxConn = maxConsumeThread > 0? maxConsumeThread : DEFAULT_MAX_CONN; + HttpClientBuilder clientBuilder = HttpClients.custom(); + String tokenValue = rpcSenderConfig.getAuthConfig().getTokenCode(); + List
defaultHeaders = new ArrayList<>(); + if (null != tokenValue && !tokenValue.trim().equals("")){ + defaultHeaders.add(new BasicHeader(rpcSenderConfig.getAuthConfig().getTokenCodeKey(), tokenValue)); + } + clientBuilder.setDefaultRequestConfig(requestConfig).setDefaultHeaders(defaultHeaders) + .useSystemProperties().setMaxConnTotal(maxConn).setMaxConnPerRoute(maxConn); + CloseableHttpClient httpClient = clientBuilder.build(); + Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() { + @Override + public void run() { + try { + httpClient.close(); + } catch (IOException e) { + // Ignore + } + } + })); + return httpClient; + } +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/entities/Resource.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/entities/Resource.java new file mode 100644 index 000000000..eaa355e92 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/entities/Resource.java @@ -0,0 +1,17 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.http.entities; + +import java.io.File; +import java.util.List; + +/** + * Entity with resources + */ +public interface Resource { + + /** + * Resources related + * @return file list + */ + List getResources(); + +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/AbstractHttpAction.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/AbstractHttpAction.java new file mode 100644 index 000000000..143f72b8e --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/AbstractHttpAction.java @@ -0,0 +1,63 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.http.request; + +import org.apache.http.HttpResponse; +import org.apache.http.client.HttpClient; +import org.apache.http.client.methods.HttpRequestBase; + +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.HashMap; +import java.util.Map; + +/** + * Abstract implement + * @param + */ +public abstract class AbstractHttpAction implements HttpAction { + + protected String uri; + + protected String user; + + public AbstractHttpAction(String uri){ + this.uri = uri; + } + + @Override + public String uri() { + return uri; + } + + /** + * Request method + * @return method + */ + protected abstract T getRequestMethod(); + + private Map requestHeaders = new HashMap<>(); + + private Map requestPayload = new HashMap<>(); + + @Override + public Map getRequestHeaders() { + return this.requestHeaders; + } + + @Override + public Map getRequestPayload() { + return this.requestPayload; + } + + @Override + public HttpResponse execute(HttpClient httpClient) throws IOException { + HttpRequestBase requestBase = getRequestMethod(); + try{ + requestBase.setURI(new URI(uri)); + } catch (URISyntaxException e) { + throw new IllegalArgumentException("URI maybe has wrong format", e); + } + requestHeaders.forEach(requestBase::setHeader); + return httpClient.execute(requestBase); + } +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/GetAction.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/GetAction.java new file mode 100644 index 000000000..f5a8a5fef --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/GetAction.java @@ -0,0 +1,4 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.http.request; + +public class GetAction { +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/HttpAction.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/HttpAction.java new file mode 100644 index 000000000..87435f8a3 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/HttpAction.java @@ -0,0 +1,38 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.http.request; + +import org.apache.http.HttpResponse; +import org.apache.http.client.HttpClient; + +import java.io.IOException; +import java.util.Map; + +/** + * Http action + */ +public interface HttpAction { + + /** + * URI path + * @return path + */ + String uri(); + + /** + * Request headers + * @return map + */ + Map getRequestHeaders(); + + /** + * Request pay load(body) + * @return map + */ + Map getRequestPayload(); + + /** + * Execute http action + * @return http response + */ + HttpResponse execute(HttpClient httpClient) throws IOException; + +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/StringPostAction.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/StringPostAction.java new file mode 100644 index 000000000..6ce0d8cdf --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/StringPostAction.java @@ -0,0 +1,29 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.http.request; + +import org.apache.http.client.methods.HttpPost; +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; + +/** + * Use string to request + */ +public class StringPostAction extends AbstractHttpAction { + + /** + * Raw string value + */ + private final String rawString; + public StringPostAction(String uri, String rawString) { + super(uri); + this.rawString = rawString; + } + + @Override + protected HttpPost getRequestMethod() { + HttpPost httpPost = new HttpPost(); + StringEntity stringEntity = new StringEntity(rawString, "UTF-8"); + stringEntity.setContentType(ContentType.APPLICATION_JSON.toString()); + httpPost.setEntity(stringEntity); + return httpPost; + } +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/utils/StringUtils.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/utils/StringUtils.java new file mode 100644 index 000000000..4b1660e65 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/utils/StringUtils.java @@ -0,0 +1,22 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.utils; + +/** + * Tool to operate str + */ +public class StringUtils { + + /** + * Convert string to array + * @param input string + * @param delimiter delimiter + * @return array + */ + public static String[] convertStrToArray(String input, String delimiter){ + if (null != input && !input.trim().equals("") && + !input.equals(delimiter.trim())){ + return input.split(","); + } + return null; + } + +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/plugin/StreamisConfigAutowired.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/plugin/StreamisConfigAutowired.java new file mode 100644 index 000000000..980a6aec5 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/plugin/StreamisConfigAutowired.java @@ -0,0 +1,15 @@ +package com.webank.wedatasphere.streamis.jobmanager.plugin; + +import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.StreamisLogAppenderConfig; + +/** + * Streamis config autowired + */ +public interface StreamisConfigAutowired { + + /** + * Log appender config + * @param builder builder + */ + StreamisLogAppenderConfig logAppenderConfig(StreamisLogAppenderConfig.Builder builder) throws Exception; +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/pom.xml b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/pom.xml new file mode 100644 index 000000000..c672ffe5d --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/pom.xml @@ -0,0 +1,64 @@ + + + + streamis-job-log + com.webank.wedatasphere.streamis + 0.2.4 + ../../pom.xml + + 4.0.0 + + streamis-job-log-collector + + + 8 + 8 + 2.17.1 + 1.7.15 + + + + + com.webank.wedatasphere.streamis + streamis-job-log-collector-core + ${streamis.version} + + + + org.slf4j + slf4j-api + ${slf4j.version} + provided + + + + org.apache.logging.log4j + log4j-slf4j-impl + ${log4j.version} + provided + + + + org.apache.logging.log4j + log4j-api + ${log4j.version} + provided + + + + org.apache.logging.log4j + log4j-core + ${log4j.version} + provided + + + + junit + junit + ${junit.version} + test + + + \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisRpcLogAppender.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisRpcLogAppender.java new file mode 100644 index 000000000..a82f44cbb --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisRpcLogAppender.java @@ -0,0 +1,128 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector; + +import com.webank.wedatasphere.streamis.jobmanager.log.collector.cache.LogCache; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcLogSenderConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.StreamisLogAppenderConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j2.StreamisLog4j2AppenderConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters.LogMessageFilter; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.StreamisRpcLogSender; +import com.webank.wedatasphere.streamis.jobmanager.log.entities.StreamisLogEvent; +import com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired; +import org.apache.logging.log4j.core.Filter; +import org.apache.logging.log4j.core.Layout; +import org.apache.logging.log4j.core.LogEvent; +import org.apache.logging.log4j.core.appender.AbstractAppender; +import org.apache.logging.log4j.core.config.Property; +import org.apache.logging.log4j.core.config.plugins.Plugin; +import org.apache.logging.log4j.core.config.plugins.PluginAttribute; +import org.apache.logging.log4j.core.config.plugins.PluginElement; +import org.apache.logging.log4j.core.config.plugins.PluginFactory; +import org.apache.logging.log4j.core.layout.PatternLayout; + +import java.io.Serializable; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import java.util.ServiceLoader; +import java.util.function.BiFunction; +import java.util.function.Function; + +/** + * Streamis rpc log appender + */ +@Plugin(name = "StreamRpcLog", category = "Core", elementType = "appender", printObject = true) +public class StreamisRpcLogAppender extends AbstractAppender { + private static final String DEFAULT_APPENDER_NAME = "StreamRpcLog"; + /** + * Appender config + */ + private final StreamisLogAppenderConfig appenderConfig; + + /** + * Rpc log sender + */ + private final StreamisRpcLogSender rpcLogSender; + + /** + * Cache + */ + private final LogCache logCache; + + /** + * Filter function + */ + private BiFunction messageFilterFunction = (logger, message) -> true; + + protected StreamisRpcLogAppender(String name, Filter filter, + Layout layout, + boolean ignoreExceptions, Property[] properties, + StreamisLogAppenderConfig appenderConfig) { + super(name, filter, layout, ignoreExceptions, properties); + this.appenderConfig = appenderConfig; + this.rpcLogSender = new StreamisRpcLogSender(this.appenderConfig.getApplicationName(), + this.appenderConfig.getSenderConfig()); + this.rpcLogSender.setExceptionListener((subject, t, message) -> + LOGGER.error((null != subject? subject.getClass().getSimpleName() : "") + ": " + message, t)); + this.logCache = this.rpcLogSender.getOrCreateLogCache(); + List messageFilters = appenderConfig.getMessageFilters(); + if (null != messageFilters && messageFilters.size() > 0){ + messageFilterFunction = (logger, message) ->{ + for(LogMessageFilter messageFilter : messageFilters){ + if (!messageFilter.doFilter(logger, message)){ + return false; + } + } + return true; + }; + } + Runtime.getRuntime().addShutdownHook(new Thread(this.rpcLogSender::close)); + } + + @Override + public void append(LogEvent event) { + String content = new String(getLayout().toByteArray(event)); + if (messageFilterFunction.apply(event.getLoggerName(), content)) { + // Transform to stream log event; + StreamisLogEvent logEvent = new StreamisLogEvent(content, event.getTimeMillis()); + try { + this.logCache.cacheLog(logEvent); + } catch (InterruptedException e) { + LOGGER.error("StreamisRpcLogAppender: {} interrupted when cache the log into the RPC sender, message: {}", this.getName(), e.getMessage()); + } + } + } + + @PluginFactory + public static StreamisRpcLogAppender createAppender(@PluginAttribute("name") String name, + @PluginAttribute("appName") String applicationName, + @PluginAttribute("ignoreExceptions") boolean ignoreExceptions, + @PluginElement("Filter") final Filter filter, + @PluginElement("Layout") Layout layout, + @PluginElement("RpcLogSender")RpcLogSenderConfig rpcLogSenderConfig) throws Exception{ + if (null == name || name.trim().equals("")){ + name = DEFAULT_APPENDER_NAME; + } + if (Objects.isNull(layout)){ + layout = PatternLayout.createDefaultLayout(); + } + // Search the config autowired class + List configAutowiredEntities = new ArrayList<>(); + StreamisLog4j2AppenderConfig logAppenderConfig = null; + ServiceLoader.load(StreamisConfigAutowired.class, + StreamisRpcLogAppender.class.getClassLoader()).iterator().forEachRemaining(configAutowiredEntities::add); + StreamisLog4j2AppenderConfig.Builder builder = new StreamisLog4j2AppenderConfig.Builder(applicationName, filter, rpcLogSenderConfig); + for (StreamisConfigAutowired autowired : configAutowiredEntities){ + logAppenderConfig = (StreamisLog4j2AppenderConfig) autowired.logAppenderConfig(builder); + } + if (Objects.isNull(logAppenderConfig)){ + logAppenderConfig = builder.build(); + } + applicationName = logAppenderConfig.getApplicationName(); + if (null == applicationName || applicationName.trim().equals("")){ + throw new IllegalArgumentException("Application name cannot be empty"); + } + System.out.println("StreamisRpcLogAppender: init with config => " + logAppenderConfig); + return new StreamisRpcLogAppender(name, logAppenderConfig.getFilter(), layout, ignoreExceptions, Property.EMPTY_ARRAY, logAppenderConfig); + } + +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/StreamisLog4j2AppenderConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/StreamisLog4j2AppenderConfig.java new file mode 100644 index 000000000..adf7dfe06 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/StreamisLog4j2AppenderConfig.java @@ -0,0 +1,97 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j2; + +import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcLogSenderConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.StreamisLogAppenderConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters.LogMessageFilter; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters.LogMessageFilterAdapter; +import org.apache.logging.log4j.core.Filter; +import org.apache.logging.log4j.core.filter.CompositeFilter; + +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; + +/** + * Appender config for log4j2 + */ +public class StreamisLog4j2AppenderConfig extends StreamisLogAppenderConfig { + /** + * Filter in log4j2 + */ + private final Filter filter; + + public StreamisLog4j2AppenderConfig(String applicationName, Filter filter, + RpcLogSenderConfig rpcLogSenderConfig, List messageFilters){ + super(applicationName, rpcLogSenderConfig, messageFilters); + this.filter = filter; + } + + public static class Builder extends StreamisLogAppenderConfig.Builder { + + /** + * Filter rules + */ + private final List filters = new ArrayList<>(); + + public Builder(String applicationName, Filter filter, RpcLogSenderConfig rpcLogSenderConfig) { + super(applicationName, rpcLogSenderConfig); + if (Objects.nonNull(filter)) { + this.filters.add(filter); + } + } + + /** + * Set filter + * @param filter filter + * @return builder + */ + public StreamisLog4j2AppenderConfig.Builder setFilter(Filter filter){ + this.filters.clear(); + this.messageFilters.clear(); + this.filters.add(filter); + if (filter instanceof LogMessageFilterAdapter){ + this.messageFilters.add(((LogMessageFilterAdapter) filter).getLogMessageFilter()); + } + return this; + } + + /** + * Append filter + * @param filter filter + * @return builder + */ + public StreamisLog4j2AppenderConfig.Builder withFilter(Filter filter){ + filters.add(filter); + if (filter instanceof LogMessageFilterAdapter){ + this.messageFilters.add(((LogMessageFilterAdapter) filter).getLogMessageFilter()); + } + return this; + } + + /** + * Build method + * @return config + */ + public StreamisLog4j2AppenderConfig build(){ + Filter logFilter = null; + if (filters.size() > 1){ + logFilter = CompositeFilter.createFilters(filters.toArray(new Filter[0])); + } else if (!filters.isEmpty()){ + logFilter = filters.get(0); + } + return new StreamisLog4j2AppenderConfig(applicationName, logFilter, rpcLogSenderConfig, messageFilters); + } + } + public Filter getFilter() { + return filter; + } + + @Override + public String toString() { + return "StreamisLog4j2AppenderConfig{" + + "applicationName='" + applicationName + '\'' + + ", senderConfig=" + senderConfig + + ", filter=" + filter + + '}'; + } +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/RpcAuthConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/RpcAuthConfig.java new file mode 100644 index 000000000..87a10ba85 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/RpcAuthConfig.java @@ -0,0 +1,31 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j2.config; + +import org.apache.logging.log4j.core.config.plugins.Plugin; +import org.apache.logging.log4j.core.config.plugins.PluginAttribute; +import org.apache.logging.log4j.core.config.plugins.PluginFactory; + +/** + * AuthConfig Element in log4j2 + */ +@Plugin( + name = "AuthConfig", + category = "Core", + printObject = true +) +public class RpcAuthConfig extends com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcAuthConfig { + + public RpcAuthConfig(){ + super(); + } + public RpcAuthConfig(String tokenCodeKey, String tokenCode, String tokenUserKey, String tokenUser) { + super(tokenCodeKey, tokenCode, tokenUserKey, tokenUser); + } + + @PluginFactory + public static RpcAuthConfig createRpcAuthConfig(@PluginAttribute("tokenCodeKey") String tokenCodeKey, + @PluginAttribute("tokenCode") String tokenCode, + @PluginAttribute("tokenUserKey") String tokenUserKey, @PluginAttribute("tokenUser") String tokenUser){ + return new RpcAuthConfig(tokenCodeKey, tokenCode, tokenUserKey, tokenUser); + } + +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/RpcLogSenderConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/RpcLogSenderConfig.java new file mode 100644 index 000000000..f9dff1d10 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/RpcLogSenderConfig.java @@ -0,0 +1,40 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j2.config; + +import org.apache.logging.log4j.core.config.plugins.Plugin; +import org.apache.logging.log4j.core.config.plugins.PluginAttribute; +import org.apache.logging.log4j.core.config.plugins.PluginElement; +import org.apache.logging.log4j.core.config.plugins.PluginFactory; +import org.apache.logging.log4j.core.util.Integers; + +/** + * Rpc sender configuration + */ +@Plugin( + name = "RpcLogSender", + category = "Core", + printObject = true +) +public class RpcLogSenderConfig extends com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcLogSenderConfig { + + public RpcLogSenderConfig(String address, int sendRetryCnt, int connectionTimeout, int socketTimeout, int serverRecoveryTimeInSec, int maxDelayTimeInSec, + RpcAuthConfig authConfig, SendLogCacheConfig cacheConfig, SendBufferConfig bufferConfig) { + super(address, sendRetryCnt, connectionTimeout, socketTimeout, serverRecoveryTimeInSec, maxDelayTimeInSec, authConfig, cacheConfig, bufferConfig); + } + + @PluginFactory + public static RpcLogSenderConfig createConfig( + @PluginAttribute("address") String address, @PluginAttribute("sendRetryCnt") String sendRetryCnt, + @PluginAttribute("connectionTimeout") String connectionTimeout, @PluginAttribute("socketTimeout") String socketTimeout, + @PluginAttribute("serverRecoveryTimeInSec") String serverRecoveryTimeInSec, @PluginAttribute("maxDelayTimeInSec") String maxDelayTimeInSec, + @PluginAttribute("debugMode")String debugMode, + @PluginElement("AuthConfig")RpcAuthConfig authConfig, @PluginElement("SendLogCache") SendLogCacheConfig cacheConfig, + @PluginElement("SendBuffer")SendBufferConfig bufferConfig){ + RpcLogSenderConfig config = new RpcLogSenderConfig(address, Integers.parseInt(sendRetryCnt, 3), + Integers.parseInt(connectionTimeout, 3000), Integers.parseInt(socketTimeout, 15000), + Integers.parseInt(serverRecoveryTimeInSec, 5), Integers.parseInt(maxDelayTimeInSec, 60), + authConfig, cacheConfig, bufferConfig); + config.setDebugMode(Boolean.parseBoolean(debugMode)); + return config; + } + +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/SendBufferConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/SendBufferConfig.java new file mode 100644 index 000000000..936accd72 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/SendBufferConfig.java @@ -0,0 +1,28 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j2.config; + +import org.apache.logging.log4j.core.config.plugins.Plugin; +import org.apache.logging.log4j.core.config.plugins.PluginAttribute; +import org.apache.logging.log4j.core.config.plugins.PluginFactory; +import org.apache.logging.log4j.core.util.Integers; + +@Plugin( + name = "SendBuffer", + category = "Core", + printObject = true +) +public class SendBufferConfig extends com.webank.wedatasphere.streamis.jobmanager.log.collector.config.SendBufferConfig { + + public SendBufferConfig() { + } + + public SendBufferConfig(int size, long expireTimeInSec) { + super(size, expireTimeInSec); + } + + @PluginFactory + public static SendBufferConfig createBufferConfig( + @PluginAttribute("size") String size, @PluginAttribute("expireTimeInSec") String expireTimeInSec){ + return new SendBufferConfig(Integers.parseInt(size, 50), + Integers.parseInt(expireTimeInSec, 2)); + } +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/SendLogCacheConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/SendLogCacheConfig.java new file mode 100644 index 000000000..f4a63c49c --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/SendLogCacheConfig.java @@ -0,0 +1,27 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j2.config; + +import org.apache.logging.log4j.core.config.plugins.Plugin; +import org.apache.logging.log4j.core.config.plugins.PluginAttribute; +import org.apache.logging.log4j.core.config.plugins.PluginFactory; +import org.apache.logging.log4j.core.util.Integers; + +/** + * Cache config + */ +@Plugin( + name = "SendLogCache", + category = "Core", + printObject = true +) +public class SendLogCacheConfig extends com.webank.wedatasphere.streamis.jobmanager.log.collector.config.SendLogCacheConfig { + + public SendLogCacheConfig(int size, int maxConsumeThread) { + super(size, maxConsumeThread); + } + + @PluginFactory + public static SendLogCacheConfig createCacheConfig( + @PluginAttribute("size") String size, @PluginAttribute("maxConsumeThread") String maxConsumeThread){ + return new SendLogCacheConfig(Integers.parseInt(size, 150), Integers.parseInt(maxConsumeThread, 10)); + } +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/filters/KeywordThresholdFilter.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/filters/KeywordThresholdFilter.java new file mode 100644 index 000000000..59a2a3da9 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/filters/KeywordThresholdFilter.java @@ -0,0 +1,84 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j2.filters; + +import com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters.KeywordMessageFilter; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters.LogMessageFilter; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters.LogMessageFilterAdapter; +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.Marker; +import org.apache.logging.log4j.core.Filter; +import org.apache.logging.log4j.core.LogEvent; +import org.apache.logging.log4j.core.Logger; +import org.apache.logging.log4j.core.filter.AbstractFilter; +import org.apache.logging.log4j.message.Message; + +import java.util.Optional; + +/** + * Threshold filter with keyword + */ +public class KeywordThresholdFilter extends AbstractFilter implements LogMessageFilterAdapter { + + /** + * Level + */ + private final Level level; + + /** + * Message filter + */ + private final KeywordMessageFilter messageFilter; + public KeywordThresholdFilter(String[] acceptKeywords, String[] excludeKeywords){ + // Use accept and deny match + super(Filter.Result.ACCEPT, Filter.Result.DENY); + // If accept keywords is empty, set the log level to warn + if (null == acceptKeywords || acceptKeywords.length <= 0){ + this.level = Level.WARN; + System.out.println("The keywords is empty, set the log threshold level >= " + this.level); + } else { + this.level = Level.ALL; + } + this.messageFilter = new KeywordMessageFilter(acceptKeywords, excludeKeywords); + } + + @Override + public Result filter(LogEvent event) { + return filter(event.getLevel()); + } + + @Override + public Result filter(Logger logger, Level level, Marker marker, Message msg, Throwable t) { + return filter(level); + } + + @Override + public Result filter(Logger logger, Level level, Marker marker, Object msg, Throwable t) { + return filter(level); + } + + @Override + public Result filter(Logger logger, Level level, Marker marker, String msg, Object... params) { + return filter(level); + } + + private Result filter(final Level level){ + return level.isMoreSpecificThan(this.level) ? onMatch : onMismatch; + } + + public Level getLevel() { + return level; + } + + @Override + public String toString() { + return level.toString() + + "|acceptKeywords:[" + + Optional.ofNullable(this.messageFilter.getAcceptKeywords()).orElse(new String[]{}).length + + "]|excludeKeywords:[" + + Optional.ofNullable(this.messageFilter.getExcludeKeywords()).orElse(new String[]{}).length + "]" ; + } + + @Override + public LogMessageFilter getLogMessageFilter() { + return this.messageFilter; + } +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisLogAppenderTest.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisLogAppenderTest.java new file mode 100644 index 000000000..0bc49c139 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisLogAppenderTest.java @@ -0,0 +1,29 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector; + +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class StreamisLogAppenderTest { + private static final Logger LOG = LoggerFactory.getLogger(StreamisLogAppenderTest.class); + @Test + public void appenderLog() throws InterruptedException { + int total = 10000; + int tps = 1000; + long timer = System.currentTimeMillis() + 1000; + for (int i = 0; i < total; i++) { + if (i > 0 && i % tps == 0) { + long sleep = timer - System.currentTimeMillis(); + if (sleep > 0) { + try { + Thread.sleep(sleep); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + timer = System.currentTimeMillis() + 1000; + } + LOG.info("ERROR: Stream Log appender test, sequence id: " + i); + } + } +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/test/resources/log4j2.xml b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/test/resources/log4j2.xml new file mode 100644 index 000000000..27aff1d6d --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/test/resources/log4j2.xml @@ -0,0 +1,37 @@ + + + + + + + + + + + + + ` + + + + + + + + \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/pom.xml b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/pom.xml new file mode 100644 index 000000000..0dcb67247 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/pom.xml @@ -0,0 +1,56 @@ + + + + streamis-job-log + com.webank.wedatasphere.streamis + 0.2.4 + ../../pom.xml + + 4.0.0 + + streamis-job-log-collector1x + + + 8 + 8 + 1.2.17 + 1.7.12 + + + + com.webank.wedatasphere.streamis + streamis-job-log-collector-core + ${streamis.version} + + + + org.slf4j + slf4j-api + ${slf4j.version} + provided + + + + org.slf4j + slf4j-log4j12 + ${slf4j.version} + provided + + + + log4j + log4j + ${log4j.version} + provided + + + + junit + junit + ${junit.version} + test + + + \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisRpcLogAppender.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisRpcLogAppender.java new file mode 100644 index 000000000..90a28abf4 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisRpcLogAppender.java @@ -0,0 +1,233 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector; + +import com.webank.wedatasphere.streamis.jobmanager.log.collector.cache.LogCache; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcLogSenderConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j1.StreamisLog4jAppenderConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters.LogMessageFilter; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.StreamisRpcLogSender; +import com.webank.wedatasphere.streamis.jobmanager.log.entities.StreamisLogEvent; +import com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired; +import org.apache.log4j.AppenderSkeleton; +import org.apache.log4j.Level; +import org.apache.log4j.SimpleLayout; +import org.apache.log4j.helpers.LogLog; +import org.apache.log4j.spi.LoggingEvent; + +import java.util.*; +import java.util.function.BiFunction; + +/** + * Rpc appender for log4j1 + */ +public class StreamisRpcLogAppender extends AppenderSkeleton { + + /** + * Application name + */ + private String applicationName; + + private String filterEnable = "true"; + /** + * Appender config + */ + private StreamisLog4jAppenderConfig appenderConfig; + + /** + * Rpc log sender + */ + private StreamisRpcLogSender rpcLogSender; + + /** + * Rpc log sender config + */ + private RpcLogSenderConfig rpcLogSenderConfig = new RpcLogSenderConfig(); + + + /** + * Cache + */ + private LogCache logCache; + + /** + * Filter function + */ + private BiFunction messageFilterFunction = (logger, message) -> false; + + @Override + protected void append(LoggingEvent loggingEvent) { + String content = super.getLayout().format(loggingEvent); + if (messageFilterFunction.apply(loggingEvent.getLoggerName(), content)) { + // Transform to stream log event; + // System.currentTimeMills() -> loggingEvent.getTimeStamp() + StreamisLogEvent logEvent = new StreamisLogEvent(content, loggingEvent.getTimeStamp()); + if (Objects.nonNull(logCache)) { + try { + this.logCache.cacheLog(logEvent); + } catch (InterruptedException e) { + LogLog.error("StreamisRpcLogAppender: " + this.getName() + + " interrupted when cache the log into the RPC sender, message: " + e.getMessage()); + } + } + } + } + + @Override + public void close() { + if (Objects.nonNull(this.rpcLogSender)){ + this.rpcLogSender.close(); + } + } + + @Override + public boolean requiresLayout() { + return true; + } + + @Override + public void activateOptions() { + if (Objects.nonNull(this.logCache)){ + return; + } + if (Objects.isNull(getLayout())){ + setLayout(new SimpleLayout()); + } + if (System.getProperty("filter.enable") == null){ + System.setProperty("filter.enable", filterEnable); + } + // Search the config autowired class + List configAutowiredEntities = new ArrayList<>(); + StreamisLog4jAppenderConfig logAppenderConfig = null; + ServiceLoader.load(StreamisConfigAutowired.class, + StreamisRpcLogAppender.class.getClassLoader()).iterator().forEachRemaining(configAutowiredEntities::add); + StreamisLog4jAppenderConfig.Builder builder = new StreamisLog4jAppenderConfig.Builder(this.applicationName, + getThreshold(), getFilter(), rpcLogSenderConfig); + for (StreamisConfigAutowired autowired : configAutowiredEntities){ + try { + logAppenderConfig = (StreamisLog4jAppenderConfig) autowired.logAppenderConfig(builder); + } catch (Exception e) { + LogLog.warn("Unable to autowired the config from: " +autowired.getClass().getName(), e); + } + } + if (Objects.isNull(logAppenderConfig)){ + logAppenderConfig = builder.build(); + } + this.applicationName = logAppenderConfig.getApplicationName(); + if (null == applicationName || applicationName.trim().equals("")){ + throw new IllegalArgumentException("Application name cannot be empty"); + } + this.appenderConfig = logAppenderConfig; + // Set the threshold to error default + setThreshold(Optional.ofNullable(logAppenderConfig.getThreshold()).orElse(Level.ERROR)); + // First to clear the filters + clearFilters(); + // Then to add filter + logAppenderConfig.getFilters().forEach(this::addFilter); + System.out.println("StreamisRpcLogAppender: init with config => " + logAppenderConfig); + this.rpcLogSender = new StreamisRpcLogSender(this.appenderConfig.getApplicationName(), + this.appenderConfig.getSenderConfig()); + this.rpcLogSender.setExceptionListener((subject, t, message) -> + LogLog.error((null != subject? subject.getClass().getSimpleName() : "") + ": " + message, t)); + this.logCache = this.rpcLogSender.getOrCreateLogCache(); + List messageFilters = appenderConfig.getMessageFilters(); + if (null != messageFilters && messageFilters.size() > 0){ + messageFilterFunction = (logger, message) ->{ + for(LogMessageFilter messageFilter : messageFilters){ + if (!messageFilter.doFilter(logger, message)){ + return false; + } + } + return true; + }; + } + } + + + public String getAppName() { + return applicationName; + } + + /** + * Application name + * @param applicationName name + */ + public void setAppName(String applicationName) { + this.applicationName = applicationName; + } + + public String getFilterEnable() { + return filterEnable; + } + + public void setFilterEnable(String filterEnable) { + this.filterEnable = filterEnable; + } + + public void setRpcAddress(String address){ + this.rpcLogSenderConfig.setAddress(address); + } + + public void setRpcConnTimeout(int connectionTimeout){ + this.rpcLogSenderConfig.setConnectionTimeout(connectionTimeout); + } + + public void setRpcSocketTimeout(int socketTimeout){ + this.rpcLogSenderConfig.setSocketTimeout(socketTimeout); + } + public void setRpcSendRetryCnt(int sendRetryCnt){ + this.rpcLogSenderConfig.setSendRetryCnt(sendRetryCnt); + } + + public void setRpcServerRecoveryTimeInSec(int serverRecoveryTimeInSec){ + this.rpcLogSenderConfig.setServerRecoveryTimeInSec(serverRecoveryTimeInSec); + } + + public void setRpcMaxDelayTimeInSec(int maxDelayTimeInSec){ + this.rpcLogSenderConfig.setMaxDelayTimeInSec(maxDelayTimeInSec); + } + // Authentication + public void setRpcAuthTokenCodeKey(String tokenCodeKey){ + this.rpcLogSenderConfig.getAuthConfig().setTokenCodeKey(tokenCodeKey); + } + + public void setRpcAuthTokenUserKey(String tokenUserKey){ + this.rpcLogSenderConfig.getAuthConfig().setTokenUserKey(tokenUserKey); + } + + public void setRpcAuthTokenUser(String tokenUser){ + this.rpcLogSenderConfig.getAuthConfig().setTokenUser(tokenUser); + } + + public void setRpcAuthTokenCode(String tokenCode){ + this.rpcLogSenderConfig.getAuthConfig().setTokenCode(tokenCode); + } + + // Cache configuration + public void setRpcCacheSize(int cacheSize){ + this.rpcLogSenderConfig.getCacheConfig().setSize(cacheSize); + } + + public void setRpcCacheMaxConsumeThread(int maxConsumeThread){ + this.rpcLogSenderConfig.getCacheConfig().setMaxConsumeThread(maxConsumeThread); + } + + // Buffer configuration + public void setRpcBufferSize(int bufferSize){ + this.rpcLogSenderConfig.getBufferConfig().setSize(bufferSize); + } + + public void setRpcBufferExpireTimeInSec(int expireTimeInSec){ + this.rpcLogSenderConfig.getBufferConfig().setExpireTimeInSec(expireTimeInSec); + } + + public void setDebugMode(boolean debugMode){ + this.rpcLogSenderConfig.setDebugMode(debugMode); + } + + public void setDiscard(boolean discard){ + this.rpcLogSenderConfig.getCacheConfig().setDiscard(discard); + } + + public void setDiscardWindow(int window){ + this.rpcLogSenderConfig.getCacheConfig().setDiscardWindow(window); + } +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j1/StreamisLog4jAppenderConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j1/StreamisLog4jAppenderConfig.java new file mode 100644 index 000000000..f10bef451 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j1/StreamisLog4jAppenderConfig.java @@ -0,0 +1,110 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j1; + +import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcLogSenderConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.StreamisLogAppenderConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters.LogMessageFilter; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters.LogMessageFilterAdapter; +import org.apache.log4j.Priority; +import org.apache.log4j.spi.Filter; + +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; + +/** + * Appender config for log4j1 + */ +public class StreamisLog4jAppenderConfig extends StreamisLogAppenderConfig { + + /** + * Filter in log4j1 + */ + private final List filters = new ArrayList<>(); + /** + * + */ + private final Priority threshold; + + protected StreamisLog4jAppenderConfig(String applicationName, Priority threshold, List filters, + RpcLogSenderConfig rpcLogSenderConfig, List messageFilters) { + super(applicationName, rpcLogSenderConfig, messageFilters); + this.threshold = threshold; + this.filters.addAll(filters); + } + + public static class Builder extends StreamisLogAppenderConfig.Builder{ + + /** + * Filter rules + */ + private final List filters = new ArrayList<>(); + + /** + * Threshold + */ + private Priority threshold; + + public Builder(String applicationName, Priority threshold, Filter filter,RpcLogSenderConfig rpcLogSenderConfig) { + super(applicationName, rpcLogSenderConfig); + this.threshold = threshold; + if (Objects.nonNull(filter)) { + this.filters.add(filter); + } + } + + public StreamisLog4jAppenderConfig.Builder setFilter(Filter filter){ + this.filters.clear(); + this.messageFilters.clear(); + this.filters.add(filter); + if (filter instanceof LogMessageFilterAdapter){ + this.messageFilters.add(((LogMessageFilterAdapter) filter).getLogMessageFilter()); + } + return this; + } + + public StreamisLog4jAppenderConfig.Builder withFilter(Filter filter){ + filters.add(filter); + if (filter instanceof LogMessageFilterAdapter){ + this.messageFilters.add(((LogMessageFilterAdapter) filter).getLogMessageFilter()); + } + return this; + } + + /** + * Set threshold + * @param threshold threshold + * @return builder + */ + public StreamisLog4jAppenderConfig.Builder threshold(Priority threshold, boolean needMoreSpecific){ + if (needMoreSpecific){ + if (this.threshold == null || threshold.isGreaterOrEqual(this.threshold)){ + this.threshold = threshold; + } + }else { + this.threshold = threshold; + } + return this; + } + public StreamisLog4jAppenderConfig build(){ + return new StreamisLog4jAppenderConfig(applicationName, threshold, filters, rpcLogSenderConfig, messageFilters); + } + } + + public List getFilters() { + return filters; + } + + public Priority getThreshold() { + return threshold; + } + + @Override + public String toString() { + return "StreamisLog4jAppenderConfig{" + + "applicationName='" + applicationName + '\'' + + ", senderConfig=" + senderConfig + + ", filters=" + filters + + ", threshold=" + threshold + + '}'; + } +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j1/filters/KeywordAllMatchFilter.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j1/filters/KeywordAllMatchFilter.java new file mode 100644 index 000000000..1fe60b308 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j1/filters/KeywordAllMatchFilter.java @@ -0,0 +1,31 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j1.filters; + +import com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters.KeywordMessageFilter; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters.LogMessageFilter; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters.LogMessageFilterAdapter; +import org.apache.log4j.spi.Filter; +import org.apache.log4j.spi.LoggingEvent; + +/** + * All match filter with keyword + */ +public class KeywordAllMatchFilter extends Filter implements LogMessageFilterAdapter { + + /** + * Message filter + */ + private final KeywordMessageFilter messageFilter; + + public KeywordAllMatchFilter(String[] acceptKeywords, String[] excludeKeywords){ + this.messageFilter = new KeywordMessageFilter(acceptKeywords, excludeKeywords); + } + @Override + public int decide(LoggingEvent event) { + return Filter.ACCEPT; + } + + @Override + public LogMessageFilter getLogMessageFilter() { + return this.messageFilter; + } +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisLogAppenderTest.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisLogAppenderTest.java new file mode 100644 index 000000000..0dcca02c9 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisLogAppenderTest.java @@ -0,0 +1,27 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector; + +import org.apache.log4j.PropertyConfigurator; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class StreamisLogAppenderTest { + private static final Logger LOG = LoggerFactory.getLogger(StreamisLogAppenderTest.class); + @Test + public void appenderLog() throws InterruptedException { + PropertyConfigurator.configure(StreamisLogAppenderTest.class.getResource("/log4j.properties").getPath()); + int total = 1000; + int tps = 100; + long timer = System.currentTimeMillis() + 1000; + for(int i = 0; i < total; i ++){ + if (i > 0 && i % tps == 0){ + long sleep = timer - System.currentTimeMillis(); + if (sleep > 0){ + Thread.sleep(sleep); + } + timer = System.currentTimeMillis() + 1000; + } + LOG.info("Stream Log appender test, sequence id: " + i); + } + } +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/test/resources/log4j.properties b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/test/resources/log4j.properties new file mode 100644 index 000000000..8801938ab --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/test/resources/log4j.properties @@ -0,0 +1,44 @@ +# +# Copyright 2021 WeBank +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +### set log levels ### + +log4j.rootCategory=INFO,stream + +log4j.appender.console=org.apache.log4j.ConsoleAppender +log4j.appender.console.Threshold=INFO +log4j.appender.console.layout=org.apache.log4j.PatternLayout +#log4j.appender.console.layout.ConversionPattern= %d{ISO8601} %-5p (%t) [%F:%M(%L)] - %m%n +log4j.appender.console.layout.ConversionPattern= %d{ISO8601} %-5p (%t) %p %c{1} - %m%n + +log4j.appender.stream=com.webank.wedatasphere.streamis.jobmanager.log.collector.StreamisRpcLogAppender +log4j.appender.stream.appName=stream_applicatioin +log4j.appender.stream.Threshold=INFO +log4j.appender.stream.filterEnable=false +log4j.appender.stream.layout=org.apache.log4j.PatternLayout +log4j.appender.stream.layout.ConversionPattern= %d{ISO8601} %-5p (%t) [%F:%M(%L)] - %m%n +log4j.appender.stream.rpcConnTimeout=3000 +log4j.appender.stream.rpcSocketTimeout=15000 +log4j.appender.stream.rpcSendRetryCnt=3 +log4j.appender.stream.rpcServerRecoveryTimeInSec=5 +log4j.appender.stream.rpcMaxDelayTimeInSec=60 +log4j.appender.stream.rpcAuthTokenCodeKey= +log4j.appender.stream.rpcAuthTokenUserKey= +log4j.appender.stream.rpcAuthTokenUser= +log4j.appender.stream.rpcAuthTokenCode= +log4j.appender.stream.rpcCacheSize=200 +log4j.appender.stream.rpcCacheMaxConsumeThread=1 +log4j.appender.stream.rpcBufferSize=50 +log4j.appender.stream.rpcBufferExpireTimeInSec=2 \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/pom.xml b/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/pom.xml new file mode 100644 index 000000000..9866eede4 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/pom.xml @@ -0,0 +1,75 @@ + + + + streamis-job-log + com.webank.wedatasphere.streamis + 0.2.4 + ../../pom.xml + + 4.0.0 + + xspark-streamis-log-collector + + + 8 + 8 + 1.2.17 + 1.7.12 + + + + + com.webank.wedatasphere.streamis + streamis-job-log-collector1x + ${streamis.version} + + + + org.slf4j + slf4j-api + ${slf4j.version} + provided + + + + org.slf4j + slf4j-log4j12 + ${slf4j.version} + provided + + + + log4j + log4j + ${log4j.version} + provided + + + + + + org.apache.maven.plugins + maven-assembly-plugin + 2.3 + + + assemble + + single + + + install + + + + + src/main/assembly/package.xml + + false + + + + + \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/src/main/assembly/package.xml b/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/src/main/assembly/package.xml new file mode 100644 index 000000000..8da27bf2c --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/src/main/assembly/package.xml @@ -0,0 +1,19 @@ + + + package + + + jar + + false + + + / + true + runtime + + + \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/spark/SparkStreamisConfigAutowired.java b/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/spark/SparkStreamisConfigAutowired.java new file mode 100644 index 000000000..2d92da75e --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/spark/SparkStreamisConfigAutowired.java @@ -0,0 +1,106 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.collector.spark; + +import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.StreamisLogAppenderConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j1.StreamisLog4jAppenderConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j1.filters.KeywordAllMatchFilter; +import com.webank.wedatasphere.streamis.jobmanager.log.utils.StringUtils; +import com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired; +import org.apache.log4j.Level; + +import java.util.Optional; +/** + * Autoconfigure the streamis config in Spark environment + */ +public class SparkStreamisConfigAutowired implements StreamisConfigAutowired { + + private static final String DEBUG_MODE = "log.debug.mode"; + + private static final String DISCARD_SWITCH = "log.discard"; + + private static final String DISCARD_WINDOW = "log.discard.window"; + + private static final String APP_NAME_CONFIG = "app.name"; + + private static final String SERVER_ADDRESS_CONFIG = "streamis.url"; + + private static final String COLLECTOR_URI_CONFIG = "streamis.log.collector.uri"; + + private static final String PROJECT_NAME_CONFIG = "project.name"; + + private static final String DEFAULT_COLLECTOR_URI = "/api/rest_j/v1/streamis/streamJobManager/log/collect/events"; + + private static final String FILTER_ENABLE = "filter.enable"; + + private static final String FILTER_KEYWORD = "filter.keywords"; + + private static final String FILTER_KEYWORD_EXCLUDE = "filter.keywords.exclude"; + @Override + public StreamisLogAppenderConfig logAppenderConfig(StreamisLogAppenderConfig.Builder builder) throws Exception { + // Load the config from system properties + String debugMode = System.getProperty(DEBUG_MODE, "false"); + if (null != debugMode && debugMode.equals("true")){ + builder.setDebugMode(true); + } + String discard = System.getProperty(DISCARD_SWITCH, "true"); + if (null != discard && discard.equals("true")){ + builder.setDiscard(true); + } + String discardWind = System.getProperty(DISCARD_WINDOW, "2"); + if (null != discardWind){ + try{ + builder.setDiscardWindow(Integer.parseInt(discardWind)); + } catch (Exception e){ + // Ignore + } + } + Optional.ofNullable(System.getProperty(APP_NAME_CONFIG)).ifPresent(appName -> { + String projectName = System.getProperty(PROJECT_NAME_CONFIG); + if (null != projectName && !projectName.trim().equals("")){ + appName = projectName + "." + appName; + } + System.out.println("Spark env to streamis: application name =>" + appName); + builder.setAppName(appName); + }); + String serverAddress = System.getProperty(SERVER_ADDRESS_CONFIG); + if (null != serverAddress && !serverAddress.trim().equals("")){ + if (serverAddress.endsWith("/")){ + serverAddress = serverAddress.substring(0, serverAddress.length() - 1); + } + String collectorUri = System.getProperty(COLLECTOR_URI_CONFIG, DEFAULT_COLLECTOR_URI); + if (null != collectorUri && !collectorUri.trim().equals("")){ + if (!collectorUri.startsWith("/")){ + collectorUri = "/" + collectorUri; + } + serverAddress += collectorUri; + } + System.out.println("Spark env to streamis: server address =>" + serverAddress); + builder.setRpcAddress(serverAddress); + } + String user = System.getenv("USER"); + if (null == user || user.trim().equals("")){ + user = System.getProperty("user.name", "hadoop"); + } + System.out.println("Spark env to streamis: log user =>" + user); + builder.setRpcAuthTokenUser(user); + // Set filter + boolean filterEnable = true; + try { + filterEnable = Boolean.parseBoolean(System.getProperty(FILTER_ENABLE, "true")); + }catch (Exception e){ + // ignore + } + if (filterEnable && builder instanceof StreamisLog4jAppenderConfig.Builder){ + StreamisLog4jAppenderConfig.Builder log4jBuilder = ((StreamisLog4jAppenderConfig.Builder) builder); + String[] acceptKeywords = StringUtils.convertStrToArray(System.getProperty(FILTER_KEYWORD, "ERROR"), ","); + KeywordAllMatchFilter keywordAllMatchFilter = new KeywordAllMatchFilter( + acceptKeywords, + StringUtils.convertStrToArray(System.getProperty(FILTER_KEYWORD_EXCLUDE), ",")); + if (null == acceptKeywords || acceptKeywords.length <=0 ){ + System.out.println("The keywords is empty, set the log threshold level >= " + Level.WARN); + log4jBuilder.threshold(Level.WARN, true); + } + log4jBuilder.setFilter(keywordAllMatchFilter); + } + return builder.build(); + } +} diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/src/main/resources/META-INF/services/com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired b/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/src/main/resources/META-INF/services/com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired new file mode 100644 index 000000000..dac2fcaed --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/src/main/resources/META-INF/services/com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired @@ -0,0 +1 @@ +com.webank.wedatasphere.streamis.jobmanager.log.collector.spark.SparkStreamisConfigAutowired \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-log/pom.xml b/streamis-jobmanager/streamis-job-log/pom.xml new file mode 100644 index 000000000..4b0219d96 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/pom.xml @@ -0,0 +1,29 @@ + + + + streamis-jobmanager + com.webank.wedatasphere.streamis + 0.2.4 + + 4.0.0 + + streamis-job-log + pom + + job-log-collector/streamis-job-log-collector-core + job-log-collector/streamis-job-log-collector + job-log-collector/streamis-job-log-collector1x + job-log-collector/flink-streamis-log-collector + job-log-collector/xspark-streamis-log-collector + streamis-job-log-server + streamis-job-log-common + + + + 8 + 8 + + + \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-common/pom.xml b/streamis-jobmanager/streamis-job-log/streamis-job-log-common/pom.xml new file mode 100644 index 000000000..886d7ed30 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-common/pom.xml @@ -0,0 +1,29 @@ + + + + streamis-job-log + com.webank.wedatasphere.streamis + 0.2.4 + ../pom.xml + + 4.0.0 + + streamis-job-log-common + + + 8 + 8 + + + + + + + + + + + + \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/entities/LogElement.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/entities/LogElement.java new file mode 100644 index 000000000..da3a7054b --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/entities/LogElement.java @@ -0,0 +1,34 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.entities; + + +/** + * Element defined of log + */ +public interface LogElement { + + /** + * Sequence id + * @return seq id + */ + int getSequenceId(); + + /** + * Log time + * @return log time + */ + long getLogTimeStamp(); + + /** + * Get content + * @return content array + */ + String[] getContents(); + + /** + * The importance of log + * 0: useless, 1: normal, 2:important + * @return + */ + int mark(); + +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/entities/StreamisLogEvent.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/entities/StreamisLogEvent.java new file mode 100644 index 000000000..6f8645f77 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/entities/StreamisLogEvent.java @@ -0,0 +1,84 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.entities; + + +import com.webank.wedatasphere.streamis.jobmanager.log.json.JsonTool; + +import java.io.Serializable; +import java.util.Objects; + +/** + * Log event for streamis + */ +public class StreamisLogEvent implements LogElement, Serializable { + + /** + * Log time + */ + private long logTimeInMills; + + /** + * Log content + */ + private String content; + + /** + * Mark + */ + private int mark; + + public StreamisLogEvent(){ + + } + public StreamisLogEvent(String content, long logTimeInMills){ + this.content = content; + this.logTimeInMills = logTimeInMills; + } + @Override + public int getSequenceId() { + return 0; + } + + @Override + public long getLogTimeStamp() { + return this.logTimeInMills; + } + + @Override + public String[] getContents() { + return new String[]{content}; + } + + public String getContent() { + return content; + } + + @Override + public int mark() { + return this.mark; + } + + public void setLogTimeStamp(long logTimeInMills) { + this.logTimeInMills = logTimeInMills; + } + + public void setContent(String content) { + this.content = content; + } + + public void setMark(int mark) { + this.mark = mark; + } + + public void setSequenceId(int sequenceId){ + // Ignore + } + + public String toJson(){ + return "{" + + "\"logTimeStamp\":" + logTimeInMills + + ",\"content\":" + (Objects.isNull(content)? null : "\"" + JsonTool.escapeStrValue(content) + "\"") + + ",\"sequenceId\":0" + + "}"; + + } +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/entities/StreamisLogEvents.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/entities/StreamisLogEvents.java new file mode 100644 index 000000000..f2843c8af --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/entities/StreamisLogEvents.java @@ -0,0 +1,112 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.entities; + +import com.webank.wedatasphere.streamis.jobmanager.log.json.JsonTool; + +import java.io.Serializable; +import java.util.Objects; + +public class StreamisLogEvents implements LogElement, Serializable { + + /** + * Application name + */ + private String appName; + /** + * Log time + */ + private long logTimeInMills; + + private StreamisLogEvent[] events; + public StreamisLogEvents(){ + + } + public StreamisLogEvents(String applicationName, StreamisLogEvent[] events){ + this.appName = applicationName; + this.events = events; + long maxTime = -1; + StreamisLogEvent lastEvent = events[events.length - 1]; + if (null == lastEvent) { + for (StreamisLogEvent event : events) { + long time = event.getLogTimeStamp(); + if (time > maxTime) { + maxTime = time; + } + } + this.logTimeInMills = maxTime; + }else { + this.logTimeInMills = lastEvent.getLogTimeStamp(); + } + + } + + @Override + public int getSequenceId() { + return 0; + } + + @Override + public long getLogTimeStamp() { + return this.logTimeInMills; + } + + + @Override + public String[] getContents() { + String[] contents = new String[events.length]; + for(int i = 0 ; i < contents.length; i++){ + contents[i] = events[i].getContent(); + } + return contents; + } + + @Override + public int mark() { + return 1; + } + + public String getAppName() { + return appName; + } + + public StreamisLogEvent[] getEvents() { + return events; + } + + public void setAppName(String appName) { + this.appName = appName; + } + + public void setLogTimeStamp(long logTimeInMills) { + this.logTimeInMills = logTimeInMills; + } + + public void setEvents(StreamisLogEvent[] events) { + this.events = events; + } + + public void setSequenceId(int sequenceId){ + // Ignore + } + + public String toJson(){ + return "{" + + "\"logTimeStamp\":" + logTimeInMills + + ",\"appName\":" + (Objects.isNull(appName)? null : "\"" + JsonTool.escapeStrValue(appName) + "\"") + + ",\"events\":[" + + (Objects.isNull(events) || events.length <=0 ? "" : joinEvents(events, ",") ) + "]" + + ",\"sequenceId\":0" + + "}"; + } + + private String joinEvents(StreamisLogEvent[] events, String separator){ + StringBuilder builder = new StringBuilder(); + for(int i = 0; i < events.length; i ++){ + builder.append(events[i].toJson()); + if (i < events.length - 1){ + builder.append(separator); + } + } + return builder.toString(); + } + +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/json/JsonTool.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/json/JsonTool.java new file mode 100644 index 000000000..0822e820d --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/json/JsonTool.java @@ -0,0 +1,63 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.json; + +import java.util.Locale; + +public class JsonTool { + static final char[] HEX_DIGITS = new char[] {'0','1','2','3','4','5','6','7','8','9','A','B','C','D','E','F'}; + + /** + * Avoid the special char + * @param input input string + * @return output string + */ + public static String escapeStrValue(String input){ + char[] chars = input.toCharArray(); + StringBuilder sb = new StringBuilder(); + for (char c : chars) { + switch (c) { + case '\"': + sb.append("\\\""); + break; + case '\\': + sb.append("\\\\"); + break; + case '/': + sb.append("\\/"); + break; + case '\b': + sb.append("\\b"); + break; + case '\f': + sb.append("\\f"); + break; + case '\n': + sb.append("\\n"); + break; + case '\r': + sb.append("\\r"); + break; + case '\t': + sb.append("\\t"); + break; + default: + sb.append((c < 32) ? escapeUnicode(c) : c); + } + } + return sb.toString(); + } + + /** + * Escape unicode + * @param code char code + * @return escaped string + */ + private static String escapeUnicode(int code){ + if (code > 0xffff){ + return "\\u" + Integer.toHexString(code).toUpperCase(Locale.ENGLISH); + } else { + return "\\u" + HEX_DIGITS[(code >> 12) & 15] + + HEX_DIGITS[(code >> 8) & 15] + HEX_DIGITS[(code >> 4) & 15] + HEX_DIGITS[code & 15]; + } + } + +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/pom.xml b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/pom.xml new file mode 100644 index 000000000..8b4714e25 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/pom.xml @@ -0,0 +1,37 @@ + + + + streamis-job-log + com.webank.wedatasphere.streamis + 0.2.4 + ../pom.xml + + 4.0.0 + + streamis-job-log-server + + + 8 + 8 + + + + + com.webank.wedatasphere.streamis + streamis-job-log-common + 0.2.4 + + + org.apache.linkis + linkis-module + + + junit + junit + ${junit.version} + test + + + \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/StreamisJobLogAutoConfiguration.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/StreamisJobLogAutoConfiguration.java new file mode 100644 index 000000000..f3f32e363 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/StreamisJobLogAutoConfiguration.java @@ -0,0 +1,24 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server; + +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.JobLogStorage; +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.StreamisJobLogStorage; +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.StorageThresholdDriftPolicy; +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.loadbalancer.RoundRobinLoadBalancer; +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.loadbalancer.SimpleLoadBalancer; +import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +@Configuration +public class StreamisJobLogAutoConfiguration { + + @Bean(initMethod = "init", destroyMethod = "destroy") + @ConditionalOnMissingBean(JobLogStorage.class) + public JobLogStorage streamisJobLogStorage(){ + StreamisJobLogStorage jobLogStorage = new StreamisJobLogStorage(); + jobLogStorage.addLoadBalancer(new RoundRobinLoadBalancer()); + jobLogStorage.addLoadBalancer(new SimpleLoadBalancer()); + jobLogStorage.setBucketDriftPolicy(new StorageThresholdDriftPolicy()); + return jobLogStorage; + } +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/config/StreamJobLogConfig.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/config/StreamJobLogConfig.java new file mode 100644 index 000000000..1f6777e9f --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/config/StreamJobLogConfig.java @@ -0,0 +1,66 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server.config; + +import org.apache.linkis.common.conf.CommonVars; +import org.apache.linkis.common.conf.TimeType; + +/** + * Store the configuration defined for job log + */ +public class StreamJobLogConfig { + + /** + * Set the log restful api as no-auth + */ + public static final CommonVars NO_AUTH_REST = CommonVars.apply("wds.stream.job.log.restful.no-auth", false); + + /** + * The threshold of log storage + */ + public static final CommonVars STORAGE_THRESHOLD = CommonVars.apply("wds.stream.job.log.storage.threshold", 0.9); + + /** + * Max weight of storage context + */ + public static final CommonVars STORAGE_CONTEXT_MAX_WEIGHT = CommonVars.apply("wds.stream.job.log.storage.context.max-weight", 5); + + /** + * Paths of storage context + */ + public static final CommonVars STORAGE_CONTEXT_PATHS = CommonVars.apply("wds.stream.job.log.storage.context.paths", "/data/stream/log"); + + /** + * Bucket monitor name + */ + public static final CommonVars BUCKET_MONITOR_NAME = CommonVars.apply("wds.stream.job.log.storage.bucket.monitor.name", "Log-Storage-Bucket-Monitor"); + + /** + * Bucket monitor interval + */ + public static final CommonVars BUCKET_MONITOR_INTERVAL = CommonVars.apply("wds.stream.job.log.storage.bucket.monitor.interval", new TimeType("2m")); + + /** + * Bucket max idle time + */ + public static final CommonVars BUCKET_MAX_IDLE_TIME = CommonVars.apply("wds.stream.job.log.storage.bucket.max-idle-time", new TimeType("12h")); + + /** + * Bucket root path + */ + public static final CommonVars BUCKET_ROOT_PATH = CommonVars.apply("wds.stream.job.log.storage.bucket.root-path", "/data/stream/log"); + /** + * Max active part size in bucket + */ + public static final CommonVars BUCKET_MAX_ACTIVE_PART_SIZE = CommonVars.apply("wds.stream.job.log.storage.bucket.max-active-part-size", 100L); + + /** + * Compression of part in bucket + */ + public static final CommonVars BUCKET_PART_COMPRESS = CommonVars.apply("wds.stream.job.log.storage.bucket.part-compress", "gz"); + + /** + * Bucket layout + */ + public static final CommonVars BUCKET_LAYOUT = CommonVars.apply("wds.stream.job.log.storage.bucket.layout", "%msg"); + + public static final CommonVars BUCKET_PART_HOLD_DAY = CommonVars.apply("wds.stream.job.log.storage.bucket.part-hold-day", 30); +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/entities/StreamisLogEvents.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/entities/StreamisLogEvents.java new file mode 100644 index 000000000..8676c5778 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/entities/StreamisLogEvents.java @@ -0,0 +1,12 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server.entities; + +import com.fasterxml.jackson.annotation.JsonIgnore; + +public class StreamisLogEvents extends com.webank.wedatasphere.streamis.jobmanager.log.entities.StreamisLogEvents { + + @Override + @JsonIgnore + public String[] getContents() { + return super.getContents(); + } +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/exception/StreamJobLogException.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/exception/StreamJobLogException.java new file mode 100644 index 000000000..56edc2dd3 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/exception/StreamJobLogException.java @@ -0,0 +1,29 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server.exception; + +import org.apache.linkis.common.exception.ErrorException; +import org.apache.linkis.common.exception.ExceptionLevel; +import org.apache.linkis.common.exception.LinkisRuntimeException; + +/** + * Stream job log exception + */ +public class StreamJobLogException extends ErrorException { + public StreamJobLogException(int errCode, String desc) { + super(errCode, desc); + } + public StreamJobLogException(int errCode, String desc, Throwable t){ + super(errCode, desc); + + } + public static class Runtime extends LinkisRuntimeException{ + + public Runtime(int errCode, String desc) { + super(errCode, desc); + } + + @Override + public ExceptionLevel getLevel() { + return ExceptionLevel.ERROR; + } + } +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/restful/JobLogRestfulApi.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/restful/JobLogRestfulApi.java new file mode 100644 index 000000000..27104311e --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/restful/JobLogRestfulApi.java @@ -0,0 +1,64 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server.restful; + +import com.webank.wedatasphere.streamis.jobmanager.log.server.config.StreamJobLogConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.server.entities.StreamisLogEvents; +import com.webank.wedatasphere.streamis.jobmanager.log.server.exception.StreamJobLogException; +import com.webank.wedatasphere.streamis.jobmanager.log.server.service.StreamisJobLogService; +import org.apache.commons.lang.StringUtils; +import org.apache.linkis.server.Message; +import org.apache.linkis.server.security.SecurityFilter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RestController; + +import javax.annotation.Resource; +import javax.servlet.http.HttpServletRequest; + +@RestController +@RequestMapping(path = "/streamis/streamJobManager/log") +public class JobLogRestfulApi { + + private static final Logger LOG = LoggerFactory.getLogger(JobLogRestfulApi.class); + + @Resource + private StreamisJobLogService streamisJobLogService; + + @RequestMapping(value = "/collect/events", method = RequestMethod.POST) + public Message collectEvents(@RequestBody StreamisLogEvents events, HttpServletRequest request){ + Message result; + try{ + if (StringUtils.isBlank(events.getAppName())){ + return Message.ok("Ignore the stream log events without application name"); + } + String userName; + if (StreamJobLogConfig.NO_AUTH_REST.getValue()){ + userName = request.getHeader("Token-User"); + if (StringUtils.isBlank(userName)){ + try { + userName = SecurityFilter.getLoginUsername(request); + }catch(Exception e){ + // Ignore + } + if (StringUtils.isBlank(userName)){ + userName = "hadoop"; + } + } + }else { + userName = SecurityFilter.getLoginUsername(request); + if (StringUtils.isBlank(userName)) { + throw new StreamJobLogException(-1, "The request should has token user"); + } + } + this.streamisJobLogService.store(userName, events); + result = Message.ok(); + }catch (Exception e){ + String message = "Fail to collect stream log events, message: " + e.getMessage(); + result = Message.error(message); + } + return result; + } + +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/service/DefaultStreamisJobLogService.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/service/DefaultStreamisJobLogService.java new file mode 100644 index 000000000..8fea4dab6 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/service/DefaultStreamisJobLogService.java @@ -0,0 +1,35 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server.service; + +import com.webank.wedatasphere.streamis.jobmanager.log.entities.StreamisLogEvents; +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.JobLogStorage; +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucket; +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucketConfig; +import org.springframework.stereotype.Service; + +import javax.annotation.PostConstruct; +import javax.annotation.Resource; + +/** + * Default implement + */ +@Service +public class DefaultStreamisJobLogService implements StreamisJobLogService{ + + @Resource + private JobLogStorage jobLogStorage; + + private JobLogBucketConfig jobLogBucketConfig; + + @PostConstruct + public void init(){ + jobLogBucketConfig = new JobLogBucketConfig(); + } + @Override + public void store(String user, StreamisLogEvents events) { + JobLogBucket jobLogBucket = jobLogStorage.getOrCreateBucket(user, events.getAppName(), jobLogBucketConfig); + // If cannot get log bucket, drop the events + if (null != jobLogBucket){ + jobLogBucket.getBucketStorageWriter().write(events); + } + } +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/service/StreamisJobLogService.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/service/StreamisJobLogService.java new file mode 100644 index 000000000..e8f8bfe4e --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/service/StreamisJobLogService.java @@ -0,0 +1,16 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server.service; + +import com.webank.wedatasphere.streamis.jobmanager.log.entities.StreamisLogEvents; + +/** + * Job log service + */ +public interface StreamisJobLogService { + + /** + * Store log events + * @param user user own + * @param events events + */ + void store(String user, StreamisLogEvents events); +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/JobLogStorage.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/JobLogStorage.java new file mode 100644 index 000000000..4299104d5 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/JobLogStorage.java @@ -0,0 +1,49 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server.storage; + +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucket; +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucketConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucketDriftPolicy; +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context.JobLogStorageContextListener; +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.loadbalancer.JobLogStorageLoadBalancer; + +/** + * Storage of job log + */ +public interface JobLogStorage { + + /** + * Create buckets + * @param userName user own + * @param appName application name + * @param bucketConfig bucket config + * @return config + */ + JobLogBucket getOrCreateBucket(String userName, String appName, JobLogBucketConfig bucketConfig); + + /** + * Set bucket drift policy + * @param bucketDriftPolicy bucket drift policy + */ + void setBucketDriftPolicy(JobLogBucketDriftPolicy bucketDriftPolicy); + + /** + * Add context listener + * @param listener listener + */ + void addContextListener(JobLogStorageContextListener listener); + + /** + * Add load balancer + * @param loadBalancer load balancer + */ + void addLoadBalancer(JobLogStorageLoadBalancer loadBalancer); + /** + * Init method + */ + void init() throws Exception; + + /** + * Destroy method + */ + void destroy(); +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/StreamisJobLogStorage.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/StreamisJobLogStorage.java new file mode 100644 index 000000000..ccadd721d --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/StreamisJobLogStorage.java @@ -0,0 +1,332 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server.storage; + +import com.webank.wedatasphere.streamis.jobmanager.log.server.config.StreamJobLogConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.server.exception.StreamJobLogException; +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucket; +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucketConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucketDriftPolicy; +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucketState; +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context.*; +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.loadbalancer.JobLogStorageLoadBalancer; +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.loadbalancer.RoundRobinLoadBalancer; +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.utils.MemUtils; +import org.apache.commons.lang.StringUtils; +import org.apache.linkis.common.utils.Utils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.stereotype.Component; + +import javax.annotation.PostConstruct; +import javax.annotation.PreDestroy; +import java.io.IOException; +import java.lang.reflect.Constructor; +import java.lang.reflect.InvocationTargetException; +import java.text.SimpleDateFormat; +import java.util.*; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; + +import static com.webank.wedatasphere.streamis.jobmanager.log.server.config.StreamJobLogConfig.BUCKET_MONITOR_INTERVAL; + +/** + * Job log storage + */ +public class StreamisJobLogStorage implements JobLogStorage{ + + private static final Logger LOG = LoggerFactory.getLogger(StreamisJobLogStorage.class); + + /** + * Storage context + */ + private final List storageContexts = new CopyOnWriteArrayList<>(); + + /** + * Drift policy + */ + private JobLogBucketDriftPolicy bucketDriftPolicy; + /** + * Buckets + */ + private final Map buckets = new ConcurrentHashMap<>(); + + /** + * Context listeners + */ + private final List contextListeners = new ArrayList<>(); + + /** + * Load balancer + */ + private final List loadBalancers = new ArrayList<>(); + + /** + * Constructor cache + */ + private final Map> bucketConstructors = new ConcurrentHashMap<>(); + + /** + * To monitor the status of buckets + */ + private Future monitorThread; + + @Override + public JobLogBucket getOrCreateBucket(String userName, String appName, JobLogBucketConfig bucketConfig) { + String bucketName = toBucketName(userName, appName); + return buckets.computeIfAbsent(bucketName, name -> { + // First to choose context + JobLogStorageContext context = chooseStorageContext(bucketName, bucketConfig); + if (null != context){ + Class bucketClass = bucketConfig.getBucketClass(); + if (Objects.nonNull(bucketClass)) { + Constructor constructor = bucketConstructors.computeIfAbsent(bucketClass.getName(), className -> { + Constructor[] constructors = bucketClass.getConstructors(); + Constructor matchConstructor = null; + for (Constructor constructor1 : constructors) { + Class[] inputParams = constructor1.getParameterTypes(); + if (inputParams.length >= 3 && inputParams[0].equals(String.class) + && inputParams[1].equals(JobLogStorageContext.class) && inputParams[2].equals(JobLogBucketConfig.class)) { + matchConstructor = constructor1; + break; + } + } + return matchConstructor; + }); + if (Objects.nonNull(constructor)) { + try { + return (JobLogBucket) constructor.newInstance(bucketName, context, bucketConfig); + } catch (InstantiationException | IllegalAccessException | InvocationTargetException e) { + LOG.warn("Cannot create storage log bucket from [{}]", bucketClass.getName(), e); + } + } + } + } + return null; + }); + } + + @Override + public void setBucketDriftPolicy(JobLogBucketDriftPolicy bucketDriftPolicy) { + this.bucketDriftPolicy = bucketDriftPolicy; + } + + @Override + public void addContextListener(JobLogStorageContextListener listener) { + this.contextListeners.add(listener); + } + + @Override + public void addLoadBalancer(JobLogStorageLoadBalancer loadBalancer) { + this.loadBalancers.add(loadBalancer); + if (loadBalancer instanceof JobLogStorageContextListener){ + addContextListener((JobLogStorageContextListener) loadBalancer); + } + } + + @Override + @PostConstruct + public synchronized void init() throws Exception{ + initStorageContexts(StringUtils.split(StreamJobLogConfig.STORAGE_CONTEXT_PATHS.getValue(), ",")); + onContextEvent(new ContextLaunchEvent(new ArrayList<>(this.storageContexts))); + // Init load balancer + initLoadBalancers(); + if (Objects.isNull(monitorThread)){ + monitorThread = Utils.defaultScheduler().scheduleAtFixedRate(() -> { + String threadName = Thread.currentThread().getName(); + try { + Thread.currentThread().setName(StreamJobLogConfig.BUCKET_MONITOR_NAME.getValue()); + long maxIdleTime = StreamJobLogConfig.BUCKET_MAX_IDLE_TIME.getValue().toLong(); + SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); + // Update the storage context + JobLogStorageContext[] contexts = this.storageContexts.toArray(new JobLogStorageContext[0]); + try { + updateContextWeight(contexts); + // Notify the listener to refresh the context information + onContextEvent(new ContextRefreshAllEvent()); + } catch (IOException e) { + LOG.warn("Unable to calculate weight array of storage context list", e); + } + if (buckets.size() > 0) { + StringBuilder builder = new StringBuilder("Buckets(").append(buckets.size()).append(") in LogStorage: [\n"); + buckets.forEach((bucketName, bucket) -> { + JobLogBucketState bucketState = bucket.getBucketState(); + builder.append("bucket: [ name: ") + .append(bucketName) + .append(", path: ").append(bucketState.getBucketPath()) + .append(", parts: ").append(bucketState.getBucketParts()) + .append(", write-rate: ").append(bucketState.getBucketWriteRate()).append("/s") + .append(", last-write-time: ").append(dateFormat.format(bucketState.getBucketWriteTime())) + .append(" ]\n"); + boolean closeBucket = false; + if (bucketState.getBucketWriteTime() + maxIdleTime <= System.currentTimeMillis()) { + LOG.info("Close the idle bucket: [ name: {}, last-write-time: {} ]", + bucketName, dateFormat.format(bucketState.getBucketWriteTime())); + closeBucket = true; + } if (Objects.nonNull(bucketDriftPolicy) && bucketDriftPolicy.onPolicy(bucket, contexts)){ + LOG.info("Drift the bucket: [ name: {}, last-write-time: {} ]", bucketName, + dateFormat.format(bucketState.getBucketWriteTime())); + closeBucket = true; + } + if (closeBucket) { + // Delete the bucket + // First to move the bucket from map, then close it + buckets.remove(bucketName); + bucket.close(); + } + }); + LOG.info(builder.toString()); + } + } catch (Throwable e){ + assert LOG != null; + LOG.warn("Some exception happened in monitor thread", e); + //Ignore + } finally { + Thread.currentThread().setName(threadName); + } + + },BUCKET_MONITOR_INTERVAL.getValue().toLong(), BUCKET_MONITOR_INTERVAL.getValue().toLong(), TimeUnit.MILLISECONDS); + } + } + + + @Override + @PreDestroy + public void destroy() { + // Fist to close all the bucket + buckets.forEach((bucketName, bucket) -> bucket.close()); + if (null != monitorThread){ + monitorThread.cancel(true); + } + } + + /** + * Choose storage context + * @param bucketName bucket name + * @param jobLogBucketConfig bucket config + * @return storage context + */ + private JobLogStorageContext chooseStorageContext(String bucketName, JobLogBucketConfig jobLogBucketConfig){ + JobLogStorageContext context; + for(JobLogStorageLoadBalancer balancer : loadBalancers){ + context = balancer.chooseContext(bucketName, jobLogBucketConfig); + if (null != context){ + return context; + } + } + return null; + } + + /** + * Init load balancers + */ + private void initLoadBalancers(){ + for(JobLogStorageLoadBalancer loadBalancer : this.loadBalancers){ + loadBalancer.init(); + } + // Sort the load balancer + this.loadBalancers.sort(Comparator.comparingInt(JobLogStorageLoadBalancer::priority).reversed()); + } + /** + * Init the storage context + * @param storagePaths storage paths + */ + private void initStorageContexts(String[] storagePaths) throws StreamJobLogException { + LOG.info("Init the storage context: [" + StringUtils.join(storagePaths, ",") + "]"); + for(String storagePath : storagePaths){ + if (StringUtils.isNotBlank(storagePath)) { + // TODO the score of context + this.storageContexts.add(new JobLogStorageContext(storagePath, 1.0)); + } + } + if (!this.storageContexts.isEmpty()) { + int size = this.storageContexts.size(); + try { + updateContextWeight(storageContexts.toArray(new JobLogStorageContext[size])); + } catch (IOException e) { + throw new StreamJobLogException(-1, "Unable to calculate weight array of storage context list", e); + } + } + } + + private void updateContextWeight(JobLogStorageContext[] contexts) throws IOException { + double[] weights = calculateContextWeight(contexts); + StringBuilder builder = new StringBuilder("Update storage context weights:[\n"); + for(int i = 0 ; i < weights.length; i ++){ + JobLogStorageContext context = contexts[i]; + builder.append(context.getStorePath()).append(" => ").append(weights[i]); + if (i != weights.length - 1){ + builder.append(", "); + } + context.setStoreWeight(weights[i]); + } + builder.append("\n]"); + LOG.info(builder.toString()); + } + /** + * Calculate the base weight of storage context + * @param contexts context array + */ + private double[] calculateContextWeight(JobLogStorageContext[] contexts) throws IOException { + double[] weights = new double[contexts.length]; + if (contexts.length > 0) { + int maxNormalizeWt = StreamJobLogConfig.STORAGE_CONTEXT_MAX_WEIGHT.getValue(); + double storageThreshold = StreamJobLogConfig.STORAGE_THRESHOLD.getValue(); + if (maxNormalizeWt < 1){ + maxNormalizeWt = 1; + } + double maxWeight = Double.MIN_VALUE; + double minWeight = Double.MAX_VALUE; + int i = 0; + for (; i < weights.length; i++) { + JobLogStorageContext context = contexts[0]; + long usableSpace = context.getUsableSpace(); + long totalSpace = context.getTotalSpace(); + double usage = (double)(totalSpace - usableSpace) / (double)totalSpace; + double weight = 0d; + if (usage >= storageThreshold){ + LOG.warn("The usage of storage context:[{}] reach the threshold: {} > {}, set the weight of it to 0", + context.getStorePath(), usage, storageThreshold); + } else { + long freeSpaceInGB = MemUtils.convertToGB(usableSpace, "B"); + if (freeSpaceInGB <= 0) { + freeSpaceInGB = 1; + } + weight = context.getScore() * (double) freeSpaceInGB; + } + weights[i] = weight; + if (weight > maxWeight){ + maxWeight = weight; + } + if (weight < minWeight){ + minWeight = weight; + } + } + double sub = maxWeight - minWeight; + i = i - 1; + for (; i >= 0; i--){ + weights[i] = (sub > 0? (maxNormalizeWt - 1) * (weights[i] - minWeight) * sub : 0) + 1; + } + } + return weights; + } + + /** + * Produce context event + * @param event event + */ + private void onContextEvent(JobLogStorageContextListener.ContextEvent event){ + for(JobLogStorageContextListener listener : contextListeners){ + listener.onContextEvent(event); + } + } + /** + * Bucket name + * @param userName username + * @param appName app name + * @return bucket name + */ + private String toBucketName(String userName, String appName){ + return userName + "." + appName; + } +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucket.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucket.java new file mode 100644 index 000000000..463edab76 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucket.java @@ -0,0 +1,36 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket; + +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context.JobLogStorageContext; + +/** + * Job log bucket for streamis + */ +public interface JobLogBucket { + + /** + * Bucket state + * @return state + */ + JobLogBucketState getBucketState(); + + /** + * Storage writer + * @return storage writer + */ + JobLogStorageWriter getBucketStorageWriter(); + + /** + * Get storage context + * @return context + */ + JobLogStorageContext getStorageContext(); + /** + * Bucket name + * @return bucket name + */ + String getBucketName(); + /** + * Close the bucket + */ + void close(); +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketConfig.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketConfig.java new file mode 100644 index 000000000..7264986bd --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketConfig.java @@ -0,0 +1,112 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket; + +import com.webank.wedatasphere.streamis.jobmanager.log.server.config.StreamJobLogConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.server.exception.StreamJobLogException; +import org.apache.linkis.common.conf.CommonVars; + +import java.util.HashMap; +import java.util.Map; + +/** + * Configuration for job log bucket + */ +public class JobLogBucketConfig { + + @SuppressWarnings("unchecked") + public JobLogBucketConfig(){ + try { + Class defaultBucketClass = Class.forName(Define.JOB_LOG_BUCKET_CLASS.getValue()); + if (JobLogBucket.class.isAssignableFrom(defaultBucketClass)){ + this.bucketClass = (Class) defaultBucketClass; + } + } catch (ClassNotFoundException e) { + throw new StreamJobLogException.Runtime(-1, "Cannot find the bucket class, message: " + e.getMessage()); + } + } + + /** + * Bucket class + */ + private Class bucketClass; + + /** + * Attribute + */ + protected Map attributes = new HashMap<>(); + + /** + * Max size of bucket active part (MB) + */ + private long maxBucketActivePartSize = StreamJobLogConfig.BUCKET_MAX_ACTIVE_PART_SIZE.getValue(); + + /** + * The compress format used for bucket parts + */ + private String bucketPartCompress = StreamJobLogConfig.BUCKET_PART_COMPRESS.getValue(); + + /** + * Max hold time in days for bucket part + */ + private int bucketPartHoldTimeInDay = StreamJobLogConfig.BUCKET_PART_HOLD_DAY.getValue(); + + /** + * Layout pattern + */ + private String LogLayOutPattern = StreamJobLogConfig.BUCKET_LAYOUT.getValue(); + + public Class getBucketClass() { + return bucketClass; + } + + public void setBucketClass(Class bucketClass) { + this.bucketClass = bucketClass; + } + + public Map getAttributes() { + return attributes; + } + + public void setAttributes(Map attributes) { + this.attributes = attributes; + } + + public long getMaxBucketActivePartSize() { + return maxBucketActivePartSize; + } + + public void setMaxBucketActivePartSize(long maxBucketActivePartSize) { + this.maxBucketActivePartSize = maxBucketActivePartSize; + } + + public String getBucketPartCompress() { + return bucketPartCompress; + } + + public void setBucketPartCompress(String bucketPartCompress) { + this.bucketPartCompress = bucketPartCompress; + } + + public int getBucketPartHoldTimeInDay() { + return bucketPartHoldTimeInDay; + } + + public void setBucketPartHoldTimeInDay(int bucketPartHoldTimeInDay) { + this.bucketPartHoldTimeInDay = bucketPartHoldTimeInDay; + } + + public String getLogLayOutPattern() { + return LogLayOutPattern; + } + + public void setLogLayOutPattern(String logLayOutPattern) { + LogLayOutPattern = logLayOutPattern; + } + + + public static final class Define{ + /** + * Default bucket class + */ + public static final CommonVars JOB_LOG_BUCKET_CLASS = CommonVars.apply("wds.streamis.job.log.bucket.class", "com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.Log4j2JobLogBucket"); + } +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketDriftPolicy.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketDriftPolicy.java new file mode 100644 index 000000000..147f8fafe --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketDriftPolicy.java @@ -0,0 +1,15 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket; + +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context.JobLogStorageContext; + +/** + * Drift policy + */ +public interface JobLogBucketDriftPolicy { + /** + * Decide whether you should drift the bucket + * @param bucket bucket + * @return + */ + boolean onPolicy(JobLogBucket bucket, JobLogStorageContext[] contexts); +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketFactory.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketFactory.java new file mode 100644 index 000000000..d4b9b6b2a --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketFactory.java @@ -0,0 +1,15 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket; + +/** + * Factory of creating job log bucket + */ +public interface JobLogBucketFactory { + + /** + * Create bucket + * @param jobName job name + * @param config bucket config + * @return + */ + JobLogBucket createBucket(String jobName, JobLogBucketConfig config); +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketState.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketState.java new file mode 100644 index 000000000..8051e6d13 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketState.java @@ -0,0 +1,31 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket; + +/** + * State of log bucket + */ +public interface JobLogBucketState { + + /** + * Bucket path + * @return path + */ + String getBucketPath(); + + /** + * Write rate + * @return rate + */ + double getBucketWriteRate(); + + /** + * Bucket parts + * @return number + */ + int getBucketParts(); + + /** + * Last rite time + * @return time + */ + long getBucketWriteTime(); +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogStorageWriter.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogStorageWriter.java new file mode 100644 index 000000000..772040374 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogStorageWriter.java @@ -0,0 +1,27 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket; + +import com.webank.wedatasphere.streamis.jobmanager.log.entities.LogElement; + +/** + * Storage writer for job log + */ +public interface JobLogStorageWriter { + + /** + * Write log element + * @param logEl elements + * @param + */ + void write(LogElement logEl); + + /** + * Write log line + * @param logLine log line + */ + void write(String logLine); + + /** + * Close log storage + */ + void close(); +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/Log4j2JobLogBucket.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/Log4j2JobLogBucket.java new file mode 100644 index 000000000..6d7b6318a --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/Log4j2JobLogBucket.java @@ -0,0 +1,356 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket; + +import com.webank.wedatasphere.streamis.jobmanager.log.entities.LogElement; +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context.JobLogStorageContext; +import org.apache.commons.io.FilenameUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.linkis.common.conf.CommonVars; +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.core.Appender; +import org.apache.logging.log4j.core.LoggerContext; +import org.apache.logging.log4j.core.appender.RollingFileAppender; +import org.apache.logging.log4j.core.appender.rolling.*; +import org.apache.logging.log4j.core.appender.rolling.action.*; +import org.apache.logging.log4j.core.config.AppenderRef; +import org.apache.logging.log4j.core.config.Configuration; +import org.apache.logging.log4j.core.config.LoggerConfig; +import org.apache.logging.log4j.core.layout.PatternLayout; +import org.checkerframework.checker.units.qual.A; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.util.Optional; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicLong; +import java.util.concurrent.locks.Condition; +import java.util.concurrent.locks.ReentrantLock; + +/** + * Job log bucket for log4j + */ +public class Log4j2JobLogBucket implements JobLogBucket{ + + private static final org.slf4j.Logger LOG = LoggerFactory.getLogger(Log4j2JobLogBucket.class); + + private static final String DEFAULT_FILE_PATTERN_SUFFIX = ".%d{yyyy-MM-dd}-%i"; + + private static final CommonVars ROLLOVER_MAX = CommonVars.apply("wds.stream.job.log.storage.bucket.log4j.rollover-max", 20); + /** + * Bucket name + */ + private final String bucketName; + + /** + * Logger name + */ + private final String loggerName; + /** + * Logger context + */ + private final LoggerContext loggerContext; + + /** + * Logger entity + */ + private final Logger logger; + + /** + * Storage context + */ + private final JobLogStorageContext storageContext; + /** + * Storage writer + */ + private final JobLogStorageWriter jobLogStorageWriter; + + /** + * Bucket state + */ + private final JobLogBucketState jobLogBucketState; + + /** + * Last write time; + */ + private long lastWriteTime; + + /** + * Prev Interval time + */ + private long preIntervalTime; + + /** + * Active thread + */ + private final AtomicLong activeThread = new AtomicLong(0); + /** + * Interval counter + */ + private final AtomicLong intervalCounter = new AtomicLong(0); + + /** + * Shutdown flag + */ + private final AtomicBoolean isShutdown = new AtomicBoolean(false); + + /** + * Shutdown lock + */ + private final ReentrantLock shutdownLock = new ReentrantLock(); + + /** + * Shutdown condition + */ + private final Condition canShutdown = shutdownLock.newCondition(); + /** + * Store the write rate + */ + private double writeRate; + public Log4j2JobLogBucket(String bucketName, JobLogStorageContext storageContext, JobLogBucketConfig config){ + this.bucketName = bucketName; + // Build unique logger name + this.loggerName = bucketName + System.currentTimeMillis() + "_" + Thread.currentThread().getId(); + this.storageContext = storageContext; + // Create logger context + this.loggerContext = (LoggerContext) LogManager.getContext(false); + this.logger = initLogger(this.bucketName, this.loggerName, this.storageContext, config, this.loggerContext); + this.jobLogStorageWriter = createStorageWriter(); + this.jobLogBucketState = createBucketState(); + } + @Override + public JobLogBucketState getBucketState() { + return this.jobLogBucketState; + } + + @Override + public JobLogStorageWriter getBucketStorageWriter() { + return this.jobLogStorageWriter; + } + + @Override + public JobLogStorageContext getStorageContext() { + return this.storageContext; + } + + @Override + public String getBucketName() { + return this.bucketName; + } + + @Override + public void close() { + this.isShutdown.set(true); + this.shutdownLock.lock(); + try{ + if (activeThread.get() > 0) { + if (!this.canShutdown.await(5, TimeUnit.SECONDS)) { + LOG.warn("Shutdown the bucket: [{}] directly because the timeout of waiting", bucketName); + } + } + } catch (InterruptedException e) { + // Ignore + } finally { + this.shutdownLock.unlock(); + } + Configuration log4jConfig = this.loggerContext.getConfiguration(); + // First to stop appender + log4jConfig.getAppender(this.loggerName).stop(); + log4jConfig.getLoggerConfig(this.loggerName).removeAppender(this.loggerName); + log4jConfig.removeLogger(this.loggerName); + loggerContext.updateLoggers(); + } + + private synchronized Logger initLogger(String bucketName, String loggerName, + JobLogStorageContext storageContext, JobLogBucketConfig config, LoggerContext loggerContext){ + Configuration log4jConfig = loggerContext.getConfiguration(); + String fileName = resolveFileName(storageContext.getStorePath().toString(), bucketName); + RollingFileAppender appender = RollingFileAppender.newBuilder() + .setLayout(PatternLayout.newBuilder().withPattern(config.getLogLayOutPattern()).build()) + .setName(loggerName) +// .withFileOwner() + .withFileName(fileName) + .withFilePattern(resolveFilePattern(fileName, config.getBucketPartCompress())) + .withPolicy(SizeBasedTriggeringPolicy.createPolicy(config.getMaxBucketActivePartSize() + "MB")) + .withStrategy(createRolloverStrategy(log4jConfig, fileName, ROLLOVER_MAX.getValue(), config.getBucketPartHoldTimeInDay())) + .setConfiguration(log4jConfig) + .build(); + appender.start(); + log4jConfig.addAppender(appender); + LoggerConfig loggerConfig = LoggerConfig.newBuilder().withAdditivity(false).withLevel(Level.ALL) + .withRefs(new AppenderRef[]{ + AppenderRef.createAppenderRef(loggerName, null, null) + }) + .withLoggerName(loggerName).withConfig(log4jConfig).build(); + loggerConfig.addAppender(appender, null, null); + log4jConfig.addLogger(loggerName, loggerConfig); + // Should we update the logger context ? + loggerContext.updateLoggers(); + return loggerContext.getLogger(loggerName); + } + + /** + * Create storage writer + * @return storage writer + */ + private JobLogStorageWriter createStorageWriter(){ + return new JobLogStorageWriter() { + @Override + public void write(LogElement logEl) { + activeThread.incrementAndGet(); + try { + String[] contents = logEl.getContents(); + if (null != contents) { + for (String content : contents) { + write(content, true); + } + } + }finally { + if (activeThread.decrementAndGet() <= 0 && isShutdown.get()){ + notifyShutdown(); + } + } + } + + @Override + public void write(String logLine) { + activeThread.incrementAndGet(); + try { + write(logLine, false); + }finally { + if (activeThread.decrementAndGet() <= 0 && isShutdown.get()){ + notifyShutdown(); + } + } + } + + private void write(String logLine, boolean batch){ + logger.info(logLine); + long currentTime = System.currentTimeMillis(); + long intervalCnt = intervalCounter.getAndIncrement(); + long intervalTime = (currentTime - preIntervalTime)/1000; + // Per minute accumulate the rate + if ( intervalTime >= 60){ + writeRate = (double)intervalCnt / (double)intervalTime; + preIntervalTime = currentTime; + intervalCounter.set(0); + } + lastWriteTime = currentTime; + } + @Override + public void close() { + // Ignore + } + }; + } + + /** + * Create bucket state + * @return bucket state + */ + private JobLogBucketState createBucketState(){ + return new JobLogBucketState() { + private String bucketPath; + @Override + public String getBucketPath() { + if (StringUtils.isBlank(bucketPath)) { + Appender appender = loggerContext.getConfiguration().getAppender(loggerName); + if (appender instanceof RollingFileAppender) { + bucketPath = new File(((RollingFileAppender) appender).getFileName()).getParent(); + } + } + return this.bucketPath; + } + + @Override + public double getBucketWriteRate() { + return writeRate; + } + + @Override + public int getBucketParts() { + AtomicInteger parts = new AtomicInteger(-1); + String bucketPath = getBucketPath(); + if (StringUtils.isNotBlank(bucketPath)){ + Optional.ofNullable(new File(bucketPath).list()).ifPresent(list -> parts.set(list.length)); + } + return parts.get(); + } + + @Override + public long getBucketWriteTime() { + return lastWriteTime; + } + }; + } + + private void notifyShutdown(){ + this.shutdownLock.lock(); + try{ + this.canShutdown.notifyAll(); + }finally { + this.shutdownLock.unlock(); + } + } + /** + * Create rollover strategy + * @param configuration configuration + * @param fileName file name + * @param rolloverMax rollover max inf file pattern + * @param fileHoldDay file hold day time + * @return strategy + */ + private RolloverStrategy createRolloverStrategy(Configuration configuration, + String fileName, int rolloverMax, int fileHoldDay){ + DefaultRolloverStrategy.Builder builder = DefaultRolloverStrategy.newBuilder(); + if (rolloverMax > 0){ + builder.withMax(rolloverMax + ""); + } + if (fileHoldDay > 0){ + // Create the actions to delete old file + builder.withCustomActions(new Action[]{ + DeleteAction.createDeleteAction(new File(fileName).getParent(), false, 2, false, null, + new PathCondition[]{ + IfFileName.createNameCondition(null, ".*"), + IfLastModified.createAgeCondition(Duration.parse(fileHoldDay + "d")) + }, + null, configuration) + } + ); + } + return builder.build(); + } + /** + * Ex: /data/stream/log/hadoop/{projectName}/{jobName}/{projectName}.{jobName}.log + * @param bucketRootPath bucket root path + * @param bucketName bucket name + * @return file name with absolute path + */ + private String resolveFileName(String bucketRootPath, String bucketName){ + // {projectName}.{jobName} + String fileName = FilenameUtils.normalize(bucketName); + String basePath = bucketRootPath; + if (!basePath.endsWith("/")){ + basePath += "/"; + } + basePath += fileName.replace(".", "/"); + return basePath + "/" + fileName.substring(bucketName.indexOf(".") + 1) + ".log"; + } + + /** + * Resolve file pattern + * @param fileName file name + * @param format format + * @return file pattern + */ + private String resolveFilePattern(String fileName, String format){ + String filePattern = fileName + Log4j2JobLogBucket.DEFAULT_FILE_PATTERN_SUFFIX; + if (StringUtils.isNotBlank(format)){ + filePattern = filePattern + (format.startsWith(".") ? format : "." +format); + } + return filePattern; + } + +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/StorageThresholdDriftPolicy.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/StorageThresholdDriftPolicy.java new file mode 100644 index 000000000..608faa75e --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/StorageThresholdDriftPolicy.java @@ -0,0 +1,23 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket; + +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context.JobLogStorageContext; + +public class StorageThresholdDriftPolicy implements JobLogBucketDriftPolicy{ + @Override + public boolean onPolicy(JobLogBucket bucket, JobLogStorageContext[] contexts) { + JobLogStorageContext bucketContext = bucket.getStorageContext(); + // Means that the storage context is not healthy + if (bucketContext.getStoreWeight() <= 0){ + // Find the available context + boolean hasRest = false; + for(JobLogStorageContext context : contexts){ + if (context.getStoreWeight() > 0){ + hasRest = true; + break; + } + } + return hasRest; + } + return false; + } +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/StreamisJobLogBucket.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/StreamisJobLogBucket.java new file mode 100644 index 000000000..ba9c002d6 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/StreamisJobLogBucket.java @@ -0,0 +1,7 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket; + +/** + * Use the appender and strategy of log4j (version 1.x) to implement the bucket + */ +public class StreamisJobLogBucket { +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/ContextDownEvent.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/ContextDownEvent.java new file mode 100644 index 000000000..c1964376e --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/ContextDownEvent.java @@ -0,0 +1,20 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context; + +/** + * Means that the storage context has been downed + */ +public class ContextDownEvent implements JobLogStorageContextListener.ContextEvent { + + /** + * Context id + */ + private final String contextId; + + public ContextDownEvent(String contextId){ + this.contextId = contextId; + } + + public String getContextId() { + return contextId; + } +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/ContextLaunchEvent.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/ContextLaunchEvent.java new file mode 100644 index 000000000..59de63d6e --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/ContextLaunchEvent.java @@ -0,0 +1,23 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context; + +import java.util.List; + +/** + * Means that the storage context has been launched + */ +public class ContextLaunchEvent implements JobLogStorageContextListener.ContextEvent { + + /** + * Storage contexts + */ + private final List contexts; + + public ContextLaunchEvent(List contexts){ + this.contexts = contexts; + } + + public List getContextList() { + return contexts; + } + +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/ContextRefreshAllEvent.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/ContextRefreshAllEvent.java new file mode 100644 index 000000000..b585e5718 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/ContextRefreshAllEvent.java @@ -0,0 +1,8 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context; + +/** + * Just a sign that to refresh all the storage context + */ +public class ContextRefreshAllEvent implements JobLogStorageContextListener.ContextEvent { + +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/JobLogStorageContext.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/JobLogStorageContext.java new file mode 100644 index 000000000..9ffd95226 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/JobLogStorageContext.java @@ -0,0 +1,144 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context; + +import com.webank.wedatasphere.streamis.jobmanager.log.server.exception.StreamJobLogException; + +import java.io.IOException; +import java.nio.file.*; +import java.nio.file.attribute.PosixFilePermissions; +import java.util.UUID; + +/** + * Storage context (represent the driver/disk) + */ +public class JobLogStorageContext{ + + /** + * Context id + */ + private final String id; + + /** + * Store path + */ + private final Path storePath; + + /** + * Store information + */ + private final FileStore storeInfo; + /** + * Score of storage context + */ + private final double score; + + /** + * Storage weight + */ + private double storeWeight; + + public JobLogStorageContext(String path, double score){ + this.id = UUID.randomUUID().toString(); + this.storePath = Paths.get(path); + this.storeInfo = initStorePath(this.storePath); + this.score = score; + } + + + private FileStore initStorePath(Path path){ + if (Files.notExists(path)){ + try { + Files.createDirectories(this.storePath, + PosixFilePermissions.asFileAttribute(PosixFilePermissions.fromString("rwxrwxr--"))); + } catch (IOException e) { + throw new StreamJobLogException.Runtime(-1, + "Cannot make the storage path directory: [" + path + "], message: " + e.getMessage()); + } + // Allow dir link + } else if (!Files.isDirectory(path)){ + throw new StreamJobLogException.Runtime(-1, + "the storage path: [" + path + "] is not directory" ); + } + try { + return Files.getFileStore(path); + } catch (IOException e) { + throw new StreamJobLogException.Runtime(-1, + "Fail to get the storage information in path: [" + path + "], message: " + e.getMessage()); + } + } + + public Path getStorePath() { + return storePath; + } + + /** + * Score + * @return score value + */ + public double getScore() { + return score; + } + + public String getId() { + return id; + } + + /** + * Total space + * @return bytes return + * @throws IOException + */ + public long getTotalSpace() throws IOException { + long result = storeInfo.getTotalSpace(); + if (result < 0){ + result = Long.MAX_VALUE; + } + return result; + } + + /** + * Usable space + * @return bytes return + * @throws IOException + */ + public long getUsableSpace() throws IOException { + long result = storeInfo.getUsableSpace(); + if (result < 0){ + result = Long.MAX_VALUE; + } + return result; + } + + /** + * Unallocated space + * @return bytes return + * @throws IOException + */ + public long getUnallocatedSpace() throws IOException{ + long result = storeInfo.getUnallocatedSpace(); + if (result < 0){ + result = Long.MAX_VALUE; + } + return result; + } + + public double getStoreWeight() { + return storeWeight; + } + + public void setStoreWeight(double storeWeight) { + this.storeWeight = storeWeight; + } + + @Override + public boolean equals(Object o) { + if (o instanceof JobLogStorageContext){ + return this.id.equals(((JobLogStorageContext) o).id); + } + return super.equals(o); + } + + @Override + public int hashCode() { + return this.id.hashCode(); + } +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/JobLogStorageContextListener.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/JobLogStorageContextListener.java new file mode 100644 index 000000000..77432a2b2 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/JobLogStorageContextListener.java @@ -0,0 +1,17 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context; + +/** + * Context listener + */ +public interface JobLogStorageContextListener { + + /** + * Listen the context event + * @param event event + */ + void onContextEvent(ContextEvent event); + + interface ContextEvent{ + + } +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/loadbalancer/JobLogStorageLoadBalancer.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/loadbalancer/JobLogStorageLoadBalancer.java new file mode 100644 index 000000000..06d6186d7 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/loadbalancer/JobLogStorageLoadBalancer.java @@ -0,0 +1,27 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.loadbalancer; + +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucketConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context.JobLogStorageContext; + +public interface JobLogStorageLoadBalancer { + /** + * Init method + */ + void init(); + + /** + * The order + * @return priority value + */ + default int priority(){ + return -1; + } + + /** + * Choose storage context + * @param bucketName bucket name + * @param config bucket config + * @return + */ + JobLogStorageContext chooseContext(String bucketName, JobLogBucketConfig config); +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/loadbalancer/RoundRobinLoadBalancer.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/loadbalancer/RoundRobinLoadBalancer.java new file mode 100644 index 000000000..8d77d5fd1 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/loadbalancer/RoundRobinLoadBalancer.java @@ -0,0 +1,199 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.loadbalancer; + +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucketConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context.*; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.List; +import java.util.concurrent.locks.ReentrantLock; + + +/** + * Round-robin load balancer + */ +public class RoundRobinLoadBalancer implements JobLogStorageLoadBalancer, JobLogStorageContextListener { + + private static final Logger LOG = LoggerFactory.getLogger(RoundRobinLoadBalancer.class); + + /** + * Candidate array + */ + private StorageContextInfo[] candidates = new StorageContextInfo[0]; + + /** + * Lock for candidate array + */ + private final ReentrantLock candidateLock = new ReentrantLock(); + @Override + public void onContextEvent(ContextEvent event) { + if (event instanceof ContextLaunchEvent){ + onLaunchContexts(((ContextLaunchEvent) event).getContextList()); + } else if (event instanceof ContextDownEvent){ + onDownContext(((ContextDownEvent) event).getContextId()); + } else if (event instanceof ContextRefreshAllEvent){ + onRefreshAllContext(); + } + } + + @Override + public int priority() { + return Integer.MAX_VALUE; + } + + @Override + public void init() { + + } + + @Override + public JobLogStorageContext chooseContext(String bucketName, JobLogBucketConfig config) { + updateCandidateContextWeight(); + candidateLock.lock(); + try { + int index = selectContext(candidates); + if (index >= 0){ + StorageContextInfo info = this.candidates[index]; + info.cwt = info.cwt -1; + LOG.info("Round-Robin chosen context: {} for bucket: {}", info.context.getStorePath(), bucketName); + return info.context; + } + }finally { + candidateLock.unlock(); + } + return null; + } + + private static class StorageContextInfo{ + + /** + * Storage context + */ + final JobLogStorageContext context; + + /** + * If the context is working + */ + boolean online = true; + + /** + * Weight value + */ + int wt; + + /** + * Dynamic weight + */ + int cwt; + + public StorageContextInfo(JobLogStorageContext context){ + this.context = context; + this.wt = (int)Math.floor(context.getStoreWeight()); + this.cwt = wt; + } + + public void refreshWeight(){ + this.wt = (int)Math.floor(context.getStoreWeight()); + if (this.cwt > this.wt){ + this.cwt = this.wt; + } + } + } + + /** + * Select context + * @param infoArray info array + * @return index + */ + private int selectContext(StorageContextInfo[] infoArray){ + int u = 0; + int reset = -1; + while (true){ + for (int i = 0; i < infoArray.length; i ++){ + if (!infoArray[i].online || infoArray[i].cwt <= 0){ + continue; + } + u = i; + while (i < infoArray.length - 1){ + i ++; + if (!infoArray[i].online || infoArray[i].cwt <= 0){ + continue; + } + if ((infoArray[u].wt * 1000 / infoArray[i].wt < + infoArray[u].cwt * 1000 / infoArray[i].cwt)){ + return u; + } + u = i; + } + return u; + } + if (++reset > 0){ + return -1; + } + for (StorageContextInfo info : infoArray){ + info.cwt = info.wt; + } + } + + } + /** + * Enlarge the candidate array of context info + * @param contexts context list + */ + private void onLaunchContexts(List contexts){ + if (contexts.size() > 0){ + candidateLock.lock(); + try{ + StorageContextInfo[] source = candidates; + int newSize = source.length + contexts.size(); + StorageContextInfo[] dest = new StorageContextInfo[newSize]; + System.arraycopy(source, 0, dest, 0, source.length); + int offset = source.length; + for(JobLogStorageContext context : contexts){ + dest[offset++] = new StorageContextInfo(context); + } + this.candidates = dest; + }finally { + candidateLock.unlock(); + } + } + } + + /** + * Mark the context has been downed + * @param contextId context id + */ + private void onDownContext(String contextId){ + // Need to lock the array ? + candidateLock.lock(); + try{ + for (StorageContextInfo info : candidates) { + if (contextId.equals(info.context.getId())) { + info.online = false; + return; + } + } + } finally { + candidateLock.unlock(); + } + } + + /** + * Refresh all the context + */ + private void onRefreshAllContext(){ + candidateLock.lock(); + try{ + // Update the dynamic weight + for (StorageContextInfo info : candidates) { + info.refreshWeight(); + } + } finally { + candidateLock.unlock(); + } + } + private void updateCandidateContextWeight(){ + // Empty method + } + +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/loadbalancer/SimpleLoadBalancer.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/loadbalancer/SimpleLoadBalancer.java new file mode 100644 index 000000000..e11e9fa9c --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/loadbalancer/SimpleLoadBalancer.java @@ -0,0 +1,51 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.loadbalancer; + +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucketConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context.ContextDownEvent; +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context.ContextLaunchEvent; +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context.JobLogStorageContext; +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context.JobLogStorageContextListener; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.security.SecureRandom; +import java.util.ArrayList; +import java.util.List; + +/** + * Simple load balancer + */ +public class SimpleLoadBalancer implements JobLogStorageLoadBalancer, JobLogStorageContextListener { + + private static final Logger LOG = LoggerFactory.getLogger(SimpleLoadBalancer.class); + + private final List contexts = new ArrayList<>(); + + private final SecureRandom random = new SecureRandom(); + @Override + public void onContextEvent(ContextEvent event) { + if (event instanceof ContextLaunchEvent){ + contexts.addAll(((ContextLaunchEvent) event).getContextList()); + } else if (event instanceof ContextDownEvent){ + contexts.removeIf(context -> context.getId().equals(((ContextDownEvent) event).getContextId())); + } + } + + @Override + public void init() { + + } + + @Override + public JobLogStorageContext chooseContext(String bucketName, JobLogBucketConfig config) { + JobLogStorageContext context = randomSelectContext(this.contexts); + if (null != context){ + LOG.info("Random chosen context: {} for bucket: {}", context.getStorePath(), bucketName); + } + return context; + } + + private JobLogStorageContext randomSelectContext(List candidates){ + return candidates.get(random.nextInt(candidates.size())); + } +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/utils/MemUtils.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/utils/MemUtils.java new file mode 100644 index 000000000..8ef2f3ec6 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/utils/MemUtils.java @@ -0,0 +1,234 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.utils; + +import org.apache.commons.lang.StringUtils; + +import java.util.HashMap; +import java.util.Map; + +/** + * Mem utils + */ +public class MemUtils { + private static final Map UNIT_MAP = new HashMap<>(); + static{ + UNIT_MAP.put("G", StoreUnit.GB); + UNIT_MAP.put("GB", StoreUnit.GB); + UNIT_MAP.put("B", StoreUnit.B); + UNIT_MAP.put("M", StoreUnit.MB); + UNIT_MAP.put("MB", StoreUnit.MB); + UNIT_MAP.put("K", StoreUnit.KB); + UNIT_MAP.put("KB", StoreUnit.KB); + } + public static long convertToGB(long size, String unitFlag){ + if(size < 0){ + return -1L; + } + if(StringUtils.isNotBlank(unitFlag)){ + StoreUnit storeUnit = UNIT_MAP.get(unitFlag.trim().toUpperCase()); + if(null != storeUnit){ + return storeUnit.toGB(size); + } + } + return -1L; + } + + public static long convertToMB(long size, String unitFlag){ + if(size < 0){ + return -1L; + } + if(StringUtils.isNotBlank(unitFlag)){ + StoreUnit storeUnit = UNIT_MAP.get(unitFlag.trim().toUpperCase()); + if(null != storeUnit){ + return storeUnit.toMB(size); + } + } + return -1L; + } + + public static long convertToByte(long size, String unitFlag){ + if(size < 0){ + return -1L; + } + if(StringUtils.isNotBlank(unitFlag)){ + StoreUnit storeUnit = UNIT_MAP.get(unitFlag.trim().toUpperCase()); + if(null != storeUnit){ + return storeUnit.toB(size); + } + } + return -1L; + } + public enum StoreUnit { + /** + * byte + */ + B { + @Override + public long toB(long s){ + return s; + } + + @Override + public long toKB(long s){ + return s/(C1/C0); + } + + @Override + public long toMB(long s) { + return s/(C2/C0); + } + + @Override + public long toGB(long s) { + return s/(C3/C0); + } + + @Override + public long toTB(long s) { + return s/(C4/C0); + } + }, + /** + * kb + */ + KB{ + @Override + public long toB(long s){ + return x(s, C1/C0, Long.MAX_VALUE/(C1/C0)); + } + + @Override + public long toKB(long s){ + return s; + } + + @Override + public long toMB(long s) { + return s/(C2/C1); + } + + @Override + public long toGB(long s) { + return s/(C3/C1); + } + + @Override + public long toTB(long s) { + return s/(C4/C0); + } + }, + MB{ + @Override + public long toB(long s){ + return x(s, C2/C0, Long.MAX_VALUE/(C2/C0)); + } + + @Override + public long toKB(long s){ + return x(s, C2/C1, Long.MAX_VALUE/(C2/C1)); + } + + @Override + public long toMB(long s) { + return s; + } + + @Override + public long toGB(long s) { + return s/(C3/C2); + } + + @Override + public long toTB(long s) { + return s/(C4/C2); + } + }, + GB{ + @Override + public long toB(long s){ + return x(s, C3/C0, Long.MAX_VALUE/(C3/C0)); + } + + @Override + public long toKB(long s){ + return x(s, C3/C1, Long.MAX_VALUE/(C3/C1)); + } + + @Override + public long toMB(long s) { + return x(s, C3/C2, Long.MAX_VALUE/(C3/C2)); + } + + @Override + public long toGB(long s) { + return s; + } + + @Override + public long toTB(long s) { + return s/(C4/C3); + } + }, + TB{ + @Override + public long toB(long s){ + return x(s, C4/C0, Long.MAX_VALUE/(C4/C0)); + } + + @Override + public long toKB(long s){ + return x(s, C4/C1, Long.MAX_VALUE/(C4/C1)); + } + + @Override + public long toMB(long s) { + return x(s, C4/C2, Long.MAX_VALUE/(C4/C2)); + } + + @Override + public long toGB(long s) { + return x(s, C4/C3, Long.MAX_VALUE/(C4/C3)); + } + + @Override + public long toTB(long s) { + return s; + } + }; + + public long toB(long s){ + throw new AbstractMethodError(); + } + + public long toKB(long s){ + throw new AbstractMethodError(); + } + + public long toMB(long s){ + throw new AbstractMethodError(); + } + + public long toGB(long s){ + throw new AbstractMethodError(); + } + + public long toTB(long s){ + throw new AbstractMethodError(); + } + } + + static long x(long d, long m, long over){ + if(d > over){ + return Long.MAX_VALUE; + } + if(d < -over){ + return Long.MIN_VALUE; + } + return d * m; + } + static final long C0 = 1L; + static final long C1 = C0 * 1024L; + static final long C2 = C1 * 1024L; + static final long C3 = C2 * 1024L; + static final long C4 = C3 * 1024L; + +} \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/test/com/webank/wedatasphere/streamis/jobmanager/log/LogStorageTest.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/test/com/webank/wedatasphere/streamis/jobmanager/log/LogStorageTest.java new file mode 100644 index 000000000..6ced214f2 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/test/com/webank/wedatasphere/streamis/jobmanager/log/LogStorageTest.java @@ -0,0 +1,4 @@ +package com.webank.wedatasphere.streamis.jobmanager.log; + +public class LogStorageTest { +} diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/server/JobLogStorageTest.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/server/JobLogStorageTest.java new file mode 100644 index 000000000..a74d38954 --- /dev/null +++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/server/JobLogStorageTest.java @@ -0,0 +1,103 @@ +package com.webank.wedatasphere.streamis.jobmanager.log.server; + +import com.webank.wedatasphere.streamis.jobmanager.log.server.config.StreamJobLogConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.JobLogStorage; +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.StreamisJobLogStorage; +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucket; +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucketConfig; +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.StorageThresholdDriftPolicy; +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context.JobLogStorageContext; +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.loadbalancer.RoundRobinLoadBalancer; +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.loadbalancer.SimpleLoadBalancer; +import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.utils.MemUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.linkis.common.conf.BDPConfiguration; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.net.URL; +import java.util.Objects; + +public class JobLogStorageTest { + + private static final Logger LOG = LoggerFactory.getLogger(JobLogStorageTest.class); + @Test + public void storageContext() throws IOException { + URL url = JobLogStorageTest.class.getResource("/"); + if (null != url){ + JobLogStorageContext context = new JobLogStorageContext(url.getPath(), 1.0d); + System.out.println("disk total(bytes): " + context.getTotalSpace()); + System.out.println("disk total(gb): " + MemUtils.convertToGB(context.getTotalSpace(), "B")); + System.out.println("disk usable(bytes): " + context.getUsableSpace()); + System.out.println("disk usable(gb): " + MemUtils.convertToGB(context.getUsableSpace(), "B")); + } + } + @Test + public void calculateWeight() throws IOException { + JobLogStorageContext candidate1 = new JobLogStorageContext(Objects.requireNonNull(JobLogStorage.class.getResource("/")) + .getPath(), 1.0d); + JobLogStorageContext candidate2 = new JobLogStorageContext(Objects.requireNonNull(JobLogStorage.class.getResource("/")) + .getPath(), 1.0d); + JobLogStorageContext[] contexts = new JobLogStorageContext[]{candidate1, candidate2}; + double[] weights = new double[contexts.length]; + int maxNormalizeWt = StreamJobLogConfig.STORAGE_CONTEXT_MAX_WEIGHT.getValue(); + double storageThreshold = StreamJobLogConfig.STORAGE_THRESHOLD.getValue(); + if (maxNormalizeWt < 1){ + maxNormalizeWt = 1; + } + double maxWeight = Double.MIN_VALUE; + double minWeight = Double.MAX_VALUE; + int i = 0; + for (; i < weights.length; i++) { + JobLogStorageContext context = contexts[0]; + long usableSpace = context.getUsableSpace(); + long totalSpace = context.getTotalSpace(); + double usage = (double)(totalSpace - usableSpace) / (double)totalSpace; + double weight = 0d; + if (usage >= storageThreshold){ + LOG.info("The usage of storage context:[{}] reach the threshold: {} > {}, set the weight of it to 0", + context.getStorePath(), usage, storageThreshold); + } else { + long freeSpaceInGB = MemUtils.convertToGB(usableSpace, "B"); + if (freeSpaceInGB <= 0) { + freeSpaceInGB = 1; + } + weight = context.getScore() * (double) freeSpaceInGB; + } + weights[i] = weight; + if (weight > maxWeight){ + maxWeight = weight; + } + if (weight < minWeight){ + minWeight = weight; + } + } + double sub = maxWeight - minWeight; + i = i - 1; + for (; i >= 0; i--){ + weights[i] = (sub > 0? (maxNormalizeWt - 1) * (weights[i] - minWeight) * sub : 0) + 1; + } + System.out.println(StringUtils.join(weights, '|')); + } + + @Test + public void startLogStorage() throws Exception { + BDPConfiguration.set("wds.stream.job.log.storage.context.paths", Objects.requireNonNull(JobLogStorage.class.getResource("/")) + .getPath()); + JobLogStorage storage = createJobLogStorage(); + storage.init(); + JobLogBucket bucket = storage.getOrCreateBucket("hadoop", "test-app", new JobLogBucketConfig()); + bucket.getBucketStorageWriter().write("Hello world"); + Thread.sleep(1000); + storage.destroy(); + } + private JobLogStorage createJobLogStorage(){ + StreamisJobLogStorage jobLogStorage = new StreamisJobLogStorage(); + jobLogStorage.addLoadBalancer(new RoundRobinLoadBalancer()); + jobLogStorage.addLoadBalancer(new SimpleLoadBalancer()); + jobLogStorage.setBucketDriftPolicy(new StorageThresholdDriftPolicy()); + return jobLogStorage; + } +} diff --git a/streamis-jobmanager/streamis-job-manager/pom.xml b/streamis-jobmanager/streamis-job-manager/pom.xml new file mode 100755 index 000000000..e3b085b24 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/pom.xml @@ -0,0 +1,34 @@ + + + + + + streamis-jobmanager + com.webank.wedatasphere.streamis + 0.2.4 + + 4.0.0 + + streamis-job-manager + pom + + streamis-job-manager-base + streamis-job-manager-service + + + \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/pom.xml b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/pom.xml new file mode 100755 index 000000000..6ffee865f --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/pom.xml @@ -0,0 +1,104 @@ + + + + + + streamis-jobmanager + com.webank.wedatasphere.streamis + 0.2.4 + ../../pom.xml + + 4.0.0 + + streamis-job-manager-base + + + + 8 + 8 + + + + + mysql + mysql-connector-java + ${mysql.connector.version} + + + org.apache.linkis + linkis-mybatis + + + org.apache.linkis + linkis-common + + + commons-io + commons-io + 2.7 + compile + + + commons-lang + commons-lang + + + jakarta.servlet + jakarta.servlet-api + 4.0.4 + compile + + + com.webank.wedatasphere.streamis + streamis-job-launcher-linkis + ${streamis.version} + compile + + + javax.validation + validation-api + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + + + src/main/java + + **/*.xml + + + + ${project.artifactId}-${project.version} + + \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/StreamAlertMapper.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/StreamAlertMapper.java new file mode 100644 index 000000000..9b2d56fe4 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/StreamAlertMapper.java @@ -0,0 +1,29 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.dao; + + +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamAlertRecord; +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +public interface StreamAlertMapper { + + void insert(StreamAlertRecord streamAlertRecord); + + List getAlertByJobIdAndVersion(@Param("username") String username, @Param("jobId") Long jobId, @Param("versionId") Long versionId); +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/StreamJobMapper.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/StreamJobMapper.java new file mode 100644 index 000000000..5ef2f1d15 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/StreamJobMapper.java @@ -0,0 +1,78 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.dao; + +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.*; +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo.QueryJobListVo; +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo.VersionDetailVo; +import org.apache.ibatis.annotations.Param; + +import java.util.List; + + +public interface StreamJobMapper { + + List getJobLists(@Param("projectName") String projectName, @Param("userName") String userName, @Param("name") String name, + @Param("status") Integer status, @Param("createBy") String createBy); + + List getJobVersionDetails(@Param("jobId") Long jobId); + + StreamJob getJobById(@Param("jobId") Long jobId); + + List getJobByName(@Param("jobName") String jobName); + + List getJobVersions(@Param("jobId") Long jobId); + + /** + * Get the latest job version + * @param jobId job id + * @return job version + */ + StreamJobVersion getLatestJobVersion(@Param("jobId") Long jobId); + + StreamJobVersion getJobVersionById(@Param("jobId") Long jobId, @Param("version") String version); + + void insertJob(StreamJob streamJob); + + void insertJobVersion(StreamJobVersion streamJobVersion); + + void updateJob(StreamJob streamJob); + + List getJobListsByProjectName(String projectName); + + VersionDetailVo getVersionDetail(@Param("jobId") Long jobId, @Param("version") String version); + + void insertJobVersionFiles(StreamJobVersionFiles jobVersionFiles); + + List getStreamJobVersionFiles(@Param("jobId") Long jobId, @Param("jobVersionId") Long jobVersionId); + + StreamJob getCurrentJob(@Param("projectName")String projectName, @Param("jobName")String jobName); + + /** + * Query and lock current job + * @param projectName project name + * @param jobName job name + * @return stream job + */ + StreamJob queryAndLockJobInCondition(@Param("projectName")String projectName, @Param("jobName")String jobName); + + /** + * Query and lock by job id + * @param jobId job id + * @return stream job + */ + StreamJob queryAndLockJobById(@Param("jobId")Long jobId); +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/StreamTaskMapper.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/StreamTaskMapper.java new file mode 100755 index 000000000..2eae1e954 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/StreamTaskMapper.java @@ -0,0 +1,85 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.dao; + +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamTask; +import org.apache.ibatis.annotations.Param; + +import java.util.List; + + +public interface StreamTaskMapper { + + void insertTask(StreamTask streamTask); + + void updateTask(StreamTask streamTask); + + /** + * Update task which in specific status + * @param streamTask stream task + * @param status status + */ + int updateTaskInStatus(@Param("task")StreamTask streamTask, @Param("status")Integer status); + + List getByJobVersionId(@Param("jobVersionId") Long jobVersionId, @Param("version") String version); + + /** + * Get latest task by job version id + * @param jobVersionId job version id + * @param version version number + * @return stream task + */ + StreamTask getLatestByJobVersionId(@Param("jobVersionId") Long jobVersionId, @Param("version") String version); + + /** + * Get the latest task by job id + * @param jobId job id + * @return stream task + */ + StreamTask getLatestByJobId(@Param("jobId") Long jobId); + + /** + * Get the latest task(launched) by job id + * @param jobId job id + * @return stream task + */ + StreamTask getLatestLaunchedById(@Param("jobId") Long jobId); + /** + * Get earlier task list by job id + * @param jobId job id + * @param count the max number of task + * @return + */ + List getEarlierByJobId(@Param("jobId") Long jobId, @Param("count") Integer count); + + StreamTask getRunningTaskByJobId(@Param("jobId") Long jobId); + + StreamTask getTaskById(@Param("id") Long id); + + List getTasksByJobIdAndJobVersionId(@Param("jobId") Long jobId, @Param("jobVersionId") Long jobVersionId); + + List getTasksByStatus(List status); + + String getTask(@Param("jobId") Long jobId, @Param("version") String version); + + /** + * Get status info of tasks by job ids + * @param jobIds job ids + * @return list + */ + List getStatusInfoByJobIds(@Param("jobIds")List jobIds); + +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/impl/StreamAlertMapper.xml b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/impl/StreamAlertMapper.xml new file mode 100644 index 000000000..9dfb1c2d9 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/impl/StreamAlertMapper.xml @@ -0,0 +1,45 @@ + + + + + + + + + + + + + + + + + + + + + + + insert into linkis_stream_alert_record (alert_level,alert_user,alert_msg,job_id,job_version_id,task_id,create_time,status,error_msg) values + (#{alertLevel},#{alertUser},#{alertMsg},#{jobId},#{jobVersionId},#{taskId},#{createTime},#{status},#{errorMsg}) + + + + \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/impl/StreamJobMapper.xml b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/impl/StreamJobMapper.xml new file mode 100644 index 000000000..dc7d7e5f7 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/impl/StreamJobMapper.xml @@ -0,0 +1,230 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + `id`,`project_name`, `workspace_name`,`name`,`create_by`, `create_time`,`label`,`description`,`submit_user`,`job_type`, `current_version` + + + + `id`,`job_id`,`version`,`source`,`job_content`,`comment`,`create_time`,`create_by` + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + INSERT INTO linkis_stream_job(`project_name`,`name`,`create_by`,`label`,`description`,`job_type`,`submit_user`, `current_version`, `create_time`) + VALUES(#{projectName},#{name},#{createBy},#{label},#{description},#{jobType},#{submitUser},#{currentVersion, jdbcType=VARCHAR},#{createTime}) + + + + INSERT INTO linkis_stream_job_version(`job_id`,`version`,`source`,`job_content`,`create_time`,`create_by`) + VALUES(#{jobId},#{version},#{source},#{jobContent},#{createTime},#{createBy}) + + + + + INSERT INTO linkis_stream_job_version_files(`job_id`,`job_version_id`,`file_name`,`version`,`store_path`,`store_type`,`create_time`,`create_by`) + VALUES + (#{jobId},#{jobVersionId},#{fileName},#{version},#{storePath},#{storeType},#{createTime},#{createBy}) + + + + + + UPDATE linkis_stream_job + + + label=#{label}, + + + description=#{description}, + + + current_version=#{currentVersion} + + + WHERE id=#{id} + + + \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/impl/StreamTaskMapper.xml b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/impl/StreamTaskMapper.xml new file mode 100644 index 000000000..c3c39bc06 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/impl/StreamTaskMapper.xml @@ -0,0 +1,195 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + `id`,`job_version_id`,`job_id`,`status`,`start_time`,`last_update_time`,`err_desc`,`submit_user`, `linkis_job_id`, `linkis_job_info` + + + + INSERT INTO linkis_stream_task(`job_version_id`,`job_id`,`version`,`status`,`start_time`,`last_update_time`,`err_desc`,`submit_user`, `linkis_job_id`, `linkis_job_info`) + VALUES(#{jobVersionId},#{jobId},#{version},#{status},#{startTime},#{lastUpdateTime},#{errDesc},#{submitUser},#{linkisJobId},#{linkisJobInfo}) + + + + UPDATE linkis_stream_task + + + status=#{status}, + + + last_update_time=#{lastUpdateTime}, + + + linkis_job_id=#{linkisJobId}, + + + linkis_job_info=#{linkisJobInfo}, + + + err_desc=#{errDesc}, + + + WHERE id=#{id} + + + + UPDATE linkis_stream_task + + + status=#{task.status}, + + + last_update_time=#{task.lastUpdateTime}, + + + linkis_job_id=#{task.linkisJobId}, + + + linkis_job_info=#{task.linkisJobInfo}, + + + err_desc=#{task.errDesc}, + + + WHERE id=#{task.id} AND status = #{status}; + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/MetaJsonInfo.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/MetaJsonInfo.java new file mode 100644 index 000000000..41032c6c8 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/MetaJsonInfo.java @@ -0,0 +1,147 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.entity; + +import javax.validation.constraints.NotBlank; +import java.util.HashMap; +import java.util.Map; + +/** + * Created by v_wbyynie on 2021/9/16. + */ +public class MetaJsonInfo { + + private String workspaceName; + + /** + * 项目名 + */ + @NotBlank(message = "projectName is null") + private String projectName; + + /** + * 作业名 + */ + @NotBlank(message = "jobName is null") + private String jobName; + + /** + * 目前只支持flink.sql、flink.jar + */ + @NotBlank(message = "jobType is null") + private String jobType; + + private String comment; + + /** + * 应用标签 + */ + private String tags; + + /** + * 作业描述 + */ + private String description; + + + private Map jobContent; + + /** + * Job configuration + */ + private Map jobConfig; + private String metaInfo; + + public String getMetaInfo() { + return metaInfo; + } + + public void setMetaInfo(String metaInfo) { + this.metaInfo = metaInfo; + } + + public String getWorkspaceName() { + return workspaceName; + } + + public void setWorkspaceName(String workspaceName) { + this.workspaceName = workspaceName; + } + + public String getProjectName() { + return projectName; + } + + public void setProjectName(String projectName) { + this.projectName = projectName; + } + + public String getJobName() { + return jobName; + } + + public void setJobName(String jobName) { + this.jobName = jobName; + } + + public String getJobType() { + return jobType; + } + + public void setJobType(String jobType) { + this.jobType = jobType; + } + + public String getTags() { + return tags; + } + + public void setTags(String tags) { + this.tags = tags; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public Map getJobContent() { + return jobContent; + } + + public void setJobContent(Map jobContent) { + this.jobContent = jobContent; + } + + public String getComment() { + return comment; + } + + public void setComment(String comment) { + this.comment = comment; + } + + public Map getJobConfig() { + return jobConfig; + } + + public void setJobConfig(Map jobConfig) { + this.jobConfig = jobConfig; + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamAlertRecord.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamAlertRecord.java new file mode 100644 index 000000000..c379217a8 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamAlertRecord.java @@ -0,0 +1,107 @@ +package com.webank.wedatasphere.streamis.jobmanager.manager.entity; + +import java.util.Date; + + +public class StreamAlertRecord { + + private Long id; + + private Long jobId; + + private String alertLevel; + + private String alertUser; + + private String alertMsg; + + private Long jobVersionId; + + private Long taskId; + + private Date createTime; + + private int status; + + private String errorMsg; + + public String getErrorMsg() { + return errorMsg; + } + + public void setErrorMsg(String errorMsg) { + this.errorMsg = errorMsg; + } + + public Long getJobVersionId() { + return jobVersionId; + } + + public void setJobVersionId(Long jobVersionId) { + this.jobVersionId = jobVersionId; + } + + public Long getTaskId() { + return taskId; + } + + public void setTaskId(Long taskId) { + this.taskId = taskId; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public int getStatus() { + return status; + } + + public void setStatus(int status) { + this.status = status; + } + + public Long getJobId() { + return jobId; + } + + public void setJobId(Long jobId) { + this.jobId = jobId; + } + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getAlertLevel() { + return alertLevel; + } + + public void setAlertLevel(String alertLevel) { + this.alertLevel = alertLevel; + } + + public String getAlertUser() { + return alertUser; + } + + public void setAlertUser(String alertUser) { + this.alertUser = alertUser; + } + + public String getAlertMsg() { + return alertMsg; + } + + public void setAlertMsg(String alertMsg) { + this.alertMsg = alertMsg; + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamJob.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamJob.java new file mode 100644 index 000000000..a86a612ca --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamJob.java @@ -0,0 +1,125 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.entity; + +import java.util.Date; + + +public class StreamJob { + private Long id; + private Long workspaceName; + private String name; + private String projectName; + private String jobType; + private String createBy; + private String label; + private String description; + private String submitUser; + private Date createTime; + /** + * Current version tab in used + */ + private String currentVersion; + + public Long getWorkspaceName() { + return workspaceName; + } + + public void setWorkspaceName(Long workspaceName) { + this.workspaceName = workspaceName; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public String getProjectName() { + return projectName; + } + + public void setProjectName(String projectName) { + this.projectName = projectName; + } + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getCreateBy() { + return createBy; + } + + public void setCreateBy(String createBy) { + this.createBy = createBy; + } + + public String getLabel() { + return label; + } + + public void setLabel(String label) { + this.label = label; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public String getSubmitUser() { + return submitUser; + } + + public void setSubmitUser(String submitUser) { + this.submitUser = submitUser; + } + + public String getJobType() { + return jobType; + } + + public void setJobType(String jobType) { + this.jobType = jobType; + } + + public String getCurrentVersion() { + return currentVersion; + } + + public void setCurrentVersion(String currentVersion) { + this.currentVersion = currentVersion; + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamJobVersion.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamJobVersion.java new file mode 100755 index 000000000..5e08bc78f --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamJobVersion.java @@ -0,0 +1,94 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.entity; + + +import java.util.Date; + +public class StreamJobVersion { + private Long id; + private Long jobId; + private String version; + private String source; + private String jobContent; + private String comment; + private Date createTime; + private String createBy; + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public String getCreateBy() { + return createBy; + } + + public void setCreateBy(String createBy) { + this.createBy = createBy; + } + + public String getSource() { + return source; + } + + public void setSource(String source) { + this.source = source; + } + + public String getJobContent() { + return jobContent; + } + + public void setJobContent(String jobContent) { + this.jobContent = jobContent; + } + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public Long getJobId() { + return jobId; + } + + public void setJobId(Long jobId) { + this.jobId = jobId; + } + + public String getVersion() { + return version; + } + + public void setVersion(String version) { + this.version = version; + } + + public String getComment() { + return comment; + } + + public void setComment(String comment) { + this.comment = comment; + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamJobVersionFiles.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamJobVersionFiles.java new file mode 100644 index 000000000..3f0f83187 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamJobVersionFiles.java @@ -0,0 +1,121 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.entity; + +import java.util.Date; + +/** + * Created by v_wbyynie on 2021/9/16. + */ +public class StreamJobVersionFiles implements StreamisFile { + + private Long id; + private Long jobId; + private Long jobVersionId; + private String fileName; + /** + * 文件版本号,由用户上传时指定的 + */ + private String version; + /** + * '如:{"resource":"22edar22", "version": "v0001"}', + */ + private String storePath; + /** + * '存储类型,一般就是bml', + */ + private String storeType = StreamisFile.BML_STORE_TYPE; + private Date createTime; + private String createBy; + + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public Long getJobId() { + return jobId; + } + + public void setJobId(Long jobId) { + this.jobId = jobId; + } + + public Long getJobVersionId() { + return jobVersionId; + } + + public void setJobVersionId(Long jobVersionId) { + this.jobVersionId = jobVersionId; + } + + @Override + public String getFileName() { + return fileName; + } + + public void setFileName(String fileName) { + this.fileName = fileName; + } + + @Override + public String getVersion() { + return version; + } + + public void setVersion(String version) { + this.version = version; + } + + @Override + public String getStorePath() { + return storePath; + } + + public void setStorePath(String storePath) { + this.storePath = storePath; + } + + @Override + public String getStoreType() { + return storeType; + } + + public void setStoreType(String storeType) { + this.storeType = storeType; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + @Override + public String getCreateBy() { + return createBy; + } + + public void setCreateBy(String createBy) { + this.createBy = createBy; + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamProject.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamProject.java new file mode 100755 index 000000000..4105bf936 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamProject.java @@ -0,0 +1,56 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.entity; + + +public class StreamProject { + private Long id; + private Long workspaceId; + private String name; + private String createBy; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public Long getWorkspaceId() { + return workspaceId; + } + + public void setWorkspaceId(Long workspaceId) { + this.workspaceId = workspaceId; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getCreateBy() { + return createBy; + } + + public void setCreateBy(String createBy) { + this.createBy = createBy; + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamTask.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamTask.java new file mode 100755 index 000000000..ac4064e3c --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamTask.java @@ -0,0 +1,135 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.entity; + +import java.util.Calendar; +import java.util.Date; + + +public class StreamTask { + private Long id; + private Long jobVersionId; + private Long jobId; + private String submitUser; + private Date startTime; + private Date lastUpdateTime; + private String linkisJobId; + private String linkisJobInfo; + private String errDesc; + private String version; + private Integer status; + + public StreamTask(){ + Calendar calendar = Calendar.getInstance(); + this.lastUpdateTime = calendar.getTime(); + this.startTime = calendar.getTime(); + } + + public StreamTask(Long jobId, Long jobVersionId, String version, String submitUser){ + this(); + this.jobId = jobId; + this.jobVersionId = jobVersionId; + this.version = version; + this.submitUser = submitUser; + } + public String getVersion() { + return version; + } + + public void setVersion(String version) { + this.version = version; + } + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public Long getJobId() { + return jobId; + } + + public void setJobId(Long jobId) { + this.jobId = jobId; + } + + public Date getStartTime() { + return startTime; + } + + public void setStartTime(Date startTime) { + this.startTime = startTime; + } + + public Date getLastUpdateTime() { + return lastUpdateTime; + } + + public void setLastUpdateTime(Date lastUpdateTime) { + this.lastUpdateTime = lastUpdateTime; + } + + public String getErrDesc() { + return errDesc; + } + + public void setErrDesc(String errDesc) { + this.errDesc = errDesc; + } + + public Long getJobVersionId() { + return jobVersionId; + } + + public void setJobVersionId(Long jobVersionId) { + this.jobVersionId = jobVersionId; + } + + public Integer getStatus() { + return status; + } + + public void setStatus(Integer status) { + this.status = status; + } + + public String getSubmitUser() { + return submitUser; + } + + public void setSubmitUser(String submitUser) { + this.submitUser = submitUser; + } + + public String getLinkisJobId() { + return linkisJobId; + } + + public void setLinkisJobId(String linkisJobId) { + this.linkisJobId = linkisJobId; + } + + public String getLinkisJobInfo() { + return linkisJobInfo; + } + + public void setLinkisJobInfo(String linkisJobInfo) { + this.linkisJobInfo = linkisJobInfo; + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamisFile.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamisFile.java new file mode 100644 index 000000000..9f85630ea --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamisFile.java @@ -0,0 +1,35 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.entity; + +/** + * Created by enjoyyin on 2021/9/23. + */ +public interface StreamisFile { + + String BML_STORE_TYPE = "bml"; + + String getFileName(); + + String getVersion(); + + String getStorePath(); + + String getStoreType(); + + String getCreateBy(); + +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/ExecResultVo.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/ExecResultVo.java new file mode 100644 index 000000000..1e101a8a4 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/ExecResultVo.java @@ -0,0 +1,56 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo; + +import com.fasterxml.jackson.annotation.JsonInclude; + +@JsonInclude(JsonInclude.Include.NON_EMPTY) +public class ExecResultVo extends ScheduleResultVo { + + /** + * Job id + */ + private Long jobId; + + /** + * Task id + */ + private Long taskId; + + public ExecResultVo(){ + + } + + public ExecResultVo(Long jobId, Long taskId){ + this.jobId = jobId; + this.taskId = taskId; + } + + public Long getJobId() { + return jobId; + } + + public void setJobId(Long jobId) { + this.jobId = jobId; + } + + public Long getTaskId() { + return taskId; + } + + public void setTaskId(Long taskId) { + this.taskId = taskId; + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobDetailsVo.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobDetailsVo.java new file mode 100644 index 000000000..f01f301f3 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobDetailsVo.java @@ -0,0 +1,205 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo; + +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.LinkisJobInfo; + +import java.util.List; + +public class JobDetailsVo { + + private List realTimeTraffic; + private List dataNumber; + private List loadCondition; + private LinkisJobInfo linkisJobInfo; + + public LinkisJobInfo getLinkisJobInfo() { + return linkisJobInfo; + } + + public void setLinkisJobInfo(LinkisJobInfo linkisJobInfo) { + this.linkisJobInfo = linkisJobInfo; + } + + public List getRealTimeTraffic() { + return realTimeTraffic; + } + + public void setRealTimeTraffic(List realTimeTraffic) { + this.realTimeTraffic = realTimeTraffic; + } + + public List getDataNumber() { + return dataNumber; + } + + public void setDataNumber(List dataNumber) { + this.dataNumber = dataNumber; + } + + public List getLoadCondition() { + return loadCondition; + } + + public void setLoadCondition(List loadCondition) { + this.loadCondition = loadCondition; + } + + public static class RealTimeTrafficDTO { + private String sourceKey; + private String sourceSpeed; + private String transformKey; + private String transformSpeed; + private String sinkKey; + private String sinkSpeed; + + public String getSourceKey() { + return sourceKey; + } + + public void setSourceKey(String sourceKey) { + this.sourceKey = sourceKey; + } + + public String getSourceSpeed() { + return sourceSpeed; + } + + public void setSourceSpeed(String sourceSpeed) { + this.sourceSpeed = sourceSpeed; + } + + public String getTransformKey() { + return transformKey; + } + + public void setTransformKey(String transformKey) { + this.transformKey = transformKey; + } + + public String getTransformSpeed() { + return transformSpeed; + } + + public void setTransformSpeed(String transformSpeed) { + this.transformSpeed = transformSpeed; + } + + public String getSinkKey() { + return sinkKey; + } + + public void setSinkKey(String sinkKey) { + this.sinkKey = sinkKey; + } + + public String getSinkSpeed() { + return sinkSpeed; + } + + public void setSinkSpeed(String sinkSpeed) { + this.sinkSpeed = sinkSpeed; + } + } + + public static class DataNumberDTO { + private String dataName; + private Integer dataNumber; + + public String getDataName() { + return dataName; + } + + public void setDataName(String dataName) { + this.dataName = dataName; + } + + public Integer getDataNumber() { + return dataNumber; + } + + public void setDataNumber(Integer dataNumber) { + this.dataNumber = dataNumber; + } + } + + public static class LoadConditionDTO { + private String type; + private String host; + private String memory; + private String totalMemory; + private String gcTotalTime; + private String gcLastTime; + private String gcLastConsume; + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getHost() { + return host; + } + + public void setHost(String host) { + this.host = host; + } + + + public String getGcTotalTime() { + return gcTotalTime; + } + + public void setGcTotalTime(String gcTotalTime) { + this.gcTotalTime = gcTotalTime; + } + + public String getGcLastTime() { + return gcLastTime; + } + + public void setGcLastTime(String gcLastTime) { + this.gcLastTime = gcLastTime; + } + + public String getGcLastConsume() { + return gcLastConsume; + } + + public void setGcLastConsume(String gcLastConsume) { + this.gcLastConsume = gcLastConsume; + } + + public String getMemory() { + return memory; + } + + public void setMemory(String memory) { + this.memory = memory; + } + + public String getTotalMemory() { + return totalMemory; + } + + public void setTotalMemory(String totalMemory) { + this.totalMemory = totalMemory; + } + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobInspectVo.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobInspectVo.java new file mode 100644 index 000000000..d8b611f6c --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobInspectVo.java @@ -0,0 +1,19 @@ +package com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; + +/** + * Job inspect vo + */ +@JsonInclude(JsonInclude.Include.NON_EMPTY) +public interface JobInspectVo { + + enum Types{ + VERSION, SNAPSHOT, STATUS + } + @JsonIgnore + String getInspectName(); + + +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobProgressVo.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobProgressVo.java new file mode 100644 index 000000000..973c06a64 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobProgressVo.java @@ -0,0 +1,37 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo; + +public class JobProgressVo { + private Long taskId; + private Integer progress; + + public Long getTaskId() { + return taskId; + } + + public void setTaskId(Long taskId) { + this.taskId = taskId; + } + + public Integer getProgress() { + return progress; + } + + public void setProgress(Integer progress) { + this.progress = progress; + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobSnapshotInspectVo.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobSnapshotInspectVo.java new file mode 100644 index 000000000..59f89b0c1 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobSnapshotInspectVo.java @@ -0,0 +1,23 @@ +package com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo; + +import java.util.Locale; + +public class JobSnapshotInspectVo implements JobInspectVo{ + /** + * Path + */ + private String path; + + @Override + public String getInspectName() { + return Types.SNAPSHOT.name().toLowerCase(Locale.ROOT); + } + + public String getPath() { + return path; + } + + public void setPath(String path) { + this.path = path; + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobStatusVo.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobStatusVo.java new file mode 100644 index 000000000..0ecca29b4 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobStatusVo.java @@ -0,0 +1,72 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo; + +import com.fasterxml.jackson.annotation.JsonInclude; + +@JsonInclude(JsonInclude.Include.NON_EMPTY) +public class JobStatusVo { + /** + * Job id + */ + private Long jobId; + + /** + * Status name + */ + private String status; + + /** + * Status code + */ + private int statusCode; + + /** + * Message + */ + private String message; + + public Long getJobId() { + return jobId; + } + + public void setJobId(Long jobId) { + this.jobId = jobId; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } + + public int getStatusCode() { + return statusCode; + } + + public void setStatusCode(int statusCode) { + this.statusCode = statusCode; + } + + public String getMessage() { + return message; + } + + public void setMessage(String message) { + this.message = message; + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobVersionInspectVo.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobVersionInspectVo.java new file mode 100644 index 000000000..2f06703dc --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobVersionInspectVo.java @@ -0,0 +1,43 @@ +package com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo; + +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamJobVersion; + +import java.util.Locale; + +/** + * Version inspect + */ +public class JobVersionInspectVo implements JobInspectVo{ + + /** + * Current version + */ + private StreamJobVersion now; + + /** + * Last version + */ + private StreamJobVersion last; + + @Override + public String getInspectName() { + return Types.VERSION.name().toLowerCase(Locale.ROOT); + } + + + public StreamJobVersion getNow() { + return now; + } + + public void setNow(StreamJobVersion now) { + this.now = now; + } + + public StreamJobVersion getLast() { + return last; + } + + public void setLast(StreamJobVersion last) { + this.last = last; + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/PauseResultVo.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/PauseResultVo.java new file mode 100644 index 000000000..ac41b5d0b --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/PauseResultVo.java @@ -0,0 +1,66 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo; + +import com.fasterxml.jackson.annotation.JsonInclude; + +@JsonInclude(JsonInclude.Include.NON_EMPTY) +public class PauseResultVo extends ScheduleResultVo { + /** + * Job id + */ + private Long jobId; + + /** + * Task id + */ + private Long taskId; + + /** + * Snapshot path + */ + private String snapshotPath; + + public PauseResultVo(){ + } + + public PauseResultVo(Long jobId, Long taskId){ + this.jobId = jobId; + this.taskId = taskId; + } + public Long getJobId() { + return jobId; + } + + public void setJobId(Long jobId) { + this.jobId = jobId; + } + + public Long getTaskId() { + return taskId; + } + + public void setTaskId(Long taskId) { + this.taskId = taskId; + } + + public String getSnapshotPath() { + return snapshotPath; + } + + public void setSnapshotPath(String snapshotPath) { + this.snapshotPath = snapshotPath; + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/PublishRequestVo.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/PublishRequestVo.java new file mode 100644 index 000000000..6969d1736 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/PublishRequestVo.java @@ -0,0 +1,231 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo; + +public class PublishRequestVo { + + private Long projectId; + /** + * sql或者scala等 + */ + private String type; + + private String jobType; + + private String fileName; + + private String storePath; + + private String storeType; + + /** + * 提交到jobmanager的执行代码 + */ + private String executionCode; + + /** + * job的名字 + */ + private String streamisJobName; + + /** + * 创建人 + */ + private String createBy; + + /** + * 修改人 + */ + private String updateBy; + + /** + * 任务的描述信息 + */ + private String description; + + private String source; + + private String metaInfo; + + private String bmlVersion; + + /** + * 标签 + */ + private String tags; + + + /** + * 发布用户 + */ + private String publishUser; + + /** + * 是用来进行判断是新建一个jobmanager 的任务 + * 传v0001 v0002 + */ + private String version; + + + /** + * 工程名字,必须是要传到的streamis jobmanager + */ + private String projectName; + + public String getBmlVersion() { + return bmlVersion; + } + + public void setBmlVersion(String bmlVersion) { + this.bmlVersion = bmlVersion; + } + + public String getFileName() { + return fileName; + } + + public void setFileName(String fileName) { + this.fileName = fileName; + } + + public String getStorePath() { + return storePath; + } + + public void setStorePath(String storePath) { + this.storePath = storePath; + } + + public String getStoreType() { + return storeType; + } + + public void setStoreType(String storeType) { + this.storeType = storeType; + } + + public String getSource() { + return source; + } + + public void setSource(String source) { + this.source = source; + } + + public String getMetaInfo() { + return metaInfo; + } + + public void setMetaInfo(String metaInfo) { + this.metaInfo = metaInfo; + } + + public String getJobType() { + return jobType; + } + + public void setJobType(String jobType) { + this.jobType = jobType; + } + + public String getType() { + return type; + } + + public void setType(String type) { + this.type = type; + } + + public String getExecutionCode() { + return executionCode; + } + + public void setExecutionCode(String executionCode) { + this.executionCode = executionCode; + } + + public String getStreamisJobName() { + return streamisJobName; + } + + public void setStreamisJobName(String streamisJobName) { + this.streamisJobName = streamisJobName; + } + + public String getCreateBy() { + return createBy; + } + + public void setCreateBy(String createBy) { + this.createBy = createBy; + } + + public String getUpdateBy() { + return updateBy; + } + + public void setUpdateBy(String updateBy) { + this.updateBy = updateBy; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public String getTags() { + return tags; + } + + public void setTags(String tags) { + this.tags = tags; + } + + public String getPublishUser() { + return publishUser; + } + + public void setPublishUser(String publishUser) { + this.publishUser = publishUser; + } + + public String getVersion() { + return version; + } + + public void setVersion(String version) { + this.version = version; + } + + public String getProjectName() { + return projectName; + } + + public void setProjectName(String projectName) { + this.projectName = projectName; + } + + public Long getProjectId() { + return projectId; + } + + public void setProjectId(Long projectId) { + this.projectId = projectId; + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/QueryJobListVo.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/QueryJobListVo.java new file mode 100644 index 000000000..0e08d538e --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/QueryJobListVo.java @@ -0,0 +1,162 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo; + +import java.util.Date; + +public class QueryJobListVo { + private Long id; + private String name; + private Long workspaceName; + private String projectId; + private String projectName; + private String jobType; + private String label; + private String createBy; + private Date createTime; + private Integer status; + private String version; + /** + * Last version + */ + private String lastVersion; + private Date lastVersionTime; + /** + * Number of version forward + */ + private Integer versionForwards; + private String description; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public Long getWorkspaceName() { + return workspaceName; + } + + public void setWorkspaceName(Long workspaceName) { + this.workspaceName = workspaceName; + } + + public String getProjectId() { + return projectId; + } + + public void setProjectId(String projectId) { + this.projectId = projectId; + } + + public String getProjectName() { + return projectName; + } + + public void setProjectName(String projectName) { + this.projectName = projectName; + } + + public String getJobType() { + return jobType; + } + + public void setJobType(String jobType) { + this.jobType = jobType; + } + + public String getLabel() { + return label; + } + + public void setLabel(String label) { + this.label = label; + } + + public String getCreateBy() { + return createBy; + } + + public void setCreateBy(String createBy) { + this.createBy = createBy; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public Integer getStatus() { + return status; + } + + public void setStatus(Integer status) { + this.status = status; + } + + public String getVersion() { + return version; + } + + public void setVersion(String version) { + this.version = version; + } + + public Date getLastVersionTime() { + return lastVersionTime; + } + + public void setLastVersionTime(Date lastVersionTime) { + this.lastVersionTime = lastVersionTime; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public Integer getVersionForwards() { + return versionForwards; + } + + public void setVersionForwards(Integer versionForwards) { + this.versionForwards = versionForwards; + } + + public String getLastVersion() { + return lastVersion; + } + + public void setLastVersion(String lastVersion) { + this.lastVersion = lastVersion; + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/ScheduleResultVo.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/ScheduleResultVo.java new file mode 100644 index 000000000..ee7cbfcbe --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/ScheduleResultVo.java @@ -0,0 +1,89 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo; + +import java.util.HashMap; +import java.util.Map; + +/** + * Schedule Result + * @param + */ +public class ScheduleResultVo { + + /** + * Schedule id + */ + protected String scheduleId; + + /** + * Schedule status + */ + protected String scheduleState; + + /** + * Error message + */ + protected String message; + /** + * Progress + */ + private double progress = 0d; + + /** + * Metric + */ + private Map metric = new HashMap<>(); + + public String getScheduleId() { + return scheduleId; + } + + public void setScheduleId(String scheduleId) { + this.scheduleId = scheduleId; + } + + public String getScheduleState() { + return scheduleState; + } + + public void setScheduleState(String scheduleState) { + this.scheduleState = scheduleState; + } + + public double getProgress() { + return progress; + } + + public void setProgress(double progress) { + this.progress = progress; + } + + public Map getMetric() { + return metric; + } + + public void setMetric(Map metric) { + this.metric = metric; + } + + public String getMessage() { + return message; + } + + public void setMessage(String message) { + this.message = message; + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/StreamTaskListVo.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/StreamTaskListVo.java new file mode 100755 index 000000000..e36daaa03 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/StreamTaskListVo.java @@ -0,0 +1,120 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo; + + +public class StreamTaskListVo { + private Long taskId; + private Long jobVersionId; + private String jobName; + private String creator; + private String version; + private String status; + private String startTime; + private String endTime; + private String runTime; + private String stopCause; + + private String versionContent; + + public Long getTaskId() { + return taskId; + } + + public void setTaskId(Long taskId) { + this.taskId = taskId; + } + + public Long getJobVersionId() { + return jobVersionId; + } + + public void setJobVersionId(Long jobVersionId) { + this.jobVersionId = jobVersionId; + } + + public String getJobName() { + return jobName; + } + + public void setJobName(String jobName) { + this.jobName = jobName; + } + + public String getCreator() { + return creator; + } + + public void setCreator(String creator) { + this.creator = creator; + } + + public String getVersion() { + return version; + } + + public void setVersion(String version) { + this.version = version; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } + + public String getStartTime() { + return startTime; + } + + public void setStartTime(String startTime) { + this.startTime = startTime; + } + + public String getEndTime() { + return endTime; + } + + public void setEndTime(String endTime) { + this.endTime = endTime; + } + + public String getRunTime() { + return runTime; + } + + public void setRunTime(String runTime) { + this.runTime = runTime; + } + + public String getStopCause() { + return stopCause; + } + + public void setStopCause(String stopCause) { + this.stopCause = stopCause; + } + + public String getVersionContent() { + return versionContent; + } + + public void setVersionContent(String versionContent) { + this.versionContent = versionContent; + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/TaskCoreNumVo.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/TaskCoreNumVo.java new file mode 100644 index 000000000..7bb738289 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/TaskCoreNumVo.java @@ -0,0 +1,110 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo; + +/** + * job核心指标 + */ +public class TaskCoreNumVo { + private Long projectId; + private String projectName; + //失败任务数目 + private Integer failureNum = 0; + //运行数目 + private Integer runningNum = 0; + //慢任务数目 + private Integer slowTaskNum = 0; + //告警任务 + private Integer alertNum = 0; + //等待重启数目 + private Integer waitRestartNum = 0; + //已完成数目 + private Integer successNum = 0; + //已停止数目 + private Integer stoppedNum = 0; + + public Integer getStoppedNum() { + return stoppedNum; + } + + public void setStoppedNum(Integer stoppedNum) { + this.stoppedNum = stoppedNum; + } + + public String getProjectName() { + return projectName; + } + + public void setProjectName(String projectName) { + this.projectName = projectName; + } + + public Integer getFailureNum() { + return failureNum; + } + + public void setFailureNum(Integer failureNum) { + this.failureNum = failureNum; + } + + public Integer getRunningNum() { + return runningNum; + } + + public void setRunningNum(Integer runningNum) { + this.runningNum = runningNum; + } + + public Integer getSlowTaskNum() { + return slowTaskNum; + } + + public void setSlowTaskNum(Integer slowTaskNum) { + this.slowTaskNum = slowTaskNum; + } + + public Integer getAlertNum() { + return alertNum; + } + + public void setAlertNum(Integer alertNum) { + this.alertNum = alertNum; + } + + public Integer getWaitRestartNum() { + return waitRestartNum; + } + + public void setWaitRestartNum(Integer waitRestartNum) { + this.waitRestartNum = waitRestartNum; + } + + public Integer getSuccessNum() { + return successNum; + } + + public void setSuccessNum(Integer successNum) { + this.successNum = successNum; + } + + public Long getProjectId() { + return projectId; + } + + public void setProjectId(Long projectId) { + this.projectId = projectId; + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/VersionDetailVo.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/VersionDetailVo.java new file mode 100644 index 000000000..ca46845f9 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/VersionDetailVo.java @@ -0,0 +1,85 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo; + +/** + * Created by v_wbyynie on 2021/6/18. + */ +public class VersionDetailVo { + private Long id; + private String version; + private String description; + private String releaseTime; + private String createBy; + private Long projectId; + private String projectName; + + public String getProjectName() { + return projectName; + } + + public void setProjectName(String projectName) { + this.projectName = projectName; + } + + public void setProjectId(Long projectId) { + this.projectId = projectId; + } + + public Long getProjectId() { + return projectId; + } + + public Long getId() { + return id; + } + + public String getVersion() { + return version; + } + + public String getDescription() { + return description; + } + + public String getReleaseTime() { + return releaseTime; + } + + public String getCreateBy() { + return createBy; + } + + public void setId(Long id) { + this.id = id; + } + + public void setVersion(String version) { + this.version = version; + } + + public void setDescription(String description) { + this.description = description; + } + + public void setReleaseTime(String releaseTime) { + this.releaseTime = releaseTime; + } + + public void setCreateBy(String createBy) { + this.createBy = createBy; + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/exception/FileException.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/exception/FileException.java new file mode 100644 index 000000000..62cd9a0c1 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/exception/FileException.java @@ -0,0 +1,28 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.exception; + +import org.apache.linkis.common.exception.ErrorException; + +public class FileException extends ErrorException { + public FileException(int errCode, String desc) { + super(errCode, desc); + } + + public FileException(int errCode, String desc, String ip, int port, String serviceKind) { + super(errCode, desc, ip, port, serviceKind); + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/exception/FileExceptionManager.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/exception/FileExceptionManager.java new file mode 100644 index 000000000..22153bb25 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/exception/FileExceptionManager.java @@ -0,0 +1,35 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.exception; + + +import java.util.HashMap; +import java.util.Map; + +public class FileExceptionManager { + //30600-30700 + private static Map desc = new HashMap(32); + static { + desc.put("30600", "%s length exceeds limit(长度超出限制),Please limit input within %d characters"); + desc.put("30601", "%s should only contains numeric/English characters and '-'(仅允许包含数字,英文和中划线)"); + desc.put("30602", "JobType %s is not supported(不支持的任务类型)"); + desc.put("30603", "File %s does not exist(文件不存在)"); + } + + public static FileException createException(int errorCode, Object... format) throws FileException { + return new FileException(errorCode, String.format(desc.get(String.valueOf(errorCode)), format)); + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/util/CookieUtils.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/util/CookieUtils.java new file mode 100644 index 000000000..9cd05e9e2 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/util/CookieUtils.java @@ -0,0 +1,45 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.util; + +import javax.servlet.http.Cookie; +import javax.servlet.http.HttpServletRequest; + +public class CookieUtils { + + private static final String COOKIE_WORKSPACE_ID = "workspaceId"; + + static String getCookieValue(HttpServletRequest request, String name) { + Cookie c = getCookie(request, name); + return c == null ? null : c.getValue(); + } + + static Cookie getCookie(HttpServletRequest request, String name) { + Cookie cookies[] = request.getCookies(); + if (cookies != null) { + for (Cookie cookie : cookies) { + if (name.equals(cookie.getName())) { + return cookie; + } + } + } + return null; + } + + public static String getCookieWorkspaceId(HttpServletRequest request) { + return getCookieValue(request, COOKIE_WORKSPACE_ID); + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/util/DateUtils.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/util/DateUtils.scala new file mode 100644 index 000000000..47794eee4 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/util/DateUtils.scala @@ -0,0 +1,69 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.util + +import org.slf4j.LoggerFactory + +import java.text.SimpleDateFormat +import java.util.Date + +object DateUtils extends Serializable { + + private val logger = LoggerFactory.getLogger(DateUtils.getClass) + private val serialVersionUID = 1L + /** + * Default short date format(默认短日期格式) + * yyyy-MM-dd + */ + val DATE_DEFAULT_FORMAT = "yyyy-MM-dd" + + /** + * Order-number prefix(订单号前缀) yyyyMMddHHmmss + */ + val DATETIME_ORDER_FORMAT = "yyyyMMddHHmmss" + /** + * Default datetime format(默认日期时间格式) + * yyyy-MM-dd HH:mm:ss + */ + val DATETIME_DEFAULT_FORMAT = "yyyy-MM-dd HH:mm:ss" + /** + * Default time format(默认时间格式) + * HH:mm:ss + */ + val TIME_DEFAULT_FORMAT = "HH:mm:ss" + /** + * Default date short format(默认日期短格式) + * yyyyMMdd + */ + val DATE_DEFAULT_SHORT_FORMAT = "yyyyMMdd" + /** + * Default Datetime format(默认日期时间格式化) + */ + val dateTimeFormat = new SimpleDateFormat(DATETIME_DEFAULT_FORMAT) + + def intervals(start:Date,end:Date): String ={ + if(start == null || end == null) return "" + val nm = 1000 * 60 + val diff = end.getTime - start.getTime + (diff/nm)+"分钟" + } + + def formatDate(dateTime:Date):String = { + if(dateTime == null) return "" + dateTimeFormat.format(dateTime) + } + +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/util/IoUtils.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/util/IoUtils.java new file mode 100644 index 000000000..9324f0f50 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/util/IoUtils.java @@ -0,0 +1,67 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.util; + +import org.apache.linkis.common.conf.CommonVars; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.apache.commons.io.FileUtils; + +import java.io.*; +import java.text.SimpleDateFormat; +import java.util.Arrays; +import java.util.Date; + + +public class IoUtils { + private static Logger logger = LoggerFactory.getLogger(IoUtils.class); + private static final String dateFormat_day = "yyyyMMdd"; + private static final String dateFormat_time = "HHmmss"; + private static final String IOUrl = CommonVars.apply("wds.streamis.zip.dir", "/tmp").getValue(); + + public static String generateIOPath(String userName, String projectName, String subDir) { + String baseIOUrl = IOUrl; + String file = subDir.substring(0,subDir.lastIndexOf(".")); + String dayStr = new SimpleDateFormat(dateFormat_day).format(new Date()); + String timeStr = new SimpleDateFormat(dateFormat_time).format(new Date()); + return addFileSeparator(baseIOUrl, projectName, dayStr, userName, file + "_" + timeStr, subDir); + } + + private static String addFileSeparator(String... str) { + return Arrays.stream(str).reduce((a, b) -> a + File.separator + b).orElse(""); + } + + public static OutputStream generateExportOutputStream(String path) throws IOException { + File file = new File(path); + if (file.exists()) { + logger.warn(String.format("%s is exist,delete it", path)); + boolean success = file.delete(); + if (!success) { + throw new IOException("Failed to delete existing file: \"" + file.getAbsolutePath() + "\""); + } + } + file.getParentFile().mkdirs(); + boolean success = file.createNewFile(); + if (!success) { + throw new IOException("Failed to create file: \"" + file.getAbsolutePath() + "\""); + } + return FileUtils.openOutputStream(file, true); + } + + public static InputStream generateInputInputStream(String path) throws IOException { + return new FileInputStream(path); + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/util/ReaderUtils.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/util/ReaderUtils.java new file mode 100644 index 000000000..b36b16601 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/util/ReaderUtils.java @@ -0,0 +1,286 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.util; + + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.MetaJsonInfo; +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo.PublishRequestVo; +import com.webank.wedatasphere.streamis.jobmanager.manager.exception.FileException; +import com.webank.wedatasphere.streamis.jobmanager.manager.exception.FileExceptionManager; +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.*; +import java.util.*; + +public class ReaderUtils { + private static final String metaFileName = "meta.txt"; + private static final String metaFileJsonName = "meta.json"; + private static final String type = "type"; + private static final String fileName = "filename"; + private static final String projectName = "projectname"; + private static final String jobName = "jobname"; + private static final String tags = "tags"; + private static final String description = "description"; + private static final String defaultTagList = "prod,streamis"; + private static final String version = "v00001"; + private static final String regex = "^[a-z0-9A-Z_-]+$"; + private static final String jarRegex = "^[a-z0-9A-Z._-]+$"; + private static final int defaultLength = 64; + private static final int descriptionLength = 128; + private static final List supportFileType = new ArrayList<>(Arrays.asList("sql")); + private String basePath; + private String zipName; + private boolean hasTags = false; + private boolean hasProjectName = false; + + + private static final Logger LOG = LoggerFactory.getLogger(ReaderUtils.class); + + public PublishRequestVo parseFile(String dirPath) throws IOException, FileException { + getBasePath(dirPath); + try (InputStream inputStream = generateInputStream(basePath)) { + return read(inputStream); + } catch (Exception e) { + throw e; + } + } + + public String setName(Object name) { + return name == null ? null : name.toString(); + } + + public boolean checkName(String fileName) { + String name = fileName.substring(0, fileName.lastIndexOf(".")); + if (fileName.endsWith(".jar")) { + return name.matches(jarRegex); + } + return name.matches(regex); + } + + public List listFiles(String path) throws FileException { + String[] strArr = path.split("\\/"); + String shortPath = strArr[strArr.length - 1]; + String workPath = path.substring(0, path.length() - shortPath.length() - 1); + File file = new File(workPath); + if (!file.exists()) { + throw new FileException(30034, "the file is not exists"); + } + List list = new ArrayList<>(); + File[] files = file.listFiles(); + if (files != null) { + for (File filePath : files) { + if (!metaFileJsonName.equals(filePath.getName()) && !filePath.getName().endsWith(".zip")) { + if (!checkName(filePath.getName())) { + throw FileExceptionManager.createException(30601,filePath.getName()); + } + list.add(filePath.getAbsolutePath()); + } + } + } + return list; + } + + public String getFileName(String path) { + if (StringUtils.isBlank(path)) { + return path; + } + String[] strArr = path.split("\\/"); + return strArr[strArr.length - 1]; + } + + public String readAsJson(MetaJsonInfo metaJsonInfo) throws JsonProcessingException { + ObjectMapper objectMapper = new ObjectMapper(); + return objectMapper.writeValueAsString(metaJsonInfo.getJobContent()).replaceAll("[\\t\\n\\r]", ""); + } + + public String readAsJson(String version, String resourceId) throws JsonProcessingException { + ObjectMapper objectMapper = new ObjectMapper(); + Map map = new HashMap<>(); + map.put("version", version); + map.put("resourceId", resourceId); + return objectMapper.writeValueAsString(map); + } + + + private MetaJsonInfo readJson(BufferedReader reader) throws IOException, FileException { + String line = null; + StringBuilder sb = new StringBuilder(); + while ((line = reader.readLine()) != null) { + sb.append(line); + } + ObjectMapper objectMapper = new ObjectMapper(); + MetaJsonInfo metaJsonInfo = objectMapper.readValue(sb.toString(), MetaJsonInfo.class); + metaJsonInfo.setMetaInfo(objectMapper.writeValueAsString(metaJsonInfo.getJobContent()).replaceAll("[\\t\\n\\r]", "")); + return metaJsonInfo; + } + + + public MetaJsonInfo parseJson(String dirPath) throws IOException, FileException { + getBasePath(dirPath); + try (InputStream inputStream = generateInputStream(basePath); + InputStreamReader streamReader = new InputStreamReader(inputStream); + BufferedReader reader = new BufferedReader(streamReader);) { + return readJson(reader); + } catch (Exception e) { + throw e; + } + } + + private InputStream generateInputStream(String basePath) throws IOException, FileException { + File metaFile = new File(basePath + File.separator + metaFileJsonName); + if (!metaFile.exists()) { + throw new FileException(30603, metaFileJsonName); + } + return IoUtils.generateInputInputStream(basePath + File.separator + metaFileJsonName); + } + + private PublishRequestVo read(InputStream inputStream) throws IOException, FileException { + try (InputStreamReader streamReader = new InputStreamReader(inputStream); + BufferedReader reader = new BufferedReader(streamReader);) { + return readFile(reader); + } catch (Exception e) { + throw e; + } + } + + private PublishRequestVo readFile(BufferedReader reader) throws IOException, FileException { + String line = null; + PublishRequestVo publishRequestVO = new PublishRequestVo(); + while ((line = reader.readLine()) != null) { + String[] lineArray = line.split(":"); + if (lineArray.length <= 1) { + LOG.error("Illegal file format(文件输入格式不正确): meta.txt, {} does not have a corresponding value(没有写对应的值)", lineArray[0]); + break; + } + String key = lineArray[0].trim().toLowerCase(); + String value = lineArray[1].trim(); + if (key.equals(type)) { + if (!supportFileType.contains(value)) { + throw FileExceptionManager.createException(30602, value); + } + publishRequestVO.setType(value); + LOG.info("Successfully set Project type(完成设置项目的类型):{}", value); + continue; + } + if (key.equals(jobName)) { + validateItem(key, value, defaultLength); + publishRequestVO.setStreamisJobName(value); + LOG.info("Successfully set Porject Name(完成设置项目的任务名):{}", value); + continue; + } + if (key.equals(tags)) { + hasTags = true; + publishRequestVO.setTags(tags); + continue; + } + if (key.equals(projectName)) { + hasProjectName = true; + validateItem(key, value, defaultLength); + setProjectName(value, publishRequestVO); + continue; + } + if (key.equals(description)) { + if (value.length() > descriptionLength) { + throw FileExceptionManager.createException(30600, key, descriptionLength); + } + publishRequestVO.setDescription(value); + LOG.info("Successfully set Porject description(完成项目描述的设置)"); + } + } + + if (!hasTags) { + publishRequestVO.setTags(defaultTagList); + } + if (!hasProjectName) { + validateItem(projectName, zipName, defaultLength); + setProjectName(zipName, publishRequestVO); + } + if (publishRequestVO.getType().equals("sql")) { + setExecutionCode(publishRequestVO); + LOG.info("Successfully extracted execution-code from sql file(sql文件待执行内容提取完成)"); + } + publishRequestVO.setVersion(version); + LOG.info("Successfully parsed meta.txt(meta.txt文件解析完成)."); + return publishRequestVO; + } + + public boolean checkFile(String path) { + return new File(path).exists(); + } + + private void setProjectName(String name, PublishRequestVo publishRequestVO) { + publishRequestVO.setProjectName(name); + LOG.info("Successfully set Project name(完成设置项目的名称):{}", name); + } + + private void setExecutionCode(PublishRequestVo publishRequestVO) throws IOException, FileException { + String sqlFilePath = getSqlFileAbsolutePath(basePath); + if (StringUtils.isBlank(sqlFilePath)) { + throw FileExceptionManager.createException(30603, "sql"); + } + if (!StringUtils.isBlank(sqlFilePath)) { + try (InputStream inputStream = IoUtils.generateInputInputStream(sqlFilePath); + InputStreamReader inputStreamReader = new InputStreamReader(inputStream); + BufferedReader reader = new BufferedReader(inputStreamReader)) { + StringBuilder sqlsb = new StringBuilder(); + String line; + while ((line = reader.readLine()) != null) { + sqlsb.append(line); + } + publishRequestVO.setExecutionCode(sqlsb.toString()); + } + } + } + + private String getSqlFileAbsolutePath(String filePath) { + File[] fileList = new File(filePath).listFiles(); + if (fileList != null) { + for (File file : fileList) { + if (file.isFile() && file.getName().endsWith(".sql")) { + return file.getAbsolutePath(); + } + } + } + return ""; + } + + private void validateItem(String key, String value, int length) throws FileException { + if (value.length() > defaultLength) { + LOG.error("{} length exceeds limit(长度超过限制)", key); + throw FileExceptionManager.createException(30600, key, length); + } + if (!isLetterDigitOrChinese(value)) { + LOG.error("{} should only contains numeric/Chinese/English characters and '_'(仅允许包含数字,中文,英文和下划线)", key); + throw FileExceptionManager.createException(30601, key); + } + } + + private void getBasePath(String dirPath) { + String[] strArr = dirPath.split("\\/"); + this.zipName = strArr[strArr.length - 1]; + this.basePath = dirPath.substring(0, dirPath.length() - zipName.length() - 1); + } + + public static boolean isLetterDigitOrChinese(String str) { + String regex = "^[a-zA-Z0-9_\u4e00-\u9fa5]+$"; + return str.matches(regex); + } + +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/util/ZipHelper.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/util/ZipHelper.java new file mode 100644 index 000000000..75439b988 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/util/ZipHelper.java @@ -0,0 +1,92 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.util; + +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.BufferedReader; +import java.io.File; +import java.io.InputStreamReader; +import java.util.ArrayList; +import java.util.List; + + +public class ZipHelper { + private static final Logger logger = LoggerFactory.getLogger(ZipHelper.class); + private static final String ZIP_CMD = "zip"; + private static final String UN_ZIP_CMD = "unzip"; + private static final String RECURSIVE = "-r"; + private static final String ZIP_TYPE = ".zip"; + + public static String unzip(String dirPath)throws Exception { //"D:\\tmp\\streamis\\20210922\\johnnwang\\ab_175950\\ab.zip" + File file = new File(dirPath); + if(!file.exists()){ + logger.error("{} does not exist, can not unzip", dirPath); + throw new Exception(dirPath + " does not exist, can not unzip"); + } + //First use simple approach, call new process to zip(先用简单的方法,调用新进程进行压缩) + String[] strArr = dirPath.split(File.separator); + String shortPath = strArr[strArr.length - 1]; + String workPath = dirPath.substring(0, dirPath.length() - shortPath.length() - 1); + List list = new ArrayList<>(); + list.add(UN_ZIP_CMD); + String longZipFilePath = dirPath.replace(ZIP_TYPE,""); + list.add(shortPath); + ProcessBuilder processBuilder = new ProcessBuilder(list); + processBuilder.redirectErrorStream(true); + processBuilder.directory(new File(workPath)); + BufferedReader infoReader = null; + BufferedReader errorReader = null; + try{ + Process process = processBuilder.start(); + infoReader = new BufferedReader(new InputStreamReader(process.getInputStream())); + errorReader = new BufferedReader(new InputStreamReader(process.getErrorStream())); + String infoLine = null; + while((infoLine = infoReader.readLine()) != null){ + logger.info("process output: {} ", infoLine); + } + String errorLine = null; + StringBuilder errMsg = new StringBuilder(); + while((errorLine = errorReader.readLine()) != null){ + if (StringUtils.isNotEmpty(errorLine)){ + errMsg.append(errorLine).append("\n"); + } + logger.error("process error: {} ", errorLine); + } + int exitCode = process.waitFor(); + if (exitCode != 0){ + throw new Exception(errMsg.toString()); + } + }catch(final Exception e){ + logger.error("Fail to unzip file(解压缩 zip 文件失败), reason: ", e); + Exception exception = new Exception(dirPath + " to zip file failed"); + exception.initCause(e); + throw exception; + } finally { + logger.info("generate unzip catalogue{}", workPath); + IOUtils.closeQuietly(infoReader); + IOUtils.closeQuietly(errorReader); + } + return longZipFilePath; + } + + public static boolean isZip(String fileName){ + return fileName.substring(fileName.lastIndexOf('.')).equals(ZIP_TYPE); + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/pom.xml b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/pom.xml new file mode 100755 index 000000000..f58757048 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/pom.xml @@ -0,0 +1,79 @@ + + + + + + streamis-jobmanager + com.webank.wedatasphere.streamis + 0.2.4 + ../../pom.xml + + 4.0.0 + + streamis-job-manager-service + + + + 8 + 8 + + + + + com.webank.wedatasphere.streamis + streamis-job-manager-base + ${jobmanager.version} + + + com.webank.wedatasphere.streamis + streamis-job-launcher-linkis + ${jobmanager.version} + + + com.webank.wedatasphere.streamis + streamis-job-launcher-service + ${jobmanager.version} + + + org.apache.linkis + linkis-module + + + org.apache.linkis + linkis-scheduler + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + ${project.artifactId}-${project.version} + + \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/JobManagerAutoConfiguration.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/JobManagerAutoConfiguration.java new file mode 100644 index 000000000..98f0267c9 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/JobManagerAutoConfiguration.java @@ -0,0 +1,52 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager; + +import com.webank.wedatasphere.streamis.jobmanager.manager.scheduler.FutureScheduler; +import com.webank.wedatasphere.streamis.jobmanager.manager.scheduler.StreamisScheduler; +import com.webank.wedatasphere.streamis.jobmanager.manager.scheduler.StreamisSchedulerExecutorManager; +import com.webank.wedatasphere.streamis.jobmanager.manager.scheduler.TenancyConsumerManager; +import org.apache.linkis.scheduler.Scheduler; +import org.apache.linkis.scheduler.executer.ExecutorManager; +import org.apache.linkis.scheduler.queue.ConsumerManager; +import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +/** + * Contains the scheduler bean configuration + */ +@Configuration +public class JobManagerAutoConfiguration { + + @Bean + @ConditionalOnMissingBean(ExecutorManager.class) + public ExecutorManager executorManager(){ + return new StreamisSchedulerExecutorManager(); + } + + @Bean + @ConditionalOnMissingBean(ConsumerManager.class) + public ConsumerManager consumerManager(){ + return new TenancyConsumerManager(); + } + + @Bean(initMethod = "init") + @ConditionalOnMissingBean(FutureScheduler.class) + public FutureScheduler scheduler(ExecutorManager executorManager, ConsumerManager consumerManager){ + return new StreamisScheduler(executorManager, consumerManager); + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/SpringContextHolder.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/SpringContextHolder.java new file mode 100644 index 000000000..8e2480ecf --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/SpringContextHolder.java @@ -0,0 +1,53 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager; + +import org.springframework.beans.factory.DisposableBean; +import org.springframework.context.ApplicationContext; +import org.springframework.context.ApplicationContextAware; +import org.springframework.stereotype.Component; + +import java.util.Objects; + +@Component +public class SpringContextHolder implements ApplicationContextAware, DisposableBean { + + private static ApplicationContext applicationContext = null; + + public static ApplicationContext getApplicationContext() { + return applicationContext; + } + + public static T getBean(Class requiredType) { + if (Objects.nonNull(applicationContext)) { + return applicationContext.getBean(requiredType); + } + return null; + } + + @Override + public void setApplicationContext(ApplicationContext applicationContext) { + if (SpringContextHolder.applicationContext == null) { + SpringContextHolder.applicationContext = applicationContext; + } + } + + @Override + public void destroy() throws Exception { + applicationContext = null; + } + +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/alert/AlertConf.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/alert/AlertConf.java new file mode 100644 index 000000000..e0361e7c4 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/alert/AlertConf.java @@ -0,0 +1,33 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.alert; + + +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; +import org.apache.linkis.common.conf.CommonVars; + + +public interface AlertConf { + + CommonVars ALERT_IP = CommonVars.apply("wds.streamis.alert.streamis.ip", "127.0.0.1"); + + CommonVars ALERT_SUB_SYS_ID = CommonVars.apply("wds.streamis.alert.streamis.systemid", "7495"); + + Gson COMMON_GSON = new GsonBuilder().setDateFormat("yyyy-MM-dd'T'HH:mm:ssZ").create(); + + +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/alert/AlertEntity.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/alert/AlertEntity.java new file mode 100644 index 000000000..b86d8e72d --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/alert/AlertEntity.java @@ -0,0 +1,150 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.alert; + +import com.google.gson.annotations.SerializedName; + +import java.util.List; + + +public class AlertEntity { + + @SerializedName("alert_info") + private String alertInfo; + + @SerializedName("alert_ip") + private String alertIp; + + @SerializedName("alert_level") + private int alertLevel; + + @SerializedName("alert_obj") + private String alertObj; + + @SerializedName("alert_title") + private String alertTitle = "Streamis application alert(Streamis流式应用系统告警)"; + + @SerializedName("remark_info") + private String remarkInfo; + + @SerializedName("sub_system_id") + private String subSystemId; + + @SerializedName("use_umg_policy") + private int useUmgPolicy = 1; + + @SerializedName("alert_reciver") + private String alertReceiver; + + @SerializedName("alert_way") + private String alertWay = "2,3"; + + public static AlertEntity newInstance(String message, List alertUsers, int alertLevel) { + AlertEntity alertEntity = new AlertEntity(); + alertEntity.setAlertInfo(message); + alertEntity.setAlertReceiver(String.join(",", alertUsers)); + alertEntity.setAlertLevel(alertLevel); + alertEntity.setAlertIp(AlertConf.ALERT_IP.getValue()); + alertEntity.setSubSystemId(AlertConf.ALERT_SUB_SYS_ID.getValue()); + return alertEntity; + } + + + public String getAlertInfo() { + return alertInfo; + } + + public void setAlertInfo(String alertInfo) { + this.alertInfo = alertInfo; + } + + public String getAlertIp() { + return alertIp; + } + + public void setAlertIp(String alertIp) { + this.alertIp = alertIp; + } + + public int getAlertLevel() { + return alertLevel; + } + + public void setAlertLevel(int alertLevel) { + this.alertLevel = alertLevel; + } + + public String getAlertObj() { + return alertObj; + } + + public void setAlertObj(String alertObj) { + this.alertObj = alertObj; + } + + public String getAlertTitle() { + return alertTitle; + } + + public void setAlertTitle(String alertTitle) { + this.alertTitle = alertTitle; + } + + public String getRemarkInfo() { + return remarkInfo; + } + + public void setRemarkInfo(String remarkInfo) { + this.remarkInfo = remarkInfo; + } + + public String getSubSystemId() { + return subSystemId; + } + + public void setSubSystemId(String subSystemId) { + this.subSystemId = subSystemId; + } + + public int getUseUmgPolicy() { + return useUmgPolicy; + } + + public void setUseUmgPolicy(int useUmgPolicy) { + this.useUmgPolicy = useUmgPolicy; + } + + public String getAlertReceiver() { + return alertReceiver; + } + + public void setAlertReceiver(String alertReceiver) { + this.alertReceiver = alertReceiver; + } + + public String getAlertWay() { + return alertWay; + } + + public void setAlertWay(String alertWay) { + this.alertWay = alertWay; + } + + @Override + public String toString() { + return AlertConf.COMMON_GSON.toJson(this); + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/alert/AlertLevel.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/alert/AlertLevel.java new file mode 100644 index 000000000..680e84c55 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/alert/AlertLevel.java @@ -0,0 +1,42 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.alert; + + +public enum AlertLevel { + + /** + * Alert level(告警的级别) + */ + CRITICAL(1, "critical"), + MAJOR(2, "major"), + MINOR(3, "minor"), + WARNING(4, "warning"), + INFO(5, "info"); + + private AlertLevel(int level, String description){ + this.level = level; + this.description = description; + } + + private int level; + private String description; + + public int getLevel(){ + return this.level; + } + +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/alert/Alerter.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/alert/Alerter.java new file mode 100644 index 000000000..0b232dbcc --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/alert/Alerter.java @@ -0,0 +1,27 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.alert; + +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamTask; + +import java.util.List; + + +public interface Alerter { + + void alert(AlertLevel alertLevel, String alertMessage, List alertUsers, StreamTask streamTask); + +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/alert/DefaultAlerter.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/alert/DefaultAlerter.java new file mode 100644 index 000000000..85f820417 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/alert/DefaultAlerter.java @@ -0,0 +1,56 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.alert; + +import com.webank.wedatasphere.streamis.jobmanager.manager.dao.StreamAlertMapper; +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamAlertRecord; +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamJob; +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamTask; +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +import java.util.Date; +import java.util.List; + +@Component +public class DefaultAlerter implements Alerter{ + + private static final Logger LOG = LoggerFactory.getLogger(DefaultAlerter.class); + + @Autowired + private StreamAlertMapper streamAlertMapper; + + @Override + public void alert(AlertLevel alertLevel, String alertMessage, List alertUsers, StreamTask streamTask) { + LOG.info("Alert info: Level={}, alertMessage={}, alertUsers={}", alertLevel.name(), alertMessage, + StringUtils.join(alertUsers, ",")); + for (String alertUser : alertUsers) { + StreamAlertRecord streamAlertRecord = new StreamAlertRecord(); + streamAlertRecord.setAlertLevel(alertLevel.name()); + streamAlertRecord.setAlertMsg(alertMessage); + streamAlertRecord.setAlertUser(alertUser); + streamAlertRecord.setJobVersionId(streamTask.getJobVersionId()); + streamAlertRecord.setJobId(streamTask.getJobId()); + streamAlertRecord.setCreateTime(new Date()); + streamAlertRecord.setStatus(0); + streamAlertRecord.setTaskId(streamTask.getId()); + streamAlertMapper.insert(streamAlertRecord); + } + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/project/service/ProjectPrivilegeService.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/project/service/ProjectPrivilegeService.java new file mode 100644 index 000000000..ef188bd25 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/project/service/ProjectPrivilegeService.java @@ -0,0 +1,20 @@ +package com.webank.wedatasphere.streamis.jobmanager.manager.project.service; + +import javax.servlet.http.HttpServletRequest; +import java.util.List; + +public interface ProjectPrivilegeService { + + Boolean hasReleasePrivilege(HttpServletRequest req, String projectName); + + Boolean hasEditPrivilege(HttpServletRequest req, String projectName); + + Boolean hasAccessPrivilege(HttpServletRequest req, String projectName); + + Boolean hasReleasePrivilege(HttpServletRequest req, List projectNames); + + Boolean hasEditPrivilege(HttpServletRequest req, List projectNames); + + Boolean hasAccessPrivilege(HttpServletRequest req, List projectNames); + +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/project/service/impl/ProjectPrivilegeServiceImpl.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/project/service/impl/ProjectPrivilegeServiceImpl.java new file mode 100644 index 000000000..21e5d50ca --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/project/service/impl/ProjectPrivilegeServiceImpl.java @@ -0,0 +1,90 @@ +package com.webank.wedatasphere.streamis.jobmanager.manager.project.service.impl; + +import com.webank.wedatasphere.streamis.jobmanager.manager.project.service.ProjectPrivilegeService; +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.lang.StringUtils; +import org.apache.linkis.common.conf.Configuration; +import org.apache.linkis.server.conf.ServerConfiguration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.*; +import org.springframework.stereotype.Service; +import org.springframework.web.client.RestTemplate; + +import javax.servlet.http.HttpServletRequest; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +@Service("projectManagerPrivilegeServiceImpl") +public class ProjectPrivilegeServiceImpl implements ProjectPrivilegeService { + + private static final Logger LOG = LoggerFactory.getLogger(ProjectPrivilegeServiceImpl.class); + + @Autowired + RestTemplate restTemplate; + + private String url_prefix = Configuration.getGateWayURL()+ ServerConfiguration.BDP_SERVER_RESTFUL_URI().getValue()+ "/streamis/project/projectPrivilege"; + + @Override + public Boolean hasReleasePrivilege(HttpServletRequest req, String projectName) { + if(StringUtils.isBlank(projectName)) return false; + Map responseData = getResponseData("/hasReleasePrivilege?projectName="+projectName, req); + return (Boolean)Optional.ofNullable(responseData).orElse(new HashMap<>()).getOrDefault("releasePrivilege",false); + } + + @Override + public Boolean hasEditPrivilege(HttpServletRequest req, String projectName) { + if(StringUtils.isBlank(projectName)) return false; + Map responseData = getResponseData("/hasEditPrivilege?projectName="+projectName, req); + return (Boolean)Optional.ofNullable(responseData).orElse(new HashMap<>()).getOrDefault("editPrivilege",false); + } + + @Override + public Boolean hasAccessPrivilege(HttpServletRequest req, String projectName) { + if(StringUtils.isBlank(projectName)) return false; + Map responseData = getResponseData("/hasAccessPrivilege?projectName="+projectName, req); + return (Boolean)Optional.ofNullable(responseData).orElse(new HashMap<>()).getOrDefault("accessPrivilege",false); + } + + @Override + public Boolean hasReleasePrivilege(HttpServletRequest req, List projectNames) { + if(CollectionUtils.isEmpty(projectNames)) return false; + Map responseData = getResponseData("/bulk/hasReleasePrivilege?projectNames="+projectNames, req); + return (Boolean)Optional.ofNullable(responseData).orElse(new HashMap<>()).getOrDefault("releasePrivilege",false); + } + + @Override + public Boolean hasEditPrivilege(HttpServletRequest req, List projectNames) { + if(CollectionUtils.isEmpty(projectNames)) return false; + Map responseData = getResponseData("/bulk/hasEditPrivilege?projectNames="+projectNames, req); + return (Boolean)Optional.ofNullable(responseData).orElse(new HashMap<>()).getOrDefault("editPrivilege",false); + } + + @Override + public Boolean hasAccessPrivilege(HttpServletRequest req, List projectNames) { + if(CollectionUtils.isEmpty(projectNames)) return false; + Map responseData = getResponseData("/bulk/hasAccessPrivilege?projectNames="+projectNames, req); + return (Boolean)Optional.ofNullable(responseData).orElse(new HashMap<>()).getOrDefault("accessPrivilege",false); + } + + private Map getResponseData(String reqPath, HttpServletRequest req){ + String url = url_prefix + reqPath; + HttpHeaders headers = new HttpHeaders(); + headers.add("Cookie",req.getHeader("Cookie")); + headers.setContentType(MediaType.APPLICATION_JSON); + HttpEntity requestEntity = new HttpEntity<>(headers); + LOG.info("obtain the operation privilege of the user,request url {}", url); + ResponseEntity responseEntity = restTemplate.exchange(url, HttpMethod.GET, requestEntity, Map.class); + LOG.info("obtain the operation privilege of the user,return response body:{}", responseEntity.getBody()); + if(responseEntity.getBody()!=null && ((int)(responseEntity.getBody().get("status")))==0){ + return (Map) responseEntity.getBody().get("data"); + }else{ + LOG.error("user failed to obtain the privilege information"); + return null; + } + } + +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/FutureScheduler.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/FutureScheduler.java new file mode 100644 index 000000000..a2367e932 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/FutureScheduler.java @@ -0,0 +1,36 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.webank.wedatasphere.streamis.jobmanager.manager.scheduler; + +import com.webank.wedatasphere.streamis.jobmanager.manager.scheduler.exception.StreamisScheduleException; +import org.apache.linkis.scheduler.queue.SchedulerEvent; + +import java.util.concurrent.Future; +import java.util.function.Function; + +/** + * Include the method submit(SchedulerEvent, Function): Future + */ +public interface FutureScheduler { + + /** + * Submit the scheduler event and return the Future value + * @param event scheduler event + * @param resultMapping result mapping + * @return future task + */ + Future submit(SchedulerEvent event, Function resultMapping) throws StreamisScheduleException; + +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/StreamisScheduler.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/StreamisScheduler.java new file mode 100644 index 000000000..feadc2bab --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/StreamisScheduler.java @@ -0,0 +1,129 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.scheduler; + +import com.webank.wedatasphere.streamis.jobmanager.manager.scheduler.exception.StreamisScheduleException; +import org.apache.commons.lang.StringUtils; +import org.apache.linkis.common.conf.CommonVars; +import org.apache.linkis.scheduler.AbstractScheduler; +import org.apache.linkis.scheduler.SchedulerContext; +import org.apache.linkis.scheduler.executer.ErrorExecuteResponse; +import org.apache.linkis.scheduler.executer.ExecutorManager; +import org.apache.linkis.scheduler.queue.ConsumerManager; +import org.apache.linkis.scheduler.queue.Job; +import org.apache.linkis.scheduler.queue.JobInfo; +import org.apache.linkis.scheduler.queue.SchedulerEvent; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Arrays; +import java.util.Collections; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.Future; +import java.util.function.Function; + +/** + * Generic scheduler for streamis + */ +public class StreamisScheduler extends AbstractScheduler implements FutureScheduler { + + private static final Logger LOG = LoggerFactory.getLogger(StreamisScheduler.class); + + public static class Constraints{ + + private static final CommonVars TENANCY_PATTERN = CommonVars.apply("wds.streamis.job.scheduler.consumer.tenancies", "hadoop"); + + private static final CommonVars GROUP_INIT_CAPACITY = CommonVars.apply("wds.streamis.job.scheduler.group.min.capacity", 1000); + + private static final CommonVars GROUP_MAX_CAPACITY = CommonVars.apply("wds.streamis.job.scheduler.group.max.capacity", 5000); + + private static final CommonVars GROUP_MAX_RUNNING_JOBS = CommonVars.apply("wds.streamis.job.scheduler.group.max.running-jobs", 30); + } + /** + * Scheduler context + */ + private SchedulerContext schedulerContext; + + /** + * Executor manager + */ + private ExecutorManager executorManager; + + /** + * Consumer manager + */ + private ConsumerManager consumerManager; + + public StreamisScheduler(){ + + } + + public StreamisScheduler(ExecutorManager executorManager, ConsumerManager consumerManager){ + this.executorManager = executorManager; + this.consumerManager = consumerManager; + } + + @Override + public void init() { + TenancyGroupFactory groupFactory = new TenancyGroupFactory(); + String tenancies = Constraints.TENANCY_PATTERN.getValue(); + groupFactory.setTenancies(StringUtils.isNotBlank(tenancies)? Arrays.asList(tenancies.split(",")) : Collections.emptyList()); + groupFactory.setDefaultInitCapacity(Constraints.GROUP_INIT_CAPACITY.getValue()); + groupFactory.setDefaultMaxCapacity(Constraints.GROUP_MAX_CAPACITY.getValue()); + groupFactory.setDefaultMaxRunningJobs(Constraints.GROUP_MAX_RUNNING_JOBS.getValue()); + this.schedulerContext = new StreamisSchedulerContext(groupFactory, consumerManager, executorManager); + } + + @Override + public String getName() { + return "Streamis-Tenancy-Scheduler"; + } + + @Override + public SchedulerContext getSchedulerContext() { + return schedulerContext; + } + + @Override + public void submit(SchedulerEvent event) { + super.submit(event); + } + + @Override + public Future submit(SchedulerEvent event, Function resultMapping) throws StreamisScheduleException { + // Empty future + CompletableFuture completableFuture = new CompletableFuture<>(); + if (event instanceof StreamisSchedulerEvent){ + StreamisSchedulerEvent schedulerEvent = (StreamisSchedulerEvent)event; + // Set the completed future + schedulerEvent.setCompleteFuture(completableFuture); + if (event instanceof Job) { + // Invoke the prepare method + JobInfo jobInfo = ((Job) event).getJobInfo(); + try { + schedulerEvent.prepare(jobInfo); + } catch (Exception e) { + ((Job) event).transitionCompleted(new ErrorExecuteResponse(e.getMessage(), e)); + } + } + } else{ + LOG.warn("Scheduler event type {} is not support future return", event.getClass().getCanonicalName()); + completableFuture.complete(event); + } + submit(event); + return completableFuture.thenApply(resultMapping); + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/StreamisSchedulerContext.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/StreamisSchedulerContext.java new file mode 100644 index 000000000..357ceb0e7 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/StreamisSchedulerContext.java @@ -0,0 +1,73 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.scheduler; + +import org.apache.linkis.common.listener.ListenerEventBus; +import org.apache.linkis.scheduler.SchedulerContext; +import org.apache.linkis.scheduler.event.ScheduleEvent; +import org.apache.linkis.scheduler.event.SchedulerEventListener; +import org.apache.linkis.scheduler.executer.ExecutorManager; +import org.apache.linkis.scheduler.queue.ConsumerManager; +import org.apache.linkis.scheduler.queue.GroupFactory; + +/** + * Streamis scheduler context + */ +public class StreamisSchedulerContext implements SchedulerContext { + + /** + * Group factory + */ + private final GroupFactory groupFactory; + + /** + * Consumer manager + */ + private final ConsumerManager consumerManager; + + /** + * Executor manager + */ + private final ExecutorManager executorManager; + + public StreamisSchedulerContext(GroupFactory groupFactory, ConsumerManager consumerManager, + ExecutorManager executorManager){ + this.groupFactory = groupFactory; + this.consumerManager = consumerManager; + this.consumerManager.setSchedulerContext(this); + this.executorManager = executorManager; + } + + @Override + public GroupFactory getOrCreateGroupFactory() { + return groupFactory; + } + + @Override + public ConsumerManager getOrCreateConsumerManager() { + return consumerManager; + } + + @Override + public ExecutorManager getOrCreateExecutorManager() { + return executorManager; + } + + @Override + public ListenerEventBus getOrCreateSchedulerListenerBus() { + return null; + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/StreamisSchedulerEvent.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/StreamisSchedulerEvent.java new file mode 100644 index 000000000..cab81a3fb --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/StreamisSchedulerEvent.java @@ -0,0 +1,67 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.scheduler; + +import com.webank.wedatasphere.streamis.jobmanager.manager.scheduler.exception.StreamisScheduleException; +import org.apache.linkis.scheduler.queue.JobInfo; +import org.apache.linkis.scheduler.queue.SchedulerEvent; + +import java.util.concurrent.CompletableFuture; + +/** + * Scheduler event + */ +public interface StreamisSchedulerEvent extends SchedulerEvent { + + /** + * Set the complete future to listen the end of scheduler + * @param completeFuture complete future + * @param + */ + void setCompleteFuture(CompletableFuture completeFuture); + + /** + * Tenancy in event + * @param tenancy tenancy + */ + void setTenancy(String tenancy); + + String getTenancy(); + + /** + * Prepare method + * @param scheduleJob job info + */ + void prepare(JobInfo scheduleJob) throws StreamisScheduleException; + + /** + * Schedule method + * @param scheduleJob job info + */ + void schedule(JobInfo scheduleJob) throws StreamisScheduleException; + + /** + * Handle error + * @param scheduleJob job info + * @param t error stack + */ + void errorHandle(JobInfo scheduleJob, Throwable t); + /** + * Post handle + * @param scheduleJob job info + */ + void postHandle(JobInfo scheduleJob) throws StreamisScheduleException; +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/StreamisSchedulerExecutorManager.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/StreamisSchedulerExecutorManager.java new file mode 100644 index 000000000..602d0d82f --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/StreamisSchedulerExecutorManager.java @@ -0,0 +1,120 @@ +package com.webank.wedatasphere.streamis.jobmanager.manager.scheduler; + +import com.webank.wedatasphere.streamis.jobmanager.manager.scheduler.events.AbstractStreamisSchedulerEvent; +import com.webank.wedatasphere.streamis.jobmanager.manager.scheduler.exception.StreamisScheduleException; +import com.webank.wedatasphere.streamis.jobmanager.manager.scheduler.exception.StreamisScheduleRetryException; +import org.apache.linkis.protocol.engine.EngineState; +import org.apache.linkis.scheduler.exception.LinkisJobRetryException; +import org.apache.linkis.scheduler.executer.*; +import org.apache.linkis.scheduler.listener.ExecutorListener; +import org.apache.linkis.scheduler.queue.SchedulerEvent; +import scala.Option; +import scala.Some; +import scala.concurrent.duration.Duration; + +import java.io.IOException; +import java.util.Objects; + +/** + * Executor manager + */ +public class StreamisSchedulerExecutorManager extends ExecutorManager { + + /** + * Just hold a singleton executor + */ + private Executor singletonExecutor; + + @Override + public void setExecutorListener(ExecutorListener engineListener) { + // Empty + } + + @Override + public Executor createExecutor(SchedulerEvent event) { + return getOrCreateExecutor(); + } + + @Override + public Option askExecutor(SchedulerEvent event) { + return Some.apply(getOrCreateExecutor()); + } + + @Override + public Option askExecutor(SchedulerEvent event, Duration wait) { + return askExecutor(event); + } + + @Override + public Option getById(long id) { + return null; + } + + @Override + public Executor[] getByGroup(String groupName) { + return new Executor[0]; + } + + @Override + public void delete(Executor executor) { + + } + + @Override + public void shutdown() { + + } + + private Executor getOrCreateExecutor(){ + if (Objects.isNull(this.singletonExecutor)){ + synchronized (this){ + if (Objects.isNull(this.singletonExecutor)){ + this.singletonExecutor = new LocalExecutor(); + } + } + } + return this.singletonExecutor; + } + public static class LocalExecutor implements Executor{ + + @Override + public long getId() { + return 0; + } + + @Override + public ExecuteResponse execute(ExecuteRequest executeRequest) { + if (executeRequest instanceof AbstractStreamisSchedulerEvent.LocalExecuteRequest){ + try { + ((AbstractStreamisSchedulerEvent.LocalExecuteRequest) executeRequest).localExecute(); + return new SuccessExecuteResponse(); + } catch (StreamisScheduleException e) { + if (e instanceof StreamisScheduleRetryException){ + e.setErrCode(LinkisJobRetryException.JOB_RETRY_ERROR_CODE()); + } + return new ErrorExecuteResponse("Scheduling exception, scheduled job will fail or retry on the next time, message: [" + + e.getMessage() + "]", e); + } catch (Exception e){ + return new ErrorExecuteResponse("Scheduling with unknown exception, message: [" + e.getMessage() + "]", e); + } + }else{ + return new ErrorExecuteResponse("Unsupported execute request: code: [" + executeRequest.code() + "]", null); + } + } + + @Override + public EngineState state() { + return null; + } + + @Override + public ExecutorInfo getExecutorInfo() { + return null; + } + + @Override + public void close() throws IOException { + + } + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/TenancyConsumerManager.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/TenancyConsumerManager.java new file mode 100644 index 000000000..896d1d465 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/TenancyConsumerManager.java @@ -0,0 +1,146 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.scheduler; + +import com.webank.wedatasphere.streamis.jobmanager.manager.scheduler.exception.StreamisScheduleException; +import org.apache.commons.lang3.StringUtils; +import org.apache.linkis.common.utils.Utils; +import org.apache.linkis.scheduler.listener.ConsumerListener; +import org.apache.linkis.scheduler.queue.*; +import org.apache.linkis.scheduler.queue.fifoqueue.FIFOGroup; +import org.apache.linkis.scheduler.queue.fifoqueue.FIFOUserConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ExecutorService; + +/** + * Tenancy consumer manager + */ +public class TenancyConsumerManager extends ConsumerManager { + + private static final Logger LOG = LoggerFactory.getLogger(TenancyConsumerManager.class); + + private ConsumerListener consumerListener; + + private final Map tenancyExecutorServices = new ConcurrentHashMap<>(); + + private final Map consumerGroupMap = new ConcurrentHashMap<>(); + + /** + * Default executor service + */ + private ExecutorService defaultExecutorService; + + @Override + public void setConsumerListener(ConsumerListener consumerListener) { + this.consumerListener = consumerListener; + } + + @Override + public synchronized ExecutorService getOrCreateExecutorService() { + if (Objects.isNull(defaultExecutorService)){ + Group group = getSchedulerContext().getOrCreateGroupFactory().getOrCreateGroup(null); + if (group instanceof FIFOGroup){ + defaultExecutorService = Utils.newCachedThreadPool(((FIFOGroup) group).getMaxRunningJobs() + + + 1, + TenancyGroupFactory.GROUP_NAME_PREFIX + TenancyGroupFactory.DEFAULT_TENANCY + "-Executor-", true); + // Put the default executor into tenancy executor map + tenancyExecutorServices.put(TenancyGroupFactory.DEFAULT_TENANCY, defaultExecutorService); + } else { + throw new StreamisScheduleException.Runtime("Cannot construct the executor service " + + "using the default group: [" + group.getClass().getCanonicalName() + "]", null); + } + } + return this.defaultExecutorService; + } + + @Override + public Consumer getOrCreateConsumer(String groupName) { + Consumer resultConsumer = consumerGroupMap.computeIfAbsent(groupName, groupName0 -> { + Consumer consumer = createConsumer(groupName); + Group group = getSchedulerContext().getOrCreateGroupFactory().getGroup(groupName); + consumer.setGroup(group); + consumer.setConsumeQueue(new LoopArrayQueue(group)); + int maxRunningJobs = -1; + if (group instanceof FIFOGroup){ + maxRunningJobs = ((FIFOGroup)group).getMaxRunningJobs(); + } + LOG.info("Create a new consumer for group: [name: {}, maxRunningJobs: {}, initCapacity: {}, maxCapacity: {}]", + groupName, maxRunningJobs, group.getInitCapacity(), group.getMaximumCapacity()); + Optional.ofNullable(consumerListener).ifPresent(listener -> listener.onConsumerCreated(consumer)); + consumer.start(); + return consumer; + }); + if (resultConsumer instanceof FIFOUserConsumer){ + ((FIFOUserConsumer)resultConsumer).setLastTime(System.currentTimeMillis()); + } + return resultConsumer; + } + + @Override + public Consumer createConsumer(String groupName) { + Group group = getSchedulerContext().getOrCreateGroupFactory().getGroup(groupName); + return new FIFOUserConsumer(getSchedulerContext(), getOrCreateExecutorService(groupName), group); + } + + @Override + public void destroyConsumer(String groupName) { + Optional.ofNullable(this.consumerGroupMap.get(groupName)).ifPresent(consumer -> { + LOG.warn("Start to shutdown the consumer of group: [{}]", groupName); + consumerGroupMap.remove(groupName); + consumer.shutdown(); + Optional.ofNullable(consumerListener).ifPresent(listener -> listener.onConsumerDestroyed(consumer)); + LOG.warn("End to shutdown the consumer for group: [{}]", groupName); + }); + } + + @Override + public void shutdown() { + LOG.warn("Shutdown all the consumers which is working"); + consumerGroupMap.forEach((group, consumer) -> { + LOG.info("Shutdown consumer for group: {}, running events: {}", group, consumer.getRunningEvents()); + consumer.shutdown(); + }); + tenancyExecutorServices.forEach((tenancy, executorService) -> executorService.shutdownNow()); + } + + @Override + public Consumer[] listConsumers() { + return consumerGroupMap.values().toArray(new Consumer[0]); + } + + protected ExecutorService getOrCreateExecutorService(String groupName){ + GroupFactory groupFactory = getSchedulerContext().getOrCreateGroupFactory(); + if (groupFactory instanceof TenancyGroupFactory){ + TenancyGroupFactory tenancyGroupFactory = (TenancyGroupFactory)groupFactory; + String tenancy = tenancyGroupFactory.getTenancyByGroupName(groupName); + groupFactory.getGroup(groupName); + if (StringUtils.isNotBlank(tenancy)){ + return tenancyExecutorServices.computeIfAbsent(tenancy, tenancyName -> { + // Use the default value of max running jobs + return Utils.newCachedThreadPool(tenancyGroupFactory.getDefaultMaxRunningJobs() + 1, + TenancyGroupFactory.GROUP_NAME_PREFIX + tenancy + "-Executor-", true); + }); + } + } + return getOrCreateExecutorService(); + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/TenancyGroupFactory.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/TenancyGroupFactory.java new file mode 100644 index 000000000..2066fa141 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/TenancyGroupFactory.java @@ -0,0 +1,75 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.scheduler; + +import org.apache.commons.lang3.StringUtils; +import org.apache.linkis.scheduler.queue.AbstractGroup; +import org.apache.linkis.scheduler.queue.SchedulerEvent; +import org.apache.linkis.scheduler.queue.fifoqueue.FIFOGroup; +import org.apache.linkis.scheduler.queue.fifoqueue.FIFOGroupFactory; + +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * Tenancy group factory e + */ +public class TenancyGroupFactory extends FIFOGroupFactory { + + public static final String GROUP_NAME_PREFIX = "Tenancy-Group-"; + + public static final String DEFAULT_TENANCY = "default"; + + private static final Pattern TENANCY_IN_GROUP_PATTERN = Pattern.compile("^" + GROUP_NAME_PREFIX + "([-_\\w\\W]+)$"); + + private List tenancies = new ArrayList<>(); + + public List getTenancies() { + return tenancies; + } + + public void setTenancies(List tenancies) { + this.tenancies = tenancies; + } + + @Override + public AbstractGroup createGroup(String groupName) { + return new FIFOGroup(groupName, getInitCapacity(groupName), getMaxCapacity(groupName)); + } + + @Override + public String getGroupNameByEvent(SchedulerEvent event) { + String tenancy = DEFAULT_TENANCY; + if (Objects.nonNull(event) && event instanceof StreamisSchedulerEvent){ + tenancy = ((StreamisSchedulerEvent) event).getTenancy(); + } + return StringUtils.isNotBlank(tenancy)?GROUP_NAME_PREFIX + tenancy : GROUP_NAME_PREFIX + DEFAULT_TENANCY; + } + + public String getTenancyByGroupName(String groupName){ + String tenancy = DEFAULT_TENANCY; + if (StringUtils.isNotBlank(groupName)){ + Matcher matcher = TENANCY_IN_GROUP_PATTERN.matcher(groupName); + if (matcher.find()){ + tenancy = matcher.group(1); + } + } + return tenancy; + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/events/AbstractStreamisSchedulerEvent.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/events/AbstractStreamisSchedulerEvent.java new file mode 100644 index 000000000..383dba8b6 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/events/AbstractStreamisSchedulerEvent.java @@ -0,0 +1,263 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.scheduler.events; + +import com.ctc.wstx.util.StringUtil; +import com.webank.wedatasphere.streamis.jobmanager.manager.scheduler.StreamisSchedulerEvent; +import com.webank.wedatasphere.streamis.jobmanager.manager.scheduler.exception.StreamisScheduleException; +import com.webank.wedatasphere.streamis.jobmanager.manager.scheduler.exception.StreamisScheduleRetryException; +import org.apache.commons.lang3.StringUtils; +import org.apache.linkis.scheduler.executer.CompletedExecuteResponse; +import org.apache.linkis.scheduler.executer.ErrorExecuteResponse; +import org.apache.linkis.scheduler.executer.ExecuteRequest; +import org.apache.linkis.scheduler.listener.JobListener; +import org.apache.linkis.scheduler.queue.Job; +import org.apache.linkis.scheduler.queue.JobInfo; +import org.apache.linkis.scheduler.queue.SchedulerEvent; +import org.apache.linkis.scheduler.queue.SchedulerEventState; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.cglib.proxy.UndeclaredThrowableException; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.atomic.AtomicBoolean; + +/** + * Abstract implement + */ +public abstract class AbstractStreamisSchedulerEvent extends Job implements StreamisSchedulerEvent { + + private static final Logger LOG = LoggerFactory.getLogger(AbstractStreamisSchedulerEvent.class); + + public static final int MAX_RETRY_NUM = 3; + + private CompletableFuture completeFuture; + + private int maxRetryNum = MAX_RETRY_NUM; + + private final AtomicBoolean initialized = new AtomicBoolean(false); + /** + * Extra message (output) + */ + private String extraMessage; + + /** + * Use schedule id instead of queue id + */ + protected String scheduleId; + + /** + * Tenancy name + */ + private String tenancy; + + /** + * ResultSet + */ + protected Map resultSet = new HashMap<>(); + + public AbstractStreamisSchedulerEvent(){ + setJobListener(new JobListener() { + @Override + public void onJobInited(Job job) { + // Ignore init + } + + @Override + public void onJobRunning(Job job) { + // Ignore job running + if (!initialized.get()){ + try { + prepare(getJobInfo()); + } catch (StreamisScheduleException e) { + // Convert to runtime exception + throw new StreamisScheduleException.Runtime(e.getMessage(), e.getCause()); + } + } + } + + @Override + public void onJobScheduled(Job job) { + // Ignore job scheduled + } + + @Override + public void onJobCompleted(Job job) { + setProgress(1.0f); + if (getState() == SchedulerEventState.Failed()){ + ErrorExecuteResponse response = getErrorResponse(); + try { + Throwable t = null; + if (Objects.nonNull(response)){ + t = response.t(); + extraMessage = response.message(); + if (t instanceof UndeclaredThrowableException){ + t = ((UndeclaredThrowableException)t).getUndeclaredThrowable(); + if (StringUtils.isBlank(extraMessage)){ + extraMessage = t.getMessage(); + } else { + extraMessage += t.getMessage(); + } + } + } + errorHandle(getJobInfo(), t); + }catch(Exception e){ + LOG.warn("Unable to process the error handler for scheduler event: [{}]", getName(), e); + // Ignore + } + }else { + // Empty the message + extraMessage = null; + } + if (Objects.nonNull(completeFuture)){ + completeFuture.complete(job); + } + try { + postHandle(getJobInfo()); + } catch (StreamisScheduleException e) { + // Convert to runtime exception + throw new StreamisScheduleException.Runtime(e.getMessage(), e.getCause()); + } + } + + @Override + public void onJobWaitForRetry(Job job) { + // Ignore job wait for retry + extraMessage = "Wait for the scheduler retry to schedule"; + } + }); + } + + @Override + public String getId() { + if (StringUtils.isNotBlank(this.scheduleId)){ + return scheduleId; + } + return super.getId(); + } + + @Override + public void init() throws Exception { + prepare(getJobInfo()); + } + + @Override + public ExecuteRequest jobToExecuteRequest() throws Exception { + return new LocalExecuteRequest(); + } + + @Override + public String getName() { + return "streamis-schedule-event-" + getId(); + } + + @Override + public JobInfo getJobInfo() { + return new StreamisEventInfo(this); + } + + @Override + public void setCompleteFuture(CompletableFuture completeFuture) { + this.completeFuture = completeFuture; + } + + @Override + public void setTenancy(String tenancy) { + this.tenancy = tenancy; + } + + @Override + public String getTenancy() { + return tenancy; + } + + @Override + public int getMaxRetryNum() { + return maxRetryNum; + } + + public void setMaxRetryNum(int maxRetryNum){ + this.maxRetryNum = maxRetryNum; + } + + @Override + public synchronized void prepare(JobInfo jobInfo) throws StreamisScheduleException{ + if (!initialized.get()){ + prepareHandle(jobInfo); + initialized.set(true); + } + } + + @Override + public void close() throws IOException { + + } + + /** + * Prepare handle + * @param jobInfo job info + */ + protected abstract void prepareHandle(JobInfo jobInfo) throws StreamisScheduleException; + + /** + * Request to execute in local thread + */ + public class LocalExecuteRequest implements ExecuteRequest{ + + @Override + public String code() { + return null; + } + + public void localExecute() throws StreamisScheduleException, StreamisScheduleRetryException{ + try{ + schedule(getJobInfo()); + } catch (StreamisScheduleRetryException e){ + if (e.getRetryNum() > 0){ + setMaxRetryNum(e.getRetryNum()); + } + throw e; + } + } + } + + @Override + public void transitionCompleted(CompletedExecuteResponse executeCompleted) { + super.transitionCompleted(executeCompleted); + } + + /** + * Extend jobInfo + */ + public static class StreamisEventInfo extends JobInfo{ + + private AbstractStreamisSchedulerEvent event; + + public StreamisEventInfo(AbstractStreamisSchedulerEvent event) { + super(event.getId(), event.extraMessage, event.getState().toString(), event.getProgress(), ""); + this.event = event; + } + + public Map getResultSet() { + return event.resultSet; + } + + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/events/StreamisPhaseInSchedulerEvent.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/events/StreamisPhaseInSchedulerEvent.java new file mode 100644 index 000000000..03c38fce6 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/events/StreamisPhaseInSchedulerEvent.java @@ -0,0 +1,117 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +package com.webank.wedatasphere.streamis.jobmanager.manager.scheduler.events; + +import com.webank.wedatasphere.streamis.jobmanager.manager.scheduler.exception.StreamisScheduleException; +import org.apache.linkis.scheduler.queue.JobInfo; + +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.concurrent.ConcurrentHashMap; + +/** + * Streamis scheduler event with different phases + */ +public class StreamisPhaseInSchedulerEvent extends AbstractStreamisSchedulerEvent{ + + + private final StateContext context; + + private final ScheduleCommand scheduleCommand; + + public StreamisPhaseInSchedulerEvent(String scheduleId, ScheduleCommand command){ + this.scheduleId = scheduleId; + this.scheduleCommand = command; + if (Objects.isNull(this.scheduleCommand)){ + throw new IllegalArgumentException("SchedulerCommand cannot be empty, please define it first"); + } + // New context + this.context = new StateContext(); + } + + @Override + public void schedule(JobInfo jobInfo) throws StreamisScheduleException { + Map resultSet = this.scheduleCommand.schedule(this.context, jobInfo); + Optional.ofNullable(resultSet).ifPresent( result -> super.resultSet.putAll(result)); + } + + @Override + public void errorHandle(JobInfo jobInfo, Throwable t) { + this.scheduleCommand.onErrorHandle(this.context, jobInfo, t); + } + + @Override + public void postHandle(JobInfo jobInfo) throws StreamisScheduleException { + this.scheduleCommand.onPostHandle(this.context, jobInfo); + } + + @Override + protected void prepareHandle(JobInfo jobInfo) throws StreamisScheduleException { + this.scheduleCommand.onPrepare(this.context, jobInfo); + } + + + /** + * Schedule command + */ + public interface ScheduleCommand { + + default void onPrepare(StateContext context, JobInfo scheduleJob) throws StreamisScheduleException{ + // No operation + } + + /** + * Schedule and return the resultSet + * @param context context + * @param jobInfo job info + * @return resultSet + * @throws StreamisScheduleException + */ + Map schedule(StateContext context, JobInfo jobInfo) throws StreamisScheduleException; + + default void onErrorHandle(StateContext context, JobInfo scheduleJob, Throwable t){ + // No operation + } + + default void onPostHandle(StateContext context, JobInfo scheduleJob) throws StreamisScheduleException{ + // No operation + } + + } + + /** + * Context contains variables + */ + public static class StateContext{ + private final Map variables = new ConcurrentHashMap<>(); + + public void addVar(String name, Object value){ + variables.put(name, value); + } + + public Object getVar(String name){ + return variables.get(name); + } + + @SuppressWarnings("unchecked") + public T getVar(String name, Class type){ + Object var = variables.get(name); + return Objects.nonNull(var)? (T)var : null; + } + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/exception/StreamisScheduleException.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/exception/StreamisScheduleException.java new file mode 100644 index 000000000..67a21b03a --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/exception/StreamisScheduleException.java @@ -0,0 +1,43 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.scheduler.exception; + +import org.apache.linkis.common.exception.ErrorException; +import org.apache.linkis.common.exception.ExceptionLevel; +import org.apache.linkis.common.exception.LinkisRuntimeException; + +/** + * Schedule exception + */ +public class StreamisScheduleException extends ErrorException { + + public StreamisScheduleException(String desc, Throwable t){ + super(-1, desc); + super.initCause(t); + } + + public static class Runtime extends LinkisRuntimeException{ + public Runtime(String desc, Throwable t) { + super(-1, desc); + super.initCause(t); + } + + @Override + public ExceptionLevel getLevel() { + return ExceptionLevel.ERROR; + } + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/exception/StreamisScheduleRetryException.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/exception/StreamisScheduleRetryException.java new file mode 100644 index 000000000..2f9ac50ed --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/exception/StreamisScheduleRetryException.java @@ -0,0 +1,38 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +package com.webank.wedatasphere.streamis.jobmanager.manager.scheduler.exception; + +/** + * Schedule retry exception + * + */ +public class StreamisScheduleRetryException extends StreamisScheduleException{ + + private int retryNum = 0; + + public int getRetryNum() { + return retryNum; + } + + public void setRetryNum(int retryNum) { + this.retryNum = retryNum; + } + + public StreamisScheduleRetryException(String desc, Throwable t) { + super(desc, t); + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/transform/entity/StreamisJarTransformJobContent.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/transform/entity/StreamisJarTransformJobContent.java new file mode 100644 index 000000000..476705968 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/transform/entity/StreamisJarTransformJobContent.java @@ -0,0 +1,81 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity; + +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamisFile; +import java.util.List; + +/** + * Created by enjoyyin on 2021/9/23. + */ +public class StreamisJarTransformJobContent implements StreamisTransformJobContent { + + private StreamisFile mainClassJar; + private String mainClass; + private List args; + + private List dependencyJars; + private List hdfsJars; + private List resources; + + public StreamisFile getMainClassJar() { + return mainClassJar; + } + + public void setMainClassJar(StreamisFile mainClassJar) { + this.mainClassJar = mainClassJar; + } + + public String getMainClass() { + return mainClass; + } + + public void setMainClass(String mainClass) { + this.mainClass = mainClass; + } + + public List getArgs() { + return args; + } + + public void setArgs(List args) { + this.args = args; + } + + public List getDependencyJars() { + return dependencyJars; + } + + public void setDependencyJars(List dependencyJars) { + this.dependencyJars = dependencyJars; + } + + public List getHdfsJars() { + return hdfsJars; + } + + public void setHdfsJars(List hdfsJars) { + this.hdfsJars = hdfsJars; + } + + public List getResources() { + return resources; + } + + public void setResources(List resources) { + this.resources = resources; + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/transform/entity/StreamisWorkflowTransformJobContent.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/transform/entity/StreamisWorkflowTransformJobContent.java new file mode 100644 index 000000000..8d44c085a --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/transform/entity/StreamisWorkflowTransformJobContent.java @@ -0,0 +1,41 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity; + +/** + * Created by enjoyyin on 2021/9/23. + */ +public class StreamisWorkflowTransformJobContent extends StreamisSqlTransformJobContent { + + private Long workflowId; + private String workflowName; + + public Long getWorkflowId() { + return workflowId; + } + + public void setWorkflowId(Long workflowId) { + this.workflowId = workflowId; + } + + public String getWorkflowName() { + return workflowName; + } + + public void setWorkflowName(String workflowName) { + this.workflowName = workflowName; + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/conf/JobConf.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/conf/JobConf.scala new file mode 100644 index 000000000..41b91e348 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/conf/JobConf.scala @@ -0,0 +1,100 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.conf + +import org.apache.linkis.common.conf.{CommonVars, Configuration, TimeType} +import com.webank.wedatasphere.streamis.jobmanager.manager.exception.JobExecuteErrorException + + +object JobConf { + + val STREAMIS_DEVELOPER: CommonVars[String] = CommonVars("wds.streamis.developer", "enjoyyin,davidhua") + + val STREAMIS_DEFAULT_TENANT: CommonVars[String] = CommonVars("wds.streamis.job.tenant.default", "") + + val STREAMIS_JOB_MONITOR_ENABLE: CommonVars[Boolean] = CommonVars("wds.streamis.job.monitor.enable", true) + + val STREAMIS_JOB_PARAM_BLANK_PLACEHOLDER: CommonVars[String] = CommonVars("wds.streamis.job.param.blank.placeholder", "\u0001") + + /** + * Gateway for stream job log module + */ + val STREAMIS_JOB_LOG_GATEWAY: CommonVars[String] = CommonVars("wds.streamis.job.log.gateway", Configuration.getGateWayURL()) + + /** + * Path for collecting stream job log + */ + val STREAMIS_JOB_LOG_COLLECT_PATH: CommonVars[String] = CommonVars("wds.streamis.job.log.collect.path", "/api/rest_j/v1/streamis/streamJobManager/log/collect/events") + + val FLINK_JOB_STATUS_NOT_STARTED: CommonVars[Int] = CommonVars("wds.streamis.job.status.not-started", 0,"Not Started") + + val FLINK_JOB_STATUS_COMPLETED: CommonVars[Int] = CommonVars("wds.streamis.job.status.completed", 1,"Completed") + + val FLINK_JOB_STATUS_WAIT_RESTART: CommonVars[Int] = CommonVars("wds.streamis.job.status.wait-restart", 2,"Wait for restart") + + val FLINK_JOB_STATUS_ALERT_RUNNING: CommonVars[Int] = CommonVars("wds.streamis.job.status.alert-running", 3,"Alert running") + + val FLINK_JOB_STATUS_SLOW_RUNNING: CommonVars[Int] = CommonVars("wds.streamis.job.status.slow-running", 4,"Slow running") + + val FLINK_JOB_STATUS_RUNNING: CommonVars[Int] = CommonVars("wds.streamis.job.status.running", 5,"running") + + val FLINK_JOB_STATUS_FAILED: CommonVars[Int] = CommonVars("wds.streamis.job.status.failed", 6,"Failed") + + val FLINK_JOB_STATUS_STOPPED: CommonVars[Int] = CommonVars("wds.streamis.job.status.stopped", 7,"Stopped") + + /** + * Starting (middle status, before scheduling) + */ + val FLINK_JOB_STATUS_STARTING: CommonVars[Int] = CommonVars("wds.streamis.job.status.starting", 8, "Starting") + + /** + * Stopping (middle status, before scheduling) + */ + val FLINK_JOB_STATUS_STOPPING: CommonVars[Int] = CommonVars("wds.streamis.job.status.stopping", 9, "Stopping") + + val STATUS_ARRAY: Array[CommonVars[Int]] = Array(FLINK_JOB_STATUS_COMPLETED, FLINK_JOB_STATUS_WAIT_RESTART, FLINK_JOB_STATUS_ALERT_RUNNING, + FLINK_JOB_STATUS_SLOW_RUNNING, FLINK_JOB_STATUS_RUNNING, FLINK_JOB_STATUS_FAILED, FLINK_JOB_STATUS_STOPPED, FLINK_JOB_STATUS_STARTING, FLINK_JOB_STATUS_STOPPING) + + val NOT_COMPLETED_STATUS_ARRAY: Array[CommonVars[Int]] = Array(FLINK_JOB_STATUS_WAIT_RESTART, FLINK_JOB_STATUS_ALERT_RUNNING, FLINK_JOB_STATUS_SLOW_RUNNING, + FLINK_JOB_STATUS_RUNNING) + + def isCompleted(status: Int): Boolean = status match { + case 1 | 6 | 7 => true + case _ => false + } + + def isRunning(status: Int): Boolean = status match { + case 2 | 3 | 4 | 5 => true + case _ => false + } + + def linkisStatusToStreamisStatus(status: String): Int = status.toLowerCase match { + case "starting" | "unlock" | "locked" | "idle" | "busy" | "running" => FLINK_JOB_STATUS_RUNNING.getValue + case "success" => FLINK_JOB_STATUS_COMPLETED.getValue + case "failed" | "shuttingdown" => FLINK_JOB_STATUS_FAILED.getValue + } + + def getStatusString(status: Int): String = STATUS_ARRAY.find(_.getValue == status).map(_.description) + .getOrElse(throw new JobExecuteErrorException(30351, s"Unknown status $status.")) + + val TASK_MONITOR_INTERVAL: CommonVars[TimeType] = CommonVars("wds.streamis.task.monitor.interval", new TimeType("1m")) + + val TASK_SUBMIT_TIME_MAX: CommonVars[TimeType] = CommonVars("wds.streamis.task.submit.time.max", new TimeType("5m")) + + val SUPPORTED_JOB_TYPES: CommonVars[String] = CommonVars("wds.streamis.supported.job.types", "flink.jar,flink.sql,spark.jar") + + val SUPPORTED_MANAGEMENT_JOB_TYPES: CommonVars[String] = CommonVars("wds.streamis.management.supported.job.types", "flink.jar,flink.sql") +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/exception/JobErrorException.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/exception/JobErrorException.scala new file mode 100644 index 000000000..c6dd208d1 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/exception/JobErrorException.scala @@ -0,0 +1,40 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.exception + +import org.apache.linkis.common.exception.ErrorException + +/** + * Common job exception + * @param errorCode error code + * @param errorMsg error message + */ +class JobErrorException(errorCode: Int, errorMsg: String) extends ErrorException(errorCode, errorMsg) + +/** + * Task exception + * @param errorCode error code + * @param errorMsg error message + */ +class JobTaskErrorException(errorCode: Int, errorMsg: String) extends ErrorException(errorCode, errorMsg) + +class JobExecuteErrorException(errorCode: Int, errorMsg: String) extends ErrorException(errorCode, errorMsg) + +class JobPauseErrorException(errorCode: Int, errorMsg: String) extends ErrorException(errorCode, errorMsg) + +class JobFetchErrorException(errorCode: Int, errorMsg: String) extends ErrorException(errorCode, errorMsg) + +class JobCreateErrorException(errorCode: Int, errorMsg: String) extends ErrorException(errorCode, errorMsg) \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/BMLService.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/BMLService.scala new file mode 100644 index 000000000..b804088e6 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/BMLService.scala @@ -0,0 +1,99 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.service + +import java.io.{ByteArrayInputStream, File, FileInputStream, InputStream} +import java.util + +import org.apache.linkis.bml.client.{BmlClient, BmlClientFactory} +import org.apache.linkis.bml.protocol.{BmlUpdateResponse, BmlUploadResponse} +import org.apache.linkis.common.exception.ErrorException +import org.apache.linkis.common.utils.{Logging, Utils} +import com.webank.wedatasphere.streamis.jobmanager.manager.exception.JobCreateErrorException +import javax.annotation.PreDestroy +import org.apache.commons.lang.StringUtils +import org.springframework.stereotype.Component + +import scala.collection.JavaConversions._ + +/** + * created by cooperyang on 2021/7/12 + * Description: + */ +@Component("projectServerBMLService") +class BMLService extends Logging{ + + private val defaultBmlUser = "hadoop" + private val bmlClient: BmlClient = BmlClientFactory.createBmlClient() + + def upload(userName: String, content: String, fileName: String): util.Map[String, Object] = { + val inputStream = new ByteArrayInputStream(content.getBytes("utf-8")) + val realUploadUser = if (StringUtils.isEmpty(userName)) defaultBmlUser else userName + val resource: BmlUploadResponse = bmlClient.uploadResource(realUploadUser, fileName, inputStream) + if (!resource.isSuccess) { + error(s"Failed to upload ${content} to bml") + throw new ErrorException(911113, "上传失败") + } + Utils.tryQuietly(inputStream.close()) + val map = new util.HashMap[String, Object] + map += "resourceId" -> resource.resourceId + map += "version" -> resource.version + } + + def upload(userName: String, fileName: String): util.Map[String, Object] = { + val resource = bmlClient.uploadResource(userName,fileName,new FileInputStream(new File(fileName))) + if (!resource.isSuccess) { + error(s"Failed to upload $fileName to bml") + throw new ErrorException(911113, "上传失败") + } + val map = new util.HashMap[String, Object] + map += "resourceId" -> resource.resourceId + map += "version" -> resource.version + } + + def update(userName: String, resourceId: String, content: String, fileName: String): util.Map[String, Object] = { + val inputStream = new ByteArrayInputStream(content.getBytes("utf-8")) + val realUploadUser = if (StringUtils.isEmpty(userName)) defaultBmlUser else userName + val resource: BmlUpdateResponse = bmlClient.updateResource(realUploadUser, resourceId, fileName, inputStream) + if (!resource.isSuccess) { + error(s"Failed to upload ${content} to bml") + throw new ErrorException(911114, "更新失败") + } + Utils.tryQuietly(inputStream.close()) + val map = new util.HashMap[String, Object] + map += "resourceId" -> resource.resourceId + map += "version" -> resource.version + } + + def get(userName: String, resourceId: String, version: String): InputStream = { + val realDownloadUser = if (StringUtils.isEmpty(userName)) defaultBmlUser else userName + val resource = + if (version == null) { + bmlClient.downloadShareResource(realDownloadUser, resourceId) + } else { + bmlClient.downloadShareResource(realDownloadUser, resourceId, version) + } + if (!resource.isSuccess) { + error(s"failed to download resourceId $resourceId version $version.") + throw new JobCreateErrorException(91115,"下载失败") + } + resource.inputStream + } + + @PreDestroy + def destroy(): Unit = bmlClient.close() + +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/DefaultStreamJobInspectService.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/DefaultStreamJobInspectService.scala new file mode 100644 index 000000000..f4f8f23e1 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/DefaultStreamJobInspectService.scala @@ -0,0 +1,108 @@ +package com.webank.wedatasphere.streamis.jobmanager.manager.service +import com.webank.wedatasphere.streamis.jobmanager.launcher.conf.JobConfKeyConstants +import com.webank.wedatasphere.streamis.jobmanager.launcher.dao.StreamJobConfMapper +import com.webank.wedatasphere.streamis.jobmanager.manager.dao.{StreamJobMapper, StreamTaskMapper} +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.{StreamJob, StreamJobVersion} +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo.{JobInspectVo, JobSnapshotInspectVo, JobVersionInspectVo} +import org.apache.linkis.common.exception.ErrorException +import org.apache.linkis.common.utils.Logging +import org.springframework.beans.factory.annotation.Autowired +import org.springframework.stereotype.Service +import org.springframework.transaction.annotation.Transactional + +import java.net.URI +import java.util + +@Service +class DefaultStreamJobInspectService extends StreamJobInspectService with Logging{ + + @Autowired + private var streamTaskService: StreamTaskService = _ + + @Autowired + private var streamJobMapper: StreamJobMapper = _ + + @Autowired + private var streamTaskMapper: StreamTaskMapper = _ + + @Autowired + private var streamJobConfMapper: StreamJobConfMapper = _ + /** + * Inspect method + * + * @param jobId job id + * @param types type list for inspecting + * @return + */ + @throws(classOf[ErrorException]) + @Transactional(rollbackFor = Array(classOf[Exception])) + override def inspect(jobId: Long, types: Array[JobInspectVo.Types]): util.List[JobInspectVo] = { + val inspectVos: util.List[JobInspectVo] = new util.ArrayList[JobInspectVo] + // Lock the stream job + Option(this.streamJobMapper.queryAndLockJobById(jobId)) match { + case Some(streamJob) => + types.foreach { + case JobInspectVo.Types.VERSION => + Option(versionInspect(streamJob)).foreach(inspectVos.add(_)) + case JobInspectVo.Types.SNAPSHOT => + Option(snapshotInspect(streamJob)).foreach(inspectVos.add(_)) + case _ => null + // Do nothing + } + case _ => //Ignore + } + + inspectVos + } + + /** + * Inspect the job version + * @param streamJob stream job + * @return + */ + private def versionInspect(streamJob: StreamJob): JobVersionInspectVo = { + Option(streamTaskMapper.getLatestByJobId(streamJob.getId)) match { + case Some(task) => + val latestJobVersion = streamJobMapper.getLatestJobVersion(streamJob.getId) + if (!task.getVersion.equals(latestJobVersion.getVersion)) { + val inspectVo = new JobVersionInspectVo + + val lastJobVersion = streamJobMapper.getJobVersionById(streamJob.getId, task.getVersion) + inspectVo.setLast(lastJobVersion) + inspectVo.setNow(latestJobVersion) + logger.info(s"Version inspect [ job: ${streamJob.getName}, id: ${streamJob.getId}," + + s" last_version: ${task.getVersion}, now_version: ${latestJobVersion.getVersion}]") + inspectVo + } else null + case _ => null + } + } + + /** + * Inspect the snapshot + * @param streamJob stream job + * @return + */ + private def snapshotInspect(streamJob: StreamJob): JobSnapshotInspectVo = { + Option(this.streamJobConfMapper.getRawConfValue(streamJob.getId, JobConfKeyConstants.SAVEPOINT.getValue + "path")) match { + case Some(path) => + val inspectVo = new JobSnapshotInspectVo + inspectVo.setPath(new URI(path).toString) + inspectVo + case _ => this.streamJobConfMapper.getRawConfValue(streamJob.getId, JobConfKeyConstants.START_AUTO_RESTORE_SWITCH.getValue) match { + case "ON" => + Option(this.streamTaskService.getStateInfo(streamTaskMapper + .getLatestLaunchedById(streamJob.getId))) match { + case Some(jobState) => + val inspectVo = new JobSnapshotInspectVo + inspectVo.setPath(jobState.getLocation.toString) + inspectVo + case _ => null + } + case _ => null + } + } + + } + +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/DefaultStreamJobService.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/DefaultStreamJobService.scala new file mode 100755 index 000000000..696d0625f --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/DefaultStreamJobService.scala @@ -0,0 +1,323 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.service + +import java.util +import java.util.Date +import com.github.pagehelper.PageInfo +import com.webank.wedatasphere.streamis.jobmanager.launcher.conf.JobConfKeyConstants +import com.webank.wedatasphere.streamis.jobmanager.launcher.service.StreamJobConfService +import com.webank.wedatasphere.streamis.jobmanager.manager.alert.AlertLevel +import com.webank.wedatasphere.streamis.jobmanager.manager.conf.JobConf +import com.webank.wedatasphere.streamis.jobmanager.manager.dao.{StreamAlertMapper, StreamJobMapper, StreamTaskMapper} +import com.webank.wedatasphere.streamis.jobmanager.manager.entity._ +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo.{QueryJobListVo, TaskCoreNumVo, VersionDetailVo} +import com.webank.wedatasphere.streamis.jobmanager.manager.exception.{JobCreateErrorException, JobFetchErrorException} +import com.webank.wedatasphere.streamis.jobmanager.manager.service.DefaultStreamJobService.JobDeployValidateResult +import com.webank.wedatasphere.streamis.jobmanager.manager.transform.JobContentParser +import com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity.StreamisTransformJobContent +import com.webank.wedatasphere.streamis.jobmanager.manager.util.{ReaderUtils, ZipHelper} +import org.apache.commons.lang.StringUtils +import org.apache.commons.lang3.ObjectUtils +import org.apache.linkis.common.exception.ErrorException +import org.apache.linkis.common.utils.Logging +import org.springframework.beans.factory.annotation.Autowired +import org.springframework.stereotype.Service +import org.springframework.transaction.annotation.Transactional +import scala.collection.JavaConverters._ + + +@Service +class DefaultStreamJobService extends StreamJobService with Logging { + + @Autowired + private var streamJobMapper: StreamJobMapper = _ + @Autowired + private var streamTaskMapper: StreamTaskMapper = _ + @Autowired + private var bmlService: BMLService = _ + @Autowired + private var jobContentParsers: Array[JobContentParser] = _ + @Autowired + private var streamJobConfService: StreamJobConfService = _ + @Autowired + private var streamAlertMapper:StreamAlertMapper = _ + + override def getJobById(jobId: Long): StreamJob = { + this.streamJobMapper.getJobById(jobId) + } + + override def getJobByName(jobName: String): util.List[StreamJob] = streamJobMapper.getJobByName(jobName) + + override def getByProList(projectName: String, userName: String, jobName: String, jobStatus: Integer, jobCreator: String): PageInfo[QueryJobListVo] = { + val streamJobList = streamJobMapper.getJobLists(projectName, userName, jobName, jobStatus, jobCreator) + if (streamJobList != null && !streamJobList.isEmpty) { + val pageInfo = new PageInfo[QueryJobListVo](streamJobList) + return pageInfo + } + new PageInfo[QueryJobListVo](new util.ArrayList[QueryJobListVo]()) + } + + /** + * Page list query of version info + * + * @param jobId job id + * @return + */ + override def getVersionList(jobId: Long): PageInfo[VersionDetailVo] = { + val jobVersions = streamJobMapper.getJobVersionDetails(jobId) + if (null == jobVersions){ + new PageInfo[VersionDetailVo](new util.ArrayList[VersionDetailVo]()) + } else new PageInfo[VersionDetailVo](jobVersions) + } + + /** + * COre indicator(核心指标) + */ + override def countByCores(projectName: String, userName: String): TaskCoreNumVo = { + val jobs = streamJobMapper.getJobLists(projectName, userName, null, null, null) + val taskNum = new TaskCoreNumVo() + taskNum.setProjectName(projectName) + if (jobs != null && !jobs.isEmpty) { + jobs.asScala.filter(_.getStatus != null).groupBy(_.getStatus).map(m => (m._1, m._2.size)).foreach(fo => { + if (fo._1.equals(JobConf.FLINK_JOB_STATUS_COMPLETED.getValue)) taskNum.setSuccessNum(fo._2) + else if (fo._1.equals(JobConf.FLINK_JOB_STATUS_WAIT_RESTART.getValue)) taskNum.setWaitRestartNum(fo._2) + else if (fo._1.equals(JobConf.FLINK_JOB_STATUS_ALERT_RUNNING.getValue)) taskNum.setAlertNum(fo._2) + else if (fo._1.equals(JobConf.FLINK_JOB_STATUS_SLOW_RUNNING.getValue)) taskNum.setSlowTaskNum(fo._2) + else if (fo._1.equals(JobConf.FLINK_JOB_STATUS_RUNNING.getValue)) taskNum.setRunningNum(fo._2) + else if (fo._1.equals(JobConf.FLINK_JOB_STATUS_FAILED.getValue)) taskNum.setFailureNum(fo._2) + else if (fo._1.equals(JobConf.FLINK_JOB_STATUS_STOPPED.getValue)) taskNum.setStoppedNum(fo._2) + }) + } + taskNum + } + + /** + * job version detail(任务版本详情) + * + * @param jobId + */ + override def versionDetail(jobId: Long, version: String): VersionDetailVo = { + streamJobMapper.getVersionDetail(jobId, version) + } + + + override def rollingJobVersion(preVersion: String): String = { + val newVersion = preVersion.substring(1).toInt + 1 + val codeFormat = "%05d" + "v" + String.format(codeFormat, new Integer(newVersion)) + } + + override def uploadFiles(metaJsonInfo: MetaJsonInfo, version: StreamJobVersion, path: String): Unit = { + val readerUtils = new ReaderUtils + readerUtils.listFiles(path).asScala.foreach(path => { + val response = bmlService.upload(version.getCreateBy, path) + val jobVersionFiles = new StreamJobVersionFiles + jobVersionFiles.setJobId(version.getJobId) + jobVersionFiles.setJobVersionId(version.getId) + jobVersionFiles.setCreateBy(version.getCreateBy) + jobVersionFiles.setVersion(version.getVersion) + jobVersionFiles.setFileName(readerUtils.getFileName(path)) + jobVersionFiles.setCreateTime(new Date(System.currentTimeMillis())) + jobVersionFiles.setStorePath(readerUtils.readAsJson(response.get("version").toString, response.get("resourceId").toString)) + streamJobMapper.insertJobVersionFiles(jobVersionFiles) + }) + } + + + override def deployStreamJob(streamJob: StreamJob, + metaJsonInfo: MetaJsonInfo, userName: String, updateVersion: Boolean): StreamJobVersion = { + if(StringUtils.isBlank(metaJsonInfo.getJobType)) + throw new JobCreateErrorException(30030, s"jobType is needed.") + else if(!JobConf.SUPPORTED_JOB_TYPES.getValue.contains(metaJsonInfo.getJobType)) { + throw new JobCreateErrorException(30030, s"jobType ${metaJsonInfo.getJobType} is not supported.") + } + if(metaJsonInfo.getJobContent == null || metaJsonInfo.getJobContent.isEmpty) + throw new JobCreateErrorException(30030, s"jobContent is needed.") + val jobVersion = new StreamJobVersion() + val newStreamJob = new StreamJob() + if (streamJob == null) { + logger.info("StreamJob is null, create a new streamJob") + jobVersion.setVersion("v00001") + newStreamJob.setCreateBy(userName) + newStreamJob.setSubmitUser(userName) + newStreamJob.setJobType(metaJsonInfo.getJobType) + newStreamJob.setDescription(metaJsonInfo.getDescription) + newStreamJob.setCurrentVersion(jobVersion.getVersion) + newStreamJob.setCreateTime(new Date()) + newStreamJob.setLabel(metaJsonInfo.getTags) + newStreamJob.setName(metaJsonInfo.getJobName) + newStreamJob.setProjectName(metaJsonInfo.getProjectName) + streamJobMapper.insertJob(newStreamJob) + } else { + val jobVersions = streamJobMapper.getJobVersions(streamJob.getId) + if (jobVersions == null || jobVersions.isEmpty) jobVersion.setVersion("v00001") + else + jobVersion.setVersion(rollingJobVersion(jobVersions.get(0).getVersion)) + if(streamJob.getJobType != metaJsonInfo.getJobType) + throw new JobCreateErrorException(30030, s"StreamJob-${streamJob.getName} has already created with jobType ${streamJob.getJobType}, you cannot change it to ${metaJsonInfo.getJobType}.") + streamJob.setId(streamJob.getId) + if (updateVersion){ + // update version + streamJob.setCurrentVersion(jobVersion.getVersion) + } + if (StringUtils.isNotEmpty(metaJsonInfo.getDescription)) + streamJob.setDescription(metaJsonInfo.getDescription) + streamJobMapper.updateJob(streamJob) + } + if (ObjectUtils.isNotEmpty(streamJob)) { + jobVersion.setJobId(streamJob.getId) + } else { + logger.info("newStreamJob is {}", newStreamJob) + jobVersion.setJobId(newStreamJob.getId) + } + jobVersion.setJobContent(metaJsonInfo.getMetaInfo) + jobVersion.setCreateBy(userName) + jobVersion.setCreateTime(new Date) + jobVersion.setSource("upload by user.") + if (StringUtils.isNotBlank(metaJsonInfo.getComment)) + jobVersion.setComment(metaJsonInfo.getComment) + else jobVersion.setComment("upload by user.") + // Should build unique key using job id and version number, to avoid the duplicate version + streamJobMapper.insertJobVersion(jobVersion) + jobVersion + } + + @throws(classOf[ErrorException]) + @Transactional(rollbackFor = Array(classOf[Exception])) + override def uploadJob(projectName: String, userName: String, inputZipPath: String): StreamJobVersion = { + val inputPath = ZipHelper.unzip(inputZipPath) + val readerUtils = new ReaderUtils + val metaJsonInfo = readerUtils.parseJson(inputPath) + if (StringUtils.isNotBlank(projectName) && projectName!=metaJsonInfo.getProjectName) { + throw new JobCreateErrorException(30030, s"the projectName ${metaJsonInfo.getProjectName} is not matching the project ") + } + val validateResult = validateJobDeploy(metaJsonInfo.getProjectName, metaJsonInfo.getJobName, userName) + // 生成StreamJob,根据StreamJob生成StreamJobVersion + val version = deployStreamJob(validateResult.streamJob, metaJsonInfo, userName, validateResult.updateVersion) + // Save the job configuration, lock the job again if exists + if (null != metaJsonInfo.getJobConfig){ + this.streamJobConfService.saveJobConfig(version.getJobId, metaJsonInfo.getJobConfig.asInstanceOf[util.Map[String, Any]]) + } + // 上传所有非meta.json的文件 + uploadFiles(metaJsonInfo, version, inputZipPath) + version + } + + @throws(classOf[ErrorException]) + @Transactional(rollbackFor = Array(classOf[Exception])) + override def createOrUpdate(userName: String, metaJsonInfo: MetaJsonInfo): StreamJobVersion = { + val validateResult = validateJobDeploy(metaJsonInfo.getProjectName, metaJsonInfo.getJobName, userName) + val readerUtils = new ReaderUtils + metaJsonInfo.setMetaInfo(readerUtils.readAsJson(metaJsonInfo)) + val version = deployStreamJob(validateResult.streamJob, metaJsonInfo, userName, validateResult.updateVersion) + // Save the job configuration, lock the job again if exists + if (null != metaJsonInfo.getJobConfig){ + this.streamJobConfService.saveJobConfig(version.getJobId, metaJsonInfo.getJobConfig.asInstanceOf[util.Map[String, Any]]) + } + version + } + + override def getJobContent(jobId: Long, version: String): StreamisTransformJobContent = { + val job = streamJobMapper.getJobById(jobId) + if(job == null) throw new JobFetchErrorException(30030, s"job is not exists.") + val jobVersion = if(StringUtils.isBlank(version)) { + streamJobMapper.getJobVersions(jobId).get(0) + } else streamJobMapper.getJobVersionById(jobId, version) + if(jobVersion == null) + throw new JobFetchErrorException(30030, s"job has no versions.") + jobContentParsers.find(_.canParse(job, jobVersion)).map(_.parseTo(job, jobVersion)) + .getOrElse(throw new JobFetchErrorException(30030, s"Cannot find a JobContentParser to parse jobContent.")) + } + + + override def hasPermission(jobId: Long, username: String): Boolean = { + hasPermission(this.streamJobMapper.getJobById(jobId), username) + } + + override def hasPermission(job: StreamJob, username: String): Boolean = { + Option(job) match { + case Some(job: StreamJob) => + if (!username.equals(job.getCreateBy)){ + Option(this.streamJobConfService.getJobConfValue(job.getId, + JobConfKeyConstants.AUTHORITY_AUTHOR_VISIBLE.getValue)) match { + case Some(authors) => + authors.split(",").toList.contains(username) + case _ => false + } + } else true + case None => false + } + } + + override def getAlertUsers(job: StreamJob): util.List[String] = { + val alertUsers = this.streamJobConfService.getJobConfValue(job.getId, JobConfKeyConstants.ALERT_USER.getValue) + if (StringUtils.isBlank(alertUsers)) return null + alertUsers.split(",").toList.asJava + } + + override def getAlertLevel(job: StreamJob): AlertLevel = { + val level = this.streamJobConfService.getJobConfValue(job.getId, JobConfKeyConstants.ALERT_LEVEL.getValue) + if (StringUtils.isBlank(level)) return AlertLevel.MINOR + AlertLevel.valueOf(level) + } + + override def isCreator(jobId: Long, username: String): Boolean = { + val job = streamJobMapper.getJobById(jobId) + if (job == null) return false + username.equals(job.getCreateBy) + } + + override def getAlert(username: String, jobId: Long, version: String): util.List[StreamAlertRecord] = { + val job = streamJobMapper.getJobVersionById(jobId, version) + if (job == null) return null + streamAlertMapper.getAlertByJobIdAndVersion(username,jobId,job.getId) + } + + private def validateJobDeploy(projectName: String, jobName: String, userName: String): JobDeployValidateResult = { + if(StringUtils.isBlank(jobName)) throw new JobCreateErrorException(30030, s"jobName is needed.") + if(StringUtils.isBlank(projectName)) throw new JobCreateErrorException(30030, s"projectName is needed.") + // Try to lock the stream job to create version + Option(streamJobMapper.queryAndLockJobInCondition(projectName, jobName)) match { + case Some(streamJob) => + var updateVersion = true + // Use the project privilege at restful api + // if (streamJob.getCreateBy != userName) + // throw new JobCreateErrorException(30030, s"You have no permission to update StreamJob-$jobName.") + // Get the latest task directly + val task = streamTaskMapper.getLatestByJobId(streamJob.getId) + if (task != null && !JobConf.isCompleted(task.getStatus)) { + logger.warn(s"StreamJob-$jobName is in status ${task.getStatus}, your deployment will not update the version in job") + updateVersion = false + // throw new JobCreateErrorException(30030, s"StreamJob-$jobName is in status ${tasks.get(0).getStatus}, you cannot upload the zip.") + } + JobDeployValidateResult(streamJob, updateVersion) + case _ => + JobDeployValidateResult(null, updateVersion = true) + } + + } + + +} + +object DefaultStreamJobService{ + /** + * Deploy validate result + * @param updateVersion should update version + */ + case class JobDeployValidateResult(streamJob: StreamJob, updateVersion: Boolean) +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/DefaultStreamTaskService.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/DefaultStreamTaskService.scala new file mode 100644 index 000000000..d7a91c775 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/DefaultStreamTaskService.scala @@ -0,0 +1,744 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.service + +import java.util +import java.util.concurrent.Future +import java.util.{Calendar, function} + +import com.webank.wedatasphere.streamis.jobmanager.launcher.conf.JobConfKeyConstants +import com.webank.wedatasphere.streamis.jobmanager.launcher.dao.StreamJobConfMapper +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobLaunchManager +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobState +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.{JobInfo, LaunchJob} +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.entity.LogRequestPayload +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.{Checkpoint, Savepoint} +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.{FlinkJobClient, FlinkJobInfo, LinkisJobInfo} +import com.webank.wedatasphere.streamis.jobmanager.manager.SpringContextHolder +import com.webank.wedatasphere.streamis.jobmanager.manager.conf.JobConf +import com.webank.wedatasphere.streamis.jobmanager.manager.conf.JobConf.FLINK_JOB_STATUS_FAILED +import com.webank.wedatasphere.streamis.jobmanager.manager.dao.{StreamJobMapper, StreamTaskMapper} +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo._ +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.{StreamJob, StreamTask} +import com.webank.wedatasphere.streamis.jobmanager.manager.exception.{JobExecuteErrorException, JobFetchErrorException, JobPauseErrorException, JobTaskErrorException} +import com.webank.wedatasphere.streamis.jobmanager.manager.scheduler.FutureScheduler +import com.webank.wedatasphere.streamis.jobmanager.manager.scheduler.events.AbstractStreamisSchedulerEvent.StreamisEventInfo +import com.webank.wedatasphere.streamis.jobmanager.manager.scheduler.events.StreamisPhaseInSchedulerEvent +import com.webank.wedatasphere.streamis.jobmanager.manager.scheduler.events.StreamisPhaseInSchedulerEvent.ScheduleCommand +import com.webank.wedatasphere.streamis.jobmanager.manager.transform.exception.TransformFailedErrorException +import com.webank.wedatasphere.streamis.jobmanager.manager.transform.{StreamisTransformJobBuilder, TaskMetricsParser, Transform} +import com.webank.wedatasphere.streamis.jobmanager.manager.util.DateUtils +import com.webank.wedatasphere.streamis.jobmanager.manager.utils.StreamTaskUtils +import javax.annotation.Resource +import org.apache.commons.lang.StringUtils +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.httpclient.dws.DWSHttpClient +import org.apache.linkis.scheduler.queue +import org.apache.linkis.scheduler.queue.{Job, SchedulerEvent} +import org.springframework.beans.factory.annotation.Autowired +import org.springframework.stereotype.Service +import org.springframework.transaction.annotation.Transactional + +import scala.collection.JavaConverters._ + + +@Service +class DefaultStreamTaskService extends StreamTaskService with Logging{ + + @Autowired private var streamTaskMapper:StreamTaskMapper=_ + @Autowired private var streamJobMapper:StreamJobMapper=_ + @Autowired private var streamisTransformJobBuilders: Array[StreamisTransformJobBuilder] = _ + @Autowired private var taskMetricsParser: Array[TaskMetricsParser] = _ + + @Resource + private var jobLaunchManager: JobLaunchManager[_ <: JobInfo] = _ + + @Resource + private var streamJobConfMapper: StreamJobConfMapper = _ + /** + * Scheduler + */ + @Resource + private var scheduler: FutureScheduler = _ + + + /** + * Sync to execute job(task) + * 1) create a new task + * 2) launch the new task + * + * @param jobId job id + * @param taskId task id + * @param execUser user name + * @param restore restore from job state + */ + override def execute(jobId: Long, taskId: Long, execUser: String, restore: Boolean): Unit = { + val result: Future[String] = asyncExecute(jobId, taskId, execUser, restore) + val errorMessage = result.get() + if (StringUtils.isNotBlank(errorMessage)){ + throw new JobExecuteErrorException(-1, s"Fail to execute StreamJob(Task), message output: $errorMessage"); + } + } + + override def execute(jobId: Long, taskId: Long, execUser: String): Unit = { + val actualJobId = if(jobId <= 0) getTaskInfo(taskId)._1 else jobId + val restore = this.streamJobConfMapper.getRawConfValue(actualJobId, JobConfKeyConstants.START_AUTO_RESTORE_SWITCH.getValue) match { + case "ON" => true + case _ => false + } + execute(actualJobId, 0, execUser, restore) + } + + override def asyncExecute(jobId: Long, taskId: Long, execUser: String, restore: Boolean): Future[String] = { + execute(jobId, taskId, execUser, restore, new function.Function[SchedulerEvent, String] { + override def apply(event: SchedulerEvent): String = { + event match { + case job: Job => + job.getJobInfo.getOutput + case _ => null + } + } + })._2 + } + + override def asyncExecute(jobId: Long, taskId: Long, execUser: String): Future[String] = { + val actualJobId = if(jobId <= 0) getTaskInfo(taskId)._1 else jobId + val restore = this.streamJobConfMapper.getRawConfValue(actualJobId, JobConfKeyConstants.START_AUTO_RESTORE_SWITCH.getValue) match { + case "ON" => true + case _ => false + } + asyncExecute(actualJobId, 0, execUser, restore) + } + + override def bulkExecute(jobIds: util.List[Long], taskIds: util.List[Long], execUser: String): util.List[ExecResultVo] = { + bulkExecute(jobIds, taskIds, execUser, (jobId, taskId) => { + val actualJobId = if(jobId <= 0) getTaskInfo(taskId)._1 else jobId + this.streamJobConfMapper.getRawConfValue(actualJobId, JobConfKeyConstants.START_AUTO_RESTORE_SWITCH.getValue) match { + case "ON" => true + case _ => false + } + }) + } + /** + * Bulk executing + * + * @param jobIds jobIds + * @param taskIds taskIds + * @param execUser execUser + * @param restore restore from job state + */ + override def bulkExecute(jobIds: util.List[Long], taskIds: util.List[Long], execUser: String, restore: Boolean): util.List[ExecResultVo] = { + bulkExecute(jobIds, taskIds, execUser, (_, _) => restore) + } + + def bulkExecute(jobIds: util.List[Long], taskIds: util.List[Long], execUser: String, isRestore: (Long, Long) => Boolean): util.List[ExecResultVo] = { + val result: util.List[ExecResultVo] = new util.ArrayList[ExecResultVo]() + val counter = (jobIds.size(), taskIds.size()) + val iterateNum: Int = math.max(counter._1, counter._2) + for (i <- 0 until iterateNum){ + val jobId = if (i < counter._1) jobIds.get(i) else 0L + val taskId = if (i < counter._2) taskIds.get(i) else 0L + val event = execute(jobId, taskId, execUser, isRestore(jobId, taskId), + new function.Function[SchedulerEvent, String]{ + override def apply(event: SchedulerEvent): String = { + event match { + case job: Job => + job.getJobInfo.getOutput + case _ => null + } + } + })._1 + // Convert scheduler event to execution result + val resultVo: ExecResultVo = new ExecResultVo(jobId, taskId) + event match { + case job: queue.Job => + queueJobInfoIntoResult(job.getJobInfo, resultVo) + } + result.add(resultVo) + } + result + } + def execute[T](jobId: Long, taskId: Long, execUser: String, restore: Boolean, returnMapping: function.Function[SchedulerEvent, T]): (SchedulerEvent, Future[T]) = { + val self = SpringContextHolder.getBean(classOf[StreamTaskService]) + var finalJobId = jobId + val event = new StreamisPhaseInSchedulerEvent(if (jobId > 0) "executeJob-" + jobId else "executeTask-" + taskId, new ScheduleCommand { + + override def onPrepare(context: StreamisPhaseInSchedulerEvent.StateContext, scheduleJob: queue.JobInfo): Unit = { + if (finalJobId <= 0 ){ + finalJobId = getTaskInfo(taskId)._1 + } + // Assign the status STARTING default + val streamTask = self.createTask(finalJobId, JobConf.FLINK_JOB_STATUS_STARTING.getValue, execUser) + context.addVar("newTaskId", streamTask.getId); + } + + override def schedule(context: StreamisPhaseInSchedulerEvent.StateContext, jobInfo: queue.JobInfo): util.Map[String, AnyRef] = { + val newTaskId = context.getVar("newTaskId") + if (null != newTaskId){ + var jobState: JobState = null + // Means to fetch the job state from task to restore + if (restore){ + val restoreTaskId = taskId + // TODO fetch the job stage strategy + jobState = if (restoreTaskId <= 0){ +// val earlierTasks = streamTaskMapper.getEarlierByJobId(finalJobId, 2) +// if (earlierTasks.isEmpty){ +// throw new JobExecuteErrorException(-1, "Cannot find the candidate task to search state") +// } else if (earlierTasks.size() < 2){ +// warn("First time to launch the StreamJob, ignore to restore JobState") +// null +// } else { +// getStateInfo(earlierTasks.get(1)) +// } + getStateInfo(streamTaskMapper.getLatestLaunchedById(jobId)) + } else getStateInfo(restoreTaskId) + } + // Launch entrance + launch(newTaskId.asInstanceOf[Long], execUser, jobState) + } else { + // TODO cannot find the new task id + } + null + } + + override def onErrorHandle(context: StreamisPhaseInSchedulerEvent.StateContext, scheduleJob: queue.JobInfo, t: Throwable): Unit = { + // Change the task status + val newTaskId = context.getVar("newTaskId") + if (null != newTaskId) { + info(s"Error to launch StreamTask [$newTaskId], now try to persist the status and message output", t) + val finalTask = new StreamTask() + finalTask.setId(newTaskId.asInstanceOf[Long]) + finalTask.setStatus(JobConf.FLINK_JOB_STATUS_FAILED.getValue) + // Output message equals error message, you can use t.getMessage() + finalTask.setErrDesc(scheduleJob.getOutput) + if (streamTaskMapper.updateTaskInStatus(finalTask, JobConf.FLINK_JOB_STATUS_STARTING.getValue) > 0) { + info(s"Transient the StreamTask [$newTaskId]'status from STARTING to FAILED and flush the output message.") + } + } + } + }) + (event, scheduler.submit(event, returnMapping)) + } + + + /** + * Sync to pause job(task) + * + * @param jobId job id + * @param taskId task id + * @param operator user name + */ + override def pause(jobId: Long, taskId: Long, operator: String, snapshot: Boolean): PauseResultVo = { + val result: Future[PauseResultVo] = asyncPause(jobId, taskId, operator, snapshot) + val pauseResult = result.get() + if (StringUtils.isNotBlank(pauseResult.getMessage)){ + throw new JobExecuteErrorException(-1, s"Fail to pause StreamJob(Task), message output: ${pauseResult.getMessage}"); + } + pauseResult + } + + + override def asyncPause(jobId: Long, taskId: Long, operator: String, snapshot: Boolean): Future[PauseResultVo] = { + pause(jobId, taskId, operator, snapshot, new function.Function[SchedulerEvent, PauseResultVo] { + override def apply(event: SchedulerEvent): PauseResultVo = { + val resultVo: PauseResultVo = new PauseResultVo(jobId, taskId) + event match { + case job: queue.Job => + val jobInfo = job.getJobInfo + queueJobInfoIntoResult(jobInfo, resultVo) + jobInfo match { + case eventInfo: StreamisEventInfo => + resultVo.setSnapshotPath(String.valueOf(eventInfo + .getResultSet.asScala.getOrElse("snapshotPath", ""))) + } + case _ => + } + resultVo + } + })._2 + } + + /** + * Bulk pausing + * + * @param jobIds jobIds + * @param taskIds taskIds + * @param operator operator + * @param snapshot snapshot + * @return + */ + override def bulkPause(jobIds: util.List[Long], taskIds: util.List[Long], operator: String, snapshot: Boolean): util.List[PauseResultVo] = { + val result: util.List[Future[PauseResultVo]] = new util.ArrayList[Future[PauseResultVo]]() + val counter = (jobIds.size(), taskIds.size()) + val iterateNum: Int = math.max(counter._1, counter._2) + for (i <- 0 until iterateNum) { + val jobId = if (i < counter._1) jobIds.get(i) else 0L + val taskId = if (i < counter._2) taskIds.get(i) else 0L + result.add(asyncPause(jobId, taskId, operator, snapshot)) + } + result.asScala.map(_.get()).asJava + } + + def pause[T](jobId: Long, taskId: Long, operator: String, snapshot: Boolean, returnMapping: function.Function[SchedulerEvent, T]): (SchedulerEvent, Future[T]) = { + val self = SpringContextHolder.getBean(classOf[StreamTaskService]) + var finalJobId = jobId + val event = new StreamisPhaseInSchedulerEvent(if (jobId > 0) "pauseJob-" + jobId else "pauseTask-" + taskId, new ScheduleCommand { + + override def onPrepare(context: StreamisPhaseInSchedulerEvent.StateContext, scheduleJob: queue.JobInfo): Unit = { + if (finalJobId < 0){ + finalJobId = getTaskInfo(taskId)._1 + } + // Assign the status STOPPING default + val pauseTaskId = self.transitionTaskStatus(jobId, taskId, JobConf.FLINK_JOB_STATUS_STOPPING.getValue) + if (pauseTaskId > 0) context.addVar("pauseTaskId", pauseTaskId) + } + + override def onErrorHandle(context: StreamisPhaseInSchedulerEvent.StateContext, scheduleJob: queue.JobInfo, t: Throwable): Unit = { + val pauseTaskId = context.getVar("pauseTaskId") + if (null != pauseTaskId) { + info(s"Error to pause StreamTask [$pauseTaskId], now try to restore the status", t) + val finalTask = new StreamTask() + finalTask.setId(pauseTaskId.asInstanceOf[Long]) + finalTask.setStatus(JobConf.FLINK_JOB_STATUS_RUNNING.getValue) + // Not need to store the output message + if (streamTaskMapper.updateTaskInStatus(finalTask, JobConf.FLINK_JOB_STATUS_STOPPING.getValue) > 0) { + info(s"Restore the StreamTask [$pauseTaskId]'status from STOPPING return to RUNNING.") + } + } + } + + override def schedule(context: StreamisPhaseInSchedulerEvent.StateContext, jobInfo: queue.JobInfo): util.Map[String, AnyRef] = { + val pauseTaskId = context.getVar("pauseTaskId") + val resultSet = new util.HashMap[String, AnyRef]() + if (null != pauseTaskId){ + val streamTask = streamTaskMapper.getTaskById(pauseTaskId.asInstanceOf[Long]) + if (null == streamTask){ + throw new JobPauseErrorException(-1, s"Not found the StreamTask [$pauseTaskId] to pause, please examined the system runtime status!") + } + if (StringUtils.isBlank(streamTask.getLinkisJobId)){ + throw new JobPauseErrorException(-1, s"Unable to pause the StreamTask [$pauseTaskId}], the linkis job id is null") + } + val streamJob = streamJobMapper.getJobById(finalJobId) + logger.info(s"Try to stop StreamJob [${streamJob.getName} with task(taskId: ${streamTask.getId}, linkisJobId: ${streamTask.getLinkisJobId}).") + val jobClient = jobLaunchManager.connect(streamTask.getLinkisJobId, streamTask.getLinkisJobInfo) + val jobStateInfo = Utils.tryCatch(jobClient.stop(snapshot)){ + case e: Exception => + val pauseError = new JobPauseErrorException(-1, s"Fail to stop the StreamJob [${streamJob.getName}] " + + s"with task(taskId: ${streamTask.getId}, linkisJobId: ${streamTask.getLinkisJobId}), reason: ${e.getMessage}.") + pauseError.initCause(e) + throw pauseError + case pauseE: JobPauseErrorException => + throw pauseE + } + Option(jobStateInfo).foreach(stateInfo => resultSet.put("snapshotPath", stateInfo.getLocation)) + streamTask.setLastUpdateTime(Calendar.getInstance.getTime) + streamTask.setStatus(JobConf.FLINK_JOB_STATUS_STOPPED.getValue) + streamTaskMapper.updateTask(streamTask) + } + resultSet + } + }) + (event, this.scheduler.submit(event, returnMapping)) + } + + /** + * Query execute history(查询运行历史) + * @param jobId + * @param version + * @return + */ + def queryHistory(jobId: Long, version: String): util.List[StreamTaskListVo] ={ + if(StringUtils.isEmpty(version)) throw new JobFetchErrorException(30355, "version cannot be empty.") + val job = streamJobMapper.getJobById(jobId) + if(job == null) throw new JobFetchErrorException(30355, s"Unknown job $jobId.") + val jobVersion = streamJobMapper.getJobVersionById(jobId, version) + if(jobVersion == null) return new util.ArrayList[StreamTaskListVo] + val tasks = streamTaskMapper.getByJobVersionId(jobVersion.getId, version) + if(tasks == null || tasks.isEmpty) return new util.ArrayList[StreamTaskListVo] + val list = new util.ArrayList[StreamTaskListVo] + tasks.asScala.foreach{ f => + val svo = new StreamTaskListVo() + svo.setTaskId(f.getId) + svo.setStatus(JobConf.getStatusString(f.getStatus)) + svo.setCreator(f.getSubmitUser) + svo.setVersion(version) + svo.setJobName(job.getName) + svo.setStartTime(DateUtils.formatDate(f.getStartTime)) + svo.setEndTime(DateUtils.formatDate(f.getLastUpdateTime)) + svo.setJobVersionId(f.getJobVersionId) + //获取最新版本的代码信息 + svo.setVersionContent(jobVersion.getJobContent) + svo.setRunTime(DateUtils.intervals(f.getStartTime, f.getLastUpdateTime)) + svo.setStopCause(sub(f.getErrDesc)) + list.add(svo) + } + list + } + + def getRealtimeLog(jobId: Long, taskId: Long, operator: String, requestPayload: LogRequestPayload): util.Map[String, Any] = { + val returnMap = new util.HashMap[String, Any] + returnMap.put("logPath", "undefined") + returnMap.put("logs", util.Arrays.asList("No log content is available. Perhaps the task has not been scheduled")) + returnMap.put("endLine", 1); + val streamTask = if(taskId > 0) streamTaskMapper.getTaskById(taskId) + else streamTaskMapper.getLatestByJobId(jobId) + if (null != streamTask && StringUtils.isNotBlank(streamTask.getLinkisJobId)) { + Utils.tryCatch { + val jobClient = jobLaunchManager.connect(streamTask.getLinkisJobId, streamTask.getLinkisJobInfo) + jobClient match { + case client: FlinkJobClient => + requestPayload.setLogHistory(JobConf.isCompleted(streamTask.getStatus)) + val logIterator = client.fetchLogs(requestPayload) + returnMap.put("logPath", logIterator.getLogPath) + returnMap.put("logs", logIterator.getLogs) + returnMap.put("endLine", logIterator.getEndLine) + logIterator.close() + jobClient.getJobInfo match { + case linkisInfo: LinkisJobInfo => + if (StringUtils.isBlank(linkisInfo.getLogDirSuffix) && StringUtils.isNotBlank(logIterator.getLogDirSuffix)){ + Utils.tryAndWarn { + // Update the linkis job info and store into database + linkisInfo.setLogDirSuffix(logIterator.getLogDirSuffix) + streamTask.setLinkisJobInfo(DWSHttpClient.jacksonJson.writeValueAsString(linkisInfo)); + streamTaskMapper.updateTask(streamTask) + } + } + case _ => + } + } + }{ case e: Exception => + // Just warn the exception + warn(s"Unable to fetch runtime log for StreamTask " + + s"[id: ${streamTask.getId}, jobId: ${streamTask.getJobId}, linkis_id: ${streamTask.getLinkisJobId}]", e) + } + } + returnMap + } + + /** + * Do snapshot + * + * @param jobId job id + * @param taskId task id + * @param operator operator + */ + override def snapshot(jobId: Long, taskId: Long, operator: String): String = { + val streamTask = if (taskId > 0) streamTaskMapper.getTaskById(taskId) + else streamTaskMapper.getLatestByJobId(jobId) + if (null != streamTask && StringUtils.isNotBlank(streamTask.getLinkisJobId)){ + val jobClient = this.jobLaunchManager.connect(streamTask.getLinkisJobId, streamTask.getLinkisJobInfo) + return jobClient match { + case flinkJobClient: FlinkJobClient => + Option(flinkJobClient.triggerSavepoint()) match { + case Some(savepoint) => + savepoint.getLocation.toString + } + } + } + null + } + /** + * @param jobId + * @return + */ + def getProgress(jobId:Long, version: String): JobProgressVo ={ + val jobVersion = streamJobMapper.getJobVersionById(jobId, version) + if(jobVersion == null) return new JobProgressVo + val tasks = streamTaskMapper.getTasksByJobIdAndJobVersionId(jobVersion.getJobId, jobVersion.getId) + if(tasks == null || tasks.isEmpty) return new JobProgressVo + val task = tasks.get(0) + val jobProgressVO = new JobProgressVo() + jobProgressVO.setTaskId(task.getId) + jobProgressVO.setProgress(task.getStatus) + jobProgressVO + } + + /** + * Fetch the status list by job id list + * + * @param jobIds job ids + */ + override def getStatusList(jobIds: util.List[Long]): util.List[JobStatusVo] = { + val streamTask: util.List[StreamTask] = this.streamTaskMapper.getStatusInfoByJobIds(jobIds.asScala.map(id => { + id.asInstanceOf[java.lang.Long] + }).asJava) + streamTask.asScala.map(task => { + val statusVo = new JobStatusVo() + statusVo.setStatusCode(task.getStatus) + statusVo.setStatus(JobConf.getStatusString(task.getStatus)) + statusVo.setJobId(task.getJobId) + statusVo.setMessage(task.getErrDesc) + statusVo + }).asJava + } + + def getTaskJobInfo(jobId:Long, version: String): FlinkJobInfo ={ + val str = streamTaskMapper.getTask(jobId, version) + if (StringUtils.isBlank(str)) { + return new FlinkJobInfo + } + DWSHttpClient.jacksonJson.readValue(str,classOf[FlinkJobInfo]) + } + + + /** + * Update the task status + * + * @param jobId job id + * @param status status code + * @return task id of latest task + */ + @Transactional(rollbackFor = Array(classOf[Exception])) + override def transitionTaskStatus(jobId: Long, taskId: Long, status: Int): Long = { + trace(s"Query and lock the StreamJob in [$jobId] before updating status of StreamTask") + Option(streamJobMapper.queryAndLockJobById(jobId)) match { + case None => throw new JobTaskErrorException(-1, s"Unable to update status of StreamTask, the StreamJob [$jobId] is not exists.") + case Some(job) => + val streamTask = if(taskId > 0) streamTaskMapper.getTaskById(taskId) + else streamTaskMapper.getLatestByJobId(jobId) + if (null == streamTask){ + throw new JobTaskErrorException(-1, s"Unable to find any StreamTask for job [id: ${job.getId}, name: ${job.getName}]") + } + if (JobConf.isCompleted(streamTask.getStatus)){ + warn(s"StreamTask [${streamTask.getId}] has been completed for for " + + s"job [id: ${job.getId}, name: ${job.getName}]") + // Just return 0 + 0 + }else { + streamTask.setStatus(status) + streamTask.setLastUpdateTime(Calendar.getInstance.getTime) + streamTaskMapper.updateTask(streamTask) + streamTask.getId + } + } + } + + + override def getLatestTaskByJobId(jobId: Long): StreamTask = streamTaskMapper.getLatestByJobId(jobId) + + /** + * Create new task use the latest job version + * + * @param jobId job id + * @param status init status + * @param creator creator + */ + @Transactional(rollbackFor = Array(classOf[Exception])) + override def createTask(jobId: Long, status: Int, creator: String): StreamTask = { + logger.trace(s"Query and lock the StreamJob in [$jobId] before creating StreamTask") + Option(streamJobMapper.queryAndLockJobById(jobId)) match { + case None => throw new JobTaskErrorException(-1, s"Unable to create StreamTask, the StreamJob [$jobId] is not exists.") + case Some(job) => + // Then to fetch latest job version + Option(streamJobMapper.getLatestJobVersion(jobId)) match { + case None => throw new JobTaskErrorException(-1, s"No versions can be found for job [id: ${job.getId}, name: ${job.getName}]") + case Some(jobVersion) => + var noticeMessage = s"Fetch the latest version: ${jobVersion.getVersion} for job [id: ${job.getId}, name: ${job.getName}]" + if (!jobVersion.getVersion.equals(job.getCurrentVersion)){ + noticeMessage += s", last version used for task is ${job.getCurrentVersion}" + // Update job current version + job.setCurrentVersion(jobVersion.getVersion) + streamJobMapper.updateJob(job) + } + logger.info(noticeMessage) + // Get the latest task by job id + val latestTask = streamTaskMapper.getLatestByJobId(jobId) + if (null == latestTask || JobConf.isCompleted(latestTask.getStatus)){ + val streamTask = new StreamTask(jobId, jobVersion.getId, jobVersion.getVersion, creator) + streamTask.setStatus(status) + logger.info(s"Produce a new StreamTask [jobId: $jobId, version: ${jobVersion.getVersion}, creator: $creator, status: ${streamTask.getStatus}]") + streamTaskMapper.insertTask(streamTask) + streamTask + } else { + throw new JobTaskErrorException(-1, s"Unable to create new task, StreamTask [${latestTask.getId}] is still " + + s"not completed for job [id: ${job.getId}, name: ${job.getName}]") + } + } + } + } + + override def updateTask(streamTask: StreamTask): Unit = streamTaskMapper.updateTask(streamTask) + + /** + * Just launch task by task id + * + * @param taskId task id + */ + override def launch(taskId: Long, execUser: String): Unit = { + launch(taskId, execUser, null) + } + + /** + * Launch with job state + * @param taskId task id + * @param execUser executor + * @param state state + */ + def launch(taskId: Long, execUser: String, state: JobState):Long = { + // First to query the task information + val streamTask = this.streamTaskMapper.getTaskById(taskId) + if (null == streamTask){ + throw new JobExecuteErrorException(-1, s"Not found the StreamTask [$taskId] to execute, please examined the system runtime status!") + } + // Second to query the related job information + val streamJob = streamJobMapper.getJobById(streamTask.getJobId) + if (null == streamJob){ + throw new JobExecuteErrorException(-1, s"Not found the related job info in [${streamTask.getJobId}], has been dropped it ?") + } + info(s"Start to find the transform builder to process the StreamJob [${streamJob.getName}]") + val transformJob = streamisTransformJobBuilders.find(_.canBuild(streamJob)).map(_.build(streamJob)) + .getOrElse(throw new TransformFailedErrorException(30408, s"Cannot find a TransformJobBuilder to build StreamJob ${streamJob.getName}.")) + // To avoid the permission problem, use the creator to submit job + // Use {projectName}.{jobName} as the launch job name + var launchJob = LaunchJob.builder().setJobName(s"${streamJob.getProjectName}.${streamJob.getName}.${taskId}").setSubmitUser(streamJob.getCreateBy).build() + launchJob = Transform.getTransforms.foldLeft(launchJob)((job, transform) => transform.transform(transformJob, job)) + info(s"StreamJob [${streamJob.getName}] has transformed with launchJob $launchJob, now to launch it.") + //TODO getLinkisJobManager should use jobManagerType to instance in future, since not only `simpleFlink` mode is supported in future. + val jobClient = jobLaunchManager.launch(launchJob, state) + // Refresh and store the information from JobClient + Utils.tryCatch { + // Refresh the job info(If the job shutdown immediately) + val jobInfo = jobClient.getJobInfo(true) + info(s"StreamJob [${streamJob.getName}] has launched with linkis_id ${jobInfo.getId}. now to examine its status") + streamTask.setLinkisJobId(jobInfo.getId) + StreamTaskUtils.refreshInfo(streamTask, jobInfo) + // First to store the launched task info + streamTaskMapper.updateTask(streamTask) + info(s"StreamJob [${streamJob.getName}] is ${jobInfo.getStatus} with $jobInfo.") + if (FLINK_JOB_STATUS_FAILED.getValue == streamTask.getStatus){ + throw new JobExecuteErrorException(-1, s"(提交流式应用状态失败, 请检查日志), errorDesc: ${streamTask.getErrDesc}") + } + // Drop the temporary configuration + Utils.tryQuietly(streamJobConfMapper.deleteTemporaryConfValue(streamTask.getJobId), { + case e: Exception => + warn(s"Fail to delete the temporary configuration for job [${streamTask.getJobId}], task [${streamTask.getId}]", e) + }) + }{case e: Exception => + val message = s"Error occurred when to refresh and store the info of StreamJob [${streamJob.getName}] with JobClient" + warn(s"$message, stop and destroy the Client connection.") + // Stop the JobClient directly + Utils.tryAndWarn(jobClient.stop()) + val errExcept = new JobExecuteErrorException(-1, s"$message, message: ${e.getMessage}") + errExcept.initCause(e) + throw errExcept + } + streamTask.getId + } + + + /** + * @param taskId taskId + * @return + */ + private def getTaskInfo(taskId: Long): (Long, StreamTask) = { + val oldStreamTask = streamTaskMapper.getTaskById(taskId) + if (Option(oldStreamTask).isEmpty){ + throw new JobTaskErrorException(-1, s"Cannot find the StreamTask in id: $taskId") + } + (oldStreamTask.getJobId, oldStreamTask) + } + + /** + * Sub function + * @param str str + * @return + */ + private def sub(str:String):String = { + if (StringUtils.isBlank(str) || str.length <= 100){ + str + }else { + if (str.contains("message")){ + val subStr = str.substring(str.indexOf("message") - 1) + if (subStr.length <= 100){ + subStr + "..." + }else { + subStr.substring(0,100) + "..." + } + }else { + str.substring(0,100) + "..." + } + } + } + + /** + * Convert the queue job info into schedule result + * @param jobInfo job info + * @param scheduleResult schedule result + */ + private def queueJobInfoIntoResult(jobInfo: queue.JobInfo, scheduleResult: ScheduleResultVo): Unit = { + scheduleResult.setScheduleId(jobInfo.getId) + scheduleResult.setScheduleState(jobInfo.getState) + scheduleResult.setProgress(jobInfo.getProgress) + // TODO Set metric info + scheduleResult.setMessage(jobInfo.getOutput) + } + + override def getStateInfo(taskId: Long): JobState = { + getStateInfo(this.streamTaskMapper.getTaskById(taskId)) + } + + override def getStateInfo(streamTask: StreamTask): JobState = { + Option(streamTask) match { + case Some(task) => + if (StringUtils.isNotBlank(task.getLinkisJobId)) { + info(s"Try to restore the JobState form taskId [${task.getId}], fetch the state information.") + // Connect to get the JobInfo + val jobClient = this.jobLaunchManager.connect(task.getLinkisJobId, task.getLinkisJobInfo) + val jobInfo = jobClient.getJobInfo + // Get the JobStateManager + val jobStateManager = this.jobLaunchManager.getJobStateManager + val stateList: util.List[JobState] = new util.ArrayList[JobState]() + // First to fetch the latest Savepoint information + Option(jobStateManager.getJobState[Savepoint](classOf[Savepoint], jobInfo)).foreach(savepoint => stateList.add(savepoint)) + // Determinate if need the checkpoint information + this.streamJobConfMapper.getRawConfValue(task.getJobId, JobConfKeyConstants.CHECKPOINT_SWITCH.getValue) match { + case "ON" => + // Then to fetch the latest Checkpoint information + Option(jobStateManager.getJobState[Checkpoint](classOf[Checkpoint], jobInfo)).foreach(checkpoint => stateList.add(checkpoint)) + case _ => + } + // Fetch the job state info in jobInfo at last +// Option(jobInfo.getJobStates).foreach(states => states.foreach(state => { +// val savepoint = new Savepoint(state.getLocation) +// savepoint.setTimestamp(state.getTimestamp) +// stateList.add(savepoint) +// })) + if (!stateList.isEmpty){ + // Choose the newest job state + val finalState = stateList.asScala.maxBy(_.getTimestamp) + info(s"Final choose the JobState: [${finalState.getLocation}] to restore the StreamJob") + return finalState + } + } else { + + } + null + case _ => null + } + } + + override def getJobDetailsVO(streamJob: StreamJob, version: String): JobDetailsVo = { + val flinkJobInfo = getTaskJobInfo(streamJob.getId, version) + val jobStateInfos = flinkJobInfo.getJobStates + val metricsStr = if (JobConf.SUPPORTED_MANAGEMENT_JOB_TYPES.getValue.contains(streamJob.getJobType)) null + else if(jobStateInfos == null || jobStateInfos.length == 0) null + else jobStateInfos(0).getLocation + taskMetricsParser.find(_.canParse(streamJob)).map(_.parse(metricsStr)).filter { jobDetailsVO => + jobDetailsVO.setLinkisJobInfo(flinkJobInfo) + true + }.getOrElse(throw new JobFetchErrorException(30030, s"Cannot find a TaskMetricsParser to parse job details.")) + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/StreamJobInspectService.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/StreamJobInspectService.scala new file mode 100644 index 000000000..8be02cf6b --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/StreamJobInspectService.scala @@ -0,0 +1,14 @@ +package com.webank.wedatasphere.streamis.jobmanager.manager.service +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo.JobInspectVo + +import java.util + +trait StreamJobInspectService { + /** + * Inspect method + * @param jobId job id + * @param types type list for inspecting + * @return + */ + def inspect(jobId: Long, types: Array[JobInspectVo.Types]): util.List[JobInspectVo] +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/StreamJobService.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/StreamJobService.scala new file mode 100644 index 000000000..2b5cc8e6e --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/StreamJobService.scala @@ -0,0 +1,139 @@ +package com.webank.wedatasphere.streamis.jobmanager.manager.service + +import com.github.pagehelper.PageInfo +import com.webank.wedatasphere.streamis.jobmanager.manager.alert.AlertLevel +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.{MetaJsonInfo, StreamAlertRecord, StreamJob, StreamJobVersion} +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo.{QueryJobListVo, TaskCoreNumVo, VersionDetailVo} +import com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity.StreamisTransformJobContent + +import java.util + +/** + * Job service + */ +trait StreamJobService { + + + def getJobById(jobId: Long): StreamJob + + def getJobByName(jobName: String): util.List[StreamJob] + + /** + * Page list query + * @param projectName project name + * @param jobName job name + * @param jobStatus job status + * @param jobCreator job creator + * @return + */ + def getByProList(projectName: String, userName: String, jobName: String, jobStatus: Integer, jobCreator: String): PageInfo[QueryJobListVo] + + /** + * Page list query of version info + * @param jobId job id + * @return + */ + def getVersionList(jobId: Long): PageInfo[VersionDetailVo] + /** + * Count core norm + * @param projectName project name + * @return + */ + def countByCores(projectName: String, userName: String): TaskCoreNumVo + + /** + * Version detail information + * @param jobId job id + * @param version version + */ + def versionDetail(jobId: Long, version: String): VersionDetailVo + + /** + * Rolling job version + * @param preVersion version + */ + def rollingJobVersion(preVersion: String): String + + /** + * Upload files + * @param metaJsonInfo meta json + * @param version version + * @param path path + */ + def uploadFiles(metaJsonInfo: MetaJsonInfo, version: StreamJobVersion, path: String): Unit + + /** + * Deploy stream job + * @param metaJsonInfo meta json + * @param userName username + * @param updateVersion should update version + * @return + */ + def deployStreamJob(streamJob: StreamJob, metaJsonInfo: MetaJsonInfo, userName: String, updateVersion: Boolean): StreamJobVersion + + /** + * Upload job + * @param projectName project name + * @param userName username + * @param inputZipPath input zip path + * @return + */ + def uploadJob(projectName: String, userName: String, inputZipPath: String): StreamJobVersion + + /** + * Create or update job with meta json + * @param userName username + * @param metaJsonInfo meta json + * @return + */ + def createOrUpdate(userName: String, metaJsonInfo: MetaJsonInfo): StreamJobVersion + + /** + * Get job content + * @param jobId job id + * @param version version + * @return + */ + def getJobContent(jobId: Long, version: String): StreamisTransformJobContent + + /** + * Has permission + * @param jobId job id + * @param username username + * @return + */ + def hasPermission(jobId: Long, username: String): Boolean + + def hasPermission(job: StreamJob, username: String): Boolean + + /** + * Alert user + * @param job stream job + * @return + */ + def getAlertUsers(job: StreamJob): util.List[String] + + /** + * Alert level + * @param job stream job + * @return + */ + def getAlertLevel(job: StreamJob): AlertLevel + + /** + * Is creator + * @param jobId job id + * @param username username + * @return + */ + def isCreator(jobId: Long, username: String): Boolean + + /** + * List alert message list + * @param username username + * @param jobId job id + * @param version version + * @return + */ + def getAlert(username: String, jobId: Long, version: String): util.List[StreamAlertRecord] +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/StreamTaskService.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/StreamTaskService.scala new file mode 100644 index 000000000..fab070a5d --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/StreamTaskService.scala @@ -0,0 +1,161 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.service + +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobState +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.entity.LogRequestPayload +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.FlinkJobInfo +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.{StreamJob, StreamTask} +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo.{ExecResultVo, JobDetailsVo, JobProgressVo, JobStatusVo, PauseResultVo, StreamTaskListVo} +import java.util +import java.util.concurrent.Future +/** + * Include the method related by stream task (such as execute/pause) + */ +trait StreamTaskService { + + /** + * Sync to execute job(task) + * 1) create a new task + * 2) launch the new task + * @param jobId job id + * @param taskId task id + * @param execUser user name + * @param restore restore from job state + */ + def execute(jobId: Long, taskId: Long, execUser: String, restore: Boolean = false): Unit + + def execute(jobId: Long, taskId: Long, execUser: String): Unit + + /** + * Async to execute job(task) + * @param jobId job id + * @param taskId task id + * @param execUser user name + * @param restore restore from job state + * @return + */ + def asyncExecute(jobId: Long, taskId: Long, execUser: String, restore: Boolean = false): Future[String] + + def asyncExecute(jobId: Long, taskId: Long, execUser: String): Future[String] + /** + * Bulk executing + * @param jobIds jobIds + * @param taskIds taskIds + * @param execUser execUser + * @param restore restore from job state + */ + def bulkExecute(jobIds: util.List[Long], taskIds: util.List[Long], execUser: String, restore: Boolean = false): util.List[ExecResultVo] + + def bulkExecute(jobIds: util.List[Long], taskIds: util.List[Long], execUser: String): util.List[ExecResultVo] + /** + * Sync to pause job(task) + * @param jobId job id + * @param taskId task id + * @param operator user name + */ + def pause(jobId: Long, taskId: Long, operator: String, snapshot: Boolean): PauseResultVo + + def asyncPause(jobId: Long, taskId: Long, operator: String, snapshot: Boolean): Future[PauseResultVo] + + /** + * Bulk pausing + * @param jobIds jobIds + * @param taskIds taskIds + * @param operator operator + * @param snapshot snapshot + * @return + */ + def bulkPause(jobIds: util.List[Long], taskIds: util.List[Long], operator: String, snapshot: Boolean): util.List[PauseResultVo] + /** + * Just launch task by task id + * @param taskId task id + */ + def launch(taskId: Long, execUser: String): Unit + + def getLatestTaskByJobId(jobId: Long): StreamTask + + /** + * Create new task use the latest job version + * @param jobId job id + * @param status init status + * @param creator creator + */ + def createTask(jobId: Long, status: Int, creator: String): StreamTask + + def updateTask(streamTask: StreamTask): Unit + + /** + * Update the task status + * @param jobId job id + * @param status status code + * @return task id of latest task + */ + def transitionTaskStatus(jobId: Long, taskId: Long, status: Int) : Long + /** + * Query the task history list + * @param jobId job id + * @param version version + * @return + */ + def queryHistory(jobId: Long, version: String): util.List[StreamTaskListVo] + + /** + * Get realtime log + * @param jobId job id + * @param operator user name + * @param requestPayload request payload + * @return + */ + def getRealtimeLog(jobId: Long, taskId: Long, operator: String, requestPayload: LogRequestPayload): util.Map[String, Any] + + /** + * Do snapshot + * @param jobId job id + * @param taskId task id + * @param operator operator + * @return snapshot url + */ + def snapshot(jobId: Long, taskId: Long, operator: String): String + /** + * Fetch the progress(job progress/the progress of latest task) by job id and version + * @param jobId job id + * @param version version + * @return + */ + def getProgress(jobId: Long, version: String): JobProgressVo + + /** + * Fetch the status list by job id list + * @param jobIds job ids + */ + def getStatusList(jobIds: util.List[Long]): util.List[JobStatusVo] + /** + * Get latest task info by job id and version number + * @param jobId job id + * @param version version + * @return + */ + def getTaskJobInfo(jobId: Long, version: String): FlinkJobInfo + + + def getStateInfo(taskId: Long): JobState + + def getStateInfo(streamTask: StreamTask): JobState + + def getJobDetailsVO(streamJob: StreamJob, version: String): JobDetailsVo + +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/StreamiFileService.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/StreamiFileService.scala new file mode 100644 index 000000000..e2e221d81 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/StreamiFileService.scala @@ -0,0 +1,31 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.service + +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamisFile + +import java.util + +/** + * Created by enjoyyin on 2021/9/23. + */ +trait StreamiFileService { + + def getFile(projectName: String, fileName: String, version: String): StreamisFile + + def listFileVersions(projectName: String, fileName: String): util.List[_ <: StreamisFile] + +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/TaskMonitorService.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/TaskMonitorService.scala new file mode 100644 index 000000000..ee830d5af --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/TaskMonitorService.scala @@ -0,0 +1,186 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.service + +import java.util +import java.util.Date +import java.util.concurrent.{Future, TimeUnit} + +import com.google.common.collect.Sets +import com.webank.wedatasphere.streamis.jobmanager.launcher.JobLauncherAutoConfiguration +import com.webank.wedatasphere.streamis.jobmanager.launcher.conf.JobConfKeyConstants +import com.webank.wedatasphere.streamis.jobmanager.launcher.dao.StreamJobConfMapper +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.JobInfo +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobLaunchManager +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.{FlinkJobInfo, LinkisJobInfo} +import com.webank.wedatasphere.streamis.jobmanager.manager.alert.{AlertLevel, Alerter} +import com.webank.wedatasphere.streamis.jobmanager.manager.conf.JobConf +import com.webank.wedatasphere.streamis.jobmanager.manager.dao.{StreamJobMapper, StreamTaskMapper} +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.{StreamJob, StreamTask} +import com.webank.wedatasphere.streamis.jobmanager.manager.utils.StreamTaskUtils +import javax.annotation.{PostConstruct, PreDestroy, Resource} +import org.apache.commons.lang.exception.ExceptionUtils +import org.apache.linkis.common.exception.ErrorException +import org.apache.linkis.common.utils.{Logging, RetryHandler, Utils} +import org.springframework.beans.factory.annotation.Autowired +import org.springframework.stereotype.Service + +import scala.collection.convert.WrapAsScala._ + + +@Service +class TaskMonitorService extends Logging { + + @Autowired private var streamTaskMapper:StreamTaskMapper=_ + @Autowired private var streamJobMapper:StreamJobMapper=_ + @Autowired private var jobService: StreamJobService =_ + + @Autowired private var alerters:Array[Alerter] = _ + + @Resource + private var streamTaskService: StreamTaskService = _ + + @Resource + private var streamJobConfMapper: StreamJobConfMapper = _ + + private var future: Future[_] = _ + + @PostConstruct + def init(): Unit = { + if (JobConf.STREAMIS_JOB_MONITOR_ENABLE.getValue) { + future = Utils.defaultScheduler.scheduleAtFixedRate(new Runnable { + override def run(): Unit = Utils.tryAndWarnMsg { + doMonitor() + }("Monitor the status of all tasks failed!") + }, JobConf.TASK_MONITOR_INTERVAL.getValue.toLong, JobConf.TASK_MONITOR_INTERVAL.getValue.toLong, TimeUnit.MILLISECONDS) + } + } + + @PreDestroy + def close(): Unit = { + Option(future).foreach(_.cancel(true)) + } + + def doMonitor(): Unit = { + info("Try to update all StreamTasks status.") + val jobLaunchManager = JobLaunchManager.getJobManager(JobLauncherAutoConfiguration.DEFAULT_JOB_LAUNCH_MANGER) + val status = util.Arrays.asList(JobConf.NOT_COMPLETED_STATUS_ARRAY.map(c => new Integer(c.getValue)) :_*) + val streamTasks = streamTaskMapper.getTasksByStatus(status) + if(streamTasks == null || streamTasks.isEmpty) { + info("No StreamTasks is running, return...") + return + } + streamTasks.filter(shouldMonitor).foreach { streamTask => + val job = streamJobMapper.getJobById(streamTask.getJobId) + if(!JobConf.SUPPORTED_MANAGEMENT_JOB_TYPES.getValue.contains(job.getJobType)) { + val userList = Sets.newHashSet(job.getSubmitUser, job.getCreateBy) + userList.addAll(getAlertUsers(job)) + val alertMsg = s"Spark Streaming应用[${job.getName}]已经超过 ${Utils.msDurationToString(System.currentTimeMillis - streamTask.getLastUpdateTime.getTime)} 没有更新状态, 请及时确认应用是否正常!" + alert(jobService.getAlertLevel(job), alertMsg, new util.ArrayList[String](userList), streamTask) + } else { + streamTask.setLastUpdateTime(new Date) + streamTaskMapper.updateTask(streamTask) + info(s"Try to update status of StreamJob-${job.getName}.") + val retryHandler = new RetryHandler {} + retryHandler.setRetryNum(3) + retryHandler.setRetryMaxPeriod(2000) + retryHandler.addRetryException(classOf[ErrorException]) + var jobInfo:JobInfo = null + Utils.tryCatch { + jobInfo = retryHandler.retry(refresh(streamTask, jobLaunchManager), s"Task-Monitor-${job.getName}") + } { ex => + error(s"Fetch StreamJob-${job.getName} failed, maybe the Linkis cluster is wrong, please be noticed!", ex) + val errorMsg = ExceptionUtils.getRootCauseMessage(ex) + if (errorMsg != null && errorMsg.contains("Not exists EngineConn")) { + streamTask.setStatus(JobConf.FLINK_JOB_STATUS_FAILED.getValue) + streamTask.setErrDesc("Not exists EngineConn.") + } else { + // 连续三次还是出现异常,说明Linkis的Manager已经不能正常提供服务,告警并不再尝试获取状态,等待下次尝试 + val users = getAlertUsers(job) + users.add(job.getCreateBy) + alert(jobService.getAlertLevel(job), s"请求LinkisManager失败,Linkis集群出现异常,请关注!影响任务[${job.getName}]", users, streamTask) + } + } + streamTaskMapper.updateTask(streamTask) + if(streamTask.getStatus == JobConf.FLINK_JOB_STATUS_FAILED.getValue) { + warn(s"StreamJob-${job.getName} is failed, please be noticed.") + var extraMessage = "" + Option(jobInfo) match { + case Some(flinkJobInfo: FlinkJobInfo) => + extraMessage = s",${flinkJobInfo.getApplicationId}" + case _ => + } + // Need to add restart feature if user sets the restart parameters. + var alertMsg = s"Streamis 流式应用[${job.getName}${extraMessage}]已经失败, 请登陆Streamis查看应用日志." + this.streamJobConfMapper.getRawConfValue(job.getId, JobConfKeyConstants.FAIL_RESTART_SWITCH.getValue) match { + case "ON" => + alertMsg = s"${alertMsg} 现将自动拉起该应用" + Utils.tryCatch{ + info(s"Start to reLaunch the StreamisJob [${job.getName}], now to submit and schedule it...") + // Use submit user to start job + val future: Future[String] = streamTaskService.asyncExecute(job.getId, 0L, job.getSubmitUser, true) + }{ + case e:Exception => + warn(s"Fail to reLaunch the StreamisJob [${job.getName}]", e) + } + case _ => + } + val userList = Sets.newHashSet(job.getSubmitUser, job.getCreateBy) + userList.addAll(getAlertUsers(job)) + alert(jobService.getAlertLevel(job), alertMsg, new util.ArrayList[String](userList), streamTask) + } + } + } + info("All StreamTasks status have updated.") + } + + /** + * Refresh streamis task + * @param streamTask stream task + * @param jobLaunchManager launch manager + */ + protected def refresh(streamTask: StreamTask, jobLaunchManager: JobLaunchManager[_ <: JobInfo]): JobInfo ={ + val jobClient = jobLaunchManager.connect(streamTask.getLinkisJobId, streamTask.getLinkisJobInfo) + StreamTaskUtils.refreshInfo(streamTask, jobClient.getJobInfo(true)) + jobClient.getJobInfo + } + + protected def getAlertUsers(job: StreamJob): util.List[String] = { + var users = jobService.getAlertUsers(job) + if (users == null) { + users = new util.ArrayList[String]() + } else { + users = new util.ArrayList[String](users) + } + users.addAll(util.Arrays.asList(JobConf.STREAMIS_DEVELOPER.getValue.split(","):_*)) + users + } + + protected def alert(alertLevel: AlertLevel, alertMsg: String, users: util.List[String], streamTask:StreamTask): Unit = alerters.foreach{ alerter => + Utils.tryCatch { + alerter.alert(alertLevel, alertMsg, users, streamTask) + }(t => error(s"failed to send alert message to ${alerter.getClass.getSimpleName}.", t)) + } + + protected def shouldMonitor(streamTask: StreamTask): Boolean = + System.currentTimeMillis - streamTask.getLastUpdateTime.getTime >= JobConf.TASK_MONITOR_INTERVAL.getValue.toLong + + protected def getStatus(jobInfo: LinkisJobInfo): Int = { + //TODO We should use jobInfo to get more accurate status, such as Alert running, Slow running + JobConf.linkisStatusToStreamisStatus(jobInfo.getStatus) + } + +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/ConfigTransform.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/ConfigTransform.scala new file mode 100644 index 000000000..918bf2754 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/ConfigTransform.scala @@ -0,0 +1,49 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.transform + +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.LaunchJob +import com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity.StreamisTransformJob +import org.apache.commons.lang3.StringUtils + +import java.util + +/** + * Config transform + */ +trait ConfigTransform extends Transform { + + override def transform(streamisTransformJob: StreamisTransformJob, job: LaunchJob): LaunchJob = { + val config: util.Map[String, Any] = streamisTransformJob.getConfigMap + val group = configGroup() + if (StringUtils.isNotBlank(group)){ + Option(config.get(group)) match { + case Some(valueSet: util.Map[String, Any]) => + transform(valueSet, job) + case _ => job + } + } else transform(streamisTransformJob.getConfigMap, job) + } + + /** + * Config group name + * @return + */ + protected def configGroup(): String = null + + protected def transform(valueSet: util.Map[String, Any], job: LaunchJob): LaunchJob + +} \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/JobContentParser.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/JobContentParser.scala new file mode 100644 index 000000000..acb251a3b --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/JobContentParser.scala @@ -0,0 +1,35 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.transform + +import org.apache.linkis.manager.label.entity.engine.RunType.RunType +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.{StreamJob, StreamJobVersion} +import com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity.StreamisTransformJobContent + +/** + * Created by enjoyyin on 2021/9/22. + */ +trait JobContentParser { + + val jobType: String + + val runType: RunType + + def canParse(job: StreamJob, jobVersion: StreamJobVersion): Boolean + + def parseTo(job: StreamJob, jobVersion: StreamJobVersion): StreamisTransformJobContent + +} \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/StreamisJobContentTransform.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/StreamisJobContentTransform.scala new file mode 100644 index 000000000..0f96016d2 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/StreamisJobContentTransform.scala @@ -0,0 +1,37 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.transform + +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.LaunchJob + +import java.util +import com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity.{StreamisTransformJob, StreamisTransformJobContent} + + + +trait StreamisJobContentTransform extends Transform { + + override def transform(streamisTransformJob: StreamisTransformJob, job: LaunchJob): LaunchJob = { + val jobContent = transformJobContent(streamisTransformJob.getStreamisTransformJobContent) + if(jobContent != null) { + jobContent.put("runType", streamisTransformJob.getStreamisJobEngineConn.getRunType.toString) + LaunchJob.builder().setLaunchJob(job).setJobContent(jobContent).build() + } else job + } + + + protected def transformJobContent(transformJob: StreamisTransformJobContent): util.HashMap[String, Any] +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/StreamisTransformJobBuilder.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/StreamisTransformJobBuilder.scala new file mode 100644 index 000000000..68ba3f979 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/StreamisTransformJobBuilder.scala @@ -0,0 +1,28 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.transform + +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamJob +import com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity.StreamisTransformJob + + +trait StreamisTransformJobBuilder { + + def canBuild(streamJob: StreamJob): Boolean + + def build(streamJob: StreamJob): StreamisTransformJob + +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/TaskMetricsParser.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/TaskMetricsParser.scala new file mode 100644 index 000000000..1e7ad7b68 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/TaskMetricsParser.scala @@ -0,0 +1,18 @@ +package com.webank.wedatasphere.streamis.jobmanager.manager.transform + +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamJob +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo.JobDetailsVo + +/** + * + * @date 2022-10-21 + * @author enjoyyin + * @since 0.5.0 + */ +trait TaskMetricsParser { + + def canParse(streamJob: StreamJob): Boolean + + def parse(metrics: String): JobDetailsVo + +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/Transform.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/Transform.scala new file mode 100644 index 000000000..4118626de --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/Transform.scala @@ -0,0 +1,37 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.transform + +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.LaunchJob +import org.apache.linkis.common.utils.ClassUtils +import com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity.StreamisTransformJob + + +trait Transform { + + def transform(streamisTransformJob: StreamisTransformJob, job: LaunchJob): LaunchJob + +} + +import scala.collection.convert.WrapAsScala._ +object Transform { + + private val transforms = ClassUtils.reflections.getSubTypesOf(classOf[Transform]).filterNot(ClassUtils.isInterfaceOrAbstract) + .map(_.newInstance).toArray + + def getTransforms: Array[Transform] = transforms + +} \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/builder/AbstractStreamisTransformJobBuilder.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/builder/AbstractStreamisTransformJobBuilder.scala new file mode 100644 index 000000000..622961329 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/builder/AbstractStreamisTransformJobBuilder.scala @@ -0,0 +1,86 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.transform.builder + +import com.webank.wedatasphere.streamis.jobmanager.launcher.conf.JobConfKeyConstants +import org.apache.linkis.common.conf.CommonVars +import org.apache.linkis.manager.label.entity.engine.RunType.RunType +import com.webank.wedatasphere.streamis.jobmanager.launcher.service.StreamJobConfService +import com.webank.wedatasphere.streamis.jobmanager.manager.conf.JobConf +import com.webank.wedatasphere.streamis.jobmanager.manager.dao.StreamJobMapper +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamJob +import com.webank.wedatasphere.streamis.jobmanager.manager.transform.StreamisTransformJobBuilder +import com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity.{StreamisJobEngineConnImpl, StreamisTransformJob, StreamisTransformJobContent, StreamisTransformJobImpl} +import org.springframework.beans.factory.annotation.Autowired + +import java.util +import scala.collection.JavaConverters.mapAsJavaMapConverter +/** + * Created by enjoyyin on 2021/9/22. + */ +abstract class AbstractStreamisTransformJobBuilder extends StreamisTransformJobBuilder { + + @Autowired private var streamJobMapper: StreamJobMapper = _ + @Autowired private var streamJobConfService: StreamJobConfService = _ + + protected def createStreamisTransformJob(): StreamisTransformJobImpl = new StreamisTransformJobImpl + + protected def createStreamisTransformJobContent(transformJob: StreamisTransformJob): StreamisTransformJobContent + + override def build(streamJob: StreamJob): StreamisTransformJob = { + val transformJob = createStreamisTransformJob() + transformJob.setStreamJob(streamJob) + val jobConfig: util.Map[String, Any] = Option(streamJobConfService.getJobConfig(streamJob.getId)) + .getOrElse(new util.HashMap[String, Any]()) + // Put and overwrite internal group, users cannot customize the internal configuration + val internalGroup = new util.HashMap[String, Any]() + jobConfig.put(JobConfKeyConstants.GROUP_INTERNAL.getValue, internalGroup) + internalLogConfig(internalGroup) + transformJob.setConfigMap(jobConfig) +// transformJob.setConfig(configurationService.getFullTree(streamJob.getId)) + val streamJobVersions = streamJobMapper.getJobVersions(streamJob.getId) + // 无需判断streamJobVersions是否非空,因为TaskService已经判断了 + transformJob.setStreamJobVersion(streamJobVersions.get(0)) + transformJob.setStreamisTransformJobContent(createStreamisTransformJobContent(transformJob)) + transformJob + } + + /** + * Log internal configuration + * @param internal internal config group + */ + private def internalLogConfig(internal: util.Map[String, Any]): Unit = { + internal.put(JobConf.STREAMIS_JOB_LOG_GATEWAY.key, JobConf.STREAMIS_JOB_LOG_GATEWAY.getValue) + internal.put(JobConf.STREAMIS_JOB_LOG_COLLECT_PATH.key, JobConf.STREAMIS_JOB_LOG_COLLECT_PATH.getValue) + } +} + +abstract class AbstractFlinkStreamisTransformJobBuilder extends AbstractStreamisTransformJobBuilder{ + + private val flinkVersion = CommonVars("wds.streamis.flink.submit.version", "1.12.2").getValue + + protected def getRunType(transformJob: StreamisTransformJob): RunType + + override def build(streamJob: StreamJob): StreamisTransformJob = super.build(streamJob) match { + case transformJob: StreamisTransformJobImpl => + val engineConn = new StreamisJobEngineConnImpl + engineConn.setEngineConnType("flink-" + flinkVersion) + engineConn.setRunType(getRunType(transformJob)) + transformJob.setStreamisJobEngineConn(engineConn) + transformJob + case job => job + } +} \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/builder/StreamisFlinkTransformJobBuilder.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/builder/StreamisFlinkTransformJobBuilder.scala new file mode 100644 index 000000000..838e9c417 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/builder/StreamisFlinkTransformJobBuilder.scala @@ -0,0 +1,41 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.transform.builder + +import org.apache.linkis.manager.label.entity.engine.RunType.RunType +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamJob +import com.webank.wedatasphere.streamis.jobmanager.manager.exception.JobExecuteErrorException +import com.webank.wedatasphere.streamis.jobmanager.manager.transform.JobContentParser +import com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity.{StreamisTransformJob, StreamisTransformJobContent} +import org.springframework.beans.factory.annotation.Autowired +import org.springframework.stereotype.Component + + +@Component +class StreamisFlinkTransformJobBuilder extends AbstractFlinkStreamisTransformJobBuilder { + + @Autowired private var jobContentParsers: Array[JobContentParser] = _ + + override def canBuild(streamJob: StreamJob): Boolean = jobContentParsers.map(_.jobType).contains(streamJob.getJobType.toLowerCase) + + override protected def getRunType(transformJob: StreamisTransformJob): RunType = + jobContentParsers.find(_.jobType == transformJob.getStreamJob.getJobType.toLowerCase).map(_.runType).get + + override protected def createStreamisTransformJobContent(transformJob: StreamisTransformJob): StreamisTransformJobContent = + jobContentParsers.find(_.canParse(transformJob.getStreamJob, transformJob.getStreamJobVersion)) + .map(_.parseTo(transformJob.getStreamJob, transformJob.getStreamJobVersion)) + .getOrElse(throw new JobExecuteErrorException(30350, "Not support jobContent " + transformJob.getStreamJobVersion.getJobContent)) +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/entity/StreamisJobEngineConn.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/entity/StreamisJobEngineConn.scala new file mode 100644 index 000000000..d9d286ebb --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/entity/StreamisJobEngineConn.scala @@ -0,0 +1,40 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity + +import org.apache.linkis.manager.label.entity.engine.RunType._ + + +trait StreamisJobEngineConn { + + def getRunType: RunType + + def getEngineConnType: String + +} + +class StreamisJobEngineConnImpl extends StreamisJobEngineConn { + + private var engineConnType: String = _ + private var runType: RunType = _ + + def setRunType(runType: RunType): Unit = this.runType = runType + override def getRunType: RunType = runType + + override def getEngineConnType: String = engineConnType + def setEngineConnType(engineConnType: String): Unit = this.engineConnType = engineConnType + +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/entity/StreamisTransformJob.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/entity/StreamisTransformJob.scala new file mode 100644 index 000000000..f4af773ff --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/entity/StreamisTransformJob.scala @@ -0,0 +1,36 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity + +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.{StreamJob, StreamJobVersion} +import java.util +/** + * Transform job + */ +trait StreamisTransformJob { + + def getStreamJob: StreamJob + + def getStreamJobVersion: StreamJobVersion + + def getConfigMap: util.Map[String, Any] + + def getStreamisJobEngineConn: StreamisJobEngineConn + + def getStreamisTransformJobContent: StreamisTransformJobContent + + +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/entity/StreamisTransformJobContent.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/entity/StreamisTransformJobContent.scala new file mode 100644 index 000000000..3debeb8c3 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/entity/StreamisTransformJobContent.scala @@ -0,0 +1,30 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity + +/** + * Created by enjoyyin on 2021/9/22. + */ +trait StreamisTransformJobContent + +class StreamisSqlTransformJobContent extends StreamisTransformJobContent { + + private var sql: String = _ + + def getSql: String = sql + def setSql(sql: String): Unit = this.sql = sql + +} \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/entity/StreamisTransformJobImpl.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/entity/StreamisTransformJobImpl.scala new file mode 100644 index 000000000..7a6daa325 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/entity/StreamisTransformJobImpl.scala @@ -0,0 +1,49 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity + +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.{StreamJob, StreamJobVersion} + +import java.util + + +class StreamisTransformJobImpl extends StreamisTransformJob { + + private var streamJob: StreamJob = _ + private var streamJobVersion: StreamJobVersion = _ + private var configMap: util.Map[String, Any] = _ + private var streamisJobEngineConn: StreamisJobEngineConn = _ + private var streamisTransformJobContent: StreamisTransformJobContent = _ + + override def getStreamJob: StreamJob = streamJob + def setStreamJob(streamJob: StreamJob): Unit = this.streamJob = streamJob + + override def getStreamJobVersion: StreamJobVersion = streamJobVersion + def setStreamJobVersion(streamJobVersion: StreamJobVersion): Unit = this.streamJobVersion = streamJobVersion + + override def getStreamisJobEngineConn: StreamisJobEngineConn = streamisJobEngineConn + def setStreamisJobEngineConn(streamisJobEngineConn: StreamisJobEngineConn): Unit = this.streamisJobEngineConn = streamisJobEngineConn + + override def getStreamisTransformJobContent: StreamisTransformJobContent = streamisTransformJobContent + def setStreamisTransformJobContent(streamisTransformJobContent: StreamisTransformJobContent): Unit = + this.streamisTransformJobContent = streamisTransformJobContent + + override def getConfigMap: util.Map[String, Any] =this.configMap + + def setConfigMap(mapValue: util.Map[String, Any]): Unit = { + this.configMap = mapValue + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/exception/TransformFailedErrorException.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/exception/TransformFailedErrorException.scala new file mode 100644 index 000000000..49379e414 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/exception/TransformFailedErrorException.scala @@ -0,0 +1,21 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.transform.exception + +import org.apache.linkis.common.exception.ErrorException + + +class TransformFailedErrorException(errorCode: Int, errorMsg: String) extends ErrorException(errorCode, errorMsg) diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkCheckpointConfigTransform.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkCheckpointConfigTransform.scala new file mode 100644 index 000000000..7d797aaf5 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkCheckpointConfigTransform.scala @@ -0,0 +1,67 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.transform.impl + +import com.webank.wedatasphere.streamis.jobmanager.launcher.JobLauncherAutoConfiguration +import com.webank.wedatasphere.streamis.jobmanager.launcher.conf.JobConfKeyConstants + +import java.util +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.LaunchJob +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobLaunchManager +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.Checkpoint +import com.webank.wedatasphere.streamis.jobmanager.manager.transform.impl.FlinkCheckpointConfigTransform.CHECKPOINT_PATH_CONFIG_NAME +import org.apache.linkis.common.conf.CommonVars +import org.apache.linkis.common.utils.Logging + +import scala.collection.JavaConverters._ + +/** + * Use the flink inner parameters instead of the engine parameter (in linkis) + */ +class FlinkCheckpointConfigTransform extends FlinkConfigTransform with Logging{ + + + /** + * Config group name + * + * @return + */ + override protected def configGroup(): String = JobConfKeyConstants.GROUP_PRODUCE.getValue + + override protected def transform(produceConfig: util.Map[String, Any], job: LaunchJob): LaunchJob = { + produceConfig.get(JobConfKeyConstants.CHECKPOINT_SWITCH.getValue) match { + case "ON" => + val checkpointConfig: util.Map[String, Any] = new util.HashMap[String, Any]() + val jobLaunchManager = JobLaunchManager.getJobManager(JobLauncherAutoConfiguration.DEFAULT_JOB_LAUNCH_MANGER) + val checkpointPath = jobLaunchManager.getJobStateManager.getJobStateDir(classOf[Checkpoint], job.getJobName) + checkpointConfig.put(FlinkConfigTransform.FLINK_CONFIG_PREFIX + CHECKPOINT_PATH_CONFIG_NAME, checkpointPath) + info(s"Use the checkpoint dir, ${CHECKPOINT_PATH_CONFIG_NAME} => ${checkpointPath}") + produceConfig.asScala.filter(_._1.startsWith(JobConfKeyConstants.CHECKPOINT.getValue)) + .foreach{ + case (key, value) => + checkpointConfig.put(FlinkConfigTransform.FLINK_CONFIG_PREFIX + key + .replace(JobConfKeyConstants.CHECKPOINT.getValue, "execution.checkpointing."), value) + } + transformConfig(checkpointConfig, job) + case _ => job + } + } + +} + +object FlinkCheckpointConfigTransform{ + private val CHECKPOINT_PATH_CONFIG_NAME = CommonVars("wds.streamis.flink.config.name.checkpoint-path", "state.checkpoints.dir").getValue +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkConfigTransform.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkConfigTransform.scala new file mode 100644 index 000000000..e44552f87 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkConfigTransform.scala @@ -0,0 +1,44 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.webank.wedatasphere.streamis.jobmanager.manager.transform.impl + +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.LaunchJob +import com.webank.wedatasphere.streamis.jobmanager.manager.transform.ConfigTransform +import com.webank.wedatasphere.streamis.jobmanager.manager.utils.JobUtils +import org.apache.linkis.protocol.utils.TaskUtils + +import scala.collection.convert.WrapAsScala._ +import java.util + +/** + * Flink common config transform + */ +abstract class FlinkConfigTransform extends ConfigTransform { + + protected def transformConfig(getConfig: => util.Map[String, Any], job: LaunchJob): LaunchJob = { + val startupMap = new util.HashMap[String, Any] + Option(getConfig).foreach(configSeq => configSeq.foreach{ + case (key, value) => startupMap.put(key, value) + case _ => + }) + val params = if(job.getParams == null) new util.HashMap[String, Any] else job.getParams + if(!startupMap.isEmpty) TaskUtils.addStartupMap(params, JobUtils.filterParameterSpec(startupMap)) + LaunchJob.builder().setLaunchJob(job).setParams(params).build() + } +} +object FlinkConfigTransform{ + + val FLINK_CONFIG_PREFIX:String = "_FLINK_CONFIG_." +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkExtraConfigTransform.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkExtraConfigTransform.scala new file mode 100644 index 000000000..86c44207e --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkExtraConfigTransform.scala @@ -0,0 +1,43 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.transform.impl + +import com.webank.wedatasphere.streamis.jobmanager.launcher.conf.JobConfKeyConstants +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.LaunchJob + +import java.util +import scala.collection.JavaConverters._ + +/** + * Flink extra configuration(key => _FLINK_CONFIG_ + key) + */ +class FlinkExtraConfigTransform extends FlinkConfigTransform { + + + /** + * Config group name + * + * @return + */ + override protected def configGroup(): String = JobConfKeyConstants.GROUP_FLINK_EXTRA.getValue + + override protected def transform(flinkExtra: util.Map[String, Any], job: LaunchJob): LaunchJob = { + transformConfig(flinkExtra.asScala.map(entry =>{ + (FlinkConfigTransform.FLINK_CONFIG_PREFIX + entry._1, entry._2) + }).asJava, job) + } + +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkInternalConfigTransform.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkInternalConfigTransform.scala new file mode 100644 index 000000000..be9782734 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkInternalConfigTransform.scala @@ -0,0 +1,49 @@ +package com.webank.wedatasphere.streamis.jobmanager.manager.transform.impl +import com.webank.wedatasphere.streamis.jobmanager.launcher.conf.JobConfKeyConstants +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.LaunchJob +import com.webank.wedatasphere.streamis.jobmanager.manager.conf.JobConf +import com.webank.wedatasphere.streamis.jobmanager.manager.transform.impl.FlinkInternalConfigTransform.INTERNAL_CONFIG_MAP +import org.apache.linkis.common.conf.CommonVars + +import java.util +import scala.collection.JavaConverters.{mapAsJavaMapConverter, mapAsScalaMapConverter} + +/** + * Flink internal config transform + */ +class FlinkInternalConfigTransform extends FlinkConfigTransform { + + /** + * Config group name + * + * @return + */ + override protected def configGroup(): String = JobConfKeyConstants.GROUP_INTERNAL.getValue + + override protected def transform(internalConfig: util.Map[String, Any], job: LaunchJob): LaunchJob = { + transformConfig(internalConfig.asScala.map{ + case (key, value) => + (FlinkConfigTransform.FLINK_CONFIG_PREFIX + (INTERNAL_CONFIG_MAP.get(key) match { + case Some(mappingKey) => mappingKey + case _ => value + }), value) + }.asJava, job) + } +} + +object FlinkInternalConfigTransform { + /** + * Defined in FlinkStreamisConfigDefine.LOG_GATEWAY_ADDRESS of 'flink-streamis-log-collector' + */ + private val LOG_GATEWAY_CONFIG_NAME = CommonVars("wds.streamis.flink.config.name.log-gateway", "stream.log.gateway.address").getValue + + /** + * Defined in FlinkStreamisConfigDefine.LOG_GATEWAY_ADDRESS of 'flink-streamis-log-collector' + */ + private val LOG_COLLECT_PATH_CONFIG_NAME = CommonVars("wds.streamis.flink.config.name.log-collect-path", "stream.log.collect.path").getValue + + + val INTERNAL_CONFIG_MAP = Map(JobConf.STREAMIS_JOB_LOG_GATEWAY.key -> LOG_GATEWAY_CONFIG_NAME, + JobConf.STREAMIS_JOB_LOG_COLLECT_PATH.key -> LOG_COLLECT_PATH_CONFIG_NAME + ) +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkJarStreamisJobContentTransform.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkJarStreamisJobContentTransform.scala new file mode 100644 index 000000000..72206eee0 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkJarStreamisJobContentTransform.scala @@ -0,0 +1,86 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.transform.impl + +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.LaunchJob + +import java.util +import org.apache.linkis.common.utils.JsonUtils +import org.apache.linkis.protocol.utils.TaskUtils +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamisFile +import com.webank.wedatasphere.streamis.jobmanager.manager.transform.{StreamisJobContentTransform, Transform} +import com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity.{StreamisJarTransformJobContent, StreamisTransformJob, StreamisTransformJobContent} +import com.webank.wedatasphere.streamis.jobmanager.manager.utils.JobUtils + +import scala.collection.JavaConverters._ +import scala.collection.mutable +import scala.collection.mutable.ArrayBuffer + +/** + * Created by enjoyyin on 2021/9/23. + */ +class FlinkJarStreamisJobContentTransform extends StreamisJobContentTransform { + override protected def transformJobContent(transformJob: StreamisTransformJobContent): util.HashMap[String, Any] = transformJob match { + case transformJobContent: StreamisJarTransformJobContent => + val jobContent = new util.HashMap[String, Any] + jobContent.put("flink.app.args", transformJobContent.getArgs.asScala.mkString(" ")) + jobContent.put("flink.app.main.class", transformJobContent.getMainClass) + jobContent + case _ => null + } +} + +class FlinkJarStreamisStartupParamsTransform extends Transform { + + override def transform(streamisTransformJob: StreamisTransformJob, job: LaunchJob): LaunchJob = streamisTransformJob.getStreamisTransformJobContent match { + case transformJobContent: StreamisJarTransformJobContent => + val startupMap = new util.HashMap[String, Any] + startupMap.put("flink.app.main.class.jar", transformJobContent.getMainClassJar.getFileName) + startupMap.put("flink.app.main.class.jar.bml.json", + JsonUtils.jackson.writeValueAsString(getStreamisFileContent(transformJobContent.getMainClassJar))) + + /** + * Notice : "flink.app.user.class.path" equals to PipelineOptions.CLASSPATHS in Flink + * paths must specify a protocol (e.g. file://) and be accessible on all nodes + * so we use "flink.yarn.ship-directories" instead + */ + var classPathFiles = Option(transformJobContent.getDependencyJars) match { + case Some(list) => list.asScala + case _ => mutable.Buffer[StreamisFile]() + } + Option(transformJobContent.getResources) match { + case Some(list) => classPathFiles = classPathFiles ++ list.asScala + case _ => // Do nothing + } + startupMap.put("flink.yarn.ship-directories", classPathFiles.map(_.getFileName).mkString(",")) + if(classPathFiles.nonEmpty) + startupMap.put("flink.app.user.class.path.bml.json", + JsonUtils.jackson.writeValueAsString(classPathFiles.map(getStreamisFileContent).asJava)) + if(transformJobContent.getHdfsJars != null) + startupMap.put("flink.user.lib.path", transformJobContent.getHdfsJars.asScala.mkString(",")) + val params = if(job.getParams == null) new util.HashMap[String, Any] else job.getParams + if(!startupMap.isEmpty) TaskUtils.addStartupMap(params, JobUtils.filterParameterSpec(startupMap)) + LaunchJob.builder().setLaunchJob(job).setParams(params).build() + case _ => job + } + + private def getStreamisFileContent(streamisFile: StreamisFile): util.Map[String, Object] = { + val content = JsonUtils.jackson.readValue(streamisFile.getStorePath, classOf[util.Map[String, Object]]) + content.put("fileName", streamisFile.getFileName) + content + } + +} \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkSavepointConfigTransform.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkSavepointConfigTransform.scala new file mode 100644 index 000000000..0d6d03994 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkSavepointConfigTransform.scala @@ -0,0 +1,28 @@ +package com.webank.wedatasphere.streamis.jobmanager.manager.transform.impl +import com.webank.wedatasphere.streamis.jobmanager.launcher.conf.JobConfKeyConstants +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.LaunchJob + +import java.util +import scala.collection.JavaConverters._ + +/** + * Flink savepoint config + */ +class FlinkSavepointConfigTransform extends FlinkConfigTransform { + + + /** + * Config group name + * + * @return + */ + override protected def configGroup(): String = JobConfKeyConstants.GROUP_PRODUCE.getValue + + override protected def transform(valueSet: util.Map[String, Any], job: LaunchJob): LaunchJob = { + transformConfig(valueSet.asScala.filter(_._1.startsWith(JobConfKeyConstants.SAVEPOINT.getValue)) + .map{ + case (key, value) => + (FlinkConfigTransform.FLINK_CONFIG_PREFIX + key.replace(JobConfKeyConstants.SAVEPOINT.getValue, "execution.savepoint."), value) + }.asJava, job) + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/LabelsStreamisCodeTransform.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/LabelsStreamisCodeTransform.scala new file mode 100644 index 000000000..062eab35c --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/LabelsStreamisCodeTransform.scala @@ -0,0 +1,45 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.transform.impl + +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.LaunchJob + +import java.util +import org.apache.linkis.computation.client.utils.LabelKeyUtils +import com.webank.wedatasphere.streamis.jobmanager.manager.conf.JobConf +import com.webank.wedatasphere.streamis.jobmanager.manager.transform.Transform +import com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity.StreamisTransformJob +import org.apache.commons.lang.StringUtils +import org.apache.linkis.manager.label.constant.LabelKeyConstant + + +class LabelsStreamisCodeTransform extends Transform { + + override def transform(streamisTransformJob: StreamisTransformJob, job: LaunchJob): LaunchJob = { + val labels = new util.HashMap[String, Any] + labels.put(LabelKeyUtils.ENGINE_TYPE_LABEL_KEY, streamisTransformJob.getStreamisJobEngineConn.getEngineConnType) + labels.put(LabelKeyUtils.USER_CREATOR_LABEL_KEY, streamisTransformJob.getStreamJob.getSubmitUser + "-Streamis") + // Add the tenant label default + val defaultTenant: String = JobConf.STREAMIS_DEFAULT_TENANT.getValue + if (StringUtils.isNotBlank(defaultTenant)){ + labels.put(LabelKeyConstant.TENANT_KEY, defaultTenant) + } + labels.put(LabelKeyUtils.ENGINE_CONN_MODE_LABEL_KEY, "once") + if (job.getLabels != null) labels.putAll(job.getLabels) + LaunchJob.builder().setLaunchJob(job).setLabels(labels).build() + } + +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/LaunchConfigTransform.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/LaunchConfigTransform.scala new file mode 100644 index 000000000..7c14926f0 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/LaunchConfigTransform.scala @@ -0,0 +1,42 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.transform.impl + +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.LaunchJob + +import java.util +import com.webank.wedatasphere.streamis.jobmanager.manager.conf.JobConf +import com.webank.wedatasphere.streamis.jobmanager.manager.transform.Transform +import com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity.StreamisTransformJob +import org.apache.linkis.DataWorkCloudApplication + +/** + * + * @date 2022-01-08 + * @author enjoyyin + * @since 0.1.0 + */ +class LaunchConfigTransform extends Transform { + + override def transform(streamisTransformJob: StreamisTransformJob, job: LaunchJob): LaunchJob = { + val launchConfigs = if(job.getLaunchConfigs != null) job.getLaunchConfigs else new util.HashMap[String, Any] + launchConfigs.putIfAbsent(LaunchJob.LAUNCH_CONFIG_CREATE_SERVICE, DataWorkCloudApplication.getServiceInstance.toString) + launchConfigs.putIfAbsent(LaunchJob.LAUNCH_CONFIG_DESCRIPTION, streamisTransformJob.getStreamJob.getDescription) + launchConfigs.putIfAbsent(LaunchJob.LAUNCH_CONFIG_MAX_SUBMIT_TIME, JobConf.TASK_SUBMIT_TIME_MAX.getValue.toLong) + LaunchJob.builder().setLaunchJob(job).setLaunchConfigs(launchConfigs).build() + } + +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/ResourceConfigTransform.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/ResourceConfigTransform.scala new file mode 100644 index 000000000..fac53a166 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/ResourceConfigTransform.scala @@ -0,0 +1,64 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.transform.impl + +import com.webank.wedatasphere.streamis.jobmanager.launcher.conf.JobConfKeyConstants + +import java.util +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.LaunchJob +import com.webank.wedatasphere.streamis.jobmanager.manager.transform.ConfigTransform +import com.webank.wedatasphere.streamis.jobmanager.manager.transform.impl.ResourceConfigTransform.RESOURCE_CONFIG_MAP +import com.webank.wedatasphere.streamis.jobmanager.manager.utils.JobUtils +import org.apache.linkis.protocol.utils.TaskUtils + +import scala.collection.JavaConverters._ + + +class ResourceConfigTransform extends ConfigTransform { + + + /** + * Config group name + * + * @return + */ + override protected def configGroup(): String = JobConfKeyConstants.GROUP_RESOURCE.getValue + + + override protected def transform(valueSet: util.Map[String, Any], job: LaunchJob): LaunchJob = { + val startupMap = valueSet.asScala.map{ + case (key, value) => + RESOURCE_CONFIG_MAP.get(key) match { + case Some(mappingKey) => + (mappingKey, value) + case _ => (key, value) + } + }.asJava + val params = if(job.getParams == null) new util.HashMap[String, Any] else job.getParams + if(!startupMap.isEmpty) TaskUtils.addStartupMap(params, JobUtils.filterParameterSpec(startupMap)) + LaunchJob.builder().setLaunchJob(job).setParams(params).build() + } +} + +object ResourceConfigTransform{ + val RESOURCE_CONFIG_MAP = Map( + "wds.linkis.flink.taskmanager.memory" ->"flink.taskmanager.memory", + "wds.linkis.flink.jobmanager.memory" -> "flink.jobmanager.memory", + "wds.linkis.flink.taskmanager.cpus" -> "flink.taskmanager.cpu.cores", + "wds.linkis.flink.taskmanager.numberOfTaskSlots" -> "flink.taskmanager.numberOfTaskSlots", + "wds.linkis.flink.app.parallelism" -> "wds.linkis.engineconn.flink.app.parallelism" + ) +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/SourceTransform.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/SourceTransform.scala new file mode 100644 index 000000000..b1cb65a6b --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/SourceTransform.scala @@ -0,0 +1,33 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.transform.impl + +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.LaunchJob + +import java.util +import com.webank.wedatasphere.streamis.jobmanager.manager.transform.Transform +import com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity.StreamisTransformJob + + +class SourceTransform extends Transform { + override def transform(streamisTransformJob: StreamisTransformJob, job: LaunchJob): LaunchJob = { + val source = new util.HashMap[String, Any] + source.put("project", streamisTransformJob.getStreamJob.getProjectName) + source.put("workspace", streamisTransformJob.getStreamJob.getWorkspaceName) + source.put("job", streamisTransformJob.getStreamJob.getName) + LaunchJob.builder().setLaunchJob(job).setSource(source).build() + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/SqlStreamisJobContentTransform.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/SqlStreamisJobContentTransform.scala new file mode 100644 index 000000000..67576a187 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/SqlStreamisJobContentTransform.scala @@ -0,0 +1,33 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.transform.impl + +import java.util + +import com.webank.wedatasphere.streamis.jobmanager.manager.transform.StreamisJobContentTransform +import com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity.{StreamisSqlTransformJobContent, StreamisTransformJobContent} + +class SqlStreamisJobContentTransform extends StreamisJobContentTransform { + + override protected def transformJobContent(transformJob: StreamisTransformJobContent): util.HashMap[String, Any] = transformJob match { + case sqlTransformJob: StreamisSqlTransformJobContent => + val jobContent = new util.HashMap[String, Any] + jobContent.put("code", sqlTransformJob.getSql) + jobContent + case _ => null + } + +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/AbstractJobContentParser.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/AbstractJobContentParser.scala new file mode 100644 index 000000000..f3e787877 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/AbstractJobContentParser.scala @@ -0,0 +1,112 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.transform.parser + +import java.io.InputStream +import java.util + +import org.apache.linkis.common.conf.Configuration +import org.apache.linkis.common.utils.{JsonUtils, Logging} +import com.webank.wedatasphere.streamis.jobmanager.manager.dao.StreamJobMapper +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.{StreamJob, StreamJobVersion, StreamisFile} +import com.webank.wedatasphere.streamis.jobmanager.manager.exception.JobExecuteErrorException +import com.webank.wedatasphere.streamis.jobmanager.manager.service.{BMLService, StreamiFileService} +import com.webank.wedatasphere.streamis.jobmanager.manager.transform.JobContentParser +import org.apache.commons.io.IOUtils +import org.apache.commons.lang.StringUtils +import org.springframework.beans.factory.annotation.Autowired + +import scala.collection.JavaConverters._ + +/** + * Created by enjoyyin on 2021/9/23. + */ +abstract class AbstractJobContentParser extends JobContentParser with Logging { + + @Autowired private var streamJobMapper: StreamJobMapper = _ + @Autowired private var bmlService: BMLService = _ + @Autowired private var streamiFileService: StreamiFileService = _ + + private def findFromProject(projectName: String, fileName: String): StreamisFile = fileName match { + case AbstractJobContentParser.PROJECT_FILE_REGEX(name, version) => + val file = streamiFileService.getFile(projectName, name, version) + if(file == null) + throw new JobExecuteErrorException(30500, s"Not exists file $fileName.") + file + case _ => + val files = streamiFileService.listFileVersions(projectName, fileName) + if(files == null || files.isEmpty) + throw new JobExecuteErrorException(30500, s"Not exists file $fileName.") + files.get(0) + } + + private def findFromProject(projectName: String, fileNames: Array[String]): Array[StreamisFile] = { + if(fileNames == null || fileNames.isEmpty) Array.empty + else fileNames.map(findFromProject(projectName, _)) + } + + protected def findFile(job: StreamJob, jobVersion: StreamJobVersion, fileName: String): StreamisFile = { + val files = streamJobMapper.getStreamJobVersionFiles(jobVersion.getJobId, jobVersion.getId) + val (file, fileSource) = if(files == null || files.isEmpty) + (findFromProject(job.getProjectName, fileName), "project") + else files.asScala.find(_.getFileName == fileName).map((_, "jobVersion")).getOrElse((findFromProject(job.getProjectName, fileName), "project")) + info(s"Find a $fileSource file(${file.getFileName}, ${file.getVersion}) with storePath ${file.getStorePath} for StreamJob-${job.getName} with file $fileName.") + file + } + + protected def findFiles(job: StreamJob, jobVersion: StreamJobVersion, fileNames: Array[String]): Array[StreamisFile] = { + val files = streamJobMapper.getStreamJobVersionFiles(jobVersion.getJobId, jobVersion.getId) + if(files == null || files.isEmpty) findFromProject(job.getProjectName, fileNames) + else { + val streamisFiles = files.asScala.filter(fileNames.contains) + if(streamisFiles.size == fileNames.length) streamisFiles.toArray + else (streamisFiles ++ findFromProject(job.getProjectName, fileNames.filterNot(files.contains))).toArray + } + } + + private def getFile[T](job: StreamJob, jobVersion: StreamJobVersion, fileName: String, op: (String, String) => T): T = { + val streamisFile = findFile(job, jobVersion, fileName) + streamisFile.getStoreType match { + case StreamisFile.BML_STORE_TYPE => + val resourceMap = JsonUtils.jackson.readValue(streamisFile.getStorePath, classOf[util.Map[String, String]]) + op(resourceMap.get("resourceId"), resourceMap.get("version")) + case _ => + throw new JobExecuteErrorException(30500, s"Not supported storeType ${streamisFile.getStoreType}.") + } + } + + protected def getFileContent(job: StreamJob, jobVersion: StreamJobVersion, fileName: String): String = + getFile(job, jobVersion, fileName, readFileFromBML(jobVersion.getCreateBy, _, _)) + + protected def readFile(job: StreamJob, jobVersion: StreamJobVersion, fileName: String): InputStream = + getFile(job, jobVersion, fileName, readBMLFile(jobVersion.getCreateBy, _, _)) + + protected def readBMLFile(userName: String, resourceId: String, version: String): InputStream = { + if(StringUtils.isBlank(resourceId)) throw new JobExecuteErrorException(30500, "Not exists resourceId.") + bmlService.get(userName, resourceId, version) + } + + protected def readFileFromBML(userName: String, resourceId: String, version: String): String = + IOUtils.toString(readBMLFile(userName, resourceId, version), Configuration.BDP_ENCODING.getValue) + + override def canParse(job: StreamJob, jobVersion: StreamJobVersion): Boolean = jobType == job.getJobType + +} +object AbstractJobContentParser { + + val PROJECT_FILE_REGEX = "(^[^.]+)-([\\d]+?\\.[\\d]+?\\.[\\d]+)$".r + +} \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/AbstractTaskMetricsParser.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/AbstractTaskMetricsParser.scala new file mode 100644 index 000000000..eb0de6d6e --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/AbstractTaskMetricsParser.scala @@ -0,0 +1,37 @@ +package com.webank.wedatasphere.streamis.jobmanager.manager.transform.parser + +import java.util + +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo.JobDetailsVo +import com.webank.wedatasphere.streamis.jobmanager.manager.transform.TaskMetricsParser +import org.apache.commons.lang3.StringUtils +import org.apache.linkis.httpclient.dws.DWSHttpClient + +/** + * + * @date 2022-10-21 + * @author enjoyyin + * @since 0.5.0 + */ +trait AbstractTaskMetricsParser extends TaskMetricsParser { + + override def parse(metrics: String): JobDetailsVo = { + val jobDetailsVO = new JobDetailsVo + val dataNumberDTOS = new util.ArrayList[JobDetailsVo.DataNumberDTO] + val loadConditionDTOs = new util.ArrayList[JobDetailsVo.LoadConditionDTO] + val realTimeTrafficDTOS = new util.ArrayList[JobDetailsVo.RealTimeTrafficDTO] + jobDetailsVO.setDataNumber(dataNumberDTOS) + jobDetailsVO.setLoadCondition(loadConditionDTOs) + jobDetailsVO.setRealTimeTraffic(realTimeTrafficDTOS) + val metricsMap = if(StringUtils.isNotBlank(metrics)) DWSHttpClient.jacksonJson.readValue(metrics, classOf[util.Map[String, Object]]) + else new util.HashMap[String, Object](0) + parse(metricsMap, dataNumberDTOS, loadConditionDTOs, realTimeTrafficDTOS) + jobDetailsVO + } + + protected def parse(metricsMap: util.Map[String, Object], + dataNumberDTOS: util.List[JobDetailsVo.DataNumberDTO], + loadConditionDTOs: util.List[JobDetailsVo.LoadConditionDTO], + realTimeTrafficDTOS: util.List[JobDetailsVo.RealTimeTrafficDTO]): Unit + +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/FlinkJarJobContentParser.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/FlinkJarJobContentParser.scala new file mode 100644 index 000000000..5005e7701 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/FlinkJarJobContentParser.scala @@ -0,0 +1,90 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.transform.parser +import java.util + +import org.apache.linkis.common.utils.JsonUtils +import org.apache.linkis.manager.label.entity.engine.RunType +import org.apache.linkis.manager.label.entity.engine.RunType.RunType +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.{StreamJob, StreamJobVersion, StreamisFile} +import com.webank.wedatasphere.streamis.jobmanager.manager.exception.JobExecuteErrorException +import com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity.{StreamisJarTransformJobContent, StreamisTransformJobContent} +import org.apache.commons.lang.StringUtils +import org.springframework.stereotype.Component + +import scala.collection.JavaConverters._ + +/** + * Created by enjoyyin on 2021/9/23. + */ +@Component +class FlinkJarJobContentParser extends AbstractJobContentParser { + + override def parseTo(job: StreamJob, jobVersion: StreamJobVersion): StreamisTransformJobContent = { + val transformJobContent = new StreamisJarTransformJobContent + val jobContent = JsonUtils.jackson.readValue(jobVersion.getJobContent, classOf[util.Map[String, Object]]) + jobContent.get("main.class.jar") match { + case mainClassJar: String => + val file = dealStreamisFile(job, jobVersion, mainClassJar, "main.class.jar") + transformJobContent.setMainClassJar(file) + case _ => throw new JobExecuteErrorException(30500, "main.class.jar is needed.") + } + jobContent.get("main.class") match { + case mainClass: String => + transformJobContent.setMainClass(mainClass) + case _ => throw new JobExecuteErrorException(30500, "main.class is needed.") + } + jobContent.get("args") match { + case args: util.List[String] => + transformJobContent.setArgs(args) + case _ => + } + jobContent.get("hdfs.jars") match { + case hdfsJars: util.List[String] => + transformJobContent.setHdfsJars(hdfsJars) + case _ => + } + jobContent.get("dependency.jars") match { + case dependencyJars: util.List[String] => + val parsedDependencyJars = dependencyJars.asScala.filter(StringUtils.isNotBlank).map { + dependencyJar => dealStreamisFile(job, jobVersion, dependencyJar, "dependency.jar") + }.asJava + transformJobContent.setDependencyJars(parsedDependencyJars) + case _ => + } + jobContent.get("resources") match { + case resources: util.List[String] => + val parsedResources = resources.asScala.filter(StringUtils.isNotBlank).map { + resource => dealStreamisFile(job, jobVersion, resource, "resources") + }.asJava + transformJobContent.setResources(parsedResources) + case _ => + } + transformJobContent + } + + private def dealStreamisFile(job: StreamJob, jobVersion: StreamJobVersion, fileName: String, fileType: String): StreamisFile = { + info(s"Try to parse the $fileType $fileName for StreamJob-${job.getName}.") + this.findFile(job, jobVersion, fileName) + } + + override val jobType: String = FlinkJarJobContentParser.JOB_TYPE + override val runType: RunType = RunType.JAR +} + +object FlinkJarJobContentParser { + val JOB_TYPE = "flink.jar" +} \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/FlinkSQLJobContentParser.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/FlinkSQLJobContentParser.scala new file mode 100644 index 000000000..62d838683 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/FlinkSQLJobContentParser.scala @@ -0,0 +1,69 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.transform.parser + +import java.util + +import org.apache.linkis.common.utils.JsonUtils +import org.apache.linkis.manager.label.entity.engine.RunType +import org.apache.linkis.manager.label.entity.engine.RunType.RunType +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.{StreamJob, StreamJobVersion} +import com.webank.wedatasphere.streamis.jobmanager.manager.exception.JobExecuteErrorException +import com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity.{StreamisSqlTransformJobContent, StreamisTransformJobContent} +import org.springframework.stereotype.Component + +/** + * Created by enjoyyin on 2021/9/23. + */ +@Component +class FlinkSQLJobContentParser extends AbstractJobContentParser { + + override def parseTo(job: StreamJob, jobVersion: StreamJobVersion): StreamisTransformJobContent = { + val jobContent = JsonUtils.jackson.readValue(jobVersion.getJobContent, classOf[util.Map[String, Object]]) + val transformJobContent = new StreamisSqlTransformJobContent + val sql = jobContent.get("type") match { + case "file" => + jobContent.get("file") match { + case file: String => + getFileContent(job, jobVersion, file) + case _ => throw new JobExecuteErrorException(30500, s"No file is exists when the type is file in jobContent.") + } + case "bml" => + val resourceId = jobContent.get("resourceId") + val version = jobContent.get("version") + if(resourceId == null || version == null) + throw new JobExecuteErrorException(30500, s"No resourceId or version is exists when the type is bml in jobContent.") + readFileFromBML(jobVersion.getCreateBy, resourceId.toString, version.toString) + case "sql" => + jobContent.get("sql") match { + case sql: String => sql + case _ => throw new JobExecuteErrorException(30500, s"No sql is exists when the type is sql in jobContent.") + } + case t => throw new JobExecuteErrorException(30500, s"Not recognized type $t in jobContent.") + } + transformJobContent.setSql(sql) + transformJobContent + } + + override val jobType: String = FlinkSQLJobContentParser.JOB_TYPE + override val runType: RunType = RunType.SQL + +} +object FlinkSQLJobContentParser { + + val JOB_TYPE = "flink.sql" + +} \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/FlinkTaskMetricsParser.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/FlinkTaskMetricsParser.scala new file mode 100644 index 000000000..48eb8ad06 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/FlinkTaskMetricsParser.scala @@ -0,0 +1,48 @@ +package com.webank.wedatasphere.streamis.jobmanager.manager.transform.parser + +import java.util + +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamJob +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo.JobDetailsVo +import org.springframework.stereotype.Component + +/** + * + * @date 2022-10-21 + * @author enjoyyin + * @since 0.5.0 + */ +@Component +class FlinkTaskMetricsParser extends AbstractTaskMetricsParser { + + override def canParse(streamJob: StreamJob): Boolean = streamJob.getJobType.startsWith("flink.") + + override def parse(metricsMap: util.Map[String, Object], + dataNumberDTOS: util.List[JobDetailsVo.DataNumberDTO], + loadConditionDTOs: util.List[JobDetailsVo.LoadConditionDTO], + realTimeTrafficDTOS: util.List[JobDetailsVo.RealTimeTrafficDTO]): Unit = { + // TODO This is just sample datas, waiting for it completed. We have planned it to a later release, welcome all partners to join us to realize this powerful feature. + val dataNumberDTO = new JobDetailsVo.DataNumberDTO + dataNumberDTO.setDataName("kafka topic") + dataNumberDTO.setDataNumber(109345) + dataNumberDTOS.add(dataNumberDTO) + + val loadConditionDTO = new JobDetailsVo.LoadConditionDTO + loadConditionDTO.setType("jobManager") + loadConditionDTO.setHost("localhost") + loadConditionDTO.setMemory("1.5") + loadConditionDTO.setTotalMemory("2.0") + loadConditionDTO.setGcLastTime("2020-08-01") + loadConditionDTO.setGcLastConsume("1") + loadConditionDTO.setGcTotalTime("2min") + loadConditionDTOs.add(loadConditionDTO) + + val realTimeTrafficDTO = new JobDetailsVo.RealTimeTrafficDTO + realTimeTrafficDTO.setSourceKey("kafka topic") + realTimeTrafficDTO.setSourceSpeed("100 Records/S") + realTimeTrafficDTO.setTransformKey("transform") + realTimeTrafficDTO.setSinkKey("hbase key") + realTimeTrafficDTO.setSinkSpeed("10 Records/S") + realTimeTrafficDTOS.add(realTimeTrafficDTO) + } +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/FlinkWorkflowJobContentParser.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/FlinkWorkflowJobContentParser.scala new file mode 100644 index 000000000..933ad90b9 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/FlinkWorkflowJobContentParser.scala @@ -0,0 +1,57 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.manager.transform.parser + +import java.util + +import org.apache.linkis.common.utils.JsonUtils +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.{StreamJob, StreamJobVersion} +import com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity.{StreamisSqlTransformJobContent, StreamisTransformJobContent, StreamisWorkflowTransformJobContent} +import org.springframework.stereotype.Component + +/** + * Created by enjoyyin on 2021/9/23. + */ +@Component +class FlinkWorkflowJobContentParser extends FlinkSQLJobContentParser { + + override def parseTo(job: StreamJob, jobVersion: StreamJobVersion): StreamisTransformJobContent = super.parseTo(job, jobVersion) match { + case transformJobContent: StreamisSqlTransformJobContent => + val workflowJobContent = new StreamisWorkflowTransformJobContent + val jobContent = JsonUtils.jackson.readValue(jobVersion.getJobContent, classOf[util.Map[String, Object]]) + workflowJobContent.setWorkflowId(jobContent.get("workflowId").asInstanceOf[Long]) + workflowJobContent.setWorkflowName(jobContent.get("workflowName").asInstanceOf[String]) + workflowJobContent.setSql(transformJobContent.getSql) + workflowJobContent + } + + override val jobType: String = FlinkWorkflowJobContentParser.JOB_TYPE +} + +object FlinkWorkflowJobContentParser { + + val JOB_TYPE = "flink.workflow" + + def sqlToJobContent(workflowId: java.lang.Long, workflowName: String, sql: String): util.Map[String, Object] = { + val jobContent = new util.HashMap[String, Object] + jobContent.put("type", "sql") + jobContent.put("sql", sql) + jobContent.put("workflowId", workflowId) + jobContent.put("workflowName", workflowName) + jobContent + } + +} \ No newline at end of file diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/SparkJarJobContentParser.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/SparkJarJobContentParser.scala new file mode 100644 index 000000000..d730753c2 --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/SparkJarJobContentParser.scala @@ -0,0 +1,78 @@ +package com.webank.wedatasphere.streamis.jobmanager.manager.transform.parser + +import java.util + +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.{StreamJob, StreamJobVersion, StreamJobVersionFiles, StreamisFile} +import com.webank.wedatasphere.streamis.jobmanager.manager.exception.JobExecuteErrorException +import com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity.{StreamisJarTransformJobContent, StreamisTransformJobContent} +import org.apache.commons.lang.StringUtils +import org.apache.linkis.common.utils.JsonUtils +import org.apache.linkis.manager.label.entity.engine.RunType +import org.apache.linkis.manager.label.entity.engine.RunType.RunType +import org.springframework.stereotype.Component + +import scala.collection.JavaConverters._ + +/** + * + * @date 2022-10-19 + * @author enjoyyin + * @since 0.5.0 + */ +@Component +class SparkJarJobContentParser extends AbstractJobContentParser { + + override val jobType: String = "spark.jar" + override val runType: RunType = RunType.JAR + + override def parseTo(job: StreamJob, jobVersion: StreamJobVersion): StreamisTransformJobContent = { + val createFile: String => StreamisFile = fileName => { + val file = new StreamJobVersionFiles() + file.setFileName(fileName) + file.setCreateBy(job.getCreateBy) + file.setCreateTime(job.getCreateTime) + file.setJobId(job.getId) + file.setJobVersionId(jobVersion.getId) + file.setVersion(jobVersion.getVersion) + file.setStorePath("") + file.setStoreType("") + file + } + val transformJobContent = new StreamisJarTransformJobContent + val jobContent = JsonUtils.jackson.readValue(jobVersion.getJobContent, classOf[util.Map[String, Object]]) + jobContent.get("main.class.jar") match { + case mainClassJar: String => + transformJobContent.setMainClassJar(createFile(mainClassJar)) + case _ => throw new JobExecuteErrorException(30500, "main.class.jar is needed.") + } + jobContent.get("main.class") match { + case mainClass: String => + transformJobContent.setMainClass(mainClass) + case _ => throw new JobExecuteErrorException(30500, "main.class is needed.") + } + jobContent.get("args") match { + case args: util.List[String] => + transformJobContent.setArgs(args) + case _ => + } + jobContent.get("hdfs.jars") match { + case hdfsJars: util.List[String] => + transformJobContent.setHdfsJars(hdfsJars) + case _ => + } + jobContent.get("dependency.jars") match { + case dependencyJars: util.List[String] => + val parsedDependencyJars = dependencyJars.asScala.filter(StringUtils.isNotBlank).map(createFile).asJava + transformJobContent.setDependencyJars(parsedDependencyJars) + case _ => + } + jobContent.get("resources") match { + case resources: util.List[String] => + val parsedResources = resources.asScala.filter(StringUtils.isNotBlank).map(createFile).asJava + transformJobContent.setResources(parsedResources) + case _ => + } + transformJobContent + } + +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/SparkTaskMetricsParser.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/SparkTaskMetricsParser.scala new file mode 100644 index 000000000..c6e00d7ff --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/SparkTaskMetricsParser.scala @@ -0,0 +1,86 @@ +package com.webank.wedatasphere.streamis.jobmanager.manager.transform.parser + +import java.util + +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamJob +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo.JobDetailsVo +import org.apache.linkis.common.utils.Utils +import org.springframework.stereotype.Component + +import scala.collection.JavaConverters._ + +/** + * + * @date 2022-10-21 + * @author enjoyyin + * @since 0.5.0 + */ +@Component +class SparkTaskMetricsParser extends AbstractTaskMetricsParser { + + override protected def parse(metricsMap: util.Map[String, Object], + dataNumberDTOS: util.List[JobDetailsVo.DataNumberDTO], + loadConditionDTOs: util.List[JobDetailsVo.LoadConditionDTO], + realTimeTrafficDTOS: util.List[JobDetailsVo.RealTimeTrafficDTO]): Unit = { + val addDataNumberDTO: String => Unit = key => { + val batch = new JobDetailsVo.DataNumberDTO + batch.setDataName(key) + batch.setDataNumber(metricsMap.get(key) match { + case null => -1 + case num => num.toString.toInt + }) + dataNumberDTOS.add(batch) + } + addDataNumberDTO("waitingBatchs") + addDataNumberDTO("runningBatchs") + addDataNumberDTO("completedBatchs") + metricsMap.get("executors") match { + case executors: util.List[util.Map[String, AnyRef]] if !executors.isEmpty => + executors.asScala.foreach { executor => + val loadConditionDTO = new JobDetailsVo.LoadConditionDTO + loadConditionDTO.setType(executor.get("type").asInstanceOf[String]) + loadConditionDTO.setHost(executor.get("host").asInstanceOf[String]) + loadConditionDTO.setMemory(executor.get("memory").asInstanceOf[String]) + loadConditionDTO.setTotalMemory(executor.get("totalMemory").asInstanceOf[String]) + loadConditionDTO.setGcLastTime(executor.get("gcLastTime").asInstanceOf[String]) + loadConditionDTO.setGcLastConsume(executor.get("gcLastConsume").asInstanceOf[String]) + loadConditionDTO.setGcTotalTime(executor.get("gcTotalTime").asInstanceOf[String]) + loadConditionDTOs.add(loadConditionDTO) + } + case _ => + val loadConditionDTO = new JobDetailsVo.LoadConditionDTO + loadConditionDTO.setType("Driver") + loadConditionDTO.setHost("") + loadConditionDTO.setMemory("") + loadConditionDTO.setTotalMemory("") + loadConditionDTO.setGcLastTime("") + loadConditionDTO.setGcLastConsume("") + loadConditionDTO.setGcTotalTime("") + loadConditionDTOs.add(loadConditionDTO) + } + val realTimeTrafficDTO = new JobDetailsVo.RealTimeTrafficDTO + metricsMap.get("batchMetrics") match { + case batchMetrics: util.List[util.Map[String, Object]] if !batchMetrics.isEmpty => + val batchMetric = batchMetrics.asScala.maxBy(_.get("batchTime").asInstanceOf[String]) + realTimeTrafficDTO.setSourceKey(metricsMap.getOrDefault("source", "").asInstanceOf[String]) + realTimeTrafficDTO.setSourceSpeed(batchMetric.get("inputRecords") + " Records") + realTimeTrafficDTO.setTransformKey("processing") + realTimeTrafficDTO.setSinkKey(metricsMap.getOrDefault("sink", "").asInstanceOf[String]) + val sinkSpeed = if (batchMetric.containsKey("totalDelay") && batchMetric.get("totalDelay") != null) + Utils.msDurationToString(batchMetric.get("totalDelay").toString.toInt) + " totalDelay" + else if (batchMetric.containsKey("taskExecuteTime") && batchMetric.get("taskExecuteTime") != null) + Utils.msDurationToString(batchMetric.get("taskExecuteTime").toString.toInt) + " executeTime(Last Batch)" + else "" + realTimeTrafficDTO.setSinkSpeed(sinkSpeed) + case _ => + realTimeTrafficDTO.setSourceKey("") + realTimeTrafficDTO.setSourceSpeed(" Records/S") + realTimeTrafficDTO.setTransformKey("") + realTimeTrafficDTO.setSinkKey("") + realTimeTrafficDTO.setSinkSpeed(" Records/S") + } + realTimeTrafficDTOS.add(realTimeTrafficDTO) + } + + override def canParse(streamJob: StreamJob): Boolean = streamJob.getJobType.startsWith("spark.") +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/utils/JobUtils.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/utils/JobUtils.scala new file mode 100644 index 000000000..75a62b53d --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/utils/JobUtils.scala @@ -0,0 +1,24 @@ +package com.webank.wedatasphere.streamis.jobmanager.manager.utils + +import com.webank.wedatasphere.streamis.jobmanager.manager.conf.JobConf + +import java.util +import scala.collection.JavaConverters.{asScalaSetConverter, mapAsScalaMapConverter} + +object JobUtils { + /** + * Filter the illegal characters parameter specific + * @param params parameters + */ + def filterParameterSpec(params: util.Map[String, Any]): util.Map[String, Any] ={ + for (paramEntry <- params.entrySet().asScala){ + val value = paramEntry.getValue + value match { + case str: String => paramEntry.setValue(str.replace(" ", JobConf.STREAMIS_JOB_PARAM_BLANK_PLACEHOLDER.getValue)) + case _ => + } + } + params + } + +} diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/utils/StreamTaskUtils.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/utils/StreamTaskUtils.scala new file mode 100644 index 000000000..6de13e67d --- /dev/null +++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/utils/StreamTaskUtils.scala @@ -0,0 +1,45 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +package com.webank.wedatasphere.streamis.jobmanager.manager.utils + +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.JobInfo +import com.webank.wedatasphere.streamis.jobmanager.manager.conf.JobConf +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamTask +import org.apache.commons.lang.StringUtils +import org.apache.linkis.httpclient.dws.DWSHttpClient + +import java.util.{Calendar, Date} + +/** + * Utils for stream task + */ +object StreamTaskUtils { + + /** + * Refresh the task info + * @param task stream task + * @param jobInfo job info + */ + def refreshInfo(task: StreamTask, jobInfo: JobInfo): Unit = { + val time = Calendar.getInstance.getTime + task.setLastUpdateTime(time) + task.setStatus(JobConf.linkisStatusToStreamisStatus(jobInfo.getStatus)) + if(JobConf.isCompleted(task.getStatus) && StringUtils.isNotEmpty(jobInfo.getCompletedMsg)) + task.setErrDesc(jobInfo.getCompletedMsg) + task.setLinkisJobInfo(DWSHttpClient.jacksonJson.writeValueAsString(jobInfo)) + } +} diff --git a/streamis-jobmanager/streamis-jobmanager-common/pom.xml b/streamis-jobmanager/streamis-jobmanager-common/pom.xml new file mode 100644 index 000000000..694e915e4 --- /dev/null +++ b/streamis-jobmanager/streamis-jobmanager-common/pom.xml @@ -0,0 +1,78 @@ + + + + + + streamis-jobmanager + com.webank.wedatasphere.streamis + 0.2.4 + + 4.0.0 + + streamis-jobmanager-common + + + 8 + 8 + + + + + + + + org.apache.linkis + linkis-common + + + + org.apache.linkis + linkis-protocol + + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + + + src/main/java + + **/*.xml + + + + + + + + \ No newline at end of file diff --git a/streamis-jobmanager/streamis-jobmanager-common/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/common/protocol/StreamJobManagerProtocol.scala b/streamis-jobmanager/streamis-jobmanager-common/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/common/protocol/StreamJobManagerProtocol.scala new file mode 100644 index 000000000..0ac866179 --- /dev/null +++ b/streamis-jobmanager/streamis-jobmanager-common/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/common/protocol/StreamJobManagerProtocol.scala @@ -0,0 +1,44 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.common.protocol + +import org.apache.linkis.protocol.Protocol + +/** + * created by yangzhiyue on 2021/4/26 + * Description: + */ +trait StreamJobManagerProtocol extends Protocol + + +case class ImportJobManagerRequest(streamJobName:String, + `type`:String, + executionCode:String, + createBy:String, + updateBy:String, + description:String, + tags:java.util.List[String], + publishUser:String, + workspaceName:String, + version:String, + projectName:String, + workflowId: Long, + workflowName: String) extends StreamJobManagerProtocol + + +case class ImportJobManagerResponse(status:Int, + streamJobId:Long, + errorMessage:String) extends StreamJobManagerProtocol \ No newline at end of file diff --git a/streamis-jobmanager/streamis-jobmanager-server/pom.xml b/streamis-jobmanager/streamis-jobmanager-server/pom.xml new file mode 100644 index 000000000..a42d3553a --- /dev/null +++ b/streamis-jobmanager/streamis-jobmanager-server/pom.xml @@ -0,0 +1,140 @@ + + + + + + streamis-jobmanager + com.webank.wedatasphere.streamis + 0.2.4 + + 4.0.0 + + streamis-jobmanager-server + + + + org.apache.linkis + linkis-module + + + com.webank.wedatasphere.streamis + streamis-job-launcher-service + ${jobmanager.version} + + + org.apache.linkis + linkis-module + + + + + com.webank.wedatasphere.streamis + streamis-job-manager-service + ${jobmanager.version} + + + org.apache.linkis + linkis-module + + + + + org.springframework.boot + spring-boot-starter-test + ${spring.boot.version} + test + + + junit + junit + 4.13.1 + test + + + + org.apache.linkis + linkis-rpc + ${linkis.version} + + + + com.webank.wedatasphere.streamis + streamis-jobmanager-common + ${streamis.version} + + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + org.apache.maven.plugins + maven-assembly-plugin + 2.3 + false + + + make-assembly + package + + single + + + + src/main/assembly/distribution.xml + + + + + + false + streamis-server + false + false + + src/main/assembly/distribution.xml + + + + + + + ${basedir}/src/main/resources + + **/*.properties + **/*.xml + **/*.yml + + + + + ${project.artifactId}-${project.version} + + \ No newline at end of file diff --git a/streamis-jobmanager/streamis-jobmanager-server/src/main/assembly/distribution.xml b/streamis-jobmanager/streamis-jobmanager-server/src/main/assembly/distribution.xml new file mode 100644 index 000000000..9a69ed9a9 --- /dev/null +++ b/streamis-jobmanager/streamis-jobmanager-server/src/main/assembly/distribution.xml @@ -0,0 +1,69 @@ + + + + streamis-server + + zip + + true + streamis-server + + + + + + lib + true + true + false + false + true + + + + + + ${basedir}/src/main/resources + + * + + 0777 + conf + unix + + + ${basedir}/bin + + * + + 0777 + bin + unix + + + . + + */** + + logs + + + + + diff --git a/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/conf/RestTemplateConf.java b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/conf/RestTemplateConf.java new file mode 100644 index 000000000..fd7aedec7 --- /dev/null +++ b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/conf/RestTemplateConf.java @@ -0,0 +1,30 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.conf; + +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +import org.springframework.web.client.RestTemplate; + +@Configuration +public class RestTemplateConf { + + @Bean + public RestTemplate getRestTemplate() { + return new RestTemplate(); + } +} diff --git a/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/exception/JobException.java b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/exception/JobException.java new file mode 100644 index 000000000..1b1b7399c --- /dev/null +++ b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/exception/JobException.java @@ -0,0 +1,29 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.exception; + +import org.apache.linkis.common.exception.ErrorException; + +public class JobException extends ErrorException { + + public JobException(int errCode, String desc) { + super(errCode, desc); + } + + public JobException(int errCode, String desc, String ip, int port, String serviceKind) { + super(errCode, desc, ip, port, serviceKind); + } +} diff --git a/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/exception/JobExceptionManager.java b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/exception/JobExceptionManager.java new file mode 100644 index 000000000..8c20b0460 --- /dev/null +++ b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/exception/JobExceptionManager.java @@ -0,0 +1,33 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.exception; + +import java.util.HashMap; +import java.util.Map; + +public class JobExceptionManager { + //30300-30599 + private static Map desc = new HashMap(32); + static { + desc.put("30300", "upload failure(上传失败)"); + desc.put("30301","%s cannot be empty!"); + desc.put("30302", "upload file type should be zip(上传的文件类型应为zip类型)"); + } + + public static JobException createException(int errorCode, Object... format) throws JobException { + return new JobException(errorCode, String.format(desc.get(String.valueOf(errorCode)), format)); + } +} diff --git a/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/exception/ProjectException.java b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/exception/ProjectException.java new file mode 100644 index 000000000..d8206f7ba --- /dev/null +++ b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/exception/ProjectException.java @@ -0,0 +1,25 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.exception; + +public class ProjectException extends Exception{ + public ProjectException() { + } + + public ProjectException(String message) { + super(message); + } +} diff --git a/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/restful/api/JobBulkRestfulApi.java b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/restful/api/JobBulkRestfulApi.java new file mode 100644 index 000000000..0b97ae5c8 --- /dev/null +++ b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/restful/api/JobBulkRestfulApi.java @@ -0,0 +1,157 @@ +package com.webank.wedatasphere.streamis.jobmanager.restful.api; + +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamJob; +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo.ExecResultVo; +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo.PauseResultVo; +import com.webank.wedatasphere.streamis.jobmanager.manager.exception.JobExecuteErrorException; +import com.webank.wedatasphere.streamis.jobmanager.manager.project.service.ProjectPrivilegeService; +import com.webank.wedatasphere.streamis.jobmanager.manager.service.StreamJobService; +import com.webank.wedatasphere.streamis.jobmanager.manager.service.StreamTaskService; +import com.webank.wedatasphere.streamis.jobmanager.vo.BaseBulkRequest; +import com.webank.wedatasphere.streamis.jobmanager.vo.BulkResponse; +import com.webank.wedatasphere.streamis.jobmanager.vo.JobBulkPauseRequest; +import com.webank.wedatasphere.streamis.jobmanager.vo.JobBulkRequest; +import org.apache.commons.lang3.StringUtils; +import org.apache.linkis.scheduler.queue.SchedulerEventState; +import org.apache.linkis.server.Message; +import org.apache.linkis.server.security.SecurityFilter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RestController; + +import javax.annotation.Resource; +import javax.servlet.http.HttpServletRequest; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +@RequestMapping(path = "/streamis/streamJobManager/job/bulk") +@RestController +public class JobBulkRestfulApi { + + private static final Logger LOG = LoggerFactory.getLogger(JobBulkRestfulApi.class); + + @Resource + private ProjectPrivilegeService privilegeService; + + /** + * Stream task service + */ + @Resource + private StreamTaskService streamTaskService; + + @Resource + private StreamJobService streamjobService; + + /** + * Bulk execution + * @param execBulkRequest bulk request + * @param request request + * @return + */ + @RequestMapping(value = "/execution", method = RequestMethod.POST) + public Message bulkExecution(@RequestBody JobBulkRequest execBulkRequest, HttpServletRequest request){ + List subjectIds = execBulkRequest.getBulkSubject(); + if (subjectIds.isEmpty()){ + return Message.error("The list of jobId/taskId cannot be empty for bulk execution"); + } + Message result = Message.ok("success"); + try{ + String username = SecurityFilter.getLoginUsername(request); + LOG.info("Bulk execution[operator: {} sbj_type: {}, subjects: ({})]", username, + execBulkRequest.getBulkSubjectType(), StringUtils.join(execBulkRequest.getBulkSubject(), ",")); + // TODO Check the permission of task id + List execResults; + if (JobBulkRequest.IdType.JOB.name().equals(execBulkRequest.getBulkSubjectType())){ + for(Object jobId : execBulkRequest.getBulkSubject()){ + StreamJob streamJob = this.streamjobService.getJobById(Long.parseLong(jobId.toString())); + if (!streamjobService.hasPermission(streamJob, username) && + !this.privilegeService.hasEditPrivilege(request, streamJob.getProjectName())){ + throw new JobExecuteErrorException(-1, "Have no permission to execute StreamJob [" + jobId + "]"); + } + } + // TODO Enable to accept 'restore' parameter from request + execResults = streamTaskService.bulkExecute(new ArrayList<>(execBulkRequest.getBulkSubject()), Collections.emptyList(), username); + } else { + execResults = streamTaskService.bulkExecute(Collections.emptyList(), new ArrayList<>(execBulkRequest.getBulkSubject()), username); + } + // Convert to bulk response + BulkResponse response = new BulkResponse<>(execResult -> { + if (SchedulerEventState.withName(execResult.getScheduleState()) + == SchedulerEventState.Failed()){ + return BaseBulkRequest.BulkStatus.Failed.name(); + } + return BaseBulkRequest.BulkStatus.Success.name(); + }, execResults); + String[] necessaryStatus = new String[]{BaseBulkRequest.BulkStatus.Failed.name(), BaseBulkRequest.BulkStatus.Success.name()}; + for (String necessary : necessaryStatus){ + response.getResult().computeIfAbsent(necessary, key -> new BulkResponse.ResultStatistic<>()); + } + result.data("total", response.getTotal()).data("result", response.getResult()); + }catch (Exception e){ + String message = "Fail to bulk execute job/task(批量执行任务/作业失败), message: " + e.getMessage(); + LOG.warn(message, e); + result = Message.error(message, e); + } + return result; + } + + /** + * Bulk pause + * @param pauseRequest + * @param request + * @return + */ + @RequestMapping(value = "/pause", method = RequestMethod.POST) + public Message bulkPause(@RequestBody JobBulkPauseRequest pauseRequest, HttpServletRequest request){ + List subjectIds = pauseRequest.getBulkSubject(); + if (subjectIds.isEmpty()){ + return Message.error("The list of jobId/taskId cannot be empty for bulk pause"); + } + Message result = Message.ok("success"); + try{ + String username = SecurityFilter.getLoginUsername(request); + LOG.info("Bulk pause[operator: {}, sbj_type: {}, snapshot: {}, subjects: ({})]", + username, pauseRequest.getBulkSubjectType(), pauseRequest.isSnapshot(), + StringUtils.join(pauseRequest.getBulkSubject(), ",")); + List pauseResults; + // TODO Check the permission of task id + if (JobBulkRequest.IdType.JOB.name().equals(pauseRequest.getBulkSubjectType())){ + for(Long jobId : pauseRequest.getBulkSubject()){ + StreamJob streamJob = this.streamjobService.getJobById(jobId); + if (!streamjobService.hasPermission(streamJob, username) && + !this.privilegeService.hasEditPrivilege(request, streamJob.getProjectName())){ + throw new JobExecuteErrorException(-1, "Have no permission to execute StreamJob [" + jobId + "]"); + } + } + pauseResults = streamTaskService.bulkPause(new ArrayList<>(pauseRequest.getBulkSubject()), + Collections.emptyList(), username, pauseRequest.isSnapshot()); + } else { + pauseResults = streamTaskService.bulkPause(Collections.emptyList(), + new ArrayList<>(pauseRequest.getBulkSubject()), username, pauseRequest.isSnapshot()); + } + // Convert to bulk response + BulkResponse response = new BulkResponse<>(pauseResult -> { + if (SchedulerEventState.withName(pauseResult.getScheduleState()) + == SchedulerEventState.Failed()){ + return BaseBulkRequest.BulkStatus.Failed.name(); + } + return BaseBulkRequest.BulkStatus.Success.name(); + }, pauseResults); + String[] necessaryStatus = new String[]{BaseBulkRequest.BulkStatus.Failed.name(), BaseBulkRequest.BulkStatus.Success.name()}; + for (String necessary : necessaryStatus){ + response.getResult().computeIfAbsent(necessary, key -> new BulkResponse.ResultStatistic<>()); + } + result.data("total", response.getTotal()).data("result", response.getResult()); + } catch (Exception e){ + String message = "Fail to bulk pause job/task(批量停止任务/作业失败), message: " + e.getMessage(); + LOG.warn(message, e); + result = Message.error(message, e); + } + return result; + } + +} diff --git a/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/restful/api/JobConfExtRestfulApi.java b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/restful/api/JobConfExtRestfulApi.java new file mode 100644 index 000000000..02b2da0b9 --- /dev/null +++ b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/restful/api/JobConfExtRestfulApi.java @@ -0,0 +1,55 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.restful.api; + +import com.webank.wedatasphere.streamis.jobmanager.manager.util.CookieUtils; +import com.webank.wedatasphere.streamis.jobmanager.service.UserService; +import org.apache.commons.lang.StringUtils; +import org.apache.linkis.server.Message; +import org.apache.linkis.server.security.SecurityFilter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.web.bind.annotation.*; + +import javax.servlet.http.HttpServletRequest; +import java.util.ArrayList; +import java.util.List; + + +@RequestMapping(path = "/streamis/streamJobManager/config") +@RestController +public class JobConfExtRestfulApi { + private static final Logger LOG = LoggerFactory.getLogger(JobConfExtRestfulApi.class); + + @Autowired + UserService userService; + + @RequestMapping(path = "/getWorkspaceUsers", method = RequestMethod.GET) + public Message getWorkspaceUsers(HttpServletRequest req) { + //获取工作空间 + List userList = new ArrayList<>(); + String workspaceId = CookieUtils.getCookieWorkspaceId(req); + if (StringUtils.isNotBlank(workspaceId)) { + String userName = SecurityFilter.getLoginUsername(req); + userList.addAll(userService.workspaceUserQuery(req, workspaceId)); + } else { + LOG.warn("Cannot find the workspaceID from DSS,perhaps the cookie value has been lost in request from: {}", req.getLocalAddr()); + } + return Message.ok().data("users", userList); + } + +} diff --git a/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/restful/api/JobConfRestfulApi.java b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/restful/api/JobConfRestfulApi.java new file mode 100644 index 000000000..8f8570c9b --- /dev/null +++ b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/restful/api/JobConfRestfulApi.java @@ -0,0 +1,190 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.restful.api; + +import com.fasterxml.jackson.databind.JsonNode; +import com.webank.wedatasphere.streamis.jobmanager.launcher.entity.JobConfDefinition; +import com.webank.wedatasphere.streamis.jobmanager.launcher.entity.vo.JobConfDefinitionVo; +import com.webank.wedatasphere.streamis.jobmanager.launcher.entity.vo.JobConfValueSet; +import com.webank.wedatasphere.streamis.jobmanager.launcher.service.StreamJobConfService; +import com.webank.wedatasphere.streamis.jobmanager.manager.conf.JobConf; +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamJob; +import com.webank.wedatasphere.streamis.jobmanager.manager.exception.JobErrorException; +import com.webank.wedatasphere.streamis.jobmanager.manager.project.service.ProjectPrivilegeService; +import com.webank.wedatasphere.streamis.jobmanager.manager.service.StreamJobService; +import org.apache.linkis.httpclient.dws.DWSHttpClient; +import org.apache.linkis.server.Message; +import org.apache.linkis.server.security.SecurityFilter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.web.bind.annotation.*; + +import javax.annotation.Resource; +import javax.servlet.http.HttpServletRequest; +import java.util.*; +import java.util.stream.Collectors; + +@RequestMapping(path = "/streamis/streamJobManager/config") +@RestController +public class JobConfRestfulApi { + + private static final Logger LOG = LoggerFactory.getLogger(JobConfRestfulApi.class); + + @Resource + private StreamJobConfService streamJobConfService; + + @Resource + private StreamJobService streamJobService; + + @Resource + private ProjectPrivilegeService privilegeService; + + /** + * Definitions + * @return message + */ + @RequestMapping(value = "/definitions") + public Message definitions(){ + Message result = Message.ok("success"); + try { + List definitionList = this.streamJobConfService.loadAllDefinitions(); + Map definitionRelation = new HashMap<>(); + definitionList.forEach(definition -> definitionRelation.put(String.valueOf(definition.getId()), + new JobConfDefinitionVo(definition))); + definitionList.forEach(definition -> { + Long parentRef = definition.getParentRef(); + if (Objects.nonNull(parentRef)) { + JobConfDefinitionVo definitionVo = definitionRelation.get(String.valueOf(parentRef)); + if (Objects.nonNull(definitionVo)) { + List childDef = Optional.ofNullable(definitionVo.getChildDef()).orElse(new ArrayList<>()); + childDef.add(definitionRelation.get(String.valueOf(definition.getId()))); + definitionVo.setChildDef(childDef); + } + } + }); + + List def = + definitionRelation.values().stream().filter(definitionVo -> definitionVo.getLevel() == 0) + .sorted((o1, o2) -> o2.getSort() - o1.getSort()).collect(Collectors.toList()); + def.forEach(definitionVo -> { + if (Objects.isNull(definitionVo.getChildDef())){ + definitionVo.setChildDef(Collections.emptyList()); + } + }); + result.data("def", def); + }catch(Exception e){ + String message = "Fail to obtain StreamJob configuration definitions(获取任务配置定义集失败), message: " + e.getMessage(); + LOG.warn(message, e); + result = Message.error(message); + } + return result; + } + + /** + * Query job config json + * @return config json + */ + @RequestMapping(value = "/json/{jobId:\\w+}", method = RequestMethod.GET) + public Message queryConfig(@PathVariable("jobId") Long jobId, HttpServletRequest request){ + Message result = Message.ok("success"); + try { + String userName = SecurityFilter.getLoginUsername(request); + StreamJob streamJob = this.streamJobService.getJobById(jobId); + if (!streamJobService.hasPermission(streamJob, userName) && + !this.privilegeService.hasAccessPrivilege(request, streamJob.getProjectName())){ + throw new JobErrorException(-1, "Have no permission to view StreamJob [" + jobId + "] configuration"); + } + result.setData(new HashMap<>(this.streamJobConfService.getJobConfig(jobId))); + }catch(Exception e){ + String message = "Fail to view StreamJob configuration(查看任务配置失败), message: " + e.getMessage(); + LOG.warn(message, e); + result = Message.error(message); + } + return result; + } + + /** + * Save job config json + * @param jobId job id + * @param configContent config content + * @param request request + * @return + */ + @RequestMapping(value = "/json/{jobId:\\w+}", method = RequestMethod.POST) + public Message saveConfig(@PathVariable("jobId") Long jobId, @RequestBody Map configContent, + HttpServletRequest request){ + Message result = Message.ok("success"); + try{ + String userName = SecurityFilter.getLoginUsername(request); + StreamJob streamJob = this.streamJobService.getJobById(jobId); + // Accept the developer to modify + if (!streamJobService.isCreator(jobId, userName) && + !JobConf.STREAMIS_DEVELOPER().getValue().contains(userName) && + !this.privilegeService.hasEditPrivilege(request, streamJob.getProjectName())) { + throw new JobErrorException(-1, "Have no permission to save StreamJob [" + jobId + "] configuration"); + } + this.streamJobConfService.saveJobConfig(jobId, configContent); + }catch(Exception e){ + String message = "Fail to save StreamJob configuration(保存/更新任务配置失败), message: " + e.getMessage(); + LOG.warn(message, e); + result = Message.error(message); + } + return result; + } + + @RequestMapping(path = "/view", method = RequestMethod.GET) + public Message viewConfigTree(@RequestParam(value = "jobId", required = false) Long jobId, + HttpServletRequest req){ + Message result = Message.ok("success"); + try{ + if (Objects.isNull(jobId)){ + throw new JobErrorException(-1, "Params 'jobId' cannot be empty"); + } + String userName = SecurityFilter.getLoginUsername(req); + StreamJob streamJob = this.streamJobService.getJobById(jobId); + if (!this.streamJobService.hasPermission(streamJob, userName) + && !this.privilegeService.hasAccessPrivilege(req, streamJob.getProjectName())){ + throw new JobErrorException(-1, "Have no permission to view the configuration tree of StreamJob [" + jobId + "]"); + } + result.data("fullTree", this.streamJobConfService.getJobConfValueSet(jobId)); + }catch (Exception e){ + String message = "Fail to view configuration tree(查看任务配置树失败), message: " + e.getMessage(); + LOG.warn(message, e); + result = Message.error(message); + } + return result; + } + + @RequestMapping(path = {"/add", "/update"}, method = RequestMethod.POST) + public Message saveConfigTree(@RequestBody JsonNode json, HttpServletRequest req){ + Message result = Message.ok("success"); + try{ + String userName = SecurityFilter.getLoginUsername(req); + JobConfValueSet fullTrees = DWSHttpClient.jacksonJson().readValue(json.get("fullTree").traverse(), JobConfValueSet.class); + // Accept the developer to modify + if (!streamJobService.isCreator(fullTrees.getJobId(), userName) && + !JobConf.STREAMIS_DEVELOPER().getValue().contains(userName)) { + return Message.error("you con not modify the config ,the job is not belong to you"); + } + streamJobConfService.saveJobConfValueSet(fullTrees); + }catch (Exception e){ + String message = "Fail to insert/update configuration tree(保存/更新任务配置树失败), message: " + e.getMessage(); + LOG.warn(message, e); + result = Message.error(message); + } + return result; + } +} diff --git a/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/restful/api/JobRestfulApi.java b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/restful/api/JobRestfulApi.java new file mode 100644 index 000000000..1317bcb16 --- /dev/null +++ b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/restful/api/JobRestfulApi.java @@ -0,0 +1,616 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.restful.api; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.github.pagehelper.PageHelper; +import com.github.pagehelper.PageInfo; +import com.webank.wedatasphere.streamis.jobmanager.exception.JobException; +import com.webank.wedatasphere.streamis.jobmanager.exception.JobExceptionManager; +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.JobInfo; +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobLaunchManager; +import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobStateInfo; +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.entity.LogRequestPayload; +import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.FlinkJobInfo; +import com.webank.wedatasphere.streamis.jobmanager.manager.conf.JobConf; +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.MetaJsonInfo; +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamJob; +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamJobVersion; +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamTask; +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo.*; +import com.webank.wedatasphere.streamis.jobmanager.manager.project.service.ProjectPrivilegeService; +import com.webank.wedatasphere.streamis.jobmanager.manager.service.StreamJobInspectService; +import com.webank.wedatasphere.streamis.jobmanager.manager.service.StreamJobService; +import com.webank.wedatasphere.streamis.jobmanager.manager.service.StreamTaskService; +import com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity.StreamisTransformJobContent; +import com.webank.wedatasphere.streamis.jobmanager.manager.utils.StreamTaskUtils; +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.lang.exception.ExceptionUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.linkis.httpclient.dws.DWSHttpClient; +import org.apache.linkis.server.Message; +import org.apache.linkis.server.security.SecurityFilter; +import org.apache.linkis.server.utils.ModuleUserUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.validation.annotation.Validated; +import org.springframework.web.bind.annotation.*; + +import javax.annotation.Resource; +import javax.servlet.http.HttpServletRequest; +import java.util.*; +import java.util.function.Function; +import java.util.stream.Collectors; + +@RequestMapping(path = "/streamis/streamJobManager/job") +@RestController +public class JobRestfulApi { + + private static final Logger LOG = LoggerFactory.getLogger(JobRestfulApi.class); + + @Autowired + private StreamJobService streamJobService; + + @Autowired + private StreamTaskService streamTaskService; + + @Autowired + private StreamJobInspectService streamJobInspectService; + + @Resource + private JobLaunchManager jobLaunchManager; + + @Resource + private ProjectPrivilegeService privilegeService; + + @RequestMapping(path = "/list", method = RequestMethod.GET) + public Message getJobList(HttpServletRequest req, + @RequestParam(value = "pageNow", required = false) Integer pageNow, + @RequestParam(value = "pageSize", required = false) Integer pageSize, + @RequestParam(value = "projectName", required = false) String projectName, + @RequestParam(value = "jobName", required = false) String jobName, + @RequestParam(value = "jobStatus", required = false) Integer jobStatus, + @RequestParam(value = "jobCreator", required = false) String jobCreator) { + String username = SecurityFilter.getLoginUsername(req); + if(StringUtils.isBlank(projectName)){ + return Message.error("Project name cannot be empty(项目名不能为空,请指定)"); + } + if (Objects.isNull(pageNow)) { + pageNow = 1; + } + if (Objects.isNull(pageSize)) { + pageSize = 20; + } + PageInfo pageInfo; + PageHelper.startPage(pageNow, pageSize); + try { + pageInfo = streamJobService.getByProList(projectName, username, jobName, jobStatus, jobCreator); + } finally { + PageHelper.clearPage(); + } + + return Message.ok().data("tasks", pageInfo.getList()).data("totalPage", pageInfo.getTotal()); + } + + @RequestMapping(path = "/createOrUpdate", method = RequestMethod.POST) + public Message createOrUpdate(HttpServletRequest req, @Validated @RequestBody MetaJsonInfo metaJsonInfo) { + String username = SecurityFilter.getLoginUsername(req); + String projectName = metaJsonInfo.getProjectName(); + if (StringUtils.isBlank(projectName)){ + return Message.error("Project name cannot be empty(项目名不能为空,请指定)"); + } + if(!this.privilegeService.hasEditPrivilege(req, projectName)){ + return Message.error("Have no permission to create or update StreamJob in project [" + projectName + "]"); + } + StreamJobVersion job = streamJobService.createOrUpdate(username, metaJsonInfo); + return Message.ok().data("jobId", job.getJobId()); + } + + @RequestMapping(path = "{jobId:\\w+}/versions", method = RequestMethod.GET) + public Message versions(HttpServletRequest req, @PathVariable("jobId")Integer jobId, + @RequestParam(value = "pageNow", required = false) Integer pageNow, + @RequestParam(value = "pageSize", required = false) Integer pageSize){ + String userName = ModuleUserUtils.getOperationUser(req, "Query job version page"); + if (Objects.isNull(pageNow)) { + pageNow = 1; + } + if (Objects.isNull(pageSize)){ + pageSize = 20; + } + StreamJob streamJob = this.streamJobService.getJobById(jobId); + if (Objects.isNull(streamJob)){ + return Message.error("Unknown StreamJob with id: " + jobId + "(无法找到对应的流任务)"); + } + if (!streamJobService.hasPermission(streamJob, userName) && + !this.privilegeService.hasAccessPrivilege(req, streamJob.getProjectName())){ + return Message.error("Have no permission to view versions of StreamJob [" + jobId + "]"); + } + Message result = Message.ok(); + PageHelper.startPage(pageNow, pageSize); + try{ + PageInfo pageInfo = this.streamJobService.getVersionList(jobId); + if (Objects.nonNull(pageInfo)){ + result.data("versions", pageInfo.getList()); + result.data("totalPage", pageInfo.getTotal()); + } + } catch (Exception e){ + result = Message.error("Fail to query job version page (查看任务版本列表失败), message: " + e.getMessage()); + } finally{ + PageHelper.clearPage(); + } + return result; + } + @RequestMapping(path = "/version", method = RequestMethod.GET) + public Message version(HttpServletRequest req, @RequestParam(value = "jobId", required = false) Long jobId, + @RequestParam(value = "version", required = false) String version) throws JobException { + if (jobId == null) { + throw JobExceptionManager.createException(30301, "jobId"); + } + if (StringUtils.isEmpty(version)) { + throw JobExceptionManager.createException(30301, "version"); + } + String username = SecurityFilter.getLoginUsername(req); + StreamJob streamJob = this.streamJobService.getJobById(jobId); + if (!streamJobService.hasPermission(streamJob, username) && + !this.privilegeService.hasAccessPrivilege(req, streamJob.getProjectName())) { + return Message.error("Have no permission to view versions of StreamJob [" + jobId + "]"); + } + VersionDetailVo versionDetailVO = streamJobService.versionDetail(jobId, version); + return Message.ok().data("detail", versionDetailVO); + } + + /** + * Inspect the execution + * @param req request + * @return message + */ + @RequestMapping(path = "/execute/inspect", method = RequestMethod.PUT) + public Message executeInspect(HttpServletRequest req, @RequestParam(value = "jobId")Integer jobId){ + String userName = ModuleUserUtils.getOperationUser(req, "Inspect of execution"); + StreamJob streamJob = this.streamJobService.getJobById(jobId); + if (Objects.isNull(streamJob)){ + return Message.error("Unknown StreamJob with id: " + jobId + "(无法找到对应的流任务)"); + } + if (!streamJobService.hasPermission(streamJob, userName) && + !this.privilegeService.hasEditPrivilege(req, streamJob.getProjectName())){ + return Message.error("Have no permission to inspect the StreamJob [" + jobId + "]"); + } + Message result = Message.ok(); + try { + List inspectResult = this.streamJobInspectService + .inspect(jobId, new JobInspectVo.Types[]{JobInspectVo.Types.VERSION, JobInspectVo.Types.SNAPSHOT}); + List inspections = inspectResult.stream().map(JobInspectVo::getInspectName) + .collect(Collectors.toList()); + result.data("inspections", inspections); + Message finalResult = result; + inspectResult.forEach(inspect -> { + finalResult.data(inspect.getInspectName(), inspect); + }); + } catch (Exception e){ + result = Message.error("Fail to inspect job of the execution(任务执行前检查失败), message: " + e.getMessage()); + } + return result; + } + @RequestMapping(path = "/execute", method = RequestMethod.POST) + public Message executeJob(HttpServletRequest req, @RequestBody Map json) throws JobException { + String userName = SecurityFilter.getLoginUsername(req); + if (!json.containsKey("jobId") || json.get("jobId") == null) { + throw JobExceptionManager.createException(30301, "jobId"); + } + long jobId = Long.parseLong(json.get("jobId").toString()); + LOG.info("{} try to execute job {}.", userName, jobId); + StreamJob streamJob = this.streamJobService.getJobById(jobId); + if(streamJob == null) { + return Message.error("not exists job " + jobId); + } else if(!JobConf.SUPPORTED_MANAGEMENT_JOB_TYPES().getValue().contains(streamJob.getJobType())) { + return Message.error("Job " + streamJob.getName() + " is not supported to execute."); + } + if (!streamJobService.hasPermission(streamJob, userName) && + !this.privilegeService.hasEditPrivilege(req, streamJob.getProjectName())) { + return Message.error("Have no permission to execute StreamJob [" + jobId + "]"); + } + try { + streamTaskService.execute(jobId, 0L, userName); + } catch (Exception e) { + LOG.error("{} execute job {} failed!", userName, jobId, e); + return Message.error(ExceptionUtils.getRootCauseMessage(e)); + } + return Message.ok(); + } + + @RequestMapping(path = "/stop", method = RequestMethod.GET) + public Message killJob(HttpServletRequest req, + @RequestParam(value = "jobId", required = false) Long jobId, + @RequestParam(value = "snapshot", required = false) Boolean snapshot) throws JobException { + String userName = SecurityFilter.getLoginUsername(req); + snapshot = !Objects.isNull(snapshot) && snapshot; + if (jobId == null) { + throw JobExceptionManager.createException(30301, "jobId"); + } + LOG.info("{} try to kill job {}.", userName, jobId); + StreamJob streamJob = this.streamJobService.getJobById(jobId); + if(streamJob == null) { + return Message.error("not exists job " + jobId); + } + if (!streamJobService.hasPermission(streamJob, userName) && + !this.privilegeService.hasEditPrivilege(req, streamJob.getProjectName())) { + return Message.error("Have no permission to kill/stop StreamJob [" + jobId + "]"); + } + if(JobConf.SUPPORTED_MANAGEMENT_JOB_TYPES().getValue().contains(streamJob.getJobType())) { + try { + PauseResultVo resultVo = streamTaskService.pause(jobId, 0L, userName, Objects.nonNull(snapshot)? snapshot : false); + return snapshot? Message.ok().data("path", resultVo.getSnapshotPath()) : Message.ok(); + } catch (Exception e) { + LOG.error("{} kill job {} failed!", userName, jobId, e); + return Message.error(ExceptionUtils.getRootCauseMessage(e)); + } + } else { + LOG.error("{} try to kill not-supported-management job {} with name {}.", userName, jobId, streamJob.getName()); + return tryStopTask(streamJob, null); + } + } + + @RequestMapping(path = "/details", method = RequestMethod.GET) + public Message detailsJob(HttpServletRequest req, @RequestParam(value = "jobId", required = false) Long jobId, + @RequestParam(value = "version", required = false) String version) throws JobException, JsonProcessingException { + if (jobId == null) { + JobExceptionManager.createException(30301, "jobId"); + } + String username = SecurityFilter.getLoginUsername(req); + StreamJob streamJob = streamJobService.getJobById(jobId); + if (!streamJobService.hasPermission(streamJob, username) && + !this.privilegeService.hasAccessPrivilege(req, streamJob.getProjectName())) { + return Message.error("Have no permission to get Job details of StreamJob [" + jobId + "]"); + } + if(streamJob == null) { + return Message.error("not exists job " + jobId); + } + return Message.ok().data("details", streamTaskService.getJobDetailsVO(streamJob, version)); + } + + @RequestMapping(path = "/execute/history", method = RequestMethod.GET) + public Message executeHistoryJob(HttpServletRequest req, + @RequestParam(value = "jobId", required = false) Long jobId, + @RequestParam(value = "version", required = false) String version) throws JobException { + String username = SecurityFilter.getLoginUsername(req); + if (jobId == null) { + throw JobExceptionManager.createException(30301, "jobId"); + } + if (StringUtils.isEmpty(version)) { + throw JobExceptionManager.createException(30301, "version"); + } + StreamJob streamJob = this.streamJobService.getJobById(jobId); + if (!streamJobService.hasPermission(streamJob, username) && + !this.privilegeService.hasAccessPrivilege(req, streamJob.getProjectName())) { + return Message.error("Have no permission to view execution history of StreamJob [" + jobId + "]"); + } + List details = streamTaskService.queryHistory(jobId, version); + return Message.ok().data("details", details); + } + + private Message withStreamJob(HttpServletRequest req, String projectName, + String jobName, String username, + Function streamJobFunction) { + if(StringUtils.isBlank(projectName)) { + return Message.error("projectName cannot be empty!"); + } else if(StringUtils.isBlank(jobName)) { + return Message.error("jobName cannot be empty!"); + } + List streamJobs = streamJobService.getByProList(projectName, username, jobName, null, null).getList(); + if(CollectionUtils.isEmpty(streamJobs)) { + return Message.error("Not exits Streamis job " + jobName); + } else if(streamJobs.size() > 1) { + return Message.error("Too many Streamis Job named " + jobName + ", we cannot distinguish between them."); + } else if(!"spark.jar".equals(streamJobs.get(0).getJobType())) { + return Message.error("Only spark.jar Job support to manage task."); + } + StreamJob streamJob = streamJobService.getJobById(streamJobs.get(0).getId()); + if (!streamJobService.hasPermission(streamJob, username) && + !this.privilegeService.hasEditPrivilege(req, streamJob.getProjectName())) { + return Message.error("Have no permission to operate task for StreamJob [" + jobName + "]."); + } + return streamJobFunction.apply(streamJob); + } + + @RequestMapping(path = "/addTask", method = RequestMethod.GET) + public Message addTask(HttpServletRequest req, + @RequestParam(value = "projectName") String projectName, + @RequestParam(value = "jobName") String jobName, + @RequestParam(value = "appId") String appId, + @RequestParam(value = "appUrl") String appUrl) { + String username = SecurityFilter.getLoginUsername(req); + LOG.info("User {} try to add a new task for Streamis job {}.{} with appId: {}, appUrl: {}.", username, projectName, jobName, appId, appUrl); + if(StringUtils.isBlank(appId)) { + return Message.error("appId cannot be empty!"); + } + return withStreamJob(req, projectName, jobName, username, streamJob -> { + // 如果存在正在运行的,先将其停止掉 + StreamTask streamTask = streamTaskService.getLatestTaskByJobId(streamJob.getId()); + if(streamTask != null && JobConf.isRunning(streamTask.getStatus())) { + LOG.warn("Streamis Job {} exists running task, update its status from Running to stopped at first.", jobName); + streamTask.setStatus((Integer) JobConf.FLINK_JOB_STATUS_STOPPED().getValue()); + streamTask.setErrDesc("stopped by App's new task."); + streamTaskService.updateTask(streamTask); + } + if(streamTask == null || StringUtils.isBlank(streamTask.getLinkisJobInfo())) { + // 这里取个巧,从该工程该用户有权限的Job中找到一个Flink的历史作业,作为这个Spark Streaming作业的jobId和jobInfo + // 替换掉JobInfo中的 yarn 信息,这样我们前端就可以在不修改任何逻辑的情况下正常展示Spark Streaming作业了 + PageInfo jobList = streamJobService.getByProList(streamJob.getProjectName(), username, null, null, null); + List copyJobs = jobList.getList().stream().filter(job -> !job.getJobType().startsWith("spark.")) + .collect(Collectors.toList()); + if(copyJobs.isEmpty()) { + return Message.error("no Flink Job has been submitted, the register to Streamis cannot be succeeded."); + } + int index = 0; + streamTask = null; + while(streamTask == null && index < copyJobs.size()) { + StreamTask copyTask = streamTaskService.getLatestTaskByJobId(copyJobs.get(index).getId()); + if(copyTask == null || StringUtils.isBlank(copyTask.getLinkisJobInfo())) { + index ++; + } else { + LOG.warn("Streamis Job {} will bind the linkisJobInfo from history Flink Job {} with linkisJobId: {}, linkisJobInfo: {}.", + jobName, copyJobs.get(index).getName(), copyTask.getLinkisJobId(), copyTask.getLinkisJobInfo()); + streamTask = streamTaskService.createTask(streamJob.getId(), (Integer) JobConf.FLINK_JOB_STATUS_RUNNING().getValue(), username); + streamTask.setLinkisJobId(copyTask.getLinkisJobId()); + streamTask.setLinkisJobInfo(copyTask.getLinkisJobInfo()); + } + } + if(streamTask == null) { + return Message.error("no Flink task has been executed, the register to Streamis cannot be succeeded."); + } + } else { + StreamTask newStreamTask = streamTaskService.createTask(streamJob.getId(), (Integer) JobConf.FLINK_JOB_STATUS_RUNNING().getValue(), username); + streamTask.setId(newStreamTask.getId()); + streamTask.setVersion(newStreamTask.getVersion()); + streamTask.setErrDesc(""); + streamTask.setStatus(newStreamTask.getStatus()); + streamTask.setSubmitUser(username); + } + streamTask.setStartTime(new Date()); + streamTask.setLastUpdateTime(new Date()); + StreamTask finalStreamTask = streamTask; + return withFlinkJobInfo(jobName, streamTask.getLinkisJobInfo(), flinkJobInfo -> { + flinkJobInfo.setApplicationId(appId); + flinkJobInfo.setApplicationUrl(appUrl); + flinkJobInfo.setName(jobName); + flinkJobInfo.setStatus(JobConf.getStatusString(finalStreamTask.getStatus())); + StreamTaskUtils.refreshInfo(finalStreamTask, flinkJobInfo); + streamTaskService.updateTask(finalStreamTask); + LOG.info("Streamis Job {} has added a new task successfully.", jobName); + return Message.ok(); + }); + }); + } + + private Message withFlinkJobInfo(String jobName, String flinkJobInfoStr, Function flinkJobInfoFunction) { + FlinkJobInfo flinkJobInfo; + try { + flinkJobInfo = DWSHttpClient.jacksonJson().readValue(flinkJobInfoStr, FlinkJobInfo.class); + } catch (JsonProcessingException e) { + LOG.error("Job {} deserialize the flinkJobInfo string to object failed!", jobName, e); + return Message.error("Deserialize the flinkJobInfo string to object failed!"); + } + return flinkJobInfoFunction.apply(flinkJobInfo); + } + + @RequestMapping(path = "/updateTask", method = RequestMethod.GET) + public Message updateTask(HttpServletRequest req, + @RequestParam(value = "projectName") String projectName, + @RequestParam(value = "jobName") String jobName, + @RequestParam(value = "appId") String appId, + @RequestParam(value = "metrics") String metrics) { + String username = SecurityFilter.getLoginUsername(req); + LOG.info("User {} try to update task for Streamis job {}.{} with appId: {}, metrics: {}.", username, projectName, jobName, appId, metrics); + return withStreamJob(req, projectName, jobName, username, streamJob -> { + StreamTask streamTask = streamTaskService.getLatestTaskByJobId(streamJob.getId()); + if (streamTask == null) { + LOG.warn("Job {} is not exists running task, ignore to update its metrics.", jobName); + return Message.ok("not exists running task, ignore it."); + } else if (JobConf.isCompleted(streamTask.getStatus())) { + LOG.warn("The task of job {} is completed, ignore to update its metrics.", jobName); + return Message.ok("Task is completed, ignore to update its metrics."); + } + return withFlinkJobInfo(jobName, streamTask.getLinkisJobInfo(), flinkJobInfo -> { + if (!flinkJobInfo.getApplicationId().equals(appId)) { + LOG.warn("Job {} with running task is not equals to the request appId: {}, ignore to update its metrics.", + jobName, flinkJobInfo.getApplicationId(), appId); + return Message.ok("the request appId is not equals to the running task appId " + flinkJobInfo.getApplicationId()); + } + JobStateInfo jobStateInfo = new JobStateInfo(); + jobStateInfo.setTimestamp(System.currentTimeMillis()); + jobStateInfo.setLocation(metrics); + flinkJobInfo.setJobStates(new JobStateInfo[]{jobStateInfo}); + StreamTaskUtils.refreshInfo(streamTask, flinkJobInfo); + streamTaskService.updateTask(streamTask); + LOG.info("Streamis Job {} has updated the task metrics successfully.", jobName); + return Message.ok(); + }); + }); + } + + @RequestMapping(path = "/updateTask", method = RequestMethod.POST) + public Message updateTask(HttpServletRequest req, + @RequestBody Map json) { + String projectName = json.get("projectName"); + String jobName = json.get("jobName"); + String appId = json.get("appId"); + String metrics = json.get("metrics"); + return updateTask(req, projectName, jobName, appId, metrics); + } + + @RequestMapping(path = "/stopTask", method = RequestMethod.GET) + public Message stopTask(HttpServletRequest req, + @RequestParam(value = "projectName") String projectName, + @RequestParam(value = "jobName") String jobName, + @RequestParam(value = "appId") String appId, + @RequestParam(value = "appUrl") String appUrl) { + String username = SecurityFilter.getLoginUsername(req); + LOG.info("User {} try to stop task for Streamis job {}.{} with appId: {}, appUrl: {}.", username, projectName, jobName, appId, appUrl); + return withStreamJob(req, projectName, jobName, username, + streamJob -> tryStopTask(streamJob, appId)); + } + + private Message tryStopTask(StreamJob streamJob, String appId) { + // 如果存在正在运行的,将其停止掉 + StreamTask streamTask = streamTaskService.getLatestTaskByJobId(streamJob.getId()); + if(streamTask != null && JobConf.isRunning(streamTask.getStatus())) { + return withFlinkJobInfo(streamJob.getName(), streamTask.getLinkisJobInfo(), flinkJobInfo -> { + if(appId == null || flinkJobInfo.getApplicationId().equals(appId)) { + LOG.warn("Streamis Job {} is exists running task, update its status to stopped.", streamJob.getName()); + streamTask.setStatus((Integer) JobConf.FLINK_JOB_STATUS_STOPPED().getValue()); + streamTask.setErrDesc("stopped by App itself."); + streamTaskService.updateTask(streamTask); + return Message.ok(); + } else { + LOG.warn("Job {} with running task is not equals to the request appId: {}, ignore to stop it.", + streamJob.getName(), flinkJobInfo.getApplicationId(), appId); + return Message.ok("the request appId is not equals to the running task appId " + flinkJobInfo.getApplicationId()); + } + }); + } else { + LOG.warn("Streamis Job {} is not exists running task, ignore to stop it.", streamJob.getName()); + return Message.ok(); + } + } + + @RequestMapping(path = "/progress", method = RequestMethod.GET) + public Message progressJob(HttpServletRequest req, @RequestParam(value = "jobId", required = false) Long jobId, + @RequestParam(value = "version", required = false) String version) throws JobException { + String username = SecurityFilter.getLoginUsername(req); + if (jobId == null) { + throw JobExceptionManager.createException(30301, "jobId"); + } + StreamJob streamJob = this.streamJobService.getJobById(jobId); + if(streamJob == null) { + return Message.error("not exists job " + jobId); + } else if(!JobConf.SUPPORTED_MANAGEMENT_JOB_TYPES().getValue().contains(streamJob.getJobType())) { + return Message.error("Job " + streamJob.getName() + " is not supported to get progress."); + } + if (!streamJobService.hasPermission(streamJob, username) && + !this.privilegeService.hasAccessPrivilege(req, streamJob.getProjectName())) { + return Message.error("Have no permission to view the progress of StreamJob [" + jobId + "]"); + } + JobProgressVo jobProgressVO = streamTaskService.getProgress(jobId, version); + return Message.ok().data("taskId", jobProgressVO.getTaskId()).data("progress", jobProgressVO.getProgress()); + } + + @RequestMapping(path = "/jobContent", method = RequestMethod.GET) + public Message uploadDetailsJob(HttpServletRequest req, @RequestParam(value = "jobId", required = false) Long jobId, + @RequestParam(value = "version", required = false) String version) { + String username = SecurityFilter.getLoginUsername(req); + StreamJob streamJob = this.streamJobService.getJobById(jobId); + if (!streamJobService.hasPermission(streamJob, username) && + !this.privilegeService.hasAccessPrivilege(req, streamJob.getProjectName())) { + return Message.error("Have no permission to view job details of StreamJob [" + jobId + "]"); + } + StreamisTransformJobContent jobContent = streamJobService.getJobContent(jobId, version); + return Message.ok().data("jobContent", jobContent); + } + + @RequestMapping(path = "/alert", method = RequestMethod.GET) + public Message getAlert(HttpServletRequest req, @RequestParam(value = "jobId", required = false) Long jobId, + @RequestParam(value = "version", required = false) String version) { + String username = SecurityFilter.getLoginUsername(req); + return Message.ok().data("list", streamJobService.getAlert(username, jobId, version)); + } + + @RequestMapping(path = "/logs", method = RequestMethod.GET) + public Message getLog(HttpServletRequest req, + @RequestParam(value = "jobId", required = false) Long jobId, + @RequestParam(value = "taskId", required = false) Long taskId, + @RequestParam(value = "pageSize", defaultValue = "100") Integer pageSize, + @RequestParam(value = "fromLine", defaultValue = "1") Integer fromLine, + @RequestParam(value = "ignoreKeywords", required = false) String ignoreKeywords, + @RequestParam(value = "onlyKeywords", required = false) String onlyKeywords, + @RequestParam(value = "logType", required = false) String logType, + @RequestParam(value = "lastRows", defaultValue = "0") Integer lastRows) throws JobException { + if (jobId == null) { + throw JobExceptionManager.createException(30301, "jobId"); + } + logType = StringUtils.isBlank(logType) ? "client" : logType; + String username = SecurityFilter.getLoginUsername(req); + StreamJob streamJob = this.streamJobService.getJobById(jobId); + if(streamJob == null) { + return Message.error("not exists job " + jobId); + } else if(!JobConf.SUPPORTED_MANAGEMENT_JOB_TYPES().getValue().contains(streamJob.getJobType()) && + "client".equals(logType)) { + return Message.error("Job " + streamJob.getName() + " is not supported to get client logs."); + } + if (!streamJobService.hasPermission(streamJob, username) && + !this.privilegeService.hasAccessPrivilege(req, streamJob.getProjectName())) { + return Message.error("Have no permission to fetch logs from StreamJob [" + jobId + "]"); + } + LogRequestPayload payload = new LogRequestPayload(); + payload.setFromLine(fromLine); + payload.setIgnoreKeywords(ignoreKeywords); + payload.setLastRows(lastRows); + payload.setOnlyKeywords(onlyKeywords); + payload.setLogType(logType); + payload.setPageSize(pageSize); + return Message.ok().data("logs", streamTaskService.getRealtimeLog(jobId, null != taskId? taskId : 0L, username, payload)); + } + + /** + * Refresh the job status + * @return status list + */ + @RequestMapping(path = "/status", method = RequestMethod.PUT) + public Message status(@RequestBody Map> requestMap){ + List jobIds = requestMap.get("id_list"); + if (Objects.isNull(jobIds) || jobIds.isEmpty()){ + return Message.error("The list of job id which to refresh the status cannot be null or empty"); + } + Message result = Message.ok("success"); + try{ + result.data("result", this.streamTaskService.getStatusList(new ArrayList<>(jobIds))); + }catch (Exception e){ + String message = "Fail to refresh the status of jobs(刷新/获得任务状态失败), message: " + e.getMessage(); + LOG.warn(message, e); + result = Message.error(message, e); + } + return result; + } + + /** + * Do snapshot + * @return path message + */ + @RequestMapping(path = "/snapshot/{jobId:\\w+}", method = RequestMethod.PUT) + public Message snapshot(@PathVariable("jobId")Long jobId, HttpServletRequest request){ + Message result = Message.ok(); + try{ + String username = SecurityFilter.getLoginUsername(request); + StreamJob streamJob = this.streamJobService.getJobById(jobId); + if(streamJob == null) { + return Message.error("not exists job " + jobId); + } else if(!JobConf.SUPPORTED_MANAGEMENT_JOB_TYPES().getValue().contains(streamJob.getJobType())) { + return Message.error("Job " + streamJob.getName() + " is not supported to do snapshot."); + } + if (!streamJobService.hasPermission(streamJob, username) && + !this.privilegeService.hasEditPrivilege(request, streamJob.getProjectName())){ + return Message.error("Have no permission to do snapshot for StreamJob [" + jobId + "]"); + } + result.data("path", streamTaskService.snapshot(jobId, 0L, username)); + }catch (Exception e){ + String message = "Fail to do a snapshot operation (快照生成失败), message: " + e.getMessage(); + LOG.warn(message, e); + result = Message.error(message, e); + } + return result; + } + +} diff --git a/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/restful/api/ProjectRestfulApi.java b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/restful/api/ProjectRestfulApi.java new file mode 100644 index 000000000..c85e22ced --- /dev/null +++ b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/restful/api/ProjectRestfulApi.java @@ -0,0 +1,48 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.restful.api; + +import com.webank.wedatasphere.streamis.jobmanager.exception.ProjectException; +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo.TaskCoreNumVo; +import com.webank.wedatasphere.streamis.jobmanager.manager.service.StreamJobService; +import org.apache.commons.lang.StringUtils; +import org.apache.linkis.server.Message; +import org.apache.linkis.server.security.SecurityFilter; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +import javax.servlet.http.HttpServletRequest; + +@RequestMapping(path = "/streamis/streamJobManager/project") +@RestController +public class ProjectRestfulApi { + + @Autowired + private StreamJobService streamJobService; + + @RequestMapping(path = "/core/target", method = RequestMethod.GET) + public Message getView(HttpServletRequest req, @RequestParam(value= "projectName",required = false) String projectName) throws ProjectException { + if(StringUtils.isBlank(projectName)){ + throw new ProjectException("params cannot be empty!"); + } + String username = SecurityFilter.getLoginUsername(req); + TaskCoreNumVo taskCoreNumVO = streamJobService.countByCores(projectName,username); + return Message.ok().data("taskCore",taskCoreNumVO); + } +} diff --git a/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/restful/api/UploadRestfulApi.java b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/restful/api/UploadRestfulApi.java new file mode 100644 index 000000000..c10f693f4 --- /dev/null +++ b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/restful/api/UploadRestfulApi.java @@ -0,0 +1,110 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.restful.api; + +import com.webank.wedatasphere.streamis.jobmanager.exception.JobException; +import com.webank.wedatasphere.streamis.jobmanager.exception.JobExceptionManager; +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamJobVersion; +import com.webank.wedatasphere.streamis.jobmanager.manager.project.service.ProjectPrivilegeService; +import com.webank.wedatasphere.streamis.jobmanager.manager.service.BMLService; +import com.webank.wedatasphere.streamis.jobmanager.manager.service.StreamJobService; +import com.webank.wedatasphere.streamis.jobmanager.manager.util.IoUtils; +import com.webank.wedatasphere.streamis.jobmanager.manager.util.ZipHelper; +import org.apache.commons.io.FileUtils; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang.exception.ExceptionUtils; +import org.apache.linkis.server.Message; +import org.apache.linkis.server.security.SecurityFilter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; +import org.springframework.web.multipart.MultipartFile; + +import javax.servlet.http.HttpServletRequest; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.nio.charset.StandardCharsets; +import java.util.List; +import java.util.Objects; + +@RequestMapping(path = "/streamis/streamJobManager/job") +@RestController +public class UploadRestfulApi { + + private static final Logger LOG = LoggerFactory.getLogger(UploadRestfulApi.class); + + @Autowired + private StreamJobService streamJobService; + + @Autowired + private BMLService bmlService; + + @Autowired + private ProjectPrivilegeService projectPrivilegeService; + + @RequestMapping(path = "/upload", method = RequestMethod.POST) + public Message uploadJar(HttpServletRequest request, + @RequestParam(name = "projectName", required = false) String projectName, + @RequestParam(name = "file") List files) throws IOException, JobException { + + String userName = SecurityFilter.getLoginUsername(request); + if (files == null || files.size() <= 0) { + throw JobExceptionManager.createException(30300, "uploaded files"); + } + if (!projectPrivilegeService.hasEditPrivilege(request, projectName)) return Message.error("the current user has no operation permission"); + + //Only uses 1st file(只取第一个文件) + MultipartFile p = files.get(0); + String fileName = new String(p.getOriginalFilename().getBytes("ISO8859-1"), StandardCharsets.UTF_8); + LOG.info("Try to upload a StreamJob zip {} to project {}.", fileName, projectName); + if(!ZipHelper.isZip(fileName)){ + throw JobExceptionManager.createException(30302); + } + InputStream is = null; + OutputStream os = null; + File file = null; + try{ + String inputPath = IoUtils.generateIOPath(userName, "streamis", fileName); + file = new File(inputPath); + if(file.getParentFile().exists()){ + FileUtils.deleteDirectory(file.getParentFile()); + } + is = p.getInputStream(); + os = IoUtils.generateExportOutputStream(inputPath); + IOUtils.copy(is, os); + StreamJobVersion job = streamJobService.uploadJob(projectName, userName, inputPath); + return Message.ok().data("jobId",job.getJobId()); + } catch (Exception e){ + LOG.error("Failed to upload zip {} to project {} for user {}.", fileName, projectName, userName, e); + return Message.error(ExceptionUtils.getRootCauseMessage(e)); + } finally{ + IOUtils.closeQuietly(os); + IOUtils.closeQuietly(is); + //Delete the temporary file + if (Objects.nonNull(file) && file.exists()){ + if (!file.delete()){ + LOG.warn("Fail to delete the input job file, please examine the local system environment"); + } + } + } + } +} diff --git a/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/service/UserService.java b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/service/UserService.java new file mode 100644 index 000000000..acf88aed0 --- /dev/null +++ b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/service/UserService.java @@ -0,0 +1,26 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.service; + +import javax.servlet.http.HttpServletRequest; +import java.util.List; + +/** + * Created by v_wbyynie on 2021/9/27. + */ +public interface UserService { + List workspaceUserQuery(HttpServletRequest req, String workspaceId); +} diff --git a/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/service/impl/UserServiceImpl.java b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/service/impl/UserServiceImpl.java new file mode 100644 index 000000000..77b4fb618 --- /dev/null +++ b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/service/impl/UserServiceImpl.java @@ -0,0 +1,55 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.service.impl; + +import com.webank.wedatasphere.streamis.jobmanager.service.UserService; +import org.apache.linkis.common.conf.Configuration; +import org.apache.linkis.server.conf.ServerConfiguration; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.http.*; +import org.springframework.stereotype.Service; +import org.springframework.web.client.RestTemplate; + +import javax.servlet.http.HttpServletRequest; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +@Service +public class UserServiceImpl implements UserService { + + @Autowired + private RestTemplate restTemplate; + + @Override + public List workspaceUserQuery(HttpServletRequest req,String workspaceId) { + String url = Configuration.getGateWayURL() + ServerConfiguration.BDP_SERVER_RESTFUL_URI().getValue() + "/dss/framework/workspace/getWorkspaceUsers?workspaceId=" + workspaceId; + HttpHeaders headers = new HttpHeaders(); + headers.setContentType(MediaType.APPLICATION_JSON); + headers.add("Cookie", req.getHeader("Cookie")); + HttpEntity httpEntity = new HttpEntity(headers); + ResponseEntity response = restTemplate.exchange(url, HttpMethod.GET, httpEntity, Map.class); + Map data = (Map) response.getBody().get("data"); + List> workspaceUsers = data.get("workspaceUsers"); + if(workspaceUsers == null || workspaceUsers.isEmpty()) { + return new ArrayList<>(); + } else { + return workspaceUsers.stream().map(user -> (String) user.get("name")).collect(Collectors.toList()); + } + } + +} diff --git a/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/vo/BaseBulkRequest.java b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/vo/BaseBulkRequest.java new file mode 100644 index 000000000..2ff55dc5a --- /dev/null +++ b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/vo/BaseBulkRequest.java @@ -0,0 +1,53 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.vo; + +import com.fasterxml.jackson.annotation.JsonAlias; +import com.fasterxml.jackson.annotation.JsonInclude; + +@JsonInclude(JsonInclude.Include.NON_EMPTY) +public class BaseBulkRequest { + + /** + * Bulk status list + */ + public enum BulkStatus{ + Success, Failed, Processing + } + + @JsonAlias("sbj_type") + protected String bulkSubjectType; + + // TODO JSR 303 + @JsonAlias("bulk_sbj") + protected T bulkSubject; + + public String getBulkSubjectType() { + return bulkSubjectType; + } + + public void setBulkSubjectType(String bulkSubjectType) { + this.bulkSubjectType = bulkSubjectType; + } + + public T getBulkSubject() { + return bulkSubject; + } + + public void setBulkSubject(T bulkSubject) { + this.bulkSubject = bulkSubject; + } +} diff --git a/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/vo/BulkResponse.java b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/vo/BulkResponse.java new file mode 100644 index 000000000..95b155f65 --- /dev/null +++ b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/vo/BulkResponse.java @@ -0,0 +1,89 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.vo; + +import org.apache.commons.lang3.StringUtils; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Function; + +public class BulkResponse { + /** + * Total count + */ + private int total = 0; + + private Map> result = new HashMap<>(); + + + public BulkResponse(){ + + } + + public BulkResponse(Function aggregateKeyFunc, List resultElements){ + resultElements.forEach(element -> { + String key = aggregateKeyFunc.apply(element); + if (StringUtils.isNotBlank(key)){ + result.compute(key, (aggregateKey, statistic) -> { + if (null == statistic){ + statistic = new ResultStatistic<>(); + } + statistic.getData().add(element); + return statistic; + }); + } + }); + this.total = resultElements.size(); + } + public int getTotal() { + return total; + } + + public void setTotal(int total) { + this.total = total; + } + + public Map> getResult() { + return result; + } + + public void setResult(Map> result) { + this.result = result; + } + + public static class ResultStatistic{ + + /** + * Result elements + */ + private List data = new ArrayList<>(); + + public List getData() { + return data; + } + + public void setData(List data) { + this.data = data; + } + + public int getCount(){ + return this.data.size(); + } + } +} diff --git a/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/vo/JobBulkPauseRequest.java b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/vo/JobBulkPauseRequest.java new file mode 100644 index 000000000..c4d311f3a --- /dev/null +++ b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/vo/JobBulkPauseRequest.java @@ -0,0 +1,40 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.vo; + +import com.fasterxml.jackson.annotation.JsonInclude; + +/** + * Bulk pausing + */ +@JsonInclude(JsonInclude.Include.NON_EMPTY) +public class JobBulkPauseRequest extends JobBulkRequest{ + /** + * Is snapshot + */ + private boolean snapshot = false; + + public JobBulkPauseRequest(){ + super(); + } + public boolean isSnapshot() { + return snapshot; + } + + public void setSnapshot(boolean snapshot) { + this.snapshot = snapshot; + } +} diff --git a/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/vo/JobBulkRequest.java b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/vo/JobBulkRequest.java new file mode 100644 index 000000000..f0eb96d41 --- /dev/null +++ b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/vo/JobBulkRequest.java @@ -0,0 +1,38 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.vo; + +import com.fasterxml.jackson.annotation.JsonInclude; + +import java.util.ArrayList; +import java.util.List; + +/** + * Bulk request for job restful api + */ +@JsonInclude(JsonInclude.Include.NON_EMPTY) +public class JobBulkRequest extends BaseBulkRequest>{ + + public enum IdType { + JOB, TASK + } + + public JobBulkRequest(){ + this.bulkSubjectType = IdType.JOB.name(); + this.bulkSubject = new ArrayList<>(); + } + +} diff --git a/streamis-jobmanager/streamis-jobmanager-server/src/main/resources/application.yml b/streamis-jobmanager/streamis-jobmanager-server/src/main/resources/application.yml new file mode 100755 index 000000000..48d1360f3 --- /dev/null +++ b/streamis-jobmanager/streamis-jobmanager-server/src/main/resources/application.yml @@ -0,0 +1,23 @@ +server: + port: 9009 +spring: + application: + name: streamis-job-manager + mvc: + servlet: + path: /api/rest_j/v1 +eureka: + client: + serviceUrl: + defaultZone: http://localhost:20303/eureka/ + instance: + metadata-map: + test: wedatasphere + +management: + endpoints: + web: + exposure: + include: refresh,info +logging: + config: classpath:log4j2.xml \ No newline at end of file diff --git a/streamis-jobmanager/streamis-jobmanager-server/src/main/resources/linkis.properties b/streamis-jobmanager/streamis-jobmanager-server/src/main/resources/linkis.properties new file mode 100755 index 000000000..546caebf5 --- /dev/null +++ b/streamis-jobmanager/streamis-jobmanager-server/src/main/resources/linkis.properties @@ -0,0 +1,58 @@ +# +# Copyright 2021 WeBank +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +wds.linkis.test.mode=true +wds.linkis.server.mybatis.datasource.url=jdbc:mysql://localhost:3306/streamis?characterEncoding=UTF-8 +wds.linkis.server.mybatis.datasource.username=user1 + +wds.linkis.server.mybatis.datasource.password=pwd1 +wds.linkis.gateway.ip= +wds.linkis.gateway.port= +wds.linkis.gateway.url=http://localhost:9001 + +wds.linkis.mysql.is.encrypt=false +##restful +wds.linkis.log.clear=true +wds.linkis.server.version=v1 +wds.linkis.test.user=user1 + + + +##restful +wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.streamis.datasource.server.restful.api,\ + com.webank.wedatasphere.streamis.project.server.restful,\ + com.webank.wedatasphere.streamis.jobmanager.restful.api,\ + com.webank.wedatasphere.streamis.projectmanager.restful.api +##mybatis +wds.linkis.server.mybatis.mapperLocations=\ + classpath*:com/webank/wedatasphere/streamis/datasource/manager/dao/impl/*.xml,\ + classpath*:com/webank/wedatasphere/streamis/project/server/dao/impl/*.xml,\ + classpath*:com/webank/wedatasphere/streamis/jobmanager/launcher/dao/impl/*.xml,\ + classpath*:com/webank/wedatasphere/streamis/jobmanager/manager/dao/impl/*.xml,\ + classpath*:com/webank/wedatasphere/streamis/projectmanager/dao/impl/*.xml + +wds.linkis.server.mybatis.typeAliasesPackage=com.webank.wedatasphere.streamis.datasource.manager.domain,\ + com.webank.wedatasphere.streamis.jobmanager.launcher.entity,\ + com.webank.wedatasphere.streamis.jobmanager.manager.entity,\ + com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo,\ + com.webank.wedatasphere.streamis.jobmanager.launcher.entity.vo,\ + com.webank.wedatasphere.streamis.jobmanager.launcher.entity.dto,\ + com.webank.wedatasphere.streamis.projectmanager.entity + + +wds.linkis.server.mybatis.BasePackage=org.apache.linkis.bml.dao,\ + com.webank.wedatasphere.streamis.project.server.dao,\ + com.webank.wedatasphere.streamis.jobmanager.launcher.dao,\ + com.webank.wedatasphere.streamis.jobmanager.manager.dao,\ + com.webank.wedatasphere.streamis.projectmanager.dao \ No newline at end of file diff --git a/streamis-jobmanager/streamis-jobmanager-server/src/main/resources/log4j.properties b/streamis-jobmanager/streamis-jobmanager-server/src/main/resources/log4j.properties new file mode 100755 index 000000000..2ad28cde6 --- /dev/null +++ b/streamis-jobmanager/streamis-jobmanager-server/src/main/resources/log4j.properties @@ -0,0 +1,35 @@ +# +# Copyright 2021 WeBank +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +### set log levels ### + +log4j.rootCategory=INFO,console + +log4j.appender.console=org.apache.log4j.ConsoleAppender +log4j.appender.console.Threshold=INFO +log4j.appender.console.layout=org.apache.log4j.PatternLayout +#log4j.appender.console.layout.ConversionPattern= %d{ISO8601} %-5p (%t) [%F:%M(%L)] - %m%n +log4j.appender.console.layout.ConversionPattern= %d{ISO8601} %-5p (%t) %p %c{1} - %m%n + + +log4j.appender.com.webank.bdp.ide.core=org.apache.log4j.DailyRollingFileAppender +log4j.appender.com.webank.bdp.ide.core.Threshold=INFO +log4j.additivity.com.webank.bdp.ide.core=false +log4j.appender.com.webank.bdp.ide.core.layout=org.apache.log4j.PatternLayout +log4j.appender.com.webank.bdp.ide.core.Append=true +log4j.appender.com.webank.bdp.ide.core.File=logs/linkis.log +log4j.appender.com.webank.bdp.ide.core.layout.ConversionPattern= %d{ISO8601} %-5p (%t) [%F:%M(%L)] - %m%n + +log4j.logger.org.springframework=INFO \ No newline at end of file diff --git a/streamis-jobmanager/streamis-jobmanager-server/src/main/resources/log4j2.xml b/streamis-jobmanager/streamis-jobmanager-server/src/main/resources/log4j2.xml new file mode 100755 index 000000000..eafb0cdff --- /dev/null +++ b/streamis-jobmanager/streamis-jobmanager-server/src/main/resources/log4j2.xml @@ -0,0 +1,37 @@ + + + + + + + + + + + + + + + + + + + + + + + diff --git a/streamis-jobmanager/streamis-jobmanager-server/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/rpc/StreamisJobManagerReceiver.scala b/streamis-jobmanager/streamis-jobmanager-server/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/rpc/StreamisJobManagerReceiver.scala new file mode 100644 index 000000000..89718ba85 --- /dev/null +++ b/streamis-jobmanager/streamis-jobmanager-server/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/rpc/StreamisJobManagerReceiver.scala @@ -0,0 +1,65 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.rpc + +import org.apache.linkis.common.conf.CommonVars +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.rpc.{Receiver, Sender} +import com.webank.wedatasphere.streamis.jobmanager.common.protocol.{ImportJobManagerRequest, ImportJobManagerResponse} +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.MetaJsonInfo +import com.webank.wedatasphere.streamis.jobmanager.manager.service.{DefaultStreamJobService, StreamJobService} +import com.webank.wedatasphere.streamis.jobmanager.manager.transform.parser.{FlinkSQLJobContentParser, FlinkWorkflowJobContentParser} +import org.apache.commons.lang.StringUtils + +import scala.collection.JavaConverters._ +import scala.concurrent.duration.Duration + +/** + * created by cooperyang on 2021/7/19 + * Description: + */ +class StreamisJobManagerReceiver(jobService: StreamJobService) extends Receiver with Logging { + + + private val timeout = CommonVars("wds.streamis.workflow.ask.timeout", 300).getValue + + + override def receive(message: Any, sender: Sender): Unit = { + + } + + override def receiveAndReply(message: Any, sender: Sender): Any = receiveAndReply(message, Duration(timeout, "seconds"), sender) + + + override def receiveAndReply(message: Any, duration: Duration, sender: Sender): Any = message match { + case request: ImportJobManagerRequest => + info(s"Try to publish DSS tasks with $request.") + val metaJsonInfo = new MetaJsonInfo + metaJsonInfo.setWorkspaceName(request.workspaceName) + metaJsonInfo.setDescription(request.description) + metaJsonInfo.setProjectName(request.projectName) + metaJsonInfo.setJobContent(FlinkWorkflowJobContentParser.sqlToJobContent(request.workflowId, request.workflowName, request.executionCode)) + metaJsonInfo.setJobType(FlinkSQLJobContentParser.JOB_TYPE) + metaJsonInfo.setJobName(request.streamJobName) + if (StringUtils.isNotEmpty(request.version)) metaJsonInfo.setComment("Published from DSS with " + request.version) + metaJsonInfo.setTags(request.tags.asScala.mkString(",")) + val streamJobVersion = jobService.createOrUpdate(request.publishUser, metaJsonInfo) + ImportJobManagerResponse(0, streamJobVersion.getJobId, "Publish succeed.") + case _ => + } + + +} diff --git a/streamis-jobmanager/streamis-jobmanager-server/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/rpc/StreamisJobManagerReceiverChooser.scala b/streamis-jobmanager/streamis-jobmanager-server/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/rpc/StreamisJobManagerReceiverChooser.scala new file mode 100644 index 000000000..7fe4e78af --- /dev/null +++ b/streamis-jobmanager/streamis-jobmanager-server/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/rpc/StreamisJobManagerReceiverChooser.scala @@ -0,0 +1,49 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.jobmanager.rpc + +import com.webank.wedatasphere.streamis.jobmanager.common.protocol.StreamJobManagerProtocol +import com.webank.wedatasphere.streamis.jobmanager.manager.service.StreamJobService +import org.apache.linkis.rpc.{RPCMessageEvent, Receiver, ReceiverChooser} +import org.springframework.beans.factory.annotation.Autowired +import org.springframework.stereotype.Component + +import javax.annotation.PostConstruct + +/** + * created by cooperyang on 2021/7/19 + * Description: + */ +@Component +class StreamisJobManagerReceiverChooser extends ReceiverChooser{ + + @Autowired + var jobService: StreamJobService = _ + + + private var receiver: Option[StreamisJobManagerReceiver] = _ + + @PostConstruct + def init():Unit = { + receiver = Some(new StreamisJobManagerReceiver(jobService)) + } + + override def chooseReceiver(event: RPCMessageEvent): Option[Receiver] = event.message match { + case streamFlowProtocol: StreamJobManagerProtocol => receiver + case _ => None + } + +} diff --git a/streamis-jobmanager/streamis-projectmanager-server/pom.xml b/streamis-jobmanager/streamis-projectmanager-server/pom.xml new file mode 100644 index 000000000..f0fb039e7 --- /dev/null +++ b/streamis-jobmanager/streamis-projectmanager-server/pom.xml @@ -0,0 +1,129 @@ + + + + + + streamis-jobmanager + com.webank.wedatasphere.streamis + 0.2.4 + + 4.0.0 + + streamis-projectmanager-server + + + + org.apache.linkis + linkis-mybatis + + + org.apache.linkis + linkis-common + + + commons-io + commons-io + 2.7 + compile + + + commons-lang + commons-lang + + + org.apache.linkis + linkis-rpc + ${linkis.version} + + + com.webank.wedatasphere.streamis + streamis-job-manager-service + ${jobmanager.version} + + + org.apache.linkis + linkis-module + + + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + org.apache.maven.plugins + maven-assembly-plugin + 2.3 + false + + + make-assembly + package + + single + + + + src/main/assembly/distribution.xml + + + + + + false + streamis-server + false + false + + src/main/assembly/distribution.xml + + + + + + + ${basedir}/src/main/resources + + **/*.properties + **/*.xml + **/*.yml + + + + src/main/java + + **/*.xml + + + + + ${project.artifactId}-${project.version} + + \ No newline at end of file diff --git a/streamis-jobmanager/streamis-projectmanager-server/src/main/assembly/distribution.xml b/streamis-jobmanager/streamis-projectmanager-server/src/main/assembly/distribution.xml new file mode 100644 index 000000000..9a69ed9a9 --- /dev/null +++ b/streamis-jobmanager/streamis-projectmanager-server/src/main/assembly/distribution.xml @@ -0,0 +1,69 @@ + + + + streamis-server + + zip + + true + streamis-server + + + + + + lib + true + true + false + false + true + + + + + + ${basedir}/src/main/resources + + * + + 0777 + conf + unix + + + ${basedir}/bin + + * + + 0777 + bin + unix + + + . + + */** + + logs + + + + + diff --git a/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/conf/RestTemplateConfig.java b/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/conf/RestTemplateConfig.java new file mode 100644 index 000000000..89fa5193f --- /dev/null +++ b/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/conf/RestTemplateConfig.java @@ -0,0 +1,13 @@ +package com.webank.wedatasphere.streamis.projectmanager.conf; + +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.web.client.RestTemplate; + +@Configuration +public class RestTemplateConfig { + @Bean + public RestTemplate restTemplate(){ + return new RestTemplate(); + } +} diff --git a/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/dao/ProjectManagerMapper.java b/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/dao/ProjectManagerMapper.java new file mode 100644 index 000000000..8eefbe92d --- /dev/null +++ b/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/dao/ProjectManagerMapper.java @@ -0,0 +1,53 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.projectmanager.dao; + +import com.webank.wedatasphere.streamis.projectmanager.entity.ProjectFiles; +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +/** + * Created by v_wbyynie on 2021/9/17. + */ +public interface ProjectManagerMapper { + + void insertProjectFilesInfo(ProjectFiles projectFiles); + + List listFiles(@Param("projectName") String projectName,@Param("username") String username,@Param("filename") String filename); + + Integer deleteFiles(@Param("list")List list,@Param("username")String username); + + List listFileVersions(@Param("projectName") String projectName, @Param("fileName") String fileName); + + ProjectFiles selectFile(@Param("fileName")String fileName, @Param("version")String version, @Param("projectName")String projectName); + + void updateFileById(ProjectFiles projectFiles); + + ProjectFiles getById(Long id); + + int countFiles(@Param("fileName")String fileName, @Param("projectName")String projectName); + + int deleteVersions(@Param("fileName")String fileName, @Param("projectName")String projectName, @Param("username") String username); + + ProjectFiles getProjectFile(Long id); + + ProjectFiles getJobFile(Long id); + + List selectProjectNamesByIds(List ids); + + String getProjectNameById(Long id); +} diff --git a/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/dao/impl/ProjectManagerMapper.xml b/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/dao/impl/ProjectManagerMapper.xml new file mode 100644 index 000000000..3338203df --- /dev/null +++ b/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/dao/impl/ProjectManagerMapper.xml @@ -0,0 +1,111 @@ + + + + + + + + + + + INSERT INTO linkis_stream_project_files + (`file_name`,`version`,`store_path`,`store_type`,`project_name`,`create_by`,`comment`) + VALUES (#{fileName},#{version},#{storePath},#{storeType},#{projectName},#{createBy},#{comment}) + + + + update linkis_stream_project_files set store_path = #{storePath} + + ,version = #{version} + + + ,comment = #{comment} + + where id = #{id} + + + + delete from linkis_stream_project_files where create_by = #{username} and id in + + #{id} + + + + + DELETE FROM linkis_stream_project_files WHERE project_name = + #{projectName} AND file_name = #{fileName} AND create_by = #{username} + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/entity/ProjectFiles.java b/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/entity/ProjectFiles.java new file mode 100644 index 000000000..4c13a8550 --- /dev/null +++ b/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/entity/ProjectFiles.java @@ -0,0 +1,121 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.projectmanager.entity; + +import java.util.Date; + +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamisFile; + +/** + * Created by v_wbyynie on 2021/9/17. + */ +public class ProjectFiles implements StreamisFile { + + private Long id; + + private String fileName; + + private String version; + + private String comment; + + private String storePath; + + private String storeType = StreamisFile.BML_STORE_TYPE; + + private String projectName; + + private Date createTime; + + private String createBy; + + public String getComment() { + return comment; + } + + public void setComment(String comment) { + this.comment = comment; + } + + @Override + public String getVersion() { + return version; + } + + public void setVersion(String version) { + this.version = version; + } + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + @Override + public String getFileName() { + return fileName; + } + + public void setFileName(String fileName) { + this.fileName = fileName; + } + + @Override + public String getStorePath() { + return storePath; + } + + public void setStorePath(String storePath) { + this.storePath = storePath; + } + + @Override + public String getStoreType() { + return storeType; + } + + public void setStoreType(String storeType) { + this.storeType = storeType; + } + + public String getProjectName() { + return projectName; + } + + public void setProjectName(String projectName) { + this.projectName = projectName; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + @Override + public String getCreateBy() { + return createBy; + } + + public void setCreateBy(String createBy) { + this.createBy = createBy; + } +} diff --git a/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/restful/api/ProjectManagerRestfulApi.java b/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/restful/api/ProjectManagerRestfulApi.java new file mode 100644 index 000000000..04d8ed405 --- /dev/null +++ b/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/restful/api/ProjectManagerRestfulApi.java @@ -0,0 +1,220 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.projectmanager.restful.api; + + +import com.github.pagehelper.PageHelper; +import com.github.pagehelper.PageInfo; +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamisFile; +import com.webank.wedatasphere.streamis.jobmanager.manager.exception.FileException; +import com.webank.wedatasphere.streamis.jobmanager.manager.exception.FileExceptionManager; +import com.webank.wedatasphere.streamis.jobmanager.manager.project.service.ProjectPrivilegeService; +import com.webank.wedatasphere.streamis.jobmanager.manager.util.IoUtils; +import com.webank.wedatasphere.streamis.jobmanager.manager.util.ReaderUtils; +import com.webank.wedatasphere.streamis.projectmanager.entity.ProjectFiles; +import com.webank.wedatasphere.streamis.projectmanager.service.ProjectManagerService; +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang.ArrayUtils; +import org.apache.commons.lang.StringUtils; +import org.apache.linkis.server.Message; +import org.apache.linkis.server.security.SecurityFilter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; +import org.springframework.web.multipart.MultipartFile; + +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import java.io.InputStream; +import java.io.OutputStream; +import java.io.UnsupportedEncodingException; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; + +@RequestMapping(path = "/streamis/streamProjectManager/project") +@RestController +public class ProjectManagerRestfulApi { + + private static final Logger LOG = LoggerFactory.getLogger(ProjectManagerRestfulApi.class); + + @Autowired + private ProjectManagerService projectManagerService; + @Autowired + private ProjectPrivilegeService projectPrivilegeService; + + @RequestMapping(path = "/files/upload", method = RequestMethod.POST) + public Message upload(HttpServletRequest req, + @RequestParam(name = "version",required = false) String version, + @RequestParam(name = "projectName",required = false) String projectName, + @RequestParam(name = "comment", required = false) String comment, + @RequestParam(name = "updateWhenExists", required = false) boolean updateWhenExists, + @RequestParam(name = "file") List files) throws UnsupportedEncodingException, FileException { + + + String username = SecurityFilter.getLoginUsername(req); + if (StringUtils.isBlank(version)) { + return Message.error("version is null"); + } + if (StringUtils.isBlank(projectName)) { + return Message.error("projectName is null"); + } + if (!projectPrivilegeService.hasEditPrivilege(req,projectName)) return Message.error("the current user has no operation permission"); + + //Only uses 1st file(只取第一个文件) + MultipartFile p = files.get(0); + String fileName = new String(p.getOriginalFilename().getBytes("ISO8859-1"), StandardCharsets.UTF_8); + ReaderUtils readerUtils = new ReaderUtils(); + if (!readerUtils.checkName(fileName)) { + throw FileExceptionManager.createException(30601, fileName); + } + if (!updateWhenExists) { + ProjectFiles projectFiles = projectManagerService.selectFile(fileName, version, projectName); + if (projectFiles != null) { + return Message.warn("the file:[" + fileName + "]is exist in the project:" + projectName + ",version:" + version); + } + } + InputStream is = null; + OutputStream os = null; + try { + String inputPath = IoUtils.generateIOPath(username, "streamis", fileName); + is = p.getInputStream(); + os = IoUtils.generateExportOutputStream(inputPath); + IOUtils.copy(is, os); + projectManagerService.upload(username, fileName, version, projectName, inputPath,comment); + } catch (Exception e) { + LOG.error("failed to upload zip {} fo user {}", fileName, username, e); + return Message.error(e.getMessage()); + } finally { + IOUtils.closeQuietly(os); + IOUtils.closeQuietly(is); + } + return Message.ok(); + } + + + + @RequestMapping(path = "/files/list", method = RequestMethod.GET) + public Message list( HttpServletRequest req,@RequestParam(value = "filename",required = false) String filename, + @RequestParam(value = "projectName",required = false) String projectName, @RequestParam(value = "username",required = false) String username, + @RequestParam(value = "pageNow",defaultValue = "1") Integer pageNow, + @RequestParam(value = "pageSize",defaultValue = "20") Integer pageSize) { + if (StringUtils.isBlank(projectName)) { + return Message.error("projectName is null"); + } + if (!projectPrivilegeService.hasAccessPrivilege(req,projectName)) return Message.error("the current user has no operation permission"); + PageHelper.startPage(pageNow, pageSize); + List fileList; + try { + fileList = projectManagerService.listFiles(projectName, username, filename); + } finally { + PageHelper.clearPage(); + } + PageInfo pageInfo = new PageInfo(fileList); + return Message.ok().data("files", fileList).data("totalPage", pageInfo.getTotal()); + } + + @RequestMapping(path = "/files/version/list", method = RequestMethod.GET) + public Message versionList( HttpServletRequest req, @RequestParam(value = "fileName",required = false) String fileName, + @RequestParam(value = "projectName",required = false) String projectName, + @RequestParam(value = "pageNow",defaultValue = "1") Integer pageNow, + @RequestParam(value = "pageSize",defaultValue = "20") Integer pageSize) { + String username = SecurityFilter.getLoginUsername(req); + if (StringUtils.isBlank(projectName)) { + return Message.error("projectName is null"); + } + if (StringUtils.isBlank(fileName)) { + return Message.error("fileName is null"); + } + if (!projectPrivilegeService.hasAccessPrivilege(req,projectName)) return Message.error("the current user has no operation permission"); + PageHelper.startPage(pageNow, pageSize); + List fileList; + try { + fileList = projectManagerService.listFileVersions(projectName, fileName); + } finally { + PageHelper.clearPage(); + } + PageInfo pageInfo = new PageInfo(fileList); + return Message.ok().data("files", fileList).data("totalPage", pageInfo.getTotal()); + } + + + @RequestMapping(path = "/files/delete", method = RequestMethod.GET) + public Message delete( HttpServletRequest req, @RequestParam(value = "fileName",required = false) String fileName, + @RequestParam(value = "projectName",required = false) String projectName) { + String username = SecurityFilter.getLoginUsername(req); + if (!projectPrivilegeService.hasEditPrivilege(req,projectName)) return Message.error("the current user has no operation permission"); + + return projectManagerService.delete(fileName, projectName, username) ? Message.ok() + : Message.warn("you have no permission delete some files not belong to you"); + } + + @RequestMapping(path = "/files/version/delete", method = RequestMethod.GET) + public Message deleteVersion(HttpServletRequest req, @RequestParam(value = "ids",required = false) String ids) { + String username = SecurityFilter.getLoginUsername(req); + List idList = new ArrayList<>(); + if (!StringUtils.isBlank(ids) && !ArrayUtils.isEmpty(ids.split(","))) { + String[] split = ids.split(","); + for (String s : split) { + idList.add(Long.parseLong(s)); + } + } + List projectNames = projectManagerService.getProjectNames(idList); + if (!projectPrivilegeService.hasEditPrivilege(req,projectNames)) { + return Message.error("the current user has no operation permission"); + } + + return projectManagerService.deleteFiles(ids, username) ? Message.ok() + : Message.warn("you have no permission delete some files not belong to you"); + } + + @RequestMapping(path = "/files/download", method = RequestMethod.GET) + public Message download( HttpServletRequest req, HttpServletResponse response, @RequestParam(value = "id",required = false) Long id, + @RequestParam(value = "projectName",required = false)String projectName) { + ProjectFiles projectFiles = projectManagerService.getFile(id, projectName); + if (projectFiles == null) { + return Message.error("no such file in this project"); + } + if (StringUtils.isBlank(projectFiles.getStorePath())) { + return Message.error("storePath is null"); + } + if(StringUtils.isBlank(projectName)){ + projectName = projectManagerService.getProjectNameById(id); + } + if (!projectPrivilegeService.hasEditPrivilege(req,projectName)) return Message.error("the current user has no operation permission"); + + response.setContentType("application/x-download"); + response.setHeader("content-Disposition", "attachment;filename=" + projectFiles.getFileName()); + try (InputStream is = projectManagerService.download(projectFiles); + OutputStream os = response.getOutputStream() + ) { + int len = 0; + byte[] arr = new byte[2048]; + while ((len = is.read(arr)) > 0) { + os.write(arr, 0, len); + } + os.flush(); + } catch (Exception e) { + LOG.error("download file: {} failed , message is : {}" , projectFiles.getFileName(), e); + return Message.error(e.getMessage()); + } + return Message.ok(); + } +} diff --git a/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/service/ProjectManagerService.java b/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/service/ProjectManagerService.java new file mode 100644 index 000000000..9ff146678 --- /dev/null +++ b/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/service/ProjectManagerService.java @@ -0,0 +1,51 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.projectmanager.service; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamisFile; +import com.webank.wedatasphere.streamis.projectmanager.entity.ProjectFiles; + +import java.io.IOException; +import java.io.InputStream; +import java.util.List; + +/** + * Created by v_wbyynie on 2021/9/17. + */ +public interface ProjectManagerService { + void upload(String username, String fileName, String version, String projectName, String file, String comment) throws IOException; + + List listFiles(String projectName, String username, String filename); + + boolean deleteFiles(String ids,String username); + + ProjectFiles selectFile(String fileName, String version, String projectName); + + List listFileVersions(String projectName, String fileName); + + InputStream download(ProjectFiles projectFiles) throws JsonProcessingException; + + ProjectFiles getById(Long id); + + boolean delete(String fileName, String projectName, String username); + + ProjectFiles getFile(Long id, String projectName); + + List getProjectNames(List ids); + + String getProjectNameById(Long id); +} diff --git a/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/service/impl/ProjectManagerServiceImpl.java b/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/service/impl/ProjectManagerServiceImpl.java new file mode 100644 index 000000000..ded1b101c --- /dev/null +++ b/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/service/impl/ProjectManagerServiceImpl.java @@ -0,0 +1,140 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.projectmanager.service.impl; + +import com.fasterxml.jackson.core.JsonProcessingException; +import org.apache.commons.collections.CollectionUtils; +import org.apache.linkis.common.utils.JsonUtils; +import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamisFile; +import com.webank.wedatasphere.streamis.jobmanager.manager.service.BMLService; +import com.webank.wedatasphere.streamis.jobmanager.manager.service.StreamiFileService; +import com.webank.wedatasphere.streamis.jobmanager.manager.util.ReaderUtils; +import com.webank.wedatasphere.streamis.projectmanager.dao.ProjectManagerMapper; +import com.webank.wedatasphere.streamis.projectmanager.entity.ProjectFiles; +import com.webank.wedatasphere.streamis.projectmanager.service.ProjectManagerService; +import org.apache.commons.lang.ArrayUtils; +import org.apache.commons.lang.StringUtils; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +import java.io.InputStream; +import java.util.*; + +/** + * Created by v_wbyynie on 2021/9/17. + */ +@Service +public class ProjectManagerServiceImpl implements ProjectManagerService, StreamiFileService { + + @Autowired + private BMLService bmlService; + + @Autowired + private ProjectManagerMapper projectManagerMapper; + + @Override + @Transactional(rollbackFor = Exception.class) + public void upload(String username, String fileName, String version, String projectName, String filePath,String comment) throws JsonProcessingException { + Map result = bmlService.upload(username, filePath); + ProjectFiles projectFiles = new ProjectFiles(); + projectFiles.setFileName(fileName); + projectFiles.setVersion(version); + projectFiles.setCreateBy(username); + projectFiles.setComment(comment); + projectFiles.setProjectName(projectName); + ReaderUtils readerUtils = new ReaderUtils(); + projectFiles.setStorePath(readerUtils.readAsJson(result.get("version").toString(),result.get("resourceId").toString())); + ProjectFiles file = selectFile(fileName, version, projectName); + if (file == null) { + projectManagerMapper.insertProjectFilesInfo(projectFiles); + }else { + projectFiles.setId(file.getId()); + projectFiles.setVersion(version); + projectManagerMapper.updateFileById(projectFiles); + } + } + + + @Override + public StreamisFile getFile(String projectName, String fileName, String version) { + return projectManagerMapper.selectFile(fileName, version, projectName); + } + + @Override + public List listFileVersions(String projectName, String fileName) { + return projectManagerMapper.listFileVersions(projectName, fileName); + } + + @Override + public InputStream download(ProjectFiles projectFiles) throws JsonProcessingException { + Map map = JsonUtils.jackson().readValue(projectFiles.getStorePath(), Map.class); + return bmlService.get(projectFiles.getCreateBy(), map.get("resourceId"), map.get("version")); + } + + @Override + public ProjectFiles getById(Long id) { + return projectManagerMapper.getById(id); + } + + @Override + public boolean delete(String fileName, String projectName, String username) { + int count = projectManagerMapper.countFiles(fileName,projectName); + int delete = projectManagerMapper.deleteVersions(fileName,projectName,username); + return count == delete; + } + + @Override + public ProjectFiles getFile(Long id, String projectName) { + return StringUtils.isBlank(projectName) ? projectManagerMapper.getJobFile(id) : projectManagerMapper.getProjectFile(id); + } + + @Override + public List listFiles(String projectName, String username, String filename) { + return projectManagerMapper.listFiles(projectName,username, filename); + } + + @Override + public boolean deleteFiles(String ids,String username) { + if (!StringUtils.isBlank(ids) && !ArrayUtils.isEmpty(ids.split(","))) { + String[] split = ids.split(","); + List list = new ArrayList<>(); + for (String s : split) { + list.add(Long.parseLong(s)); + } + return projectManagerMapper.deleteFiles(list, username) >= list.size(); + } + return true; + } + + @Override + public ProjectFiles selectFile(String fileName, String version, String projectName) { + return projectManagerMapper.selectFile(fileName, version, projectName); + } + + @Override + public List getProjectNames(List ids) { + if(CollectionUtils.isEmpty(ids)){ + return null; + } + return projectManagerMapper.selectProjectNamesByIds(ids); + } + + @Override + public String getProjectNameById(Long id) { + return projectManagerMapper.getProjectNameById(id); + } +} diff --git a/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/utils/DateUtils.java b/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/utils/DateUtils.java new file mode 100644 index 000000000..355d0a5da --- /dev/null +++ b/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/utils/DateUtils.java @@ -0,0 +1,86 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.projectmanager.utils; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.text.SimpleDateFormat; +import java.util.Date; + +public class DateUtils { + + private static final Logger LOG = LoggerFactory.getLogger(DateUtils.class); + + private static final String FORMAT_HH_T_Z = "yyyy-MM-dd'T'HH:mm:ss'Z'"; + private static final String FORMAT_HH_MM_SS = "yyyy-MM-dd HH:mm:ss"; + private static final String FORMAT_HH_MM_SS_S = "yyyy-MM-dd HH:mm:ss.S"; + private static final String FORMAT_HH_MM = "yyyy-MM-dd HH:mm"; + + + /** + * contain T,Z format date time convert + * + * @param dateTime + * @return + * @throws Exception + */ + public static String dateTimeTZConvert(String dateTime) throws Exception { + Date date = new SimpleDateFormat(FORMAT_HH_T_Z).parse(dateTime); + String time = new SimpleDateFormat(FORMAT_HH_MM_SS).format(date); + return time; + } + + /** + * Time format to be reserved to minutes. + * yyyy-MM-dd HH:mm:ss.S to yyyy-MM-dd HH:mm + * + * @param dateTime the amount of time needed to process + * @return string date time + */ + public static String timeFormatReservedMinutes(String dateTime) { + String timeMin = ""; + try { + SimpleDateFormat sdfSec = new SimpleDateFormat(FORMAT_HH_MM_SS_S); + Date dt = sdfSec.parse(dateTime); + SimpleDateFormat sdfMin = new SimpleDateFormat(FORMAT_HH_MM_SS); + timeMin = sdfMin.format(dt); + } catch (Exception e) { + LOG.error("timeFormatReservedMinutes parse error:{}", e); + } + return timeMin; + } + + /** + * Time format to be reserved to second. + * yyyy-MM-dd HH:mm:ss.S to yyyy-MM-dd HH:mm:ss + * + * @param dateTime the amount of time needed to process + * @return string date time + */ + public static String timeFormatReservedSecond(String dateTime) { + String timeSec = ""; + try { + SimpleDateFormat sdfSec = new SimpleDateFormat(FORMAT_HH_MM_SS_S); + Date dt = sdfSec.parse(dateTime); + SimpleDateFormat sdfMin = new SimpleDateFormat(FORMAT_HH_MM_SS); + timeSec = sdfMin.format(dt); + } catch (Exception e) { + LOG.error("timeFormatReservedMinutes parse error:{}", e); + } + return timeSec; + } +} diff --git a/streamis-jobmanager/streamis-projectmanager-server/src/main/resources/application.yml b/streamis-jobmanager/streamis-projectmanager-server/src/main/resources/application.yml new file mode 100644 index 000000000..71657fd31 --- /dev/null +++ b/streamis-jobmanager/streamis-projectmanager-server/src/main/resources/application.yml @@ -0,0 +1,23 @@ +server: + port: 9010 +spring: + application: + name: cloud-streammanager-debug + mvc: + servlet: + path: /api/rest_j/v1 +eureka: + client: + serviceUrl: + defaultZone: http://localhost:20303/eureka/ + instance: + metadata-map: + test: wedatasphere + +management: + endpoints: + web: + exposure: + include: refresh,info +logging: + config: classpath:log4j2.xml \ No newline at end of file diff --git a/streamis-jobmanager/streamis-projectmanager-server/src/main/resources/linkis.properties b/streamis-jobmanager/streamis-projectmanager-server/src/main/resources/linkis.properties new file mode 100644 index 000000000..0fb8d2d09 --- /dev/null +++ b/streamis-jobmanager/streamis-projectmanager-server/src/main/resources/linkis.properties @@ -0,0 +1,59 @@ +# +# Copyright 2021 WeBank +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +wds.linkis.test.mode=true +wds.linkis.server.mybatis.datasource.url=jdbc:mysql://localhost:3306/streamis?characterEncoding=UTF-8 +wds.linkis.server.mybatis.datasource.username=user1 + +wds.linkis.server.mybatis.datasource.password=pwd1 +wds.linkis.gateway.ip= +wds.linkis.gateway.port= +wds.linkis.gateway.url=http://localhost:9001 + +wds.linkis.mysql.is.encrypt=false +##restful +wds.linkis.log.clear=true +wds.linkis.server.version=v1 + +wds.linkis.test.user=user1 + + + +##restful +wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.streamis.datasource.server.restful.api,\ + com.webank.wedatasphere.streamis.project.server.restful,\ + com.webank.wedatasphere.streamis.jobmanager.restful.api,\ + com.webank.wedatasphere.streamis.projectmanager.restful.api +##mybatis +wds.linkis.server.mybatis.mapperLocations=\ + classpath*:com/webank/wedatasphere/streamis/project/server/dao/impl/*.xml,\ + classpath*:com/webank/wedatasphere/streamis/jobmanager/launcher/dao/impl/*.xml,\ + classpath*:com/webank/wedatasphere/streamis/jobmanager/manager/dao/impl/*.xml,\ + classpath*:com/webank/wedatasphere/streamis/projectmanager/dao/impl/*.xml + +wds.linkis.server.mybatis.typeAliasesPackage=\ + com.webank.wedatasphere.streamis.jobmanager.launcher.entity,\ + com.webank.wedatasphere.streamis.jobmanager.manager.entity,\ + com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo,\ + com.webank.wedatasphere.streamis.jobmanager.launcher.entity.vo,\ + com.webank.wedatasphere.streamis.jobmanager.launcher.entity.dto,\ + com.webank.wedatasphere.streamis.projectmanager.entity + + +wds.linkis.server.mybatis.BasePackage=\ + org.apache.linkis.bml.dao,\ + com.webank.wedatasphere.streamis.project.server.dao,\ + com.webank.wedatasphere.streamis.jobmanager.launcher.dao,\ + com.webank.wedatasphere.streamis.jobmanager.manager.dao,\ + com.webank.wedatasphere.streamis.projectmanager.dao \ No newline at end of file diff --git a/streamis-jobmanager/streamis-projectmanager-server/src/main/resources/log4j.properties b/streamis-jobmanager/streamis-projectmanager-server/src/main/resources/log4j.properties new file mode 100644 index 000000000..2ad28cde6 --- /dev/null +++ b/streamis-jobmanager/streamis-projectmanager-server/src/main/resources/log4j.properties @@ -0,0 +1,35 @@ +# +# Copyright 2021 WeBank +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +### set log levels ### + +log4j.rootCategory=INFO,console + +log4j.appender.console=org.apache.log4j.ConsoleAppender +log4j.appender.console.Threshold=INFO +log4j.appender.console.layout=org.apache.log4j.PatternLayout +#log4j.appender.console.layout.ConversionPattern= %d{ISO8601} %-5p (%t) [%F:%M(%L)] - %m%n +log4j.appender.console.layout.ConversionPattern= %d{ISO8601} %-5p (%t) %p %c{1} - %m%n + + +log4j.appender.com.webank.bdp.ide.core=org.apache.log4j.DailyRollingFileAppender +log4j.appender.com.webank.bdp.ide.core.Threshold=INFO +log4j.additivity.com.webank.bdp.ide.core=false +log4j.appender.com.webank.bdp.ide.core.layout=org.apache.log4j.PatternLayout +log4j.appender.com.webank.bdp.ide.core.Append=true +log4j.appender.com.webank.bdp.ide.core.File=logs/linkis.log +log4j.appender.com.webank.bdp.ide.core.layout.ConversionPattern= %d{ISO8601} %-5p (%t) [%F:%M(%L)] - %m%n + +log4j.logger.org.springframework=INFO \ No newline at end of file diff --git a/streamis-jobmanager/streamis-projectmanager-server/src/main/resources/log4j2.xml b/streamis-jobmanager/streamis-projectmanager-server/src/main/resources/log4j2.xml new file mode 100644 index 000000000..057d7bf21 --- /dev/null +++ b/streamis-jobmanager/streamis-projectmanager-server/src/main/resources/log4j2.xml @@ -0,0 +1,37 @@ + + + + + + + + + + + + + + + + + + + + + + + diff --git a/streamis-project/pom.xml b/streamis-project/pom.xml new file mode 100644 index 000000000..4a4d89197 --- /dev/null +++ b/streamis-project/pom.xml @@ -0,0 +1,40 @@ + + + + + + streamis + com.webank.wedatasphere.streamis + 0.2.4 + + 4.0.0 + + streamis-project + pom + + 8 + 8 + + + + + streamis-project-common + streamis-project-server + + + \ No newline at end of file diff --git a/streamis-project/streamis-project-common/pom.xml b/streamis-project/streamis-project-common/pom.xml new file mode 100644 index 000000000..4880bf505 --- /dev/null +++ b/streamis-project/streamis-project-common/pom.xml @@ -0,0 +1,75 @@ + + + + + + streamis-project + com.webank.wedatasphere.streamis + 0.2.4 + + 4.0.0 + + streamis-project-common + + + 8 + 8 + + + + + + + org.apache.linkis + linkis-common + + + + org.apache.linkis + linkis-protocol + + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + + + src/main/java + + **/*.xml + + + + + + \ No newline at end of file diff --git a/streamis-project/streamis-project-common/src/main/scala/com/webank/wedatasphere/streamis/project/common/StreamisProjectProtocol.scala b/streamis-project/streamis-project-common/src/main/scala/com/webank/wedatasphere/streamis/project/common/StreamisProjectProtocol.scala new file mode 100644 index 000000000..f417ed919 --- /dev/null +++ b/streamis-project/streamis-project-common/src/main/scala/com/webank/wedatasphere/streamis/project/common/StreamisProjectProtocol.scala @@ -0,0 +1,47 @@ +package com.webank.wedatasphere.streamis.project.common + +import org.apache.linkis.protocol.Protocol + +/** + * created by yangzhiyue on 2021/4/25 + * Description: + */ +trait StreamisProjectProtocol extends Protocol + + + + +case class CreateStreamProjectRequest(projectName:String, + description:String, + createBy:String) extends StreamisProjectProtocol + + +case class CreateStreamProjectResponse(status:Int, + projectName:String, + streamisProjectId:Long, + errorMessage:String) extends StreamisProjectProtocol + + + + +case class UpdateStreamProjectRequest(streamisProjectId:Long, + projectName:String, + description:String, + updateBy:String) extends StreamisProjectProtocol + + +case class UpdateStreamProjectResponse(status:Int, + streamisProjectId:Long, + errorMessage:String)extends StreamisProjectProtocol + + +case class DeleteStreamProjectRequest(streamisProjectId:Long, + projectName:String) extends StreamisProjectProtocol + + +case class DeleteStreamProjectResponse(status:Long, + projectName:String, + errorMessage:String) extends StreamisProjectProtocol + + + diff --git a/streamis-project/streamis-project-server/pom.xml b/streamis-project/streamis-project-server/pom.xml new file mode 100644 index 000000000..4374f4e08 --- /dev/null +++ b/streamis-project/streamis-project-server/pom.xml @@ -0,0 +1,112 @@ + + + + + + streamis-project + com.webank.wedatasphere.streamis + 0.2.4 + + 4.0.0 + + streamis-project-server + + + 8 + 8 + + + + + + + + org.apache.linkis + linkis-module + ${linkis.version} + + + + + org.apache.linkis + linkis-mybatis + ${linkis.version} + + + + com.webank.wedatasphere.streamis + streamis-project-common + ${streamis.version} + + + + org.apache.linkis + linkis-rpc + ${linkis.version} + + + + + org.apache.commons + commons-math3 + + + com.webank.wedatasphere.dss + dss-sso-integration-standard + 1.0.1 + compile + + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + + + src/main/java + + **/*.xml + + + + + + + + \ No newline at end of file diff --git a/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/constant/ProjectUserPrivilegeEnum.java b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/constant/ProjectUserPrivilegeEnum.java new file mode 100644 index 000000000..ce371cf47 --- /dev/null +++ b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/constant/ProjectUserPrivilegeEnum.java @@ -0,0 +1,33 @@ +package com.webank.wedatasphere.streamis.project.server.constant; + +public enum ProjectUserPrivilegeEnum { + + RELEASE(1,"发布权限"), + EDIT(2,"编辑权限"), + ACCESS(3,"查看权限"); + + ProjectUserPrivilegeEnum(int rank, String name) { + this.rank = rank; + this.name = name; + } + + private int rank; + + private String name; + + public int getRank() { + return rank; + } + + public void setRank(int rank) { + this.rank = rank; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } +} diff --git a/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/dao/StreamisProjectMapper.java b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/dao/StreamisProjectMapper.java new file mode 100644 index 000000000..d60a10508 --- /dev/null +++ b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/dao/StreamisProjectMapper.java @@ -0,0 +1,27 @@ +package com.webank.wedatasphere.streamis.project.server.dao; + +import com.webank.wedatasphere.streamis.project.server.entity.StreamisProject; +import org.apache.ibatis.annotations.Mapper; + +import java.util.List; + +/** + * Description: + */ +@Mapper +public interface StreamisProjectMapper { + + void createProject(StreamisProject streamisProject); + + StreamisProject findProjectById(Long projectId); + + List findProjectIdByName(String name); + + void deleteProjectById(Long projectId); + + void deleteProjectByName(String projectName); + + void updateProject(StreamisProject streamisProject); + + List findProjectIdsByNames(List names); +} diff --git a/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/dao/StreamisProjectPrivilegeMapper.java b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/dao/StreamisProjectPrivilegeMapper.java new file mode 100644 index 000000000..c8b06f6a0 --- /dev/null +++ b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/dao/StreamisProjectPrivilegeMapper.java @@ -0,0 +1,23 @@ +package com.webank.wedatasphere.streamis.project.server.dao; + +import com.webank.wedatasphere.streamis.project.server.entity.StreamisProjectPrivilege; +import org.apache.ibatis.annotations.Mapper; +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +@Mapper +public interface StreamisProjectPrivilegeMapper { + + void addProjectPrivilege(@Param("list") List projectPrivilegeList); + + List findProjectPrivilegeByProjectId(Long projectId); + + void deleteProjectPrivilegeByProjectId(Long projectId); + + void deleteProjectPrivilegeById(@Param("list") List projectPrivilegeList); + + List getProjectPrivilege(@Param("projectId") Long projectId, @Param("userName") String userName); + + List findProjectPrivilegeByProjectIds(List projectId); +} diff --git a/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/dao/impl/projectMapper.xml b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/dao/impl/projectMapper.xml new file mode 100644 index 000000000..8563cb82b --- /dev/null +++ b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/dao/impl/projectMapper.xml @@ -0,0 +1,58 @@ + + + + + + + + + + + insert into linkis_stream_project (name,workspace_id,create_by,create_time) + values ( + #{name},#{workspaceId},#{createBy},now() + ) + + + + update linkis_stream_project set + + last_update_by = #{lastUpdateBy}, + + last_update_time = now() + where id = #{id} + + + + update linkis_stream_project set is_deleted = 1 where id = #{id} + + + + + + + + diff --git a/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/dao/impl/projectPrivilegeMapper.xml b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/dao/impl/projectPrivilegeMapper.xml new file mode 100644 index 000000000..3556f08e1 --- /dev/null +++ b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/dao/impl/projectPrivilegeMapper.xml @@ -0,0 +1,43 @@ + + + + + + + + + insert into linkis_stream_project_privilege (project_id,user_name,privilege) + values + + #{item.projectId}, #{item.userName}, #{item.privilege} + + + + + + + delete from linkis_stream_project_privilege where project_id = #{projectId} + + + + delete from linkis_stream_project_privilege where id in + + #{item.id} + + + + + + + + diff --git a/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/entity/StreamisProject.java b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/entity/StreamisProject.java new file mode 100644 index 000000000..8462de4ac --- /dev/null +++ b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/entity/StreamisProject.java @@ -0,0 +1,92 @@ +package com.webank.wedatasphere.streamis.project.server.entity; + +import java.util.Date; +import java.util.List; + +/** + * Description: + */ +public class StreamisProject { + + + private Long id; + private String name; + private Long workspaceId; + private String createBy; + private Date createTime; + private String lastUpdateBy; + private Date lastUpdateTime; + private List projectPrivileges; + + public StreamisProject() { + } + + public StreamisProject(String name, Long workspaceId){ + this.name = name; + this.workspaceId = workspaceId; + } + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public Long getWorkspaceId() { + return workspaceId; + } + + public void setWorkspaceId(Long workspaceId) { + this.workspaceId = workspaceId; + } + + public String getCreateBy() { + return createBy; + } + + public void setCreateBy(String createBy) { + this.createBy = createBy; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public String getLastUpdateBy() { + return lastUpdateBy; + } + + public void setLastUpdateBy(String lastUpdateBy) { + this.lastUpdateBy = lastUpdateBy; + } + + public Date getLastUpdateTime() { + return lastUpdateTime; + } + + public void setLastUpdateTime(Date lastUpdateTime) { + this.lastUpdateTime = lastUpdateTime; + } + + public List getProjectPrivileges() { + return projectPrivileges; + } + + public void setProjectPrivileges(List projectPrivileges) { + this.projectPrivileges = projectPrivileges; + } +} diff --git a/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/entity/StreamisProjectPrivilege.java b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/entity/StreamisProjectPrivilege.java new file mode 100644 index 000000000..7f6cee735 --- /dev/null +++ b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/entity/StreamisProjectPrivilege.java @@ -0,0 +1,76 @@ +package com.webank.wedatasphere.streamis.project.server.entity; + +import java.util.Objects; + +public class StreamisProjectPrivilege { + private Long id; + private Long projectId; + private String userName; + private Integer privilege; + + public StreamisProjectPrivilege() { + } + + public StreamisProjectPrivilege(Long projectId, String userName, Integer privilege) { + this.projectId = projectId; + this.userName = userName; + this.privilege = privilege; + } + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public Long getProjectId() { + return projectId; + } + + public void setProjectId(Long projectId) { + this.projectId = projectId; + } + + public String getUserName() { + return userName; + } + + public void setUserName(String userName) { + this.userName = userName; + } + + public Integer getPrivilege() { + return privilege; + } + + public void setPrivilege(Integer privilege) { + this.privilege = privilege; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + StreamisProjectPrivilege that = (StreamisProjectPrivilege) o; + return Objects.equals(projectId, that.projectId) && + Objects.equals(userName, that.userName) && + Objects.equals(privilege, that.privilege); + } + + @Override + public int hashCode() { + return Objects.hash(projectId, userName, privilege); + } + + @Override + public String toString() { + return "StreamisProjectPrivilege{" + + "id=" + id + + ", projectId=" + projectId + + ", userName='" + userName + '\'' + + ", privilege=" + privilege + + '}'; + } +} diff --git a/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/entity/request/CreateProjectRequest.java b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/entity/request/CreateProjectRequest.java new file mode 100644 index 000000000..a8a7a4da2 --- /dev/null +++ b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/entity/request/CreateProjectRequest.java @@ -0,0 +1,75 @@ +package com.webank.wedatasphere.streamis.project.server.entity.request; + +import javax.validation.constraints.NotNull; +import javax.xml.bind.annotation.XmlRootElement; +import java.util.List; + +/** + * created by yangzhiyue on 2021/4/20 + * Description: + */ +@XmlRootElement +public class CreateProjectRequest { + + @NotNull(message = "projectName can not be null") + private String projectName; + + private Long workspaceId; + + private List accessUsers; + + private List editUsers; + + private List releaseUsers; + + public String getProjectName() { + return projectName; + } + + public void setProjectName(String projectName) { + this.projectName = projectName; + } + + public Long getWorkspaceId() { + return workspaceId; + } + + public void setWorkspaceId(Long workspaceId) { + this.workspaceId = workspaceId; + } + + public List getAccessUsers() { + return accessUsers; + } + + public void setAccessUsers(List accessUsers) { + this.accessUsers = accessUsers; + } + + public List getEditUsers() { + return editUsers; + } + + public void setEditUsers(List editUsers) { + this.editUsers = editUsers; + } + + public List getReleaseUsers() { + return releaseUsers; + } + + public void setReleaseUsers(List releaseUsers) { + this.releaseUsers = releaseUsers; + } + + @Override + public String toString() { + return "CreateProjectRequest{" + + "projectName='" + projectName + '\'' + + ", workspaceId=" + workspaceId + + ", accessUsers=" + accessUsers + + ", editUsers=" + editUsers + + ", releaseUsers=" + releaseUsers + + '}'; + } +} diff --git a/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/entity/request/DeleteProjectRequest.java b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/entity/request/DeleteProjectRequest.java new file mode 100644 index 000000000..09af8ed5c --- /dev/null +++ b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/entity/request/DeleteProjectRequest.java @@ -0,0 +1,38 @@ +package com.webank.wedatasphere.streamis.project.server.entity.request; + +import javax.validation.constraints.NotNull; +import javax.xml.bind.annotation.XmlRootElement; + + +@XmlRootElement +public class DeleteProjectRequest { + + @NotNull(message = "streamis projetId can not null") + private Long projectId; + + private String projectName; + + public Long getProjectId() { + return projectId; + } + + public void setProjectId(Long projectId) { + this.projectId = projectId; + } + + public String getProjectName() { + return projectName; + } + + public void setProjectName(String projectName) { + this.projectName = projectName; + } + + @Override + public String toString() { + return "DeleteProjectRequest{" + + "projectId=" + projectId + + ", projectName='" + projectName + '\'' + + '}'; + } +} diff --git a/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/entity/request/UpdateProjectRequest.java b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/entity/request/UpdateProjectRequest.java new file mode 100644 index 000000000..aa75850a9 --- /dev/null +++ b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/entity/request/UpdateProjectRequest.java @@ -0,0 +1,84 @@ +package com.webank.wedatasphere.streamis.project.server.entity.request; + +import javax.validation.constraints.NotNull; +import javax.xml.bind.annotation.XmlRootElement; +import java.util.List; + + +@XmlRootElement +public class UpdateProjectRequest { + + @NotNull(message = "projectId can not be null") + private Long projectId; + + @NotNull(message = "projectName can not be null") + private String projectName; + + private Long workspaceId; + + private List accessUsers; + + private List editUsers; + + private List releaseUsers; + + public Long getProjectId() { + return projectId; + } + + public void setProjectId(Long projectId) { + this.projectId = projectId; + } + + public String getProjectName() { + return projectName; + } + + public void setProjectName(String projectName) { + this.projectName = projectName; + } + + public Long getWorkspaceId() { + return workspaceId; + } + + public void setWorkspaceId(Long workspaceId) { + this.workspaceId = workspaceId; + } + + public List getAccessUsers() { + return accessUsers; + } + + public void setAccessUsers(List accessUsers) { + this.accessUsers = accessUsers; + } + + public List getEditUsers() { + return editUsers; + } + + public void setEditUsers(List editUsers) { + this.editUsers = editUsers; + } + + public List getReleaseUsers() { + return releaseUsers; + } + + public void setReleaseUsers(List releaseUsers) { + this.releaseUsers = releaseUsers; + } + + @Override + public String toString() { + return "UpdateProjectRequest{" + + "projectId=" + projectId + + ", projectName='" + projectName + '\'' + + ", workspaceId=" + workspaceId + + ", accessUsers=" + accessUsers + + ", editUsers=" + editUsers + + ", releaseUsers=" + releaseUsers + + '}'; + } +} diff --git a/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/exception/StreamisProjectErrorException.java b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/exception/StreamisProjectErrorException.java new file mode 100644 index 000000000..ee8e5c618 --- /dev/null +++ b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/exception/StreamisProjectErrorException.java @@ -0,0 +1,20 @@ +package com.webank.wedatasphere.streamis.project.server.exception; + +import org.apache.linkis.common.exception.ErrorException; + +/** + * Description: + */ +public class StreamisProjectErrorException extends ErrorException { + + + public StreamisProjectErrorException(int errCode, String desc) { + super(errCode, desc); + } + + public StreamisProjectErrorException(int errorCode, String desc, Throwable throwable){ + super(errorCode, desc); + this.initCause(throwable); + } + +} diff --git a/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/exception/ValidExceptionHander.java b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/exception/ValidExceptionHander.java new file mode 100644 index 000000000..a7ef12d18 --- /dev/null +++ b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/exception/ValidExceptionHander.java @@ -0,0 +1,51 @@ +package com.webank.wedatasphere.streamis.project.server.exception; + +import org.apache.commons.lang.StringUtils; +import org.apache.linkis.server.Message; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.validation.BindingResult; +import org.springframework.web.bind.MethodArgumentNotValidException; +import org.springframework.web.bind.annotation.ExceptionHandler; +import org.springframework.web.bind.annotation.RestControllerAdvice; +import org.springframework.web.method.annotation.MethodArgumentTypeMismatchException; + +import javax.validation.ConstraintViolation; +import javax.validation.ConstraintViolationException; +import java.util.ArrayList; +import java.util.Set; + +@RestControllerAdvice +public class ValidExceptionHander { + + private static final Logger LOG = LoggerFactory.getLogger(ValidExceptionHander.class); + + @ExceptionHandler(value = MethodArgumentNotValidException.class) + public Message handle(MethodArgumentNotValidException e){ + LOG.error("Request parameter validation exception", e); + BindingResult bindingResult = e.getBindingResult(); + StringBuilder stringBuilder = new StringBuilder(); + bindingResult.getFieldErrors().forEach((item) -> stringBuilder.append(item.getDefaultMessage()).append(";")); + return Message.error("failed to validate request parameter, detail:"+stringBuilder.toString()); + } + + @ExceptionHandler(value = MethodArgumentTypeMismatchException.class) + public Message handle(MethodArgumentTypeMismatchException e){ + LOG.error("Request parameter validation exception", e); + return Message.error("failed to validate request parameter, detail:"+e.getMessage()); + } + + @ExceptionHandler(value = ConstraintViolationException.class) + public Message handle(ConstraintViolationException e){ + LOG.error("Request parameter validation exception", e); + ArrayList list = new ArrayList<>(); + Set> constraintViolations = e.getConstraintViolations(); + for (ConstraintViolation violation : constraintViolations) { + list.add(violation.getMessage()); + } + return Message.error("failed to validate request parameter, detail:"+ StringUtils.join(list, ",")); + } + + + +} diff --git a/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/restful/StreamisProjectPrivilegeRestfulApi.java b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/restful/StreamisProjectPrivilegeRestfulApi.java new file mode 100644 index 000000000..9bb11542c --- /dev/null +++ b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/restful/StreamisProjectPrivilegeRestfulApi.java @@ -0,0 +1,174 @@ +package com.webank.wedatasphere.streamis.project.server.restful; + +import com.webank.wedatasphere.streamis.project.server.entity.StreamisProjectPrivilege; +import com.webank.wedatasphere.streamis.project.server.service.StreamisProjectPrivilegeService; +import com.webank.wedatasphere.streamis.project.server.service.StreamisProjectService; +import com.webank.wedatasphere.streamis.project.server.utils.StreamisProjectRestfulUtils; +import org.apache.commons.lang.exception.ExceptionUtils; +import org.apache.commons.math3.util.Pair; +import org.apache.linkis.server.Message; +import org.apache.linkis.server.security.SecurityFilter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.util.CollectionUtils; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +import javax.servlet.http.HttpServletRequest; +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; + +@RequestMapping(path = "/streamis/project/projectPrivilege") +@RestController +public class StreamisProjectPrivilegeRestfulApi { + + private static final Logger LOGGER = LoggerFactory.getLogger(StreamisProjectPrivilegeRestfulApi.class); + + @Autowired + private StreamisProjectPrivilegeService projectPrivilegeService; + @Autowired + private StreamisProjectService projectService; + + @RequestMapping(path = "/getProjectPrivilege", method = RequestMethod.GET) + public Message getProjectPrivilege(HttpServletRequest request, @RequestParam(value = "projectId", required = false) Long projectId, + @RequestParam(value = "projectName", required = false) String projectName) { + String username = SecurityFilter.getLoginUsername(request); + LOGGER.info("user {} obtain project[id:{} name:{}] privilege",username,projectId,projectName); + try { + if(projectId==null || projectId == 0) { + List projectIds = projectService.queryProjectIds(projectName); + if (!CollectionUtils.isEmpty(projectIds)) projectId = projectIds.get(0); + } + List projectPrivileges = projectPrivilegeService.getProjectPrivilege(projectId, username); + return StreamisProjectRestfulUtils.dealOk("Successfully obtained the projectPrivileges", + new Pair<>("projectPrivileges", projectPrivileges)); + } catch (Exception e) { + LOGGER.error("failed to obtain the release privilege for user {}", username, e); + return StreamisProjectRestfulUtils.dealError("failed to obtain the release privilege, reason is:" + ExceptionUtils.getRootCauseMessage(e)); + } + } + + @RequestMapping(path = "/hasReleasePrivilege", method = RequestMethod.GET) + public Message hasReleaseProjectPrivilege(HttpServletRequest request, @RequestParam(value = "projectId", required = false) Long projectId, + @RequestParam(value = "projectName", required = false) String projectName) { + String username = SecurityFilter.getLoginUsername(request); + LOGGER.info("user {} obtain project[id:{} name:{}] release privilege",username,projectId,projectName); + try { + if(projectId==null || projectId == 0) { + List projectIds = projectService.queryProjectIds(projectName); + if (!CollectionUtils.isEmpty(projectIds)) projectId = projectIds.get(0); + } + boolean hasReleaseProjectPrivilege = projectPrivilegeService.hasReleaseProjectPrivilege(projectId, username); + return StreamisProjectRestfulUtils.dealOk("Successfully obtained the release privilege", + new Pair<>("releasePrivilege", hasReleaseProjectPrivilege)); + } catch (Exception e) { + LOGGER.error("failed to obtain the release privilege for user {}", username, e); + return StreamisProjectRestfulUtils.dealError("failed to obtain the release privilege, reason is:" + ExceptionUtils.getRootCauseMessage(e)); + } + } + + @RequestMapping(path = "/hasEditPrivilege", method = RequestMethod.GET) + public Message hasEditProjectPrivilege(HttpServletRequest request, @RequestParam(value = "projectId", required = false) Long projectId, + @RequestParam(value = "projectName", required = false) String projectName) { + String username = SecurityFilter.getLoginUsername(request); + LOGGER.info("user {} obtain project[id:{} name:{}] edit privilege",username,projectId,projectName); + try { + if(projectId==null || projectId == 0) { + List projectIds = projectService.queryProjectIds(projectName); + if (!CollectionUtils.isEmpty(projectIds)) projectId = projectIds.get(0); + } + boolean hasEditProjectPrivilege = projectPrivilegeService.hasEditProjectPrivilege(projectId, username); + return StreamisProjectRestfulUtils.dealOk("Successfully obtained the edit privilege", + new Pair<>("editPrivilege", hasEditProjectPrivilege)); + } catch (Exception e) { + LOGGER.error("failed to obtain the edit privilege for user {}", username, e); + return StreamisProjectRestfulUtils.dealError("failed to obtain the edit privilege, reason is:" + ExceptionUtils.getRootCauseMessage(e)); + } + } + + @RequestMapping(path = "/hasAccessPrivilege", method = RequestMethod.GET) + public Message hasAccessProjectPrivilege(HttpServletRequest request, @RequestParam(value = "projectId", required = false) Long projectId, + @RequestParam(value = "projectName", required = false) String projectName) { + String username = SecurityFilter.getLoginUsername(request); + LOGGER.info("user {} obtain project[id:{} name:{}] access privilege",username,projectId,projectName); + try { + if(projectId==null || projectId == 0) { + List projectIds = projectService.queryProjectIds(projectName); + if (!CollectionUtils.isEmpty(projectIds)) projectId = projectIds.get(0); + } + boolean hasAccessProjectPrivilege = projectPrivilegeService.hasAccessProjectPrivilege(projectId, username); + return StreamisProjectRestfulUtils.dealOk("Successfully obtained the access privilege", + new Pair<>("accessPrivilege", hasAccessProjectPrivilege)); + } catch (Exception e) { + LOGGER.error("failed to obtain the access privilege for user {}", username, e); + return StreamisProjectRestfulUtils.dealError("failed to obtain the access privilege, reason is:" + ExceptionUtils.getRootCauseMessage(e)); + } + } + + @RequestMapping(path = "/bulk/hasReleasePrivilege", method = RequestMethod.GET) + public Message hasReleaseProjectPrivilege(HttpServletRequest request, @RequestParam(value = "projectIds", required = false) List projectIds, + @RequestParam(value = "projectNames", required = false) List projectNames) { + String username = SecurityFilter.getLoginUsername(request); + LOGGER.info("user {} obtain bulk project[id:{} name:{}] release privilege",username,projectIds,projectNames); + try { + projectIds = Optional.ofNullable(projectIds).orElse(new ArrayList<>()); + if(!CollectionUtils.isEmpty(projectNames)) { + List ids = projectService.queryProjectIdsByNames(projectNames); + if (!CollectionUtils.isEmpty(ids)) projectIds.addAll(ids); + } + LOGGER.info("obtain bulk projectIds {} release privilege",projectIds); + boolean hasReleaseProjectPrivilege = projectPrivilegeService.hasReleaseProjectPrivilege(projectIds, username); + return StreamisProjectRestfulUtils.dealOk("Successfully obtained the release privilege", + new Pair<>("releasePrivilege", hasReleaseProjectPrivilege)); + } catch (Exception e) { + LOGGER.error("failed to obtain the release privilege for user {}", username, e); + return StreamisProjectRestfulUtils.dealError("failed to obtain the release privilege, reason is:" + ExceptionUtils.getRootCauseMessage(e)); + } + } + + @RequestMapping(path = "/bulk/hasEditPrivilege", method = RequestMethod.GET) + public Message hasEditProjectPrivilege(HttpServletRequest request, @RequestParam(value = "projectIds", required = false) List projectIds, + @RequestParam(value = "projectNames", required = false) List projectNames) { + String username = SecurityFilter.getLoginUsername(request); + LOGGER.info("user {} obtain bulk project[id:{} name:{}] edit privilege",username,projectIds,projectNames); + try { + projectIds = Optional.ofNullable(projectIds).orElse(new ArrayList<>()); + if(!CollectionUtils.isEmpty(projectNames)) { + List ids = projectService.queryProjectIdsByNames(projectNames); + if (!CollectionUtils.isEmpty(ids)) projectIds.addAll(ids); + } + LOGGER.info("obtain bulk projectIds {} edit privilege",projectIds); + boolean hasEditProjectPrivilege = projectPrivilegeService.hasEditProjectPrivilege(projectIds, username); + return StreamisProjectRestfulUtils.dealOk("Successfully obtained the edit privilege", + new Pair<>("editPrivilege", hasEditProjectPrivilege)); + } catch (Exception e) { + LOGGER.error("failed to obtain the edit privilege for user {}", username, e); + return StreamisProjectRestfulUtils.dealError("failed to obtain the edit privilege, reason is:" + ExceptionUtils.getRootCauseMessage(e)); + } + } + + @RequestMapping(path = "/bulk/hasAccessPrivilege", method = RequestMethod.GET) + public Message hasAccessProjectPrivilege(HttpServletRequest request, @RequestParam(value = "projectIds", required = false) List projectIds, + @RequestParam(value = "projectNames", required = false) List projectNames) { + String username = SecurityFilter.getLoginUsername(request); + LOGGER.info("user {} obtain bulk project[id:{} name:{}] access privilege",username,projectIds,projectNames); + try { + projectIds = Optional.ofNullable(projectIds).orElse(new ArrayList<>()); + if(!CollectionUtils.isEmpty(projectNames)) { + List ids = projectService.queryProjectIdsByNames(projectNames); + if (!CollectionUtils.isEmpty(ids)) projectIds.addAll(ids); + } + LOGGER.info("obtain bulk projectIds {} access privilege",projectIds); + boolean hasAccessProjectPrivilege = projectPrivilegeService.hasAccessProjectPrivilege(projectIds, username); + return StreamisProjectRestfulUtils.dealOk("Successfully obtained the access privilege", + new Pair<>("accessPrivilege", hasAccessProjectPrivilege)); + } catch (Exception e) { + LOGGER.error("failed to obtain the access privilege for user {}", username, e); + return StreamisProjectRestfulUtils.dealError("failed to obtain the access privilege, reason is:" + ExceptionUtils.getRootCauseMessage(e)); + } + } +} \ No newline at end of file diff --git a/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/restful/StreamisProjectRestfulApi.java b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/restful/StreamisProjectRestfulApi.java new file mode 100644 index 000000000..8f5ba414f --- /dev/null +++ b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/restful/StreamisProjectRestfulApi.java @@ -0,0 +1,111 @@ +package com.webank.wedatasphere.streamis.project.server.restful; + + +import com.webank.wedatasphere.streamis.project.server.constant.ProjectUserPrivilegeEnum; +import com.webank.wedatasphere.streamis.project.server.entity.StreamisProject; +import com.webank.wedatasphere.streamis.project.server.entity.StreamisProjectPrivilege; +import com.webank.wedatasphere.streamis.project.server.entity.request.CreateProjectRequest; +import com.webank.wedatasphere.streamis.project.server.entity.request.UpdateProjectRequest; +import com.webank.wedatasphere.streamis.project.server.service.StreamisProjectService; +import com.webank.wedatasphere.streamis.project.server.utils.StreamisProjectRestfulUtils; +import org.apache.commons.lang.exception.ExceptionUtils; +import org.apache.commons.math3.util.Pair; +import org.apache.linkis.server.Message; +import org.apache.linkis.server.security.SecurityFilter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.validation.annotation.Validated; +import org.springframework.web.bind.annotation.*; + +import javax.servlet.http.HttpServletRequest; +import java.util.ArrayList; +import java.util.List; + +import static com.webank.wedatasphere.streamis.project.server.utils.StreamisProjectPrivilegeUtils.createStreamisProjectPrivilege; + + +/** + * this is the restful class for streamis project + */ + +@RequestMapping(path = "/streamis/project") +@RestController +public class StreamisProjectRestfulApi { + + private static final Logger LOGGER = LoggerFactory.getLogger(StreamisProjectRestfulApi.class); + + @Autowired + private StreamisProjectService projectService; + + @RequestMapping(path = "/createProject", method = RequestMethod.POST) + public Message createProject( HttpServletRequest request,@Validated @RequestBody CreateProjectRequest createProjectRequest){ + LOGGER.info("enter createProject, requestBody is {}",createProjectRequest.toString()); + String username = SecurityFilter.getLoginUsername(request); + try{ + StreamisProject streamisProject = new StreamisProject(createProjectRequest.getProjectName(), createProjectRequest.getWorkspaceId()); + streamisProject.setCreateBy(username); + List privilegeList = new ArrayList<>(); + privilegeList.addAll(createStreamisProjectPrivilege(streamisProject.getId(),createProjectRequest.getReleaseUsers(),ProjectUserPrivilegeEnum.RELEASE.getRank())); + privilegeList.addAll(createStreamisProjectPrivilege(streamisProject.getId(),createProjectRequest.getEditUsers(), ProjectUserPrivilegeEnum.EDIT.getRank())); + privilegeList.addAll(createStreamisProjectPrivilege(streamisProject.getId(),createProjectRequest.getAccessUsers(),ProjectUserPrivilegeEnum.ACCESS.getRank())); + streamisProject.setProjectPrivileges(privilegeList); + streamisProject = projectService.createProject(streamisProject); + return StreamisProjectRestfulUtils.dealOk("create project success", + new Pair<>("projectName", streamisProject.getName()), new Pair<>("projectId", streamisProject.getId())); + }catch(Exception e){ + LOGGER.error("failed to create project for user {}", username, e); + return StreamisProjectRestfulUtils.dealError("failed to create project,reason is:" + ExceptionUtils.getRootCauseMessage(e)); + } + } + + @RequestMapping(path = "/updateProject", method = RequestMethod.PUT) + public Message updateProject( HttpServletRequest request, @Validated @RequestBody UpdateProjectRequest updateProjectRequest){ + LOGGER.info("enter updateProject, requestBody is {}",updateProjectRequest.toString()); + String username = SecurityFilter.getLoginUsername(request); + try{ + StreamisProject streamisProject = new StreamisProject(updateProjectRequest.getProjectName(), updateProjectRequest.getWorkspaceId()); + streamisProject.setId(updateProjectRequest.getProjectId()); + streamisProject.setLastUpdateBy(username); + List privilegeList = new ArrayList<>(); + privilegeList.addAll(createStreamisProjectPrivilege(streamisProject.getId(),updateProjectRequest.getReleaseUsers(),ProjectUserPrivilegeEnum.RELEASE.getRank())); + privilegeList.addAll(createStreamisProjectPrivilege(streamisProject.getId(),updateProjectRequest.getEditUsers(), ProjectUserPrivilegeEnum.EDIT.getRank())); + privilegeList.addAll(createStreamisProjectPrivilege(streamisProject.getId(),updateProjectRequest.getAccessUsers(),ProjectUserPrivilegeEnum.ACCESS.getRank())); + streamisProject.setProjectPrivileges(privilegeList); + projectService.updateProject(streamisProject); + return StreamisProjectRestfulUtils.dealOk("update project success"); + }catch(Exception e){ + LOGGER.error("failed to update project for user {}", username, e); + return StreamisProjectRestfulUtils.dealError("failed to update project,reason is:" + ExceptionUtils.getRootCauseMessage(e)); + } + } + + @RequestMapping(path = "/deleteProject", method = RequestMethod.DELETE) + public Message deleteProject( HttpServletRequest request, @RequestParam(value = "projectId", required = false) Long projectId){ + LOGGER.info("enter deleteProject, requestParam projectId is {}",projectId); + String username = SecurityFilter.getLoginUsername(request); + try{ + projectService.deleteProjectById(projectId); + return StreamisProjectRestfulUtils.dealOk("delete project success"); + }catch(Exception e){ + LOGGER.error("failed to delete project for user {}", username, e); + return StreamisProjectRestfulUtils.dealError("failed to delete project,reason is:" + ExceptionUtils.getRootCauseMessage(e)); + } + } + + @RequestMapping(path = "/searchProject", method = RequestMethod.GET) + public Message searchProject( HttpServletRequest request,@RequestParam(value = "projectName", required = false) String projectName){ + LOGGER.info("enter searchProject, requestParam projectName is {}",projectName); + String username = SecurityFilter.getLoginUsername(request); + try{ + List projectIds = projectService.queryProjectIds(projectName); + return StreamisProjectRestfulUtils.dealOk("search project success", + new Pair<>("projectId", projectIds.isEmpty()?null:projectIds.get(0))); + }catch(Exception e){ + LOGGER.error("failed to search project for user {}", username, e); + return StreamisProjectRestfulUtils.dealError("failed to search project,reason is:" + ExceptionUtils.getRootCauseMessage(e)); + } + } + + +} diff --git a/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/service/StreamisProjectPrivilegeService.java b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/service/StreamisProjectPrivilegeService.java new file mode 100644 index 000000000..855259c9f --- /dev/null +++ b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/service/StreamisProjectPrivilegeService.java @@ -0,0 +1,29 @@ +package com.webank.wedatasphere.streamis.project.server.service; + +import com.webank.wedatasphere.streamis.project.server.entity.StreamisProjectPrivilege; + +import java.util.List; + +public interface StreamisProjectPrivilegeService { + + void addProjectPrivilege(List streamisProjectPrivilegeList); + + void updateProjectPrivilege(List streamisProjectPrivilegeList); + + void deleteProjectPrivilegeByProjectId(Long projectId); + + List getProjectPrivilege(Long projectId, String username); + + boolean hasReleaseProjectPrivilege(Long projectId, String username); + + boolean hasEditProjectPrivilege(Long projectId, String username); + + boolean hasAccessProjectPrivilege(Long projectId, String username); + + boolean hasReleaseProjectPrivilege(List projectId, String username); + + boolean hasEditProjectPrivilege(List projectId, String username); + + boolean hasAccessProjectPrivilege(List projectId, String username); + +} diff --git a/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/service/StreamisProjectService.java b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/service/StreamisProjectService.java new file mode 100644 index 000000000..fad783758 --- /dev/null +++ b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/service/StreamisProjectService.java @@ -0,0 +1,24 @@ +package com.webank.wedatasphere.streamis.project.server.service; + + +import com.webank.wedatasphere.streamis.project.server.entity.StreamisProject; +import com.webank.wedatasphere.streamis.project.server.exception.StreamisProjectErrorException; + +import java.util.List; + +/** + * Description: + */ +public interface StreamisProjectService { + + StreamisProject createProject(StreamisProject streamisProject) throws StreamisProjectErrorException; + + void updateProject(StreamisProject streamisProject) throws StreamisProjectErrorException; + + void deleteProjectById(Long projectId) throws StreamisProjectErrorException; + + List queryProjectIds(String projectName) throws StreamisProjectErrorException; + + List queryProjectIdsByNames(List projectNames); + +} diff --git a/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/service/impl/StreamisProjectPrivilegeServiceImpl.java b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/service/impl/StreamisProjectPrivilegeServiceImpl.java new file mode 100644 index 000000000..cd0e4a940 --- /dev/null +++ b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/service/impl/StreamisProjectPrivilegeServiceImpl.java @@ -0,0 +1,157 @@ +package com.webank.wedatasphere.streamis.project.server.service.impl; + +import com.webank.wedatasphere.streamis.project.server.constant.ProjectUserPrivilegeEnum; +import com.webank.wedatasphere.streamis.project.server.dao.StreamisProjectPrivilegeMapper; +import com.webank.wedatasphere.streamis.project.server.entity.StreamisProjectPrivilege; +import com.webank.wedatasphere.streamis.project.server.service.StreamisProjectPrivilegeService; +import org.apache.commons.collections.CollectionUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; + +@Service +public class StreamisProjectPrivilegeServiceImpl implements StreamisProjectPrivilegeService { + + private static final Logger LOGGER = LoggerFactory.getLogger(StreamisProjectPrivilegeServiceImpl.class); + + @Autowired + private StreamisProjectPrivilegeMapper streamisProjectPrivilegeMapper; + + @Override + @Transactional(rollbackFor = Exception.class) + public void addProjectPrivilege(List projectPrivilegeList) { + if(CollectionUtils.isEmpty(projectPrivilegeList)) { + return; + } + streamisProjectPrivilegeMapper.addProjectPrivilege(projectPrivilegeList); + LOGGER.info("create project privilege finish and projectId is {}",projectPrivilegeList.get(0).getProjectId()); + } + + @Override + @Transactional(rollbackFor = Exception.class) + public void updateProjectPrivilege(List dssPrivilegeList) { + if(CollectionUtils.isEmpty(dssPrivilegeList)) { + return; + } + List streamisAllPrivilegeList = streamisProjectPrivilegeMapper.findProjectPrivilegeByProjectId(dssPrivilegeList.get(0).getProjectId()); + List addPrivilegeList = (ArrayList)(CollectionUtils.subtract(dssPrivilegeList, streamisAllPrivilegeList)); + List delPrivilegeList = (ArrayList) CollectionUtils.subtract(streamisAllPrivilegeList, dssPrivilegeList); + if(!CollectionUtils.isEmpty(addPrivilegeList)) { + streamisProjectPrivilegeMapper.addProjectPrivilege(addPrivilegeList); + } + if(!CollectionUtils.isEmpty(delPrivilegeList)) { + streamisProjectPrivilegeMapper.deleteProjectPrivilegeById(delPrivilegeList); + } + LOGGER.info("update project privilege finish and projectId is {}", dssPrivilegeList.get(0).getProjectId()); + } + + @Override + @Transactional(rollbackFor = Exception.class) + public void deleteProjectPrivilegeByProjectId(Long projectId) { + streamisProjectPrivilegeMapper.deleteProjectPrivilegeByProjectId(projectId); + LOGGER.info("delete privilege finish and projectId is {}", projectId ); + } + + @Override + public List getProjectPrivilege(Long projectId, String username) { + return streamisProjectPrivilegeMapper.getProjectPrivilege(projectId, username); + } + + @Override + public boolean hasReleaseProjectPrivilege(Long projectId, String username) { + if(projectId == null || projectId == 0) return false; + List privileges = streamisProjectPrivilegeMapper.getProjectPrivilege(projectId, username); + if(CollectionUtils.isEmpty(privileges)){ + return false; + } + List privilegeList = privileges.stream() + .filter(privilege -> ProjectUserPrivilegeEnum.RELEASE.getRank() == privilege.getPrivilege()) + .collect(Collectors.toList()); + return CollectionUtils.isNotEmpty(privilegeList); + } + + @Override + public boolean hasEditProjectPrivilege(Long projectId, String username) { + List privileges = streamisProjectPrivilegeMapper.getProjectPrivilege(projectId, username); + if(CollectionUtils.isEmpty(privileges)){ + return false; + } + List privilegeList = privileges.stream() + .filter(privilege -> ProjectUserPrivilegeEnum.RELEASE.getRank() == privilege.getPrivilege() + || ProjectUserPrivilegeEnum.EDIT.getRank() == privilege.getPrivilege()) + .collect(Collectors.toList()); + return CollectionUtils.isNotEmpty(privilegeList); + } + + @Override + public boolean hasAccessProjectPrivilege(Long projectId, String username) { + List privileges = streamisProjectPrivilegeMapper.getProjectPrivilege(projectId, username); + if(CollectionUtils.isEmpty(privileges)){ + return false; + } + List privilegeList = privileges.stream() + .filter(privilege -> ProjectUserPrivilegeEnum.RELEASE.getRank() == privilege.getPrivilege() + || ProjectUserPrivilegeEnum.EDIT.getRank() == privilege.getPrivilege() + || ProjectUserPrivilegeEnum.ACCESS.getRank() == privilege.getPrivilege()) + .collect(Collectors.toList()); + return CollectionUtils.isNotEmpty(privilegeList); + } + + @Override + public boolean hasReleaseProjectPrivilege(List projectIds, String username) { + if(CollectionUtils.isEmpty(projectIds)){ + return false; + } + List privileges = streamisProjectPrivilegeMapper.findProjectPrivilegeByProjectIds(projectIds); + if(CollectionUtils.isEmpty(privileges)){ + return false; + } + List privilegeList = privileges.stream() + .filter(privilege -> username!=null && username.equals(privilege.getUserName()) + && (ProjectUserPrivilegeEnum.RELEASE.getRank() == privilege.getPrivilege())) + .collect(Collectors.toList()); + return CollectionUtils.isNotEmpty(privilegeList); + } + + @Override + public boolean hasEditProjectPrivilege(List projectIds, String username) { + if(CollectionUtils.isEmpty(projectIds)){ + return false; + } + List privileges = streamisProjectPrivilegeMapper.findProjectPrivilegeByProjectIds(projectIds); + if(CollectionUtils.isEmpty(privileges)){ + return false; + } + List privilegeList = privileges.stream() + .filter(privilege -> username!=null && username.equals(privilege.getUserName()) + && (ProjectUserPrivilegeEnum.RELEASE.getRank() == privilege.getPrivilege() + || ProjectUserPrivilegeEnum.EDIT.getRank() == privilege.getPrivilege())) + .collect(Collectors.toList()); + return CollectionUtils.isNotEmpty(privilegeList); + } + + @Override + public boolean hasAccessProjectPrivilege(List projectIds, String username) { + if(CollectionUtils.isEmpty(projectIds)){ + return false; + } + List privileges = streamisProjectPrivilegeMapper.findProjectPrivilegeByProjectIds(projectIds); + if(CollectionUtils.isEmpty(privileges)){ + return false; + } + List privilegeList = privileges.stream() + .filter(privilege -> username!=null && username.equals(privilege.getUserName()) + && (ProjectUserPrivilegeEnum.RELEASE.getRank() == privilege.getPrivilege() + || ProjectUserPrivilegeEnum.EDIT.getRank() == privilege.getPrivilege() + || ProjectUserPrivilegeEnum.EDIT.getRank() == privilege.getPrivilege())) + .collect(Collectors.toList()); + return CollectionUtils.isNotEmpty(privilegeList); + } + +} diff --git a/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/service/impl/StreamisProjectServiceImpl.java b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/service/impl/StreamisProjectServiceImpl.java new file mode 100644 index 000000000..da594155b --- /dev/null +++ b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/service/impl/StreamisProjectServiceImpl.java @@ -0,0 +1,80 @@ +package com.webank.wedatasphere.streamis.project.server.service.impl; + +import com.webank.wedatasphere.streamis.project.server.dao.StreamisProjectMapper; +import com.webank.wedatasphere.streamis.project.server.entity.StreamisProject; +import com.webank.wedatasphere.streamis.project.server.entity.StreamisProjectPrivilege; +import com.webank.wedatasphere.streamis.project.server.exception.StreamisProjectErrorException; +import com.webank.wedatasphere.streamis.project.server.service.StreamisProjectPrivilegeService; +import com.webank.wedatasphere.streamis.project.server.service.StreamisProjectService; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Propagation; +import org.springframework.transaction.annotation.Transactional; +import org.springframework.util.CollectionUtils; + +import java.util.List; + +/** + * Description: + */ +@Service +public class StreamisProjectServiceImpl implements StreamisProjectService { + + private static final Logger LOGGER = LoggerFactory.getLogger(StreamisProjectServiceImpl.class); + + @Autowired + private StreamisProjectMapper streamisProjectMapper; + + @Autowired + private StreamisProjectPrivilegeService streamisProjectPrivilegeService; + + @Override + @Transactional(rollbackFor = Exception.class) + public StreamisProject createProject(StreamisProject streamisProject) throws StreamisProjectErrorException { + LOGGER.info("user {} starts to create project {}", streamisProject.getCreateBy(), streamisProject.getName()); + if (!CollectionUtils.isEmpty(streamisProjectMapper.findProjectIdByName(streamisProject.getName()))) { + throw new StreamisProjectErrorException(600500, "the project name is exist"); + } + streamisProjectMapper.createProject(streamisProject); + List projectPrivileges = streamisProject.getProjectPrivileges(); + for (StreamisProjectPrivilege privilege : projectPrivileges) privilege.setProjectId(streamisProject.getId()); + streamisProjectPrivilegeService.addProjectPrivilege(projectPrivileges); + LOGGER.info("user {} create project {} finished and id is {}", streamisProject.getCreateBy(), streamisProject.getName(), streamisProject.getId()); + return streamisProject; + } + + @Override + @Transactional(rollbackFor = Exception.class) + public void updateProject(StreamisProject streamisProject) throws StreamisProjectErrorException { + LOGGER.info("User {} begins to update project {}", streamisProject.getLastUpdateBy(), streamisProject.getId()); + List list = streamisProjectMapper.findProjectIdByName(streamisProject.getName()); + if (!CollectionUtils.isEmpty(list) && !list.get(0).equals(streamisProject.getId())) { + throw new StreamisProjectErrorException(600500, "the project name is exist"); + } + streamisProjectMapper.updateProject(streamisProject); + streamisProjectPrivilegeService.updateProjectPrivilege(streamisProject.getProjectPrivileges()); + LOGGER.info("user {} update project finished and name is {} and id is {}",streamisProject.getLastUpdateBy(),streamisProject.getName(),streamisProject.getId()); + } + + @Override + @Transactional(rollbackFor = Exception.class) + public void deleteProjectById(Long projectId) { + streamisProjectMapper.deleteProjectById(projectId); + streamisProjectPrivilegeService.deleteProjectPrivilegeByProjectId(projectId); + LOGGER.info("delete projectId {} finished", projectId); + } + + @Override + @Transactional(propagation = Propagation.SUPPORTS, rollbackFor = Exception.class) + public List queryProjectIds(String projectName) { + return streamisProjectMapper.findProjectIdByName(projectName); + } + + @Override + @Transactional(propagation = Propagation.SUPPORTS, rollbackFor = Exception.class) + public List queryProjectIdsByNames(List projectNames) { + return streamisProjectMapper.findProjectIdsByNames(projectNames); + } +} diff --git a/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/utils/StreamisProjectPrivilegeUtils.java b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/utils/StreamisProjectPrivilegeUtils.java new file mode 100644 index 000000000..0fbfc5c79 --- /dev/null +++ b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/utils/StreamisProjectPrivilegeUtils.java @@ -0,0 +1,21 @@ +package com.webank.wedatasphere.streamis.project.server.utils; + +import com.webank.wedatasphere.streamis.project.server.entity.StreamisProjectPrivilege; +import org.apache.commons.collections.CollectionUtils; + +import java.util.ArrayList; +import java.util.List; + +public class StreamisProjectPrivilegeUtils { + public static List createStreamisProjectPrivilege(Long projectId, List users, int privilege){ + List retList = new ArrayList<>(); + if(CollectionUtils.isEmpty(users)){ + return retList; + } + users.forEach(user->{ + StreamisProjectPrivilege streamisProjectPrivilege = new StreamisProjectPrivilege(projectId, user, privilege); + retList.add(streamisProjectPrivilege); + }); + return retList; + } +} diff --git a/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/utils/StreamisProjectRestfulUtils.java b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/utils/StreamisProjectRestfulUtils.java new file mode 100644 index 000000000..050deb6f8 --- /dev/null +++ b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/utils/StreamisProjectRestfulUtils.java @@ -0,0 +1,31 @@ +package com.webank.wedatasphere.streamis.project.server.utils; + +import org.apache.commons.math3.util.Pair; +import org.apache.linkis.server.Message; + +import java.util.Arrays; + + +public class StreamisProjectRestfulUtils { + + + + public static Message dealError(String reason){ + return Message.error(reason); + } + + public static Message dealOk(String msg){ + return Message.ok(msg); + } + + + + @SafeVarargs + public static Message dealOk(String msg, Pair... data){ + Message message = Message.ok(msg); + Arrays.stream(data).forEach(p -> message.data(p.getKey(), p.getValue())); + return message; + } + + +} diff --git a/streamis-project/streamis-project-server/src/main/scala/com/webank/wedatasphere/streamis/project/server/receiver/StreamisProjectServerReceiver.scala b/streamis-project/streamis-project-server/src/main/scala/com/webank/wedatasphere/streamis/project/server/receiver/StreamisProjectServerReceiver.scala new file mode 100644 index 000000000..b74d9631b --- /dev/null +++ b/streamis-project/streamis-project-server/src/main/scala/com/webank/wedatasphere/streamis/project/server/receiver/StreamisProjectServerReceiver.scala @@ -0,0 +1,58 @@ +package com.webank.wedatasphere.streamis.project.server.receiver + +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.rpc.{Receiver, Sender} +import com.webank.wedatasphere.streamis.project.common.{CreateStreamProjectRequest, CreateStreamProjectResponse, DeleteStreamProjectRequest, DeleteStreamProjectResponse, UpdateStreamProjectRequest, UpdateStreamProjectResponse} +import com.webank.wedatasphere.streamis.project.server.entity.request.CreateProjectRequest +import com.webank.wedatasphere.streamis.project.server.service.StreamisProjectService +import org.springframework.beans.factory.annotation.Autowired +import org.springframework.stereotype.Component + +import scala.concurrent.duration.Duration + +/** + * Description: streamis project 支持rest 和 rpc两种方式 + * 本receiver是rpc的方式 + */ +class StreamisProjectServerReceiver(projectService:StreamisProjectService) extends Receiver with Logging{ + + + + + override def receive(message: Any, sender: Sender): Unit = { + + } + + override def receiveAndReply(message: Any, sender: Sender): Any = receiveAndReply(message, Duration.create(300, "seconds"), sender) + + override def receiveAndReply(message: Any, duration: Duration, sender: Sender): Any = null +// message match { +// case createStreamProjectRequest: CreateStreamProjectRequest => +// Utils.tryCatch{ +// val streamisProject = projectService.createProject(createStreamProjectRequest) +// CreateStreamProjectResponse(0, streamisProject.getName, streamisProject.getId, "") +// }{ +// t => logger.error("failed to create project in streamis", t) +// CreateStreamProjectResponse(-1, createStreamProjectRequest.projectName, -1, t.getCause.getMessage) +// } +// case updateStreamProjectRequest: UpdateStreamProjectRequest => Utils.tryCatch{ +// projectService.updateProject(updateStreamProjectRequest) +// UpdateStreamProjectResponse(0, updateStreamProjectRequest.streamisProjectId, "") +// }{ +// t => logger.error(s"failed to update project ${updateStreamProjectRequest.projectName} in streamis",t) +// UpdateStreamProjectResponse(-1, updateStreamProjectRequest.streamisProjectId, t.getCause.getMessage) +// } +// +// case deleteStreamProjectRequest: DeleteStreamProjectRequest => Utils.tryCatch{ +// projectService.deleteProject(deleteStreamProjectRequest) +// DeleteStreamProjectResponse(0, deleteStreamProjectRequest.projectName, "") +// }{ +// t => logger.error(s"failed to update project ${deleteStreamProjectRequest.projectName} in streamis",t) +// DeleteStreamProjectResponse(-1, deleteStreamProjectRequest.projectName, t.getCause.getMessage) +// } +// case _ => +// } + + + +} diff --git a/streamis-project/streamis-project-server/src/main/scala/com/webank/wedatasphere/streamis/project/server/receiver/StreamisProjectServerReceiverChooser.scala b/streamis-project/streamis-project-server/src/main/scala/com/webank/wedatasphere/streamis/project/server/receiver/StreamisProjectServerReceiverChooser.scala new file mode 100644 index 000000000..e94f0ab65 --- /dev/null +++ b/streamis-project/streamis-project-server/src/main/scala/com/webank/wedatasphere/streamis/project/server/receiver/StreamisProjectServerReceiverChooser.scala @@ -0,0 +1,29 @@ +package com.webank.wedatasphere.streamis.project.server.receiver + +import org.apache.linkis.rpc.{RPCMessageEvent, Receiver, ReceiverChooser} +import com.webank.wedatasphere.streamis.project.common.StreamisProjectProtocol +import com.webank.wedatasphere.streamis.project.server.service.StreamisProjectService +import org.springframework.beans.factory.annotation.Autowired +import org.springframework.stereotype.Component + +import javax.annotation.PostConstruct + +@Component +class StreamisProjectServerReceiverChooser extends ReceiverChooser { + + @Autowired + var streamisProjectService:StreamisProjectService = _ + + + private var receiver: Option[StreamisProjectServerReceiver] = _ + + @PostConstruct + def init():Unit = { + receiver = Some(new StreamisProjectServerReceiver(streamisProjectService)) + } + + override def chooseReceiver(event: RPCMessageEvent): Option[Receiver] = event.message match { + case _: StreamisProjectProtocol => receiver + case _ => None + } +} diff --git a/streamis-server/bin/start-streamis-server.sh b/streamis-server/bin/start-streamis-server.sh new file mode 100644 index 000000000..f440e0f40 --- /dev/null +++ b/streamis-server/bin/start-streamis-server.sh @@ -0,0 +1,33 @@ +#!/bin/bash + +cd `dirname $0` +cd .. +HOME=`pwd` +export STREAMIS_HOME=$HOME + +export STREAMIS_PID=$HOME/bin/linkis.pid + +if [[ -f "${STREAMIS_PID}" ]]; then + pid=$(cat ${STREAMIS_PID}) + if kill -0 ${pid} >/dev/null 2>&1; then + echo "Streamis Server is already running." + return 0; + fi +fi + +export STREAMIS_LOG_PATH=$HOME/logs +export STREAMIS_HEAP_SIZE="1G" +export STREAMIS_JAVA_OPTS="-Xms$STREAMIS_HEAP_SIZE -Xmx$STREAMIS_HEAP_SIZE -XX:+UseG1GC -XX:MaxPermSize=500m -agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=11729" + +nohup java $STREAMIS_JAVA_OPTS -cp $HOME/conf:$HOME/lib/* org.apache.linkis.DataWorkCloudApplication 2>&1 > $STREAMIS_LOG_PATH/streamis.out & +pid=$! +if [[ -z "${pid}" ]]; then + echo "Streamis Server start failed!" + sleep 1 + exit 1 +else + echo "Streamis Server start succeeded!" + echo $pid > $STREAMIS_PID + sleep 1 +fi +exit 1 diff --git a/streamis-server/bin/stop-streamis-server.sh b/streamis-server/bin/stop-streamis-server.sh new file mode 100644 index 000000000..1d055d4cd --- /dev/null +++ b/streamis-server/bin/stop-streamis-server.sh @@ -0,0 +1,47 @@ +#!/bin/bash + +cd `dirname $0` +cd .. +HOME=`pwd` + +export STREAMIS_HOME_PID=$HOME/bin/linkis.pid + +function wait_for_STREAMIS_to_die() { + local pid + local count + pid=$1 + timeout=$2 + count=0 + timeoutTime=$(date "+%s") + let "timeoutTime+=$timeout" + currentTime=$(date "+%s") + forceKill=1 + + while [[ $currentTime -lt $timeoutTime ]]; do + $(kill ${pid} > /dev/null 2> /dev/null) + if kill -0 ${pid} > /dev/null 2>&1; then + sleep 3 + else + forceKill=0 + break + fi + currentTime=$(date "+%s") + done + + if [[ forceKill -ne 0 ]]; then + $(kill -9 ${pid} > /dev/null 2> /dev/null) + fi +} + +if [[ ! -f "${STREAMIS_HOME_PID}" ]]; then + echo "STREAMIS SERVER is not running" +else + pid=$(cat ${STREAMIS_HOME_PID}) + if [[ -z "${pid}" ]]; then + echo "STREAMIS SERVER is not running" + else + wait_for_STREAMIS_to_die $pid 40 + $(rm -f ${STREAMIS_HOME_PID}) + echo "STREAMIS SERVER is stopped." + fi +fi diff --git a/streamis-server/pom.xml b/streamis-server/pom.xml new file mode 100644 index 000000000..ee3142243 --- /dev/null +++ b/streamis-server/pom.xml @@ -0,0 +1,156 @@ + + + + + + streamis + com.webank.wedatasphere.streamis + 0.2.4 + + 4.0.0 + + streamis-server + jar + + + + 8 + 8 + + + + + + + + + + + + + + + + + + + + + com.webank.wedatasphere.streamis + streamis-jobmanager-server + ${streamis.version} + + + + com.webank.wedatasphere.streamis + streamis-projectmanager-server + ${streamis.version} + + + + + com.webank.wedatasphere.streamis + streamis-project-server + ${streamis.version} + + + + + com.webank.wedatasphere.streamis + streamis-job-log-server + ${streamis.version} + + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-jar-plugin + + + org.apache.maven.plugins + maven-assembly-plugin + 2.3 + false + + + make-assembly + package + + single + + + + src/main/assembly/distribution.xml + + + + + + false + streamis-server + false + false + + src/main/assembly/distribution.xml + + + + + org.apache.maven.plugins + maven-compiler-plugin + + 8 + 8 + + + + + + src/main/java + + **/*.xml + + + + src/main/resources + + **/*.xml + **/*.properties + **/*.yml + + + + + + + + + + \ No newline at end of file diff --git a/streamis-server/src/main/assembly/distribution.xml b/streamis-server/src/main/assembly/distribution.xml new file mode 100644 index 000000000..416239fa3 --- /dev/null +++ b/streamis-server/src/main/assembly/distribution.xml @@ -0,0 +1,69 @@ + + + + streamis-server + + zip + + true + streamis-server + + + + + + lib + true + true + false + false + true + + + + + + ${basedir}/src/main/resources + + * + + 0777 + conf + unix + + + ${basedir}/bin + + * + + 0777 + bin + unix + + + . + + */** + + logs + + + + + diff --git a/streamis-server/src/main/resources/application.yml b/streamis-server/src/main/resources/application.yml new file mode 100644 index 000000000..837a97e4e --- /dev/null +++ b/streamis-server/src/main/resources/application.yml @@ -0,0 +1,29 @@ +server: + port: 9321 +spring: + application: + name: streamis-server + mvc: + servlet: + path: /api/rest_j/v1 + servlet: + multipart: + enabled: true + max-file-size: 500MB + max-request-size: 500MB + +eureka: + client: + serviceUrl: + defaultZone: http://127.0.0.1:20303/eureka/ + instance: + metadata-map: + test: wedatasphere + +management: + endpoints: + web: + exposure: + include: refresh,info +logging: + config: classpath:log4j2.xml diff --git a/streamis-server/src/main/resources/linkis.properties b/streamis-server/src/main/resources/linkis.properties new file mode 100644 index 000000000..b09dbbc4a --- /dev/null +++ b/streamis-server/src/main/resources/linkis.properties @@ -0,0 +1,66 @@ +# +# Copyright 2021 WeBank +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +#wds.linkis.test.mode=true +wds.linkis.server.mybatis.datasource.url=jdbc:mysql://localhost:3306/streamis?characterEncoding=UTF-8 +wds.linkis.server.mybatis.datasource.username=user1 + +wds.linkis.server.mybatis.datasource.password=pwd1 +wds.linkis.gateway.ip= +wds.linkis.gateway.port= +wds.linkis.gateway.url=http://localhost:9001 + +wds.linkis.mysql.is.encrypt=false +##restful +wds.linkis.log.clear=true +wds.linkis.server.version=v1 +#wds.linkis.test.user=user1 + + + +##restful +wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.streamis.datasource.server.restful.api,\ + com.webank.wedatasphere.streamis.project.server.restful,\ + com.webank.wedatasphere.streamis.jobmanager.restful.api,\ + com.webank.wedatasphere.streamis.datasource.execute.rest,\ + com.webank.wedatasphere.streamis.projectmanager.restful.api,\ + com.webank.wedatasphere.streamis.jobmanager.log.server.restful +##mybatis +wds.linkis.server.mybatis.mapperLocations=\ + classpath*:com/webank/wedatasphere/streamis/datasource/manager/dao/impl/*.xml,\ + classpath*:com/webank/wedatasphere/streamis/project/server/dao/impl/*.xml,\ + classpath*:com/webank/wedatasphere/streamis/jobmanager/launcher/dao/impl/*.xml,\ + classpath*:com/webank/wedatasphere/streamis/jobmanager/manager/dao/impl/*.xml,\ + classpath*:com/webank/wedatasphere/streamis/projectmanager/dao/impl/*.xml + +wds.linkis.server.mybatis.typeAliasesPackage=com.webank.wedatasphere.streamis.datasource.manager.domain,\ + com.webank.wedatasphere.streamis.jobmanager.launcher.entity,\ + com.webank.wedatasphere.streamis.jobmanager.manager.entity,\ + com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo,\ + com.webank.wedatasphere.streamis.jobmanager.launcher.entity.vo,\ + com.webank.wedatasphere.streamis.projectmanager.entity + + +wds.linkis.server.mybatis.BasePackage=com.webank.wedatasphere.streamis.datasource.manager.dao,\ + org.apache.linkis.bml.dao,\ + com.webank.wedatasphere.streamis.project.server.dao,\ + com.webank.wedatasphere.streamis.jobmanager.launcher.dao,\ + com.webank.wedatasphere.streamis.jobmanager.manager.dao,\ + com.webank.wedatasphere.streamis.projectmanager.dao + +# Make sure that can fetch the application info finally +wds.streamis.application.info.fetch.max=20 + +# To use the complete features of streamis in linkis 1.1.2 +#wds.streamis.launch.flink.linkis.release.version=1.1.2 \ No newline at end of file diff --git a/streamis-server/src/main/resources/log4j.properties b/streamis-server/src/main/resources/log4j.properties new file mode 100644 index 000000000..f070a82c1 --- /dev/null +++ b/streamis-server/src/main/resources/log4j.properties @@ -0,0 +1,35 @@ +# +# Copyright 2021 WeBank +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +### set log levels ### + +log4j.rootCategory=INFO,console + +log4j.appender.console=org.apache.log4j.ConsoleAppender +log4j.appender.console.Threshold=INFO +log4j.appender.console.layout=org.apache.log4j.PatternLayout +#log4j.appender.console.layout.ConversionPattern= %d{ISO8601} %-5p (%t) [%F:%M(%L)] - %m%n +log4j.appender.console.layout.ConversionPattern= %d{ISO8601} %-5p (%t) %p %c{1} - %m%n + + +log4j.appender.com.webank.bdp.ide.core=org.apache.log4j.DailyRollingFileAppender +log4j.appender.com.webank.bdp.ide.core.Threshold=INFO +log4j.additivity.com.webank.bdp.ide.core=false +log4j.appender.com.webank.bdp.ide.core.layout=org.apache.log4j.PatternLayout +log4j.appender.com.webank.bdp.ide.core.Append=true +log4j.appender.com.webank.bdp.ide.core.File=logs/linkis.log +log4j.appender.com.webank.bdp.ide.core.layout.ConversionPattern= %d{ISO8601} %-5p (%t) [%F:%M(%L)] - %m%n + +log4j.logger.org.springframework=INFO diff --git a/streamis-server/src/main/resources/log4j2.xml b/streamis-server/src/main/resources/log4j2.xml new file mode 100644 index 000000000..08a23e2d0 --- /dev/null +++ b/streamis-server/src/main/resources/log4j2.xml @@ -0,0 +1,37 @@ + + + + + + + + + + + + + + + + + + + + + + + diff --git a/streamis-server/src/main/scala/com/webank/wedatasphere/streamis/server/boot/StreamisServerApplication.scala b/streamis-server/src/main/scala/com/webank/wedatasphere/streamis/server/boot/StreamisServerApplication.scala new file mode 100644 index 000000000..bcc004fb3 --- /dev/null +++ b/streamis-server/src/main/scala/com/webank/wedatasphere/streamis/server/boot/StreamisServerApplication.scala @@ -0,0 +1,35 @@ +/* + * Copyright 2021 WeBank + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.webank.wedatasphere.streamis.server.boot + + +import org.apache.linkis.DataWorkCloudApplication +import org.apache.linkis.common.utils.{Logging, Utils} + +/** + * created by yangzhiyue on 2021/4/30 + * Description: + */ +object StreamisServerApplication extends Logging{ + + val userName:String = System.getProperty("username.name") + val hostName:String = Utils.getComputerName + + def main(args: Array[String]): Unit = { + DataWorkCloudApplication.main(args) + } + +} diff --git a/web/.editorconfig b/web/.editorconfig new file mode 100644 index 000000000..f1cc3ad32 --- /dev/null +++ b/web/.editorconfig @@ -0,0 +1,15 @@ +# http://editorconfig.org + +root = true + +[*] +charset = utf-8 +indent_style = space +indent_size = 2 +end_of_line = lf +insert_final_newline = true +trim_trailing_whitespace = true + +[*.md] +insert_final_newline = false +trim_trailing_whitespace = false diff --git a/web/.env b/web/.env new file mode 100644 index 000000000..514f0d391 --- /dev/null +++ b/web/.env @@ -0,0 +1,4 @@ +VUE_APP_HOST= +VUE_APP_MN_CONFIG_PREFIX= +VUE_APP_MN_CONFIG_SOCKET=/ws/api/entrance/connect +VUE_APP_VERSION=v1.0.0 \ No newline at end of file diff --git a/web/.eslintignore b/web/.eslintignore new file mode 100644 index 000000000..904c1894b --- /dev/null +++ b/web/.eslintignore @@ -0,0 +1,2 @@ +iconfont.js +"*.min.js" \ No newline at end of file diff --git a/web/.eslintrc.js b/web/.eslintrc.js new file mode 100644 index 000000000..fe93fbe50 --- /dev/null +++ b/web/.eslintrc.js @@ -0,0 +1,93 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +module.exports = { + root: true, + env: { + node: true + }, + extends: [ + 'eslint:recommended', + 'plugin:vue/essential' + ], + rules: { + 'no-console': process.env.NODE_ENV === 'production' ? 'error' : 'off', + 'no-debugger': process.env.NODE_ENV === 'production' ? 'error' : 'off', + 'key-spacing': ['error'], + 'standard/no-callback-literal': 0, + 'handle-callback-err': 0, + 'no-return-assign': 0, + 'eqeqeq': 0, + 'comma-dangle': 0, + 'semi': 0, + 'space-before-function-paren': 0, + 'keyword-spacing': 0, + 'no-useless-escape': 0, + 'operator-linebreak': 0, + 'indent': [ + 'error', + 2, + { + 'SwitchCase': 1 + } + ], + 'no-const-assign': 'warn', + 'no-this-before-super': 'warn', + "no-irregular-whitespace": 0, + 'no-undef': 2, + 'no-unreachable': 'warn', + 'no-unused-vars': 2, + 'constructor-super': 'warn', + 'valid-typeof': 'warn', + 'one-var': 'warn', + 'max-len': 'off', + 'no-trailing-spaces': 'off', + 'require-jsdoc': 'warn', + 'camelcase': 'warn', + 'no-invalid-this': 'off', + 'linebreak-style': 0, + 'vue/no-parsing-error': [2, { + 'x-invalid-end-tag': false, + 'invalid-first-character-of-tag-name': false + }], + 'no-tabs': 0, + 'vue/html-indent': [2, 2, { + 'attribute': 1, + 'closeBracket': 0, + 'alignAttributesVertically': false + }], + 'vue/require-default-prop': 0, + 'vue/component-name-in-template-casing': 0, + 'vue/html-closing-bracket-spacing': 0, + 'vue/html-closing-bracket-newline': 0, + 'vue/singleline-html-element-content-newline': 0, + 'vue/multiline-html-element-content-newline': 0, + 'vue/attributes-order': 0, + 'vue/html-self-closing': 0, + 'no-useless-constructor': 0, + 'no-mixed-operators': 0, + 'no-new-func': 0, + 'no-template-curly-in-string': 0, + 'no-useless-call': 0, + "one-var": 0, + "camelcase": 0 + }, + parserOptions: { + "parser": 'babel-eslint', + "sourceType": "module" + } +} diff --git a/web/.gitattributes b/web/.gitattributes new file mode 100644 index 000000000..ec8935df7 --- /dev/null +++ b/web/.gitattributes @@ -0,0 +1,9 @@ +* text=auto +* text eol=lf +*.png binary +*.gif binary +*.ttf binary +*.woff binary +*.eot binary +*.woff binary +*.otf binary \ No newline at end of file diff --git a/web/.gitignore b/web/.gitignore new file mode 100644 index 000000000..954d5ddfd --- /dev/null +++ b/web/.gitignore @@ -0,0 +1,13 @@ +.DS_Store +.vscode +.cache +.idea/ + +node_modules/ +dist/ + +package-lock.json +wedatasphere-*.zip +./cn.json +.env.* +*-dist.zip diff --git a/web/.jshintrc b/web/.jshintrc new file mode 100644 index 000000000..40bd84d8e --- /dev/null +++ b/web/.jshintrc @@ -0,0 +1,28 @@ +{ + "node": true, + "esnext": true, + "bitwise": true, + "camelcase": true, + "curly": true, + "eqeqeq": true, + "immed": true, + "indent": 2, + "latedef": true, + "newcap": true, + "noarg": true, + "quotmark": "single", + "regexp": true, + "undef": true, + "unused": false, + "strict": true, + "trailing": true, + "smarttabs": true, + "white": true, + "globals": { + "history": false, + "window": false, + "_": false, + "cordova": false, + "angular": false + } +} diff --git a/web/.prettierrc.json b/web/.prettierrc.json new file mode 100644 index 000000000..fd496a820 --- /dev/null +++ b/web/.prettierrc.json @@ -0,0 +1,4 @@ +{ + "singleQuote": true, + "semi": false +} diff --git a/web/LICENSE b/web/LICENSE new file mode 100644 index 000000000..261eeb9e9 --- /dev/null +++ b/web/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/web/README.md b/web/README.md new file mode 100644 index 000000000..5cc2ca1d7 --- /dev/null +++ b/web/README.md @@ -0,0 +1,46 @@ +### 说明 + +Vue + iview + +### 项目结构 + +``` +├─dist # 构建后静态资源 +├─node_modules +├─public # 公共index.html +└─src + ├─apps # 独立模块 + │ ├─scriptis # scriptis相关功能 + │ │ ├─assets # 应用所需的图片、css等资源 + │ │ ├─config # + │ │ ├─i18n # 国际化的中英文json + │ │ ├─module # 当前应用所需的模块,每个模块相对独立,模块私有资源内置 + │ │ ├─service + │ │ └─view # 当前应用的页面,路由在同级目录下router.js下配置 + │ └─workflows + ├─common # 全局使用的公共服务和方法 + ├─components # 全局使用的公共组件部分 + ├─dss # 主应用,apps里的应用都是其子路由 + ├─main.js # 应用启动入口 + └─router.js # 合并后的应用路由 +``` + +### 建议/约束 + +新增功能模块先确定涉及应用,按照上面目录结构维护代码同时建议遵守以下约束: + +- apps应用模块间不要相互依赖,可复用部分抽离 +- 可复用组件,资源需要合理放置 +- 新增功能模块需要按照现有目录约束建立文件,已有功能修改应在有限模块内进行,控制影响范围 +- 全局共用组件、公共基础样式、工具方法修改需评估后才能修改,并且重点review + + +### 前端开发、构建打包 + +``` +# 开发启动DSS +npm run serve + +# 打包应用 +npm run build +``` \ No newline at end of file diff --git a/web/babel.config.js b/web/babel.config.js new file mode 100644 index 000000000..ef8e58391 --- /dev/null +++ b/web/babel.config.js @@ -0,0 +1,22 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +module.exports = { + presets: [ + '@vue/app' + ] +} diff --git a/web/mock.js b/web/mock.js new file mode 100644 index 000000000..82953f9b4 --- /dev/null +++ b/web/mock.js @@ -0,0 +1,19 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +module.exports = function () { +} diff --git a/web/package.json b/web/package.json new file mode 100644 index 000000000..28a74198f --- /dev/null +++ b/web/package.json @@ -0,0 +1,73 @@ +{ + "name": "streamis", + "version": "0.2.4", + "private": true, + "scripts": { + "serve": "vue-cli-service serve", + "build": "vue-cli-service build", + "lint": "vue-cli-service lint --no-fix", + "fix": "eslint --ext .js,.vue src --fix", + "precommit": "lint-staged", + "postinstall": "patch-package" + }, + "husky": { + "hooks": { + "postcommit": "git update-index --again", + "pre-commit": "lint-staged" + } + }, + "lint-staged": { + "src/**/*.{js,vue}": [ + "vue-cli-service lint --no-fix", + "git add" + ] + }, + "dependencies": { + "axios": "0.19.2", + "babel-polyfill": "6.26.0", + "core-js": "2.6.11", + "dexie": "2.0.4", + "dt-sql-parser": "1.2.1", + "eslint": "6.8.0", + "eslint-plugin-vue": "6.2.2", + "highlight.js": "9.18.3", + "lodash": "4.17.20", + "md5": "2.3.0", + "mitt": "1.2.0", + "moment": "2.29.1", + "monaco-editor": "0.19.3", + "pinyin": "2.9.1", + "qs": "6.9.4", + "reconnecting-websocket": "4.4.0", + "sql-formatter": "2.3.3", + "svgo": "1.3.0", + "view-design": "4.5.0", + "vue": "2.6.12", + "vue-i18n": "8.22.1", + "vue-router": "3.4.8", + "vuedraggable": "2.24.3", + "vuescroll": "4.16.1", + "worker-loader": "2.0.0" + }, + "devDependencies": { + "@kazupon/vue-i18n-loader": "0.4.1", + "@vue/cli-plugin-babel": "3.12.1", + "@vue/cli-plugin-eslint": "3.12.1", + "@vue/cli-service": "3.12.1", + "@vue/eslint-config-standard": "4.0.0", + "archiver": "3.1.1", + "babel-eslint": "10.1.0", + "copy-webpack-plugin": "4.6.0", + "csp-html-webpack-plugin": "4.0.0", + "filemanager-webpack-plugin": "2.0.5", + "husky": "1.3.1", + "lint-staged": "8.2.1", + "monaco-editor-webpack-plugin": "1.8.2", + "node-sass": "4.14.1", + "patch-package": "6.2.2", + "sass-loader": "7.3.1", + "svg-sprite-loader": "5.0.0", + "vue-cli-plugin-mockjs": "0.1.3", + "vue-template-compiler": "2.6.12" + } +} diff --git a/web/postcss.config.js b/web/postcss.config.js new file mode 100644 index 000000000..6a1e97cd9 --- /dev/null +++ b/web/postcss.config.js @@ -0,0 +1,22 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +module.exports = { + plugins: { + autoprefixer: {} + } +} diff --git a/web/public/favicon.ico b/web/public/favicon.ico new file mode 100644 index 000000000..3fab09991 Binary files /dev/null and b/web/public/favicon.ico differ diff --git a/web/public/index.html b/web/public/index.html new file mode 100644 index 000000000..69dbcd418 --- /dev/null +++ b/web/public/index.html @@ -0,0 +1,34 @@ + + + + + + + + + + DataSphere Studio + + + +
+ + + diff --git a/web/src/apps/scriptis/assets/images/dssLogo5.png b/web/src/apps/scriptis/assets/images/dssLogo5.png new file mode 100644 index 000000000..a16cac58b Binary files /dev/null and b/web/src/apps/scriptis/assets/images/dssLogo5.png differ diff --git a/web/src/apps/scriptis/assets/styles/home.scss b/web/src/apps/scriptis/assets/styles/home.scss new file mode 100644 index 000000000..83853380f --- /dev/null +++ b/web/src/apps/scriptis/assets/styles/home.scss @@ -0,0 +1,105 @@ +/*! + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +@import '@/common/style/variables.scss'; + +.home-page{ + position: $relative; + width: 100%; + height: 100%; + display: flex; + z-index: 1; + .container{ + flex: 1; + } + .center-panel{ + border-left: $border-width-base $border-style-base $border-color-base; + } + + .right-panel{ + border-left: $border-width-base $border-style-base $border-color-base; + } + + .nav-list{ + width: 44px; + height: 100%; + border-right: $border-width-base $border-style-base $border-color-base; + background: $table-thead-bg; + .nav-warp{ + position: $absolute; + height: 100%; + top: 0; + left: 0; + overflow: hidden; + } + .nav-item{ + width: 43px; + padding: 32px 4px; + text-align: center; + cursor: pointer; + &.actived{ + width: 44px; + border-left: 3px solid $primary-color; + border-bottom: $border-width-base $border-style-base $border-color-base; + border-top: $border-width-base $border-style-base $border-color-base; + background: $background-color-white ; + color: $primary-color; + } + &:hover{ + color: $primary-color; + } + .nav-icon{ + font-size: $font-size-large; + } + .nav-name{ + width: 100%; + white-space: normal; + word-break: break-all; + display: inline-block; + line-height: $font-size-large; + } + } + } + .narrow { + width: 30px; + .nav-item { + width: 100%; + padding: 30px 4px; + &.actived { + width: 30px; + } + .nav-icon{ + font-size: $font-size-base; + } + } + } + // @media screen and(max-height: 790px) { + // .nav-list { + // width: 30px; + // .nav-item { + // width: 100%; + // padding: 30px 4px; + // &.actived { + // width: 30px; + // } + // .nav-icon{ + // font-size: $font-size-base; + // } + // } + // } + // } +} diff --git a/web/src/apps/scriptis/assets/styles/sidebar.scss b/web/src/apps/scriptis/assets/styles/sidebar.scss new file mode 100644 index 000000000..66076512d --- /dev/null +++ b/web/src/apps/scriptis/assets/styles/sidebar.scss @@ -0,0 +1,41 @@ +/*! + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +@import '@/common/style/variables.scss'; +.we-side-bar { + height: 100%; + overflow: hidden; + padding-top: 38px; + .we-side-bar-content { + height: 100%; + overflow-y: auto; + &::-webkit-scrollbar { + width: 6px; + height: 1px; + } + &::-webkit-scrollbar-thumb { + border-radius: $border-radius-base; + box-shadow: $shadow-inset; + background: $subsidiary-color; + } + &::-webkit-scrollbar-track { + border-radius: $border-radius-base; + box-shadow: $shadow-inset; + background: $body-background; + } + } +} diff --git a/web/src/apps/scriptis/components/apiPublish/TagsInput/index.scss b/web/src/apps/scriptis/components/apiPublish/TagsInput/index.scss new file mode 100644 index 000000000..e2682eec2 --- /dev/null +++ b/web/src/apps/scriptis/components/apiPublish/TagsInput/index.scss @@ -0,0 +1,39 @@ +@import '@/common/style/variables.scss'; +.tags-wrap{ + width: 100%; + height: 100%; + outline: none; + &::after{ + content: ""; + display: block; + height: 0; + clear: both; + } +} +.tags, .tags-input{ + position: relative; + float: left; + color: #fff; + line-height: 28px; + margin: 0 4px 4px 0; + padding: 0 22px 0 10px; + border-radius: $border-radius-base; + .content{ + line-height: 28px; + } + .del{ + width: 22px; + height: 28px; + text-align: center; + cursor: pointer; + position: absolute; + top: -1px; + right: 0; + } +} +.tags-input{ + background-color: inherit; + border: $border-width-base $border-style-base $border-color-base; + color: inherit; + width: auto !important; +} diff --git a/web/src/apps/scriptis/components/apiPublish/TagsInput/index.vue b/web/src/apps/scriptis/components/apiPublish/TagsInput/index.vue new file mode 100644 index 000000000..9d28fb239 --- /dev/null +++ b/web/src/apps/scriptis/components/apiPublish/TagsInput/index.vue @@ -0,0 +1,85 @@ + + + + + + diff --git a/web/src/apps/scriptis/components/fileTree/index.js b/web/src/apps/scriptis/components/fileTree/index.js new file mode 100644 index 000000000..c72ded6e6 --- /dev/null +++ b/web/src/apps/scriptis/components/fileTree/index.js @@ -0,0 +1,19 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +import fileTree from './index.vue'; +export default fileTree; diff --git a/web/src/apps/scriptis/components/fileTree/index.scss b/web/src/apps/scriptis/components/fileTree/index.scss new file mode 100644 index 000000000..87a8062af --- /dev/null +++ b/web/src/apps/scriptis/components/fileTree/index.scss @@ -0,0 +1,24 @@ +/*! + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +.we-file-tree { + &.is-empty { + display: flex; + justify-content: center; + align-items: center; + } +} diff --git a/web/src/apps/scriptis/components/fileTree/index.vue b/web/src/apps/scriptis/components/fileTree/index.vue new file mode 100644 index 000000000..969c97a07 --- /dev/null +++ b/web/src/apps/scriptis/components/fileTree/index.vue @@ -0,0 +1,225 @@ + + + diff --git a/web/src/apps/scriptis/components/functionSetting/index.js b/web/src/apps/scriptis/components/functionSetting/index.js new file mode 100644 index 000000000..237e39670 --- /dev/null +++ b/web/src/apps/scriptis/components/functionSetting/index.js @@ -0,0 +1,20 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +import WeFunctionSetting from './index.vue'; + +export default WeFunctionSetting; diff --git a/web/src/apps/scriptis/components/functionSetting/index.scss b/web/src/apps/scriptis/components/functionSetting/index.scss new file mode 100644 index 000000000..b14cbe634 --- /dev/null +++ b/web/src/apps/scriptis/components/functionSetting/index.scss @@ -0,0 +1,58 @@ +/*! + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +@import "@/common/style/variables.scss"; +.fn-setting { + .setting-content { + padding-right: 16px; + overflow-y: auto; + .preview { + overflow: hidden; + text-overflow: ellipsis; + width: 380px; + white-space: nowrap; + + .preview-name { + color: $success-color; + } + + .preview-params { + color: $primary-color; + } + + .preview-type { + color: $warning-color; + } + } + .format-div { + display: flex; + .format-item { + width: 50%; + } + .format-input { + padding-right: 6px; + } + } + } + .fn-default-load { + position: absolute; + left: 30px; + bottom: 20px; + } + .fn-sure-btn { + margin-right: 10px; + } +} diff --git a/web/src/apps/scriptis/components/functionSetting/index.vue b/web/src/apps/scriptis/components/functionSetting/index.vue new file mode 100644 index 000000000..49bb28c0c --- /dev/null +++ b/web/src/apps/scriptis/components/functionSetting/index.vue @@ -0,0 +1,644 @@ + + + diff --git a/web/src/apps/scriptis/components/functionSetting/jarPreview.vue b/web/src/apps/scriptis/components/functionSetting/jarPreview.vue new file mode 100644 index 000000000..85e0e7164 --- /dev/null +++ b/web/src/apps/scriptis/components/functionSetting/jarPreview.vue @@ -0,0 +1,19 @@ + + + diff --git a/web/src/apps/scriptis/components/functionSetting/pyPreview.vue b/web/src/apps/scriptis/components/functionSetting/pyPreview.vue new file mode 100644 index 000000000..e0bd0648a --- /dev/null +++ b/web/src/apps/scriptis/components/functionSetting/pyPreview.vue @@ -0,0 +1,19 @@ + + + diff --git a/web/src/apps/scriptis/components/functionSetting/scalaPreview.vue b/web/src/apps/scriptis/components/functionSetting/scalaPreview.vue new file mode 100644 index 000000000..ee6716ff2 --- /dev/null +++ b/web/src/apps/scriptis/components/functionSetting/scalaPreview.vue @@ -0,0 +1,25 @@ + + + diff --git a/web/src/apps/scriptis/components/functionSetting/usePreview.vue b/web/src/apps/scriptis/components/functionSetting/usePreview.vue new file mode 100644 index 000000000..95bcbc8ce --- /dev/null +++ b/web/src/apps/scriptis/components/functionSetting/usePreview.vue @@ -0,0 +1,23 @@ + + + diff --git a/web/src/apps/scriptis/components/functionShare/index.js b/web/src/apps/scriptis/components/functionShare/index.js new file mode 100644 index 000000000..3270660f2 --- /dev/null +++ b/web/src/apps/scriptis/components/functionShare/index.js @@ -0,0 +1,20 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +import WeFunctionShare from './index.vue'; + +export default WeFunctionShare; diff --git a/web/src/apps/scriptis/components/functionShare/index.vue b/web/src/apps/scriptis/components/functionShare/index.vue new file mode 100644 index 000000000..e15819311 --- /dev/null +++ b/web/src/apps/scriptis/components/functionShare/index.vue @@ -0,0 +1,161 @@ + + diff --git a/web/src/apps/scriptis/components/importToHive/fieldList.vue b/web/src/apps/scriptis/components/importToHive/fieldList.vue new file mode 100644 index 000000000..4fc15ebeb --- /dev/null +++ b/web/src/apps/scriptis/components/importToHive/fieldList.vue @@ -0,0 +1,133 @@ + + diff --git a/web/src/apps/scriptis/components/importToHive/index.js b/web/src/apps/scriptis/components/importToHive/index.js new file mode 100644 index 000000000..eadf0fc81 --- /dev/null +++ b/web/src/apps/scriptis/components/importToHive/index.js @@ -0,0 +1,19 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +import weImportToHive from './index.vue'; +export default weImportToHive; diff --git a/web/src/apps/scriptis/components/importToHive/index.scss b/web/src/apps/scriptis/components/importToHive/index.scss new file mode 100644 index 000000000..66b756a2d --- /dev/null +++ b/web/src/apps/scriptis/components/importToHive/index.scss @@ -0,0 +1,141 @@ +/*! + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +@import '~@/common/style/variables.scss'; +.we-import-to-hive { + min-height: 480px; + padding: 20px 20px 0; + padding-top: 0; + color: #787878; + position: relative; + overflow: hidden; + &.first-step { + overflow: auto; + } + .we-import-to-hive-steps { + padding-bottom: 9px; + } + .we-import-to-hive-form { + .step-progress { + height: 500px; + position: absolute; + width: 740px; + display: flex; + z-index: 999; + background-color: rgba(0, 0, 0, 0.3); + align-items: center; + justify-content: center; + } + .step-form-title { + font-size: $font-size-large; + color: $primary-color ; + display: block; + margin: 0 0 9px; + } + .step-form-field { + display: inline-block; + margin-right: 10px; + } + .step-form-extras { + margin: 6px 4px 0 5px; + } + .seconed-Form { + display: flex; + flex-direction: column; + .field-list-wrap { + .field-list-item { + .field-list-item-child { + margin-bottom: 20px; + display: inline-block; + .field-list-item-child-label { + display: inline-block; + width: 30px; + text-align: right; + vertical-align: middle; + margin-right: 10px; + margin-left: 4px; + } + .field-list-item-child-type, + .field-list-item-child-comment, + .field-list-item-child-option, + .field-list-item-child-length, + .field-list-item-child-time { + display: inline-block; + margin: 0 2px; + } + .field-list-item-child-type { + width: 104px; + } + .field-list-item-child-option { + width: 81px; + } + .field-list-item-child-length { + width: 166px; + } + .field-list-item-child-time { + width: 122px; + } + .field-list-item-child-comment { + width: 104px; + } + } + .step-form-close, + .step-form-extras { + cursor: pointer; + } + .step-form-close { + padding-top: 6px; + } + } + } + } + .step-form-type-supplement { + margin-left: 157px; + } + .item-auto-complete { + .ivu-select-dropdown-list { + height: 300px; + } + } + .form-item-help-icon { + position: absolute; + left: -12px; + top: 10px; + } + .step-form-fields { + display: inline-flex; + .form-item-inline { + width: 220px; + display: inline-block; + } + } + } +} + +.we-import-to-hive-errmsg { + color: $error-color; + position: absolute; + left: 24px; + font-size: 12px; + display: inline-block; + width: 380px; + text-align: left; +} + +.ivu-auto-complete.ivu-select-dropdown { + max-height: 120px; +} diff --git a/web/src/apps/scriptis/components/importToHive/index.vue b/web/src/apps/scriptis/components/importToHive/index.vue new file mode 100644 index 000000000..2d829c173 --- /dev/null +++ b/web/src/apps/scriptis/components/importToHive/index.vue @@ -0,0 +1,836 @@ + + + diff --git a/web/src/apps/scriptis/components/newDialog/index.js b/web/src/apps/scriptis/components/newDialog/index.js new file mode 100644 index 000000000..167f685a4 --- /dev/null +++ b/web/src/apps/scriptis/components/newDialog/index.js @@ -0,0 +1,20 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +import WeNewDialog from './index.vue'; + +export default WeNewDialog; diff --git a/web/src/apps/scriptis/components/newDialog/index.vue b/web/src/apps/scriptis/components/newDialog/index.vue new file mode 100644 index 000000000..a952acc5a --- /dev/null +++ b/web/src/apps/scriptis/components/newDialog/index.vue @@ -0,0 +1,260 @@ + + diff --git a/web/src/apps/scriptis/components/uploadDialog/index.js b/web/src/apps/scriptis/components/uploadDialog/index.js new file mode 100644 index 000000000..e30c39d7c --- /dev/null +++ b/web/src/apps/scriptis/components/uploadDialog/index.js @@ -0,0 +1,19 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +import uploadDialog from './index.vue'; +export default uploadDialog; diff --git a/web/src/apps/scriptis/components/uploadDialog/index.scss b/web/src/apps/scriptis/components/uploadDialog/index.scss new file mode 100644 index 000000000..6b485785a --- /dev/null +++ b/web/src/apps/scriptis/components/uploadDialog/index.scss @@ -0,0 +1,85 @@ +/*! + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +@import '@/common/style/variables.scss'; + +.we-upload { + .we-upload-content { + width: 310px; + min-height: 230px; + padding-top: 15px; + padding-bottom: 15px; + cursor: default; + + .el-upload-dragger { + height: 220px; + } + + .el-upload-list { + max-height: 300px; + overflow: auto; + } + + .we-upload_path { + color: #97a8be; + font-size: 12px; + padding-top: 40px; + } + } + + .v-modal { + opacity: 0; + width: 0; + height: 0; + } + + .close-confirm-box { + width: 420px; + } + + .close-confirm-box__content { + display: flex; + margin-top: -10px; + margin-bottom: -10px; + } + + .close-confirm-box__status { + color: $yellow-color; + font-size: 36px !important; + } + + .close-confirm-box__message { + line-height: 36px; + padding-left: 8px; + } + + .grayBg { + background: $text-over-color; + color: $body-background; + padding: 5px 9px; + font-size: 12px; + border-radius: 2px; + border: none; + margin-left: 10px; + border: $line-border; + } + + .el-tooltip__popper.is-light { + border-color: $line-color; + color: #3d3d3d; + } +} diff --git a/web/src/apps/scriptis/components/uploadDialog/index.vue b/web/src/apps/scriptis/components/uploadDialog/index.vue new file mode 100644 index 000000000..2ced47d51 --- /dev/null +++ b/web/src/apps/scriptis/components/uploadDialog/index.vue @@ -0,0 +1,199 @@ + + + diff --git a/web/src/apps/scriptis/i18n/en.json b/web/src/apps/scriptis/i18n/en.json new file mode 100644 index 000000000..bae28e41b --- /dev/null +++ b/web/src/apps/scriptis/i18n/en.json @@ -0,0 +1,810 @@ +{ + "message": { + "scripts": { + "emptyText": "No data yet", + "publishing": "Publishing", + "tagPlaceholder": "tag,click enter add", + "script": "Script", + "ok": "OK", + "editor": "Edit", + "failedNotice": "Invalid items found, please check and then retry!", + "cancel": "Cancel", + "notSave": "Cannot save on read-only status!", + "saveSuccess": "Successfully saved!", + "saveErr": "Save failed!", + "createdTitle": "Table creation guide", + "saveToWidget": "Save widget to solution", + "contextMenu": { + "openToTab": "Open as a tab", + "copyPath": "Copy path", + "addCatalog": "New directory", + "addScript": "New script", + "importToHive": "Import to Hive", + "importToHdfs": "Import to HDFS", + "copyscript": "Copy script", + "stickscript": "Stick script" + }, + "warning": { + "getRootPath": "Failed to request Workspace root directory, please refresh to retry!", + "scriptChanged": "Script already updated, please save and retry!", + "empty": "Empty script content!" + }, + "error": { + "fileExists": "Duplicated file!" + }, + "boyondQuota": { + "title": "Warning", + "desc": "'You have open 20 scripts(over maximun limit), please close some other scripts!'" + }, + "createTable": { + "action": "Action", + "delete": "Del", + "baseAttr": "Table basic attributes", + "add": "New", + "partitionAttr": "Partition attributes", + "selectResource": "Please choose the original field", + "next": "Next", + "nameNotice": "Only Chinese characters, alphanumeric characters and underscore are allowed", + "zm": "alphabetic characters", + "daName": "Database name", + "tableName": "Table name", + "tablePlaceholder": "Starts with alphabetic characters; Only alphanumeric characters and underscore are allowed", + "tableOtherName": "Table alias name", + "aliasPlaceholder": "Only Chinese characters, alphanumeric characters and underscore are allowed", + "use": "Usage", + "emptyPlaceholder": "Only Chinese characters, alphanumeric characters and underscore are allowed(Can be empty)", + "product": "Product", + "project": "Project", + "tableDesc": "Table description", + "selectDbName": "Please choose database name", + "inputTableName": "Please enter table name", + "inputOtherName": "Please enter alias name", + "letter": "Alphabetic characters", + "moduleAttr": "Table model attributes", + "partitionType": "Partition type", + "partition": "Partition", + "unartition": "Not Partition", + "lifecycle": "Lifecycle", + "forever": "Forever", + "day": "Valid for a day", + "week": "Valid for a week", + "moon": "Valid for a month", + "midYear": "Valid for half a year", + "moduleRank": "Model layer", + "resource": "Original data layer", + "detailLevel": "Detailed data layer", + "sumData": "Summarized data layer", + "appData": "Application data layer", + "actionType": "Usage method", + "noeWrite": "Write Once Read Many", + "ZSGC": "CRUD", + "FG": "Multi-overwrite", + "OED": "Write Once Read Occasionally", + "isUse": "Is used externally", + "isDwsExeis": "Need to be used in systems outside DsS", + "selectPartitionType": "Please choose partition type", + "selectLife": "Please choose lifecycle", + "selectModuleLevel": "Please choose model layer", + "selectUseType": "Please choose usage method", + "selectIsUse": "Please choose whether to be used externally", + "canned": "Duplicated table, please enter other table names", + "type": "Type", + "selectType": "Please choose type", + "path": "Path", + "FGF": "Separator", + "BMGS": "Encoding format", + "XZBMGS": "Please choose encoding format", + "XDF": "Limited qualifier", + "WXDF": "Unlimited qualifier", + "HXWBT": "Use the first line as table header", + "HQBZDSJ": "Request table column data", + "XZWJLJ": "Please choose file path", + "GXMLDR": "Import from shared directory", + "DR": "Import", + "DH": "Comma", + "FH": "Semicolon", + "ZBF": "Tab", + "KG": "Space", + "SYH": "Double quote", + "DYH": "Single quote(\"\")", + "GLWJ": "Filter directory", + "LYSX": "Source attribute", + "MBZD": "Target column and source column", + "XZK": "Choose database", + "XZB": "Choose table", + "SJSXLY": "Data attribute source", + "DRFS": "Import method", + "XJ": "New", + "FHSYB": "Back to Previous", + "SCDDL": "Generate DDL statement", + "JB": "Create table", + "DDLYJ": "DDL Statement", + "FZZT": "Copy to clipboard", + "DDLYFZ": "DDL statement has been copied to clipboard!", + "fieldsName": "Column name", + "isPrimary": "Primary key", + "fieldsType": "Column type", + "length": "Length/Settings", + "fieldsAlias": "Column alias name", + "alias": "Alias name", + "sourceFields": "Source column", + "rule": "Column rule", + "express": "Column expression", + "comment": "Comments", + "partitionValue": "Partition value", + "partitionAlias": "Partition alias", + "fieldsNameRequire": "Column name cannot be empty", + "fieldsNameRule": "Only alphanumeric characters and underscore are allowed" + }, + "tableDetails": { + "BZBSX": "Table basic attributes", + "BZDXX": "Table column information", + "BTJXX": "Table statistical information", + "DBN": "Database name", + "TN": "Table namee", + "BBM": "Table alias name", + "FQM": "Partitioned table", + "CJYH": "Created By", + "CJSI": "Created At", + "ZHFWSJ": "Last Accessed At", + "BMS": "Table description", + "BMXSX": "Table model attributes", + "SMZQ": "Lifecycle", + "MXCJ": "Model layer", + "SYFS": "Usage mothod", + "WBSFSY": "Is used externally", + "BYYSX": "Table usage attributes", + "SXCP": "Product", + "SSXM": "Project", + "YT": "Usage", + "SSZDMC": "Search for column name...", + "XH": "Index", + "ZDM": "Column name", + "ZDLX": "Column type", + "BM": "Alias", + "SFZJ": "Is primary key", + "SFFQ": "Is partition", + "ZDGZ": "Column rules", + "MS": "Description", + "FQXX": "Partition information", + "DJFQCK": "Please click the root partition to view size information", + "WFQSJ": "No data of table partitions", + "BXX": "Table information", + "BZD": "Number of columns", + "BDX": "Table size", + "WJM": "Number of files", + "FQS": "Number of partitions", + "DX": "size", + "FQDX": "Partition size:", + "WJS": "Number of files:", + "YES": "Yes", + "GSHJX": "Formatted to", + "ZJXYGE": "The format required by the component" + }, + "logView": { + "taskId": "Task ID:", + "search": "Search", + "filter": "Filter", + "error": "Error", + "inputTaskId": "Please enter a valid task ID!" + }, + "functionSetting": { + "HSM": "Function name", + "HSLX": "Funtion type", + "TY": "Common", + "ZDYHS": "Self-defined funtions", + "B": "Package", + "JBLU": "Script path", + "ZCGS": "Register format", + "ZCHS": "Register function", + "FHLX": "Return type", + "CSLX": "Parameter type", + "YLLX": "Preview", + "SYGS": "Using format", + "SRLX": "Input type", + "HSMS": "Funtion description", + "MRJZ": "Loaded by default", + "XZHS": "New function", + "LJCJ": "Create", + "QSRMC": "Please enter the name", + "CDZ": "Length should be between 1 to 100 characters", + "BXYZMKT": "Must starts with alphabetic characters, followed by only alphanumeric, underscore and Chinese characters", + "QZSXZYZLX": "Please choose at least one type", + "QSRWZLJ": "Please enter the complete path", + "KHDBJZPP": "Brackets indicate precise matching", + "HZMZC": "Only jar is allowed as the suffix. Only alphanumeric characters and underscore is allowed for the name of jar package!", + "ZCPYSCA": "Only py and scala are allowed as the suffix!", + "BHJZCYW": "Only alphabetic characters are allowed as the package name", + "XHX": "Underscore", + "SRZCHS": "Please enter function registration", + "SRFHLX": "Please enter the return type", + "SRCSLX": "Please enter the parameter type", + "XGHS": "Update funtion", + "XG": "Update" + }, + "functionShare": { + "ZZGX": "Sharing function {name} in progress", + "HSLX": "Funtion type", + "GXYH": "Shared users", + "SRGXYH": "Please enter the users to be shared, separated by comma", + "XZGXHSLX": "Please choose a type for this shared function!", + "ZSXZYWGXYH": "Please enter at least one user needed to be shared with!" + }, + "uploadDialog": { + "SCJBWJ": "Upload script file", + "TWJDSQ": "Drag file here, or click to upload ", + "PATH": "Path:", + "QKLB": "Clear list", + "LIMIT2M": "Script file to upload cannot exceed 2MB!", + "LIMIT100M": "Script file to upload cannot exceed 100MB!", + "ZDSC": "This operation will interrupt file uploading, continue or not? ", + "TS": "Notice", + "ZJGB": "Close directly", + "QX": "Cancel", + "QXBBSCCK": "Close uploading window", + "SCCG": "Successfully uploaded file {name}!", + "WJDXCCXE": "File size exceeds limit", + "WJYCZ": "Duplicated file, please choose another file or directory!", + "WJMBHF": "Invalid file name, only alphanumeric characters, Chinese characters and underscore is allowed with suffix!", + "SCZQK": "Uploading file in progress, cannot clear upload list!" + }, + "importToHive": { + "MC": "Name", + "ZDMC": "Column name cannot be empty", + "JZCZM": "Only alphanumeric characters and underscore are allowed", + "QSRZDM": "Please enter column name", + "LX": "Type", + "GS": "Format", + "QXZGS": "Please choose date format", + "CD": "Length", + "JD": "Accuracy", + "XSW": "Decimal scale", + "SRZDZS": "Please enter column comments", + "DRWJ": "Import file", + "BZONE": "Step 1", + "BZTOW": "Step 2", + "WJDR": "Import data from file", + "DRZMRB": "Import to the default table", + "LY": "Source", + "QXZLX": "Please choose type", + "LJ": "Path", + "GSH": "Format", + "FGF": "Separator", + "BMGS": "Encoding format", + "QXZBMGS": "Please choose encoding format", + "XDF": "Qualifier", + "WXDF": "No qualifier", + "SHWBT": "Treat the first line as header", + "SJK": "Database name", + "QSRKM": "Please enter database name", + "SJB": "Table name", + "SRSJBM": "Please enter table name", + "FQ": "Partition", + "FX": "Overwrite", + "XZFQ": "New partition", + "SHEETB": "sheet table", + "ZG": "Column", + "XYB": "Next step", + "SYB": "Previous step", + "TJ": "Submit", + "LXWCHAR": "Column with type char or varchar should have length as an integer greater than 0", + "ZDJDSRYW": "Column with type decimal has invalid accuracy", + "ZDXSWYDY": "Column with type decimal should have scale as an integer greater than 0", + "ZDZSZCZW": "Only Chinese characters, alphabetic characters and underscore are allowed in column comments, with length not greater than 100 characters", + "SJKMBNWK": "Database name cannot be empty", + "BMBNWK": "Table name cannot be empty", + "BMJZCZMKT": "Table names should start with alphabetic characters, followed by alphabetic characters and underscore, length between 0 to 100 characters", + "FQMBNWK": "Partition name cannot be empty", + "FQMZCSZ": "Only alphanumeric characters and - are allowed in partition names", + "FQZBNWK": "Partition value cannot be empty", + "FQZZZCSZ": "Only numbers are allowed as partition values", + "GXMLDR": "Import from shared directory", + "HDFSDR": "Import from HDFS", + "DH": "Comma(,)", + "FH": "Semicolon(;)", + "ZBF": "Tab(\\t)", + "KG": "Space", + "SYH": "Double quote", + "DYH": "Single quote", + "WJZZZXDR": "Importing file in progress, please retry later", + "FQZCF": "Duplicated partition values, please confirm whether to overwrite", + "TSWST": "Notice: Cannot handle view tables, please choose other tables to import", + "WFCZSTB": "Cannot handle view tables", + "QXZQTBDR": "Please choose other tables to import", + "BMYCZDQSJK": "Table name already exists in this database", + "ZBZCDJFQ": "Multi-level partitions are not supported yet, please choose other tables", + "QQURFQ": "Please confirm the partition", + "HQFQSB": "Failed to request partition information", + "ZQDRWJLJ": "Please choose the path correctly", + "QQRSJKMCXX": "Please confirm the database name", + "QQRSFCF": "Please confirm whether to overwrite" + }, + "hiveTableExport": { + "DCB": "Export form table", + "SZDCCS": "Step 1: Set export parameters", + "SZDCLJ": "Step 2: Set export path", + "SJKM": "Database name", + "QSRKM": "Please enter the database name", + "SJBM": "Table name", + "ZDXX": "Column information", + "FQXX": "Partition information", + "DCGS": "Export format", + "FGF": "Separator", + "BMGS": "Encoding format", + "XZBMGS": "Please choose the encoding format", + "KZTH": "Null value place holder", + "SXWBT": "Treat the first line as header", + "LX": "Type", + "QXZLX": "Please choose the type", + "DCZ": "Export to", + "WJM": "File name", + "TRDCWJMC": "Please enter the name of the file to be exported", + "FXMS": "Overwrite mode", + "ZJ": "Append", + "FX": "Overwrite", + "BM": "sheet table name", + "QSR": "Please enter", + "XYB": "Next step", + "SYB": "Previous step", + "TJ": "Submit", + "WFCZST": "Cannot handle view tables, please choose other tables to export", + "ZBZCDBDC": "Tables with multiple partitions are not supported yet, please choose other tables to export", + "GXML": "Shared directory", + "DH": "Comma(,)", + "FH": "Semicolon(;)", + "ZBF": "Tab(\\t)", + "KG": "Space", + "KZFC": "Empty string", + "XZSJK": "Please choose database", + "XZB": "Please choose table", + "XZFGF": "Please choose separator", + "XZYGFQBDC": "Please choose a partitioned table to export", + "ZSXZYGZD": "Please choose at least one column", + "XZDCGS": "Please choose export format", + "XZXYJKZTHDZ": "Please choose the Null value place holder", + "XZDCLJDLX": "Please choose the type of export path", + "XZDCLJ": "Please choose export path", + "DRDCDWJ": "Please enter the name of the file to be exported", + "CDZ": "Length should be between 3 to 100 characters", + "WJMZZCZW": "Only Chinese characters, alphanumeric characters and underscore are allowed", + "CBDC": "Exporting table in progress, please hold on or try other tables", + "QSHZSS": "Please hold on or try other tables", + "XZFQ": "Please choose partition", + "BDXCG": "The size of this table is greater than 5GB, please choose other methods to export or choose other tables" + }, + "hiveTableDesc": { + "searchName": "Search by column name...", + "tableDetail": "Table details", + "noData": "No data for table partition information yet", + "tableColumns": "Table columns", + "columnsName": "Column name", + "columnsType": "Column type", + "tablePartitions": "Table partitions表分区", + "dbName": "Database name", + "tableName": "Table name", + "createUser": "Created By", + "createTtime": "Created At", + "lastTime": "Last accessed", + "tableSize": "Table size", + "size": "Size" + }, + "deleteType": { + "function": "Function", + "folder": "Directory", + "file": "File", + "common": "Common", + "catalog": "Directory", + "script": "Script file", + "engine": "Engine", + "task": "Task", + "engineAndTask": "Engine and task" + }, + "constants": { + "add": "New", + "cancel": "Cancel", + "confirm": "Confirm", + "changeLanguage": "Switch language", + "detail": "Details", + "description": "Description", + "delete": "Delete", + "download": "Download", + "edit": "Edit", + "export": "Export", + "fail": "Fail", + "noDataText": "No data yet", + "node": "Node", + "refresh": "Refresh", + "rename": "Rename", + "reset": "Reset", + "remove": "Remove", + "submit": "Submit", + "search": "Search", + "save": "Save", + "upload": "Upload", + "update": "Update", + "warning": { + "api": "Requesting API, please hold on!", + "data": "Requesting data, please hold on!", + "waiting": "Please wait for API response!", + "biLoading": "Connecting with Visualis, please hold on!", + "comingSoon": "New version is being integrated, so stay tuned !", + "GJZ": "Open source co-construction is in progress, so stay tuned!" + }, + "success": { + "delete": "Successfully deleted!", + "update": "Successfully updated!", + "add": "Successfully created!", + "save": "Successfully saved", + "stick": "Sticking success", + "prohibit": "Prohibit copying this type" + }, + "error": { + "refresh": "Error on refreshing, please retry!", + "fileExists": "Duplicated file", + "folderExists": "Duplicated directory", + "functionExists": "Duplicated function" + }, + "logPanelList": { + "fullScreen": "Full Screen", + "releaseFullScreen": "Restore", + "min": "Minimize", + "releaseMin": "Restore" + }, + "time": { + "second": "Second", + "minute": "Minute", + "hour": "Hour", + "day": "Day" + } + }, + "navList": { + "dataStudio": "Data development", + "workSpace": "Workspace", + "database": "Database", + "udfFunction": "UDF", + "methodFunction": "Method", + "hdfs": "HDFS" + }, + "newDialog": { + "catalog": "Directory", + "scriptName": "Name", + "catalogName": "Name", + "targetScriptPath": "Creation target path", + "placeholder": "Please enter directory name", + "required": "Please enter directory name", + "rules": { + "scriptName": { + "required": "Please enter script name", + "lengthLimit": "Length should be between 1 to 200 characters", + "letterTypeLimit": "Only Chinese characters, alphanumeric characters and underscore are allowed" + }, + "catalogName": { + "required": "Please enter directory name", + "lengthLimit": "Length should be between 1 to 200 characters", + "letterTypeLimit": " Only Chinese characters, alphanumeric characters and underscore are allowed" + }, + "targetScriptPath": { + "required": "Please choose target script path" + } + }, + "nodeType": "Node type", + "scriptType": "Script type", + "targetFolder": "Target directory" + }, + "database": { + "contextMenu": { + "db": { + "copyName": "Copy database name", + "pasteName": "Copy database name & paste to script", + "listSort": "Sort by time", + "liststringSort": "Sort by first character" + }, + "tb": { + "queryTable": "Query table", + "deleteTable": "Drop table", + "viewTable": "View table structure", + "exportTable": "Export table", + "copyName": "Copy table name", + "pasteName": "Copy table name & paste to script", + "copyAllColumns": "Copy all the columns of table" + }, + "field": { + "copyColumnsName": "Copy column name", + "pasteName": "Copy column name & paste to script" + } + }, + "partitionsTitle": "Partition size", + "notPosition": "Failed to locate last operation, please use refresh after right-click" + }, + "fnSideBar": { + "contextMenu": { + "isOwn": { + "addFunction": "New function", + "addFolder": "New directory", + "share": "Share" + }, + "isLeaf": { + "copyName": "Copy function name", + "pasteName": "Copy function name & paste to script" + }, + "isOwnAndLeaf": { + "updateToExpired": "Set as expired", + "viewSharedUsers": "View shared users" + } + }, + "warning": { + "folderNameExists": "Duplicated directory name", + "functionNameExists": "Duplicated function name", + "functionShared": "This function has been shared, please set it as expired before deletion!", + "noEmpty": "Please manually clear sub directories/functions and then retry!" + }, + "success": { + "load": "Successfully loaded UDF!", + "cancelLoading": "Canceled loading UDF!", + "addUdf": "Successfully created UDF!", + "addFolder": "Successfully created new directory!", + "shareFunction": "Successfully shared function", + "updateShareUser": "Successfully updated shared users!", + "functionExpired": "Successfully set function as expired, please view it in Shared Functions!" + }, + "confirm": { + "title": "Notice", + "content": "

This operation will affect all the shared users, continue or not?

" + } + }, + "hdfs": { + "contextMenu": { + "openToTab": "Open as a tab", + "copyPath": "Copy path", + "addCatalog": "New directory", + "importToHive": "Export to Hive", + "exportToWorkSpace": "Export to Workspace" + }, + "dialogTitle": { + "exportToWorkSpace": "Export to Workspace" + }, + "warning": { + "noRootPath": "Failed to request HDFS root directory, please refresh to retry!" + } + }, + "container": { + "title": { + "editorName": "Editor" + }, + "closeDropDown": { + "others": "Close Others", + "all": "Close All", + "left": "Close Left", + "right": "Close Right" + }, + "closeHint": "Close Hint", + "footer": { + "cancel": "Cancel", + "close": "Close", + "save": "Save", + "saveAs": "Save As" + }, + "tips": "What is Scriptis?\nScriptis is a one-stop interactive data analysis & exploration tool built by WeDataSphere at Webank. Using Linkis as its kernel, Scriptis provides functionalities including multiple computation engines(Spark, Hive, TiSpark...), Hive database management, resource management(such as Yarn resource and server resource), application management and user resource management(such as UDF and variables). \nScriptis?\n1. Select a directory in workspace, and create a sub directory; \n2. Right-click a directory => Create script; \n3. Select script type, for example: SQL, Pyspark, HQL, etc... \n4. Edit script, click & run, wait for result sets.", + "warning": { + "noDBInfo": "Failed to request database/table information, word association in script may not be working well! Please refresh to retry!", + "noUDF": "Failed to request UDF and Method information, word association in script may not be working well! Please refresh to retry!", + "noSelectedScript": "No script selected, please select one first and retry!" + }, + "notice": { + "developerWarning": { + "title": "Warning", + "desc": "Warning from developers: Requesing add API must pass id and filename as parameters!" + }, + "unSupport": { + "title": "Warning", + "desc": "Sorry, the system does not support this file format!" + } + }, + "removeWork": { + "normal": "This script has been updated, save or not?", + "readOnly": "This read-only script has been updated, save it as another file to workspace or not? ", + "temporary": "This temporary script has been updated, save it as another file or not?" + } + }, + "editorDetail": { + "navBar": { + "undo": "Undo", + "redo": "Redo", + "play": "Run", + "stop": "Stop", + "save": "Save", + "config": "Configuration" + }, + "warning": { + "running": "Script execution in progress, please hold on!", + "invalidArgs": "Please check the correctness of the input of configuration parameters!", + "emptyCode": "Cannot execute empty script!" + } + }, + "history": { + "columns": { + "taskID": { + "title": "Task ID" + }, + "runningTime": { + "title": "Execution Time" + }, + "createDate": { + "title": "Executed At" + }, + "status": { + "title": "Status", + "status": { + "succeed": "Succeeded", + "running": "Running", + "timeout": "Timeout", + "inited": "Waiting", + "scheduled": "Applying Resources", + "failed": "Failed", + "cancel": "Canceled" + } + }, + "data": { + "title": "Code" + }, + "failedReason": { + "title": "Key information" + }, + "control": { + "title": "Handle", + "view": "View", + "download": "Download log" + } + }, + "success": { + "open": "Successfully open", + "download": "Successfully downloaded, please check from local download directory!" + } + }, + "saveAs": { + "header": "Save as script", + "formItems": { + "fileName": { + "label": "File name" + }, + "selectedPath": { + "label": "Choose path" + } + }, + "rules": { + "fileName": { + "required": "Please enter the file name", + "lengthLimit": "Length between 1 to 200 characters", + "letterTypeLimit": "Only Chinese characters, alphanumeric characters and underscore are allowed" + }, + "selectedPath": { + "required": "Please choose a directory" + } + }, + "warning": { + "invalid": "Please check the items failed to pass validations!" + }, + "success": { + "saveScript": "Successfully saved as script, please refresh directory tree manually!" + } + }, + "tabs": { + "progress": "Progress", + "result": "Execution Result", + "log": "Execution Log", + "history": "History", + "diagnosis": "Intelligent Diagnosis" + }, + "notice": { + "unsave": { + "title": "Notice", + "desc": "Your temporary script has been updated, please mind to save!" + }, + "sendStart": { + "title": "Execution Notice", + "render": "Start to execute the script" + }, + "querySuccess": { + "title": "Execution result notice", + "render": "Successfully executed script, time elapsed:" + }, + "notice": { + "title": "Execution Result Notice" + }, + "kill": { + "title": "Execution Notice", + "desc": "Stopped executing this script" + } + }, + "confirmModal": { + "title": "Save as", + "content": "

This temporary has been updated!

Save it or not?

", + "okText": "Save As", + "cancelText": "Cancel" + }, + "setting": { + "header": "Settings", + "customVariable": { + "title": "Customized Variables" + }, + "envVariable": { + "title": "Environment Variables" + }, + "runTimeArgs": { + "settingTitle": "Command line Arguments", + "placeholder": "Please use space to separate multiple arguments" + } + }, + "apiPublish": { + "title": "PublishAPI", + "button": { + "addApi": "API Add", + "updateApi": "API Update" + }, + "addApiModal": { + "modalTitle": "Add API", + "contentTitle": "Base Info", + "paramConfirmTitle": "Params Confirm", + "apiName": "API Name", + "apiPath": "API Path", + "protocol": "Protocol", + "requestType": "Request Type", + "tag": "Tag", + "visible": "Visible", + "describe": "Describe", + "nextStep": "Next", + "back": "Back", + "cancel": "Cancel", + "approvalName": "Approval Name", + "applyUser": "Apply User", + "proxyUser": "Proxy User", + "confirm": "Confirm", + "comment": "Comment" + }, + "updateApiModal": { + "modalTitle": "Update API", + "apiVersionUpgrade": "New Version", + "paramConfirm": "Please confirm param" + }, + "paramTable": { + "paramName": "Param Name", + "paramType": "Param Type", + "require": { + "title": "Require", + "yes": "Yes", + "no": "No", + "hide": "Hide" + }, + "defaultValue": "Default Value", + "displayName": "Display Name", + "describe": "Describe" + }, + "rule": { + "nameRule": "Please input api name!", + "pathRule": "Please input api path!", + "pathRegRule": "Api path illegal!", + "requestTypeRule": "Please select request type!", + "protocolRule": "Please select protocol!", + "valueRule": "Started with alphabetic characters, only alphanumeric and underscore are allowed!", + "contentLengthLimit": "Length limit 155", + "contentLengthLimit50": "Length limit 50", + "contentLengthLimitTwo": "Length limit 200", + "pathRepeat": "Api path exists", + "nameRepeat": "Api name exists", + "chineseCheck": "Chainese only", + "chineseName": "Chinese name of API" + }, + "visible": { + "workspace": "Workspace", + "personal": "Personal", + "public": "Public", + "grantView": "Grant View" + }, + "notice": { + "publishSuccess": "Publish Success" + }, + "tagPlaceholder": "tag,click enter add" + } + } + } +} \ No newline at end of file diff --git a/web/src/apps/scriptis/i18n/index.js b/web/src/apps/scriptis/i18n/index.js new file mode 100644 index 000000000..abe2fb5cf --- /dev/null +++ b/web/src/apps/scriptis/i18n/index.js @@ -0,0 +1,7 @@ +const en = require('./en.json') +const zh = require('./zh.json') + +export default { + 'zh-CN': zh, + en +} \ No newline at end of file diff --git a/web/src/apps/scriptis/i18n/zh.json b/web/src/apps/scriptis/i18n/zh.json new file mode 100644 index 000000000..1b003fdf2 --- /dev/null +++ b/web/src/apps/scriptis/i18n/zh.json @@ -0,0 +1,810 @@ +{ + "message": { + "scripts": { + "emptyText": "暂无数据", + "publishing": "发布中", + "tagPlaceholder": "标签,按 enter 创建", + "script": "脚本", + "editor": "修改", + "ok": "确认", + "failedNotice": "验证项未通过,请检查后再试!", + "cancel": "取消", + "notSave": "只读状态不可保存!", + "saveSuccess": "保存成功!", + "saveErr": "保存失败!", + "createdTitle": "建表向导", + "saveToWidget": "保存widget到解决方案", + "contextMenu": { + "openToTab": "打开到侧边", + "copyPath": "复制路径", + "addCatalog": "新建目录", + "addScript": "新建脚本", + "importToHive": "导入至Hive", + "importToHdfs": "导入至HDFS", + "copyscript": "复制脚本", + "stickscript": "粘贴" + }, + "warning": { + "getRootPath": "未请求到工作空间根目录,请刷新重试!", + "scriptChanged": "脚本已发生改变,请保存后再试!", + "empty": "脚本内容为空!" + }, + "error": { + "fileExists": "该文件已经存在!" + }, + "boyondQuota": { + "title": "警告", + "desc": "'您已打开20个脚本(超出最大限额),请关闭其他脚本再打开!'" + }, + "createTable": { + "action": "操作", + "delete": "删除", + "baseAttr": "表基本属性", + "add": "新增", + "partitionAttr": "分区属性", + "selectResource": "请选择来源字段", + "next": "下一步", + "nameNotice": "仅支持中文、字母、数字和下划线", + "zm": "字母", + "daName": "数据库名", + "tableName": "表名", + "tablePlaceholder": "字母开头,支持字母、数字和下划线", + "tableOtherName": "表别名", + "aliasPlaceholder": "支持中文、字母、数字和下划线", + "use": "用途", + "emptyPlaceholder": "支持中文、字母、数字和下划线(可为空)", + "product": "所属产品", + "project": "所属项目", + "tableDesc": "表描述", + "selectDbName": "请选择库名", + "inputTableName": "请输入表名", + "inputOtherName": "请输入别名", + "letter": "大小写字母", + "moduleAttr": "表模型属性", + "partitionType": "分区类型", + "partition": "分区", + "unartition": "非分区", + "lifecycle": "生命周期", + "forever": "永久", + "day": "当天有效", + "week": "一周有效", + "moon": "一月有效", + "midYear": "半年有效", + "moduleRank": "模型层级", + "resource": "原始数据层", + "detailLevel": "明细数据层", + "sumData": "汇总数据层", + "appData": "应用数据层", + "actionType": "使用方式", + "noeWrite": "一次写多次读", + "ZSGC": "增删改查", + "FG": "多次覆盖写", + "OED": "一次写偶尔读", + "isUse": "外部是否使用", + "isDwsExeis": "是否需要在dws之外的系统上使用", + "selectPartitionType": "请选择分区类型", + "selectLife": "请选择生命周期", + "selectModuleLevel": "请选择模型层级", + "selectUseType": "请选择使用方式", + "selectIsUse": "请选择外部是否使用", + "canned": "该表已存在,请输入其他表名", + "type": "类型", + "selectType": "请选择类型", + "path": "路径", + "FGF": "分隔符", + "BMGS": "编码格式", + "XZBMGS": "请选择编码格式", + "XDF": "限定符", + "WXDF": "无限定符", + "HXWBT": "首行为表头", + "HQBZDSJ": "获取表字段数据", + "XZWJLJ": "请选择文件路径", + "GXMLDR": "共享目录导入", + "DR": "导入", + "DH": "逗号", + "FH": "分号", + "ZBF": "制表符", + "KG": "空格", + "SYH": "双引号", + "DYH": "单引号", + "GLWJ": "过滤文件夹", + "LYSX": "来源属性", + "MBZD": "目标表字段与源字段", + "XZK": "选择库", + "XZB": "选择表", + "SJSXLY": "数据属性来源", + "DRFS": "导入方式", + "XJ": "新建", + "FHSYB": "返回上一步", + "SCDDL": "生成DDL语句", + "JB": "建表", + "DDLYJ": "DDL语句", + "FZZT": "复制至粘贴板", + "DDLYFZ": "DDL语句已复制至粘贴板!", + "fieldsName": "字段名", + "isPrimary": "主键", + "fieldsType": "字段类型", + "length": "长度/设置", + "fieldsAlias": "字段别名", + "alias": "别名", + "sourceFields": "来源字段", + "rule": "字段规则", + "express": "字段表达式", + "comment": "描述", + "partitionValue": "分区值", + "partitionAlias": "分区别名", + "fieldsNameRequire": "字段名称不能为空", + "fieldsNameRule": "仅支持字母数字和下划线" + }, + "tableDetails": { + "BZBSX": "表基本属性", + "BZDXX": "表字段信息", + "BTJXX": "表统计信息", + "DBN": "数据库名", + "TN": "表名", + "BBM": "表别名", + "FQM": "分区表", + "CJYH": "创建用户", + "CJSI": "创建时间", + "ZHFWSJ": "最后访问时间", + "BMS": "表描述", + "BMXSX": "表模型属性", + "SMZQ": "生命周期", + "MXCJ": "模型层级", + "SYFS": "使用方式", + "WBSFSY": "外部是否使用", + "BYYSX": "表应用属性", + "SXCP": "所属产品", + "SSXM": "所属项目", + "YT": "用途", + "SSZDMC": "搜索字段名称...", + "XH": "序号", + "ZDM": "字段名", + "ZDLX": "字段类型", + "BM": "别名", + "SFZJ": "是否主键", + "SFFQ": "是否分区", + "ZDGZ": "字段规则", + "MS": "描述", + "FQXX": "分区信息", + "DJFQCK": "请点击根分区查看分区大小", + "WFQSJ": "暂无表分区数据", + "BXX": "表信息", + "BZD": "字段数", + "BDX": "表大小", + "WJM": "文件数", + "FQS": "分区数", + "DX": "大小", + "FQDX": "分区大小:", + "WJS": "文件数:", + "YES": "对", + "GSHJX": "进行格式化成", + "ZJXYGE": "组件需要的格式" + + }, + "logView": { + "taskId": "任务ID:", + "search": "搜索", + "filter": "过滤", + "error": "错误", + "inputTaskId": "请输入有效的任务ID!" + }, + "functionSetting": { + "HSM": "函数名称", + "HSLX": "函数类型", + "TY": "通用", + "ZDYHS": "自定义函数", + "B": "jar包", + "JBLU": "脚本路径", + "ZCGS": "注册格式", + "ZCHS": "注册函数", + "FHLX": "返回类型", + "CSLX": "参数类型", + "YLLX": "预览", + "SYGS": "使用格式", + "SRLX": "输入类型", + "HSMS": "函数描述", + "MRJZ": "默认加载", + "XZHS": "新增函数", + "LJCJ": "立即创建", + "QSRMC": "请输入名称", + "CDZ": "长度在1到100个字符", + "BXYZMKT": "必须以字母开头,且只支持字母、数字、下划线和中文", + "QZSXZYZLX": "请至少选择一种类型", + "QSRWZLJ": "请输入完整的路径", + "KHDBJZPP": "括号代表精准匹配", + "HZMZC": "后缀名只支持jar,且jar包名仅支持英文,数字,下划线!", + "ZCPYSCA": "后缀名只支持py、scala!", + "BHJZCYW": "包名仅支持英文", + "XHX": "下划线", + "SRZCHS": "请输入注册函数", + "SRFHLX": "请输入返回类型", + "SRCSLX": "请输入参数类型", + "XGHS": "修改函数", + "XG": "修改" + }, + "functionShare": { + "ZZGX": "正在对{name}函数进行共享操作", + "HSLX": "函数类型", + "GXYH": "共享用户", + "SRGXYH": "请输入要共享的用户,以逗号分隔", + "XZGXHSLX": "请选择该共享函数的类型!", + "ZSXZYWGXYH": "请填入至少一位需要共享的用户!" + }, + "uploadDialog": { + "SCJBWJ": "上传脚本文件", + "TWJDSQ": "将文件拖到此处,或点击上传", + "PATH": "路径:", + "QKLB": "清空列表", + "LIMIT2M": "上传脚本文件不得超过2MB!", + "LIMIT100M": "上传脚本文件不得超过100MB!", + "ZDSC": "此操作将中断文件上传,是否继续?", + "TS": "提示", + "ZJGB": "直接关闭", + "QX": "取消", + "QXBBSCCK": "取消关闭上传窗口", + "SCCG": "文件{name}上传成功!", + "WJDXCCXE": "文件大小超出限额", + "WJYCZ": "文件已存在,请选择其它文件或选择其它文件夹!", + "WJMBHF": "文件名称不合法,仅支持以字母、数字、中文、下划线且带后缀的命名!", + "SCZQK": "文件上传中,无法清空上传列表!" + }, + "importToHive": { + "MC": "名称", + "ZDMC": "字段名称不能为空", + "JZCZM": "仅支持字母数字和下划线", + "QSRZDM": "请输入字段名", + "LX": "类型", + "GS": "格式", + "QXZGS": "请选择日期格式", + "CD": "长度", + "JD": "精度", + "XSW": "小数位数", + "SRZDZS": "请输入字段注释", + "DRWJ": "导入文件", + "BZONE": "步骤1", + "BZTOW": "步骤2", + "WJDR": "从文件导入数据", + "DRZMRB": "导入至默认表", + "LY": "来源", + "QXZLX": "请选择类型", + "LJ": "路径", + "GSH": "格式化", + "FGF": "分隔符", + "BMGS": "编码格式", + "QXZBMGS": "请选择编码格式", + "XDF": "限定符", + "WXDF": "无限定符", + "SHWBT": "首行为表头", + "SJK": "数据库名", + "QSRKM": "请输入库名", + "SJB": "数据表名", + "SRSJBM": "请输入数据表名", + "FQ": "分区", + "FX": "复写", + "XZFQ": "新增分区", + "SHEETB": "sheet表", + "ZG": "字段", + "XYB": "下一步", + "SYB": "上一步", + "TJ": "提交", + "LXWCHAR": "类型为char或者varchar的字段长度应为大于0的正整数", + "ZDJDSRYW": "类型为decimal的字段精度输入有误", + "ZDXSWYDY": "类型为decimal的字段小数位数应为大于0的正整数", + "ZDZSZCZW": "字段注释仅支持中文、大小写字母和下划线,长度不得超过100字符", + "SJKMBNWK": "数据库名不能为空", + "BMBNWK": "表名不能为空", + "BMJZCZMKT": "表名仅支持以字母开头,名称包含大小写字母和下划线,长度为0~100", + "FQMBNWK": "分区名不能为空", + "FQMZCSZ": "分区名只支持数字、字母和-", + "FQZBNWK": "分区值不能为空", + "FQZZZCSZ": "分区值只支持数字", + "GXMLDR": "共享目录导入", + "HDFSDR": "HDFS导入", + "DH": "逗号(,)", + "FH": "分号(;)", + "ZBF": "制表符(\\t)", + "KG": "空格", + "SYH": "双引号", + "DYH": "单引号", + "WJZZZXDR": "文件正在执行导入,请稍后再试", + "FQZCF": "分区值重复,请确认是否复写", + "TSWST": "提示:无法操作视图表, 请选择其它表导入", + "WFCZSTB": "无法操作视图表", + "QXZQTBDR": "请选择其它表导入", + "BMYCZDQSJK": "表名已存在当前数据库", + "ZBZCDJFQ": "暂不支持多级分区,请选择其它表", + "QQURFQ": "请确认分区", + "HQFQSB": "获取分区信息失败", + "ZQDRWJLJ": "请正确选择导入文件路径", + "QQRSJKMCXX": "请确认数据库名称填写信息", + "QQRSFCF": "请确认是否复写" + }, + "hiveTableExport": { + "DCB": "导出表", + "SZDCCS": "步骤1:设置导出参数", + "SZDCLJ": "步骤2:设置导出路径", + "SJKM": "数据库名", + "QSRKM": "请输入库名", + "SJBM": "数据表名", + "ZDXX": "字段信息", + "FQXX": "分区信息", + "DCGS": "导出格式", + "FGF": "分隔符", + "BMGS": "编码格式", + "XZBMGS": "请选择编码格式", + "KZTH": "空值替换", + "SXWBT": "首行为表头", + "LX": "类型", + "QXZLX": "请选择类型", + "DCZ": "导出至", + "WJM": "文件名", + "TRDCWJMC": "请填入导出文件名称", + "FXMS": "复写模式", + "ZJ": "追加", + "FX": "复写", + "BM": "sheet表名", + "QSR": "请填入", + "XYB": "下一步", + "SYB": "上一步", + "TJ": "提交", + "WFCZST": "无法操作视图表,请选择其它表导出", + "ZBZCDBDC": "暂不支持多分区表导出,请选择其它表导出", + "GXML": "共享目录", + "DH": "逗号(,)", + "FH": "分号(;)", + "ZBF": "制表符(\\t)", + "KG": "空格", + "KZFC": "空字符串", + "XZSJK": "请选择数据库", + "XZB": "请选择表", + "XZFGF": "请选择分隔符", + "XZYGFQBDC": "请选择一个分区表导出", + "ZSXZYGZD": "请至少选择一个字段", + "XZDCGS": "请选择导出格式", + "XZXYJKZTHDZ": "请选择需要将空值替换的值", + "XZDCLJDLX": "请选择导出路径的类型", + "XZDCLJ": "请选择导出路径", + "DRDCDWJ": "请填入导出的文件名称", + "CDZ": "长度在3~100个字符", + "WJMZZCZW": "文件名只支持中文、字母、数字和下划线", + "CBDC": "此表导出中,请稍后再试或者选择其他表", + "QSHZSS": "请稍后再试或者选择其他表", + "XZFQ": "请选择分区", + "BDXCG": "表大小超过5GB,请使用其它方式导出或选择其它表" + }, + "hiveTableDesc": { + "searchName": "搜索字段名称...", + "tableDetail": "表详情", + "noData": "暂无表分区数据", + "tableColumns": "表字段", + "columnsName": "字段名", + "columnsType": "字段类型", + "tablePartitions": "表分区", + "dbName": "数据库名", + "tableName": "数据表名", + "createUser": "创建用户", + "createTtime": "创建时间", + "lastTime": "最后访问时间", + "tableSize": "表大小", + "size": "大小" + }, + "deleteType": { + "function": "函数", + "folder": "文件夹", + "file": "文件", + "common": "通用", + "catalog": "目录", + "script": "脚本文件", + "engine": "引擎", + "task": "任务", + "engineAndTask": "引擎和任务" + }, + "constants": { + "add": "新建", + "cancel": "取消", + "confirm": "确认", + "changeLanguage": "切换语言", + "detail": "详情", + "description": "描述", + "delete": "删除", + "download": "下载", + "edit": "编辑", + "export": "导出", + "fail": "失败", + "noDataText": "暂无数据", + "node": "节点", + "refresh": "刷新", + "rename": "重命名", + "reset": "重置", + "remove": "移除", + "submit": "确定", + "search": "搜索", + "save": "保存", + "upload": "上传", + "update": "修改", + "warning": { + "api": "接口请求中,请稍候!", + "data": "数据请求中,请稍候!", + "waiting": "请等待接口返回!", + "biLoading": "正在和Visualis系统通讯,请稍候!", + "comingSoon": "尚未开源,敬请期待!" + }, + "success": { + "delete": "删除成功!", + "update": "修改成功!", + "add": "新建成功!", + "save": "保存成功", + "stick": "粘贴成功", + "prohibit": "禁止复制此类型" + }, + "error": { + "refresh": "刷新出错,请重试!", + "fileExists": "该文件已经存在", + "folderExists": "该文件夹已经存在", + "functionExists": "该函数已经存在" + }, + "logPanelList": { + "fullScreen": "全屏", + "releaseFullScreen": "取消全屏", + "min": "最小化", + "releaseMin": "取消最小化" + }, + "time": { + "second": "秒", + "minute": "分钟", + "hour": "小时", + "day": "天" + } + }, + "navList": { + "dataStudio": "数据开发", + "workSpace": "工作空间", + "database": "数据库", + "udfFunction": "UDF函数", + "methodFunction": "方法函数", + "hdfs": "HDFS" + }, + "newDialog": { + "catalog": "目录", + "scriptName": "名称", + "catalogName": "名称", + "targetScriptPath": "创建路径", + "placeholder": "请输入脚本名称", + "required": "请输入目录名称", + "rules": { + "scriptName": { + "required": "请输入脚本名称", + "lengthLimit": "长度在1到200个字符", + "letterTypeLimit": "仅支持中文、大小写字母、数字和下划线" + }, + "catalogName": { + "required": "请输入目录名称", + "lengthLimit": "长度在1到200个字符", + "letterTypeLimit": "仅支持中文、大小写字母、数字和下划线" + }, + "targetScriptPath": { + "required": "请选择目标脚本路径" + } + }, + "nodeType": "节点类型", + "scriptType": "脚本类型", + "targetFolder": "目标文件夹" + }, + "database": { + "contextMenu": { + "db": { + "copyName": "复制库名", + "pasteName": "复制库名并粘贴至脚本", + "listSort": "表按时间排序", + "liststringSort": "表按首字符排序" + }, + "tb": { + "queryTable": "查询表", + "deleteTable": "删除表", + "viewTable": "查看表结构", + "exportTable": "导出表", + "copyName": "复制表名", + "pasteName": "复制表名并粘贴至脚本", + "copyAllColumns": "复制表全字段" + }, + "field": { + "copyColumnsName": "复制字段名", + "pasteName": "复制库名并粘贴至脚本" + } + }, + "partitionsTitle": "分区大小", + "notPosition": "无法定位上一次操作,请使用右键刷新功能" + }, + "fnSideBar": { + "contextMenu": { + "isOwn": { + "addFunction": "新增函数", + "addFolder": "新增文件夹", + "share": "共享" + }, + "isLeaf": { + "copyName": "复制函数名", + "pasteName": "复制函数并粘贴至脚本" + }, + "isOwnAndLeaf": { + "updateToExpired": "修改为过期", + "viewSharedUsers": "查看已共享的用户" + } + }, + "warning": { + "folderNameExists": "存在同名的文件夹", + "functionNameExists": "存在同名的函数", + "functionShared": "该函数已被共享,请将函数设置为过期再删除!", + "noEmpty": "请手动删除子文件夹或函数后再试!" + }, + "success": { + "load": "加载udf函数成功!", + "cancelLoading": "已取消加载udf函数!", + "addUdf": "新建udf函数成功!", + "addFolder": "新建文件夹成功!", + "shareFunction": "函数已共享成功", + "updateShareUser": "修改共享用户成功!", + "functionExpired": "函数已设置为过期,请到共享函数中查看!" + }, + "confirm": { + "title": "提示", + "content": "

此操作将会对已共享的用户造成影响,是否继续?

" + } + }, + "hdfs": { + "contextMenu": { + "openToTab": "打开到侧边", + "copyPath": "复制路径", + "addCatalog": "新建目录", + "importToHive": "导入至Hive", + "exportToWorkSpace": "导出至工作空间" + }, + "dialogTitle": { + "exportToWorkSpace": "导出至工作空间" + }, + "warning": { + "noRootPath": "未请求到HDFS根目录,请刷新重试!" + } + }, + "container": { + "title": { + "editorName": "编辑器" + }, + "closeDropDown": { + "others": "关闭其他", + "all": "关闭全部", + "left": "关闭左边", + "right": "关闭右边" + }, + "closeHint": "关闭提示", + "footer": { + "cancel": "取消", + "close": "直接关闭", + "save": "保存", + "saveAs": "另存为" + }, + "tips": "什么是Scriptis?\nScriptis是微众银行微数域(WeDataSphere)打造的一站式交互式数据探索分析工具,以任意桥(Linkis)做为内核,提供多种计算存储引擎(如Spark、Hive、TiSpark等)、Hive数据库管理功能、资源(如Yarn资源、服务器资源)管理、应用管理和各种用户资源(如UDF、变量等)管理的能力。\nScriptis?\n1. 选中工作空间的目录,创建文件夹;\n2. 右键某个文件夹 =>新建脚本;\n3. 选择脚本类型,如:SQL、Pyspark、HQL等;\n4. 书写脚本,点击执行,生成结果集。", + "warning": { + "noDBInfo": "未获取到数据库表信息,脚本联想词功能可能存在异常!可刷新重试!", + "noUDF": "未获取到UDF和方法函数信息,脚本联想词功能可能存在异常!可刷新重试!", + "noSelectedScript": "未选中脚本,请选择一个脚本再试!" + }, + "notice": { + "developerWarning": { + "title": "警告", + "desc": "开发者警告:调用add接口必须传入id和filename!" + }, + "unSupport": { + "title": "警告", + "desc": "很抱歉,系统暂不支持打开该格式文件!" + } + }, + "removeWork": { + "normal": "该脚本已发生改变,是否保存?", + "readOnly": "该只读脚本已发生改变,是否另存至工作空间?", + "temporary": "该临时脚本已发生改变,是否另存为文件?" + } + }, + "editorDetail": { + "navBar": { + "undo": "撤销", + "redo": "恢复", + "play": "运行", + "stop": "停止", + "save": "保存", + "config": "配置" + }, + "warning": { + "running": "脚本运行中,请稍候!", + "invalidArgs": "请检查配置参数输入是否正确!", + "emptyCode": "无法执行空代码!" + } + }, + "history": { + "columns": { + "taskID": { + "title": "任务ID" + }, + "runningTime": { + "title": "执行时间" + }, + "createDate": { + "title": "启动时间" + }, + "status": { + "title": "状态", + "status": { + "succeed": "成功", + "running": "运行", + "timeout": "超时", + "inited": "排队中", + "scheduled": "资源申请中", + "failed": "失败", + "cancel": "取消" + } + }, + "data": { + "title": "代码" + }, + "failedReason": { + "title": "关键信息" + }, + "control": { + "title": "操作", + "view": "查看", + "download": "日志下载" + } + }, + "success": { + "open": "打开成功", + "download": "下载成功,请到本地的download文件夹查看!" + } + }, + "saveAs": { + "header": "另存为脚本", + "formItems": { + "fileName": { + "label": "文件名" + }, + "selectedPath": { + "label": "选择路径" + } + }, + "rules": { + "fileName": { + "required": "请输入文件名", + "lengthLimit": "长度在1到200个字符", + "letterTypeLimit": "仅支持中文、大小写字母、数字和下划线" + }, + "selectedPath": { + "required": "请选择一个目录" + } + }, + "warning": { + "invalid": "请查看未通过验证项!" + }, + "success": { + "saveScript": "另存脚本成功,请手动刷新目录树!" + } + }, + "tabs": { + "progress": "进度", + "result": "运行结果", + "log": "运行日志", + "history": "历史", + "diagnosis": "智能诊断" + }, + "notice": { + "unsave": { + "title": "提醒", + "desc": "您的临时脚本已修改,请注意保存!" + }, + "sendStart": { + "title": "运行提示", + "render": "开始执行脚本" + }, + "querySuccess": { + "title": "执行结果提示", + "render": "脚本执行成功,共耗时" + }, + "notice": { + "title": "执行结果提示" + }, + "kill": { + "title": "运行提示", + "desc": "已经停止执行脚本" + } + }, + "confirmModal": { + "title": "另存为", + "content": "

该临时脚本已发生改变!

是否另存?

", + "okText": "另存为", + "cancelText": "取消" + }, + "setting": { + "header": "配置", + "customVariable": { + "title": "自定义参数" + }, + "envVariable": { + "title": "环境变量" + }, + "runTimeArgs": { + "settingTitle": "命令行参数", + "placeholder": "请使用空格分开多个参数" + } + }, + "apiPublish": { + "title": "发布为数据API", + "button": { + "addApi": "新建数据API", + "updateApi": "更新数据API" + }, + "addApiModal": { + "modalTitle": "数据服务API新增", + "contentTitle": "基本信息补充", + "paramConfirmTitle": "参数确认", + "apiName": "API 名称", + "apiPath": "API Path", + "protocol": "协议", + "requestType": "请求方式", + "tag": "标签", + "visible": "可见范围", + "describe": "描述", + "cancel": "取消", + "nextStep": "下一步", + "approvalName": "审批单名称", + "applyUser": "申请用户", + "proxyUser": "代理执行用户", + "back": "上一步", + "confirm": "确认", + "comment": "备注" + }, + "updateApiModal": { + "modalTitle": "数据服务API更新", + "apiVersionUpgrade": "生成新版本", + "paramConfirm": "请确认参数" + }, + "paramTable": { + "paramName": "参数名称", + "paramType": "参数类型", + "require": { + "title": "是否必填", + "yes": "是", + "no": "否", + "hide": "隐藏" + }, + "defaultValue": "默认值", + "displayName": "展示名", + "describe": "描述" + }, + "rule": { + "nameRule": "请填写API名称", + "pathRule": "请填写API路径", + "pathRegRule": "API路径不合法", + "requestTypeRule": "请选择请求方式", + "protocolRule": "请选择协议", + "valueRule": "必须以字母开头,且只支持字母、数字、下划线!", + "contentLengthLimit": "长度不能大于150", + "contentLengthLimit50": "长度不能大于50", + "contentLengthLimitTwo": "长度不能大于200", + "pathRepeat": "API路径已存在", + "nameRepeat": "API名称已存在", + "chineseCheck": "只支持中文", + "chineseName": "API中文名称" + }, + "visible": { + "workspace": "工作空间", + "personal": "个人", + "public": "公开", + "grantView": "授权可见" + }, + "notice": { + "publishSuccess": "发布成功" + }, + "tagPlaceholder": "标签,按 enter 创建" + } + } + } +} diff --git a/web/src/apps/scriptis/module/dateReport/index.js b/web/src/apps/scriptis/module/dateReport/index.js new file mode 100644 index 000000000..359936970 --- /dev/null +++ b/web/src/apps/scriptis/module/dateReport/index.js @@ -0,0 +1,23 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +export default { + name: 'DateReport', + dispatchs: { + }, + component: () => import('./index.vue'), +}; diff --git a/web/src/apps/scriptis/module/dateReport/index.vue b/web/src/apps/scriptis/module/dateReport/index.vue new file mode 100644 index 000000000..f42c3f9aa --- /dev/null +++ b/web/src/apps/scriptis/module/dateReport/index.vue @@ -0,0 +1,27 @@ + + + diff --git a/web/src/apps/scriptis/module/workbench/image/bg-img.png b/web/src/apps/scriptis/module/workbench/image/bg-img.png new file mode 100644 index 000000000..455f8299d Binary files /dev/null and b/web/src/apps/scriptis/module/workbench/image/bg-img.png differ diff --git a/web/src/apps/scriptis/module/workbench/image/hive.png b/web/src/apps/scriptis/module/workbench/image/hive.png new file mode 100644 index 000000000..bcb28b9e6 Binary files /dev/null and b/web/src/apps/scriptis/module/workbench/image/hive.png differ diff --git a/web/src/apps/scriptis/module/workbench/image/python.png b/web/src/apps/scriptis/module/workbench/image/python.png new file mode 100644 index 000000000..6706e6cf1 Binary files /dev/null and b/web/src/apps/scriptis/module/workbench/image/python.png differ diff --git a/web/src/apps/scriptis/module/workbench/image/scala.png b/web/src/apps/scriptis/module/workbench/image/scala.png new file mode 100644 index 000000000..857e76797 Binary files /dev/null and b/web/src/apps/scriptis/module/workbench/image/scala.png differ diff --git a/web/src/apps/scriptis/module/workbench/image/spark.png b/web/src/apps/scriptis/module/workbench/image/spark.png new file mode 100644 index 000000000..4db650758 Binary files /dev/null and b/web/src/apps/scriptis/module/workbench/image/spark.png differ diff --git a/web/src/apps/scriptis/module/workbench/index.js b/web/src/apps/scriptis/module/workbench/index.js new file mode 100644 index 000000000..f4a5317ba --- /dev/null +++ b/web/src/apps/scriptis/module/workbench/index.js @@ -0,0 +1,50 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +export default { + name: 'Workbench', + // 规范模块监测什么事件 + events: ['Workbench:add', 'Workbench:deleteDirOrFile', 'Workbench:save', 'Workbench:socket', 'Workbench:openFile', 'Workbench:run', 'Workbench:pasteInEditor', 'Workbench:saveAs', 'Workbench:updateTab', 'Workbench:setResultCache', 'Workbench:setResult', 'Workbench:insertValue', 'Workbench:checkExist', 'Workbench:getWorksLangList', 'Workbench:setEditorPanelSize', 'Workbench:setTabPanelSize', 'Workbench:updateFlowsTab', 'Workbench:resetScriptData', 'Workbench:removeWork', 'Workbench:updateFlowsNodeName', 'Workbench:isOpenTab'], + // 规范模块能够触发什么事件 + dispatchs: { + Workbench: ['save', 'saveAs', 'run', 'add', 'setResultCache', 'setResult', 'insertValue', 'openFile', 'checkExist', 'setEditorPanelSize', 'setTabPanelSize', 'resetScriptData', 'removeWork', 'repeatWork'], + IndexedDB: [ + 'appendLog', 'clearLog', 'getLog', 'changeLogKey', + 'updateHistory', 'appendHistory', 'getHistory', 'clearHistory', 'changeHistoryKey', + 'updateResult', 'appendResult', 'getResult', 'clearResult', 'changResultKey', + 'updateProgress', 'clearProgress', 'getProgress', 'changProgressKey', + 'getTabs', 'recordTab', 'toggleTab', 'removeTab', 'changeTabKey', + 'getGlobalCache', 'setGlobalCache', 'updateGlobalCache', 'removeGlobalCache', 'getTree', 'appendTree' + ], + WebSocket: ['init', 'send'], + WorkSidebar: ['setHighLight', 'showTree', 'revealInSideBar'], + HdfsSidebar: ['setHighLight', 'showTree'], + HiveSidebar: ['getAllDbsAndTables', 'deletedAndRefresh', 'showHive', 'getAllowMap'], + fnSidebar: ['getAllLoadedFunction'], + Footer: ['updateRunningJob'], + GlobalValiable: ['getGlobalVariable'], + IDE: ['saveNode'], + }, + data: { + API_PATH: process.env.VUE_APP_MN_CONFIG_PREFIX || `http://${window.location.host}/api/rest_j/v1/`, + BI_API_PATH: 'dws/vg#', + }, + component: () => + import('./container.vue'), + config: { + }, +}; diff --git a/web/src/apps/scriptis/module/workbench/modal.js b/web/src/apps/scriptis/module/workbench/modal.js new file mode 100644 index 000000000..898eb15e1 --- /dev/null +++ b/web/src/apps/scriptis/module/workbench/modal.js @@ -0,0 +1,162 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +let logoList = [ + { rule: /\.(bi)$/i, logo: 'fi-bi' }, + { rule: /\.(sql)$/i, logo: 'fi-spark' }, + { rule: /\.(hql)$/i, logo: 'fi-hive' }, + { rule: /\.(out)$/i, logo: 'fi-storage' }, + { rule: /\.scala$/i, logo: 'fi-scala' }, + { rule: /\.jdbc$/i, logo: 'fi-jdbc' }, + { rule: /\.python$/i, logo: 'fi-python' }, + { rule: /\.py$/i, logo: 'fi-spark-python' }, + { rule: /\.r$/i, logo: 'fi-r' }, + { rule: /\.qmlsql$/i, logo: 'fi-spark' }, + { rule: /\.qmlpy$/i, logo: 'fi-python' }, + { rule: /\.txt$/i, logo: 'fi-txt' }, + { rule: /\.log$/i, logo: 'fi-log' }, +]; + +/** + * 基础 + */ +export class Work { + /** + * 构造器 + * @param {*} option + */ + constructor(option) { + // 唯一标识 + this.id = option.id; + // 执行id + this.execID = null; + // 历史脚本才用得到,只/api/jobhistory/${id}/get请求使用 + this.taskID = option.taskID; + // 文件名 + this.filename = option.filename; + // 文件路径 + this.filepath = option.filepath; + // 数据开发模块会显示文件列表 + this.fileList = option.fileList || []; + // 是否保存 + this.unsave = option.unsave || false; + // 是否另存 + this.saveAs = option.saveAs || false; + // 是否要读取缓存数据,一般用于自动运行的时候 + this.noLoadCache = option.noLoadCache || false; + // 类型 + this.type = option.type || 'workspaceScript'; // workspaceScript hdfsScript historyScript workflow backgroundScript + // 数据 + this.data = option.data || null; + // 脚本内容和参数(test) + this.code = option.code; + this.params = option.params; + // 用于记录临时脚本或者HDFS脚本另存后保存内容和参数用 + this.ismodifyByOldTab = option.ismodifyByOldTab || false; + this.owner = option.owner; + this.specialSetting = option.specialSetting; + this.nodeName = option.nodeName || null; // 在工作流操作时记录名称 + } + + /** + * 根据文件后缀判断logo + */ + get logo() { + let logos = logoList.filter((item) => { + return item.rule.test(this.filename); + }); + if (logos.length > 0) { + return logos[0].logo; + } else { + return 'javascript: void 0'; + } + } +} + +/** + * 脚本任务 + */ +export class Script { + /** + * 构造器 + * @param {*} option + */ + constructor(option) { + this.id = option.id; + this.fileName = option.fileName; + this.filepath = option.filepath; + // 脚本内容 + this.data = option.data || option.code || ''; + // 脚本原内容 + this.oldData = option.data || ''; + // 日志 + this.log = option.log || {}; + this.logLine = option.logLine || 1; + // 历史 + this.history = []; + // 进度 + this.progress = { + current: null, + progressInfo: [], + waitingSize: null, + costTime: null, + }; + // 步骤 + this.steps = []; + // 智能诊断 + this.diagnosis = null; + // 运行结果 + this.result = null; + // 记录结果集的存储路径 + this.resultList = null; + // 参数 + this.params = option.params || { + variable: [], + configuration: { + special: {}, + runtime: { + args: '', + env: [], + }, + startup: {}, + }, + }; + + // editor组件的language + this.lang = option.lang; + // 是否可执行 + this.executable = option.executable || false; + // 是否可配置 + this.configurable = option.configurable || true; + // 后台使用哪种BDP服务 + this.application = option.application; + // 后台运行的服务类型 + this.runType = option.runType; + // 后缀 + this.ext = option.ext; + // 否为系统支持的脚本类型 + this.scriptType = option.scriptType; + // 是否可读 + this.readOnly = option.readOnly || false; + // 是否正在执行 + this.running = false; + // 当前的运行状态 + this.status = option.status ? option.status : 'Inited'; + // script视图状态 + this.scriptViewState = {}; + } +} diff --git a/web/src/apps/scriptis/module/workbench/script/backgroundScript.vue b/web/src/apps/scriptis/module/workbench/script/backgroundScript.vue new file mode 100644 index 000000000..f67652986 --- /dev/null +++ b/web/src/apps/scriptis/module/workbench/script/backgroundScript.vue @@ -0,0 +1,393 @@ + + diff --git a/web/src/apps/scriptis/module/workbench/script/editor.vue b/web/src/apps/scriptis/module/workbench/script/editor.vue new file mode 100644 index 000000000..348d8c7cf --- /dev/null +++ b/web/src/apps/scriptis/module/workbench/script/editor.vue @@ -0,0 +1,339 @@ + + + + diff --git a/web/src/apps/scriptis/module/workbench/script/history.vue b/web/src/apps/scriptis/module/workbench/script/history.vue new file mode 100644 index 000000000..587efbf60 --- /dev/null +++ b/web/src/apps/scriptis/module/workbench/script/history.vue @@ -0,0 +1,303 @@ + + + + diff --git a/web/src/apps/scriptis/module/workbench/script/intelligentDiagnosis.vue b/web/src/apps/scriptis/module/workbench/script/intelligentDiagnosis.vue new file mode 100644 index 000000000..ac2c83363 --- /dev/null +++ b/web/src/apps/scriptis/module/workbench/script/intelligentDiagnosis.vue @@ -0,0 +1,55 @@ + + + + diff --git a/web/src/apps/scriptis/module/workbench/script/mixin.js b/web/src/apps/scriptis/module/workbench/script/mixin.js new file mode 100644 index 000000000..996e99bf7 --- /dev/null +++ b/web/src/apps/scriptis/module/workbench/script/mixin.js @@ -0,0 +1,44 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +import { throttle } from 'lodash'; +import elementResizeEvent from '@/common/helper/elementResizeEvent'; +export default { + data() { + return { + tableHeight: 100, + }; + }, + mounted() { + this.initEvents(); + }, + beforeDestroy: function() { + elementResizeEvent.unbind(this.$el); + }, + methods: { + initEvents() { + let thro = throttle(() => { + this.resize(); + }); + elementResizeEvent.bind(this.$el, thro); + }, + resize() { + this.tableHeight = this.$el.clientHeight - 40; + }, + }, +} +; diff --git a/web/src/apps/scriptis/module/workbench/script/saveAs.vue b/web/src/apps/scriptis/module/workbench/script/saveAs.vue new file mode 100644 index 000000000..41c3cb5f5 --- /dev/null +++ b/web/src/apps/scriptis/module/workbench/script/saveAs.vue @@ -0,0 +1,200 @@ + + + + diff --git a/web/src/apps/scriptis/module/workbench/script/script.vue b/web/src/apps/scriptis/module/workbench/script/script.vue new file mode 100644 index 000000000..5ef17f89a --- /dev/null +++ b/web/src/apps/scriptis/module/workbench/script/script.vue @@ -0,0 +1,1535 @@ + + + + diff --git a/web/src/apps/scriptis/module/workbench/script/setting.vue b/web/src/apps/scriptis/module/workbench/script/setting.vue new file mode 100644 index 000000000..a66b43aae --- /dev/null +++ b/web/src/apps/scriptis/module/workbench/script/setting.vue @@ -0,0 +1,102 @@ + + + + diff --git a/web/src/apps/scriptis/module/workbench/setting/customVariable.vue b/web/src/apps/scriptis/module/workbench/setting/customVariable.vue new file mode 100644 index 000000000..640f8feab --- /dev/null +++ b/web/src/apps/scriptis/module/workbench/setting/customVariable.vue @@ -0,0 +1,24 @@ + + diff --git a/web/src/apps/scriptis/module/workbench/setting/envVariable.vue b/web/src/apps/scriptis/module/workbench/setting/envVariable.vue new file mode 100644 index 000000000..46fab494c --- /dev/null +++ b/web/src/apps/scriptis/module/workbench/setting/envVariable.vue @@ -0,0 +1,26 @@ + + diff --git a/web/src/apps/scriptis/module/workbench/setting/runTimeArgs.vue b/web/src/apps/scriptis/module/workbench/setting/runTimeArgs.vue new file mode 100644 index 000000000..334829aa4 --- /dev/null +++ b/web/src/apps/scriptis/module/workbench/setting/runTimeArgs.vue @@ -0,0 +1,32 @@ + + diff --git a/web/src/apps/scriptis/module/workbench/tableDetails/components/basic.vue b/web/src/apps/scriptis/module/workbench/tableDetails/components/basic.vue new file mode 100644 index 000000000..e15219473 --- /dev/null +++ b/web/src/apps/scriptis/module/workbench/tableDetails/components/basic.vue @@ -0,0 +1,132 @@ + + + + diff --git a/web/src/apps/scriptis/module/workbench/tableDetails/components/field.vue b/web/src/apps/scriptis/module/workbench/tableDetails/components/field.vue new file mode 100644 index 000000000..4b801555a --- /dev/null +++ b/web/src/apps/scriptis/module/workbench/tableDetails/components/field.vue @@ -0,0 +1,104 @@ + + diff --git a/web/src/apps/scriptis/module/workbench/tableDetails/components/statistics.vue b/web/src/apps/scriptis/module/workbench/tableDetails/components/statistics.vue new file mode 100644 index 000000000..10ecf8aa8 --- /dev/null +++ b/web/src/apps/scriptis/module/workbench/tableDetails/components/statistics.vue @@ -0,0 +1,189 @@ + + + diff --git a/web/src/apps/scriptis/module/workbench/tableDetails/index.vue b/web/src/apps/scriptis/module/workbench/tableDetails/index.vue new file mode 100644 index 000000000..b4aa3cf61 --- /dev/null +++ b/web/src/apps/scriptis/module/workbench/tableDetails/index.vue @@ -0,0 +1,319 @@ + + + diff --git a/web/src/apps/scriptis/module/workbench/tableDetails/utils.js b/web/src/apps/scriptis/module/workbench/tableDetails/utils.js new file mode 100644 index 000000000..7e34cd421 --- /dev/null +++ b/web/src/apps/scriptis/module/workbench/tableDetails/utils.js @@ -0,0 +1,93 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +import moment from 'moment'; + +const convertList = { + lifecycle: [{ + value: 0, + label: '永久', + }, { + value: 1, + label: '当天有效', + }, { + value: 2, + label: '一周有效', + }, { + value: 3, + label: '一月有效', + }, { + value: 4, + label: '半年有效', + }], + modelLevel: [{ + value: 0, + label: 'ODS(原始数据层)', + }, { + value: 1, + label: 'DWD(明细数据层)', + }, { + value: 2, + label: 'DWS(汇总数据层)', + }, { + value: 3, + label: 'ADS(应用数据层)', + }], + useWay: [{ + value: 0, + label: '一次写多次读', + }, { + value: 1, + label: '增删改查', + }, { + value: 2, + label: '多次覆盖写', + }, { + value: 3, + label: '一次写偶尔读', + }], +}; + +/** + * 格式化值 + * @param {*} item + * @param {*} field + * @return {*} return + */ +function formatValue(item, field) { + const value = item[field.key]; + let formatted = value; + switch (field.type) { + case 'boolean': + formatted = value ? '是' : '否'; + break; + case 'timestramp': + formatted = value == '0' || !value ? 0 : moment.unix(value).format('YYYY-MM-DD HH:mm:ss'); + break; + case 'convert': + if (!item[field.key] && item[field.key] !== 0) { + return value; + } + formatted = convertList[field.key][item[field.key]].label; + break; + } + return formatted; +} + +export default { + formatValue, +}; diff --git a/web/src/apps/scriptis/module/workbench/title.vue b/web/src/apps/scriptis/module/workbench/title.vue new file mode 100644 index 000000000..d4e306beb --- /dev/null +++ b/web/src/apps/scriptis/module/workbench/title.vue @@ -0,0 +1,116 @@ + + + + diff --git a/web/src/apps/scriptis/module/workbench/visualAnalysis/visualAnalysis.vue b/web/src/apps/scriptis/module/workbench/visualAnalysis/visualAnalysis.vue new file mode 100644 index 000000000..e91d330de --- /dev/null +++ b/web/src/apps/scriptis/module/workbench/visualAnalysis/visualAnalysis.vue @@ -0,0 +1,128 @@ + + + diff --git a/web/src/apps/scriptis/router.js b/web/src/apps/scriptis/router.js new file mode 100644 index 000000000..d3cb4b2bf --- /dev/null +++ b/web/src/apps/scriptis/router.js @@ -0,0 +1,13 @@ +export default [ + { + path: '/home', + name: 'Home', + meta: { + title: 'Scriptis', + keepAlive: false, // 缓存导致页面有多个编辑器,广播事件会触发报错 + publicPage: true, // 权限公开 + }, + component: () => + import('./view/home/index.vue'), + } +] diff --git a/web/src/apps/scriptis/service/db/globalcache.js b/web/src/apps/scriptis/service/db/globalcache.js new file mode 100644 index 000000000..46725dd57 --- /dev/null +++ b/web/src/apps/scriptis/service/db/globalcache.js @@ -0,0 +1,101 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +import { isNil, remove } from 'lodash'; +import { Basic } from '@/common/service/db/index.js'; +/** + * @class Globalcache + * @extends {Basic} + */ +class Globalcache extends Basic { + /** + *Creates an instance of Globalcache. + * @param {*} table + * @param {*} db + * @memberof Globalcache + */ + constructor(table) { + super(table); + } + + /** + * @param {*} args + * @return {*} + */ + async setCache(args) { + let cache = await this.getCache(args.key); + let cacheToUpdate = args; + if (!isNil(cache)) { + cacheToUpdate = { _id: cache._id, ...cacheToUpdate }; + } + return this.add(cacheToUpdate); + } + + /** + * @param {*} key + * @return {*} + */ + async getCache(key) { + let caches = await this.get(key) || []; + return caches[0]; + } + + /** + * @param {*} args + * @return {*} + */ + async removeCache(args) { + let cache = await this.getCache(args.id); + let tabList = []; + if (!isNil(cache)) { + tabList = cache.tabList; + remove(tabList, (n) => n === args.tabId); + } + return this.update(args.id, { key: args.id, tabList: tabList }); + } + + /** + * @param {*} args + * @return {*} + */ + async updateCache(args) { + if (args.work) { + let cache = await this.getCache(args.id); + const id = args.work.id; + let tabList = []; + if (!isNil(cache)) { + tabList = cache.tabList; + if (cache.tabList.indexOf(id) === -1) { + tabList.push(id); + } + } else { + tabList.push(id); + } + return this.update(args.id, { key: args.id, tabList: tabList }); + } + if (args.fnList) { + this.update(args.id, { key: args.id, fnList: args.fnList }); + } + if (args.variableList) { + this.update(args.id, { key: args.id, variableList: args.variableList }); + } + return; + } +} +const globalcache = new Globalcache('globalCache'); + +export default globalcache; diff --git a/web/src/apps/scriptis/service/db/history.js b/web/src/apps/scriptis/service/db/history.js new file mode 100644 index 000000000..09da6d759 --- /dev/null +++ b/web/src/apps/scriptis/service/db/history.js @@ -0,0 +1,35 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +import { Basic } from '@/common/service/db/index.js'; +/** + * @class History + * @extends {Basic} + */ +class History extends Basic { + /** + *Creates an instance of History. + * @param {*} table + * @memberof History + */ + constructor(table) { + super(table); + } +} +const history = new History('history'); + +export default history; diff --git a/web/src/apps/scriptis/service/db/log.js b/web/src/apps/scriptis/service/db/log.js new file mode 100644 index 000000000..abb67137b --- /dev/null +++ b/web/src/apps/scriptis/service/db/log.js @@ -0,0 +1,35 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +import { Basic } from '@/common/service/db/index.js'; +/** + * @class Log + * @extends {Basic} + */ +class Log extends Basic { + /** + *Creates an instance of Log. + * @param {*} table + * @memberof Log + */ + constructor(table) { + super(table); + } +} +const log = new Log('log'); + +export default log; diff --git a/web/src/apps/scriptis/service/db/progress.js b/web/src/apps/scriptis/service/db/progress.js new file mode 100644 index 000000000..7cb29e8f1 --- /dev/null +++ b/web/src/apps/scriptis/service/db/progress.js @@ -0,0 +1,35 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +import { Basic } from '@/common/service/db/index.js'; +/** + * @class progress + * @extends {Basic} + */ +class Progress extends Basic { + /** + *Creates an instance of progress. + * @param {*} table + * @memberof progress + */ + constructor(table) { + super(table); + } +} +const progress = new Progress('progress'); + +export default progress; diff --git a/web/src/apps/scriptis/service/db/result.js b/web/src/apps/scriptis/service/db/result.js new file mode 100644 index 000000000..6558f72ac --- /dev/null +++ b/web/src/apps/scriptis/service/db/result.js @@ -0,0 +1,35 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +import { Basic } from '@/common/service/db/index.js'; +/** + * @class Result + * @extends {Basic} + */ +class Result extends Basic { + /** + *Creates an instance of Result. + * @param {*} table + * @memberof Result + */ + constructor(table) { + super(table); + } +} +const result = new Result('result'); + +export default result; diff --git a/web/src/apps/scriptis/service/db/tab.js b/web/src/apps/scriptis/service/db/tab.js new file mode 100644 index 000000000..aca5dc17a --- /dev/null +++ b/web/src/apps/scriptis/service/db/tab.js @@ -0,0 +1,35 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +import { Basic } from '@/common/service/db/index.js'; +/** + * @class Tab + * @extends {Basic} + */ +class Tab extends Basic { + /** + *Creates an instance of Tab. + * @param {*} table + * @memberof Tab + */ + constructor(table) { + super(table); + } +} +const tab = new Tab('tab'); + +export default tab; diff --git a/web/src/apps/scriptis/service/db/tree.js b/web/src/apps/scriptis/service/db/tree.js new file mode 100644 index 000000000..7907ea50f --- /dev/null +++ b/web/src/apps/scriptis/service/db/tree.js @@ -0,0 +1,35 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +import { Basic } from '@/common/service/db/index.js'; +/** + * @class Tab + * @extends {Basic} + */ +class Tree extends Basic { + /** + *Creates an instance of Tab. + * @param {*} table + * @memberof Tree + */ + constructor(table) { + super(table); + } +} +const tree = new Tree('tree'); + +export default tree; diff --git a/web/src/apps/scriptis/view/home/index.vue b/web/src/apps/scriptis/view/home/index.vue new file mode 100644 index 000000000..9b246b509 --- /dev/null +++ b/web/src/apps/scriptis/view/home/index.vue @@ -0,0 +1,123 @@ + + + diff --git a/web/src/apps/scriptis/view/layout.vue b/web/src/apps/scriptis/view/layout.vue new file mode 100644 index 000000000..8958c366c --- /dev/null +++ b/web/src/apps/scriptis/view/layout.vue @@ -0,0 +1,20 @@ + + diff --git a/web/src/apps/streamis/assets/images/u1909.png b/web/src/apps/streamis/assets/images/u1909.png new file mode 100644 index 000000000..86f18774c Binary files /dev/null and b/web/src/apps/streamis/assets/images/u1909.png differ diff --git a/web/src/apps/streamis/assets/images/u2063.png b/web/src/apps/streamis/assets/images/u2063.png new file mode 100644 index 000000000..1565f7b8e Binary files /dev/null and b/web/src/apps/streamis/assets/images/u2063.png differ diff --git a/web/src/apps/streamis/assets/images/u2450.png b/web/src/apps/streamis/assets/images/u2450.png new file mode 100644 index 000000000..25bdcbbaf Binary files /dev/null and b/web/src/apps/streamis/assets/images/u2450.png differ diff --git a/web/src/apps/streamis/assets/images/u2451.png b/web/src/apps/streamis/assets/images/u2451.png new file mode 100644 index 000000000..804183254 Binary files /dev/null and b/web/src/apps/streamis/assets/images/u2451.png differ diff --git a/web/src/apps/streamis/assets/images/u2452.png b/web/src/apps/streamis/assets/images/u2452.png new file mode 100644 index 000000000..339463d96 Binary files /dev/null and b/web/src/apps/streamis/assets/images/u2452.png differ diff --git a/web/src/apps/streamis/assets/images/u2453.png b/web/src/apps/streamis/assets/images/u2453.png new file mode 100644 index 000000000..7f6fef90d Binary files /dev/null and b/web/src/apps/streamis/assets/images/u2453.png differ diff --git a/web/src/apps/streamis/assets/images/u2461.png b/web/src/apps/streamis/assets/images/u2461.png new file mode 100644 index 000000000..e0aeb7d97 Binary files /dev/null and b/web/src/apps/streamis/assets/images/u2461.png differ diff --git a/web/src/apps/streamis/assets/images/u2466.svg b/web/src/apps/streamis/assets/images/u2466.svg new file mode 100644 index 000000000..a6ea1b042 --- /dev/null +++ b/web/src/apps/streamis/assets/images/u2466.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/web/src/apps/streamis/assets/images/u2498p000.png b/web/src/apps/streamis/assets/images/u2498p000.png new file mode 100644 index 000000000..48faee6e8 Binary files /dev/null and b/web/src/apps/streamis/assets/images/u2498p000.png differ diff --git a/web/src/apps/streamis/assets/images/u2616.png b/web/src/apps/streamis/assets/images/u2616.png new file mode 100644 index 000000000..a2e6d40f0 Binary files /dev/null and b/web/src/apps/streamis/assets/images/u2616.png differ diff --git a/web/src/apps/streamis/assets/images/u2618.png b/web/src/apps/streamis/assets/images/u2618.png new file mode 100644 index 000000000..43cf0e573 Binary files /dev/null and b/web/src/apps/streamis/assets/images/u2618.png differ diff --git a/web/src/apps/streamis/assets/images/u2662.png b/web/src/apps/streamis/assets/images/u2662.png new file mode 100644 index 000000000..9eee4dd33 Binary files /dev/null and b/web/src/apps/streamis/assets/images/u2662.png differ diff --git a/web/src/apps/streamis/assets/images/u2665.png b/web/src/apps/streamis/assets/images/u2665.png new file mode 100644 index 000000000..632935c8d Binary files /dev/null and b/web/src/apps/streamis/assets/images/u2665.png differ diff --git a/web/src/apps/streamis/assets/images/u2680.png b/web/src/apps/streamis/assets/images/u2680.png new file mode 100644 index 000000000..14f4eaa06 Binary files /dev/null and b/web/src/apps/streamis/assets/images/u2680.png differ diff --git a/web/src/apps/streamis/assets/images/u3941.png b/web/src/apps/streamis/assets/images/u3941.png new file mode 100644 index 000000000..34515e0eb Binary files /dev/null and b/web/src/apps/streamis/assets/images/u3941.png differ diff --git a/web/src/apps/streamis/assets/images/u3950.png b/web/src/apps/streamis/assets/images/u3950.png new file mode 100644 index 000000000..de3b8fd5d Binary files /dev/null and b/web/src/apps/streamis/assets/images/u3950.png differ diff --git a/web/src/apps/streamis/assets/images/u3951.png b/web/src/apps/streamis/assets/images/u3951.png new file mode 100644 index 000000000..7b83daf25 Binary files /dev/null and b/web/src/apps/streamis/assets/images/u3951.png differ diff --git a/web/src/apps/streamis/assets/images/u3961.png b/web/src/apps/streamis/assets/images/u3961.png new file mode 100644 index 000000000..47a86ba9d Binary files /dev/null and b/web/src/apps/streamis/assets/images/u3961.png differ diff --git a/web/src/apps/streamis/assets/images/u3962.png b/web/src/apps/streamis/assets/images/u3962.png new file mode 100644 index 000000000..80995b0d1 Binary files /dev/null and b/web/src/apps/streamis/assets/images/u3962.png differ diff --git a/web/src/apps/streamis/assets/images/u3963.png b/web/src/apps/streamis/assets/images/u3963.png new file mode 100644 index 000000000..6cffc403f Binary files /dev/null and b/web/src/apps/streamis/assets/images/u3963.png differ diff --git a/web/src/apps/streamis/assets/images/u4002.png b/web/src/apps/streamis/assets/images/u4002.png new file mode 100644 index 000000000..08494381c Binary files /dev/null and b/web/src/apps/streamis/assets/images/u4002.png differ diff --git a/web/src/apps/streamis/assets/images/u458.png b/web/src/apps/streamis/assets/images/u458.png new file mode 100644 index 000000000..457acfa29 Binary files /dev/null and b/web/src/apps/streamis/assets/images/u458.png differ diff --git a/web/src/apps/streamis/assets/streamisIconFont/demo.css b/web/src/apps/streamis/assets/streamisIconFont/demo.css new file mode 100644 index 000000000..a67054a0a --- /dev/null +++ b/web/src/apps/streamis/assets/streamisIconFont/demo.css @@ -0,0 +1,539 @@ +/* Logo 字体 */ +@font-face { + font-family: "iconfont logo"; + src: url('https://at.alicdn.com/t/font_985780_km7mi63cihi.eot?t=1545807318834'); + src: url('https://at.alicdn.com/t/font_985780_km7mi63cihi.eot?t=1545807318834#iefix') format('embedded-opentype'), + url('https://at.alicdn.com/t/font_985780_km7mi63cihi.woff?t=1545807318834') format('woff'), + url('https://at.alicdn.com/t/font_985780_km7mi63cihi.ttf?t=1545807318834') format('truetype'), + url('https://at.alicdn.com/t/font_985780_km7mi63cihi.svg?t=1545807318834#iconfont') format('svg'); +} + +.logo { + font-family: "iconfont logo"; + font-size: 160px; + font-style: normal; + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; +} + +/* tabs */ +.nav-tabs { + position: relative; +} + +.nav-tabs .nav-more { + position: absolute; + right: 0; + bottom: 0; + height: 42px; + line-height: 42px; + color: #666; +} + +#tabs { + border-bottom: 1px solid #eee; +} + +#tabs li { + cursor: pointer; + width: 100px; + height: 40px; + line-height: 40px; + text-align: center; + font-size: 16px; + border-bottom: 2px solid transparent; + position: relative; + z-index: 1; + margin-bottom: -1px; + color: #666; +} + + +#tabs .active { + border-bottom-color: #f00; + color: #222; +} + +.tab-container .content { + display: none; +} + +/* 页面布局 */ +.main { + padding: 30px 100px; + width: 960px; + margin: 0 auto; +} + +.main .logo { + color: #333; + text-align: left; + margin-bottom: 30px; + line-height: 1; + height: 110px; + margin-top: -50px; + overflow: hidden; + *zoom: 1; +} + +.main .logo a { + font-size: 160px; + color: #333; +} + +.helps { + margin-top: 40px; +} + +.helps pre { + padding: 20px; + margin: 10px 0; + border: solid 1px #e7e1cd; + background-color: #fffdef; + overflow: auto; +} + +.icon_lists { + width: 100% !important; + overflow: hidden; + *zoom: 1; +} + +.icon_lists li { + width: 100px; + margin-bottom: 10px; + margin-right: 20px; + text-align: center; + list-style: none !important; + cursor: default; +} + +.icon_lists li .code-name { + line-height: 1.2; +} + +.icon_lists .icon { + display: block; + height: 100px; + line-height: 100px; + font-size: 42px; + margin: 10px auto; + color: #333; + -webkit-transition: font-size 0.25s linear, width 0.25s linear; + -moz-transition: font-size 0.25s linear, width 0.25s linear; + transition: font-size 0.25s linear, width 0.25s linear; +} + +.icon_lists .icon:hover { + font-size: 100px; +} + +.icon_lists .svg-icon { + /* 通过设置 font-size 来改变图标大小 */ + width: 1em; + /* 图标和文字相邻时,垂直对齐 */ + vertical-align: -0.15em; + /* 通过设置 color 来改变 SVG 的颜色/fill */ + fill: currentColor; + /* path 和 stroke 溢出 viewBox 部分在 IE 下会显示 + normalize.css 中也包含这行 */ + overflow: hidden; +} + +.icon_lists li .name, +.icon_lists li .code-name { + color: #666; +} + +/* markdown 样式 */ +.markdown { + color: #666; + font-size: 14px; + line-height: 1.8; +} + +.highlight { + line-height: 1.5; +} + +.markdown img { + vertical-align: middle; + max-width: 100%; +} + +.markdown h1 { + color: #404040; + font-weight: 500; + line-height: 40px; + margin-bottom: 24px; +} + +.markdown h2, +.markdown h3, +.markdown h4, +.markdown h5, +.markdown h6 { + color: #404040; + margin: 1.6em 0 0.6em 0; + font-weight: 500; + clear: both; +} + +.markdown h1 { + font-size: 28px; +} + +.markdown h2 { + font-size: 22px; +} + +.markdown h3 { + font-size: 16px; +} + +.markdown h4 { + font-size: 14px; +} + +.markdown h5 { + font-size: 12px; +} + +.markdown h6 { + font-size: 12px; +} + +.markdown hr { + height: 1px; + border: 0; + background: #e9e9e9; + margin: 16px 0; + clear: both; +} + +.markdown p { + margin: 1em 0; +} + +.markdown>p, +.markdown>blockquote, +.markdown>.highlight, +.markdown>ol, +.markdown>ul { + width: 80%; +} + +.markdown ul>li { + list-style: circle; +} + +.markdown>ul li, +.markdown blockquote ul>li { + margin-left: 20px; + padding-left: 4px; +} + +.markdown>ul li p, +.markdown>ol li p { + margin: 0.6em 0; +} + +.markdown ol>li { + list-style: decimal; +} + +.markdown>ol li, +.markdown blockquote ol>li { + margin-left: 20px; + padding-left: 4px; +} + +.markdown code { + margin: 0 3px; + padding: 0 5px; + background: #eee; + border-radius: 3px; +} + +.markdown strong, +.markdown b { + font-weight: 600; +} + +.markdown>table { + border-collapse: collapse; + border-spacing: 0px; + empty-cells: show; + border: 1px solid #e9e9e9; + width: 95%; + margin-bottom: 24px; +} + +.markdown>table th { + white-space: nowrap; + color: #333; + font-weight: 600; +} + +.markdown>table th, +.markdown>table td { + border: 1px solid #e9e9e9; + padding: 8px 16px; + text-align: left; +} + +.markdown>table th { + background: #F7F7F7; +} + +.markdown blockquote { + font-size: 90%; + color: #999; + border-left: 4px solid #e9e9e9; + padding-left: 0.8em; + margin: 1em 0; +} + +.markdown blockquote p { + margin: 0; +} + +.markdown .anchor { + opacity: 0; + transition: opacity 0.3s ease; + margin-left: 8px; +} + +.markdown .waiting { + color: #ccc; +} + +.markdown h1:hover .anchor, +.markdown h2:hover .anchor, +.markdown h3:hover .anchor, +.markdown h4:hover .anchor, +.markdown h5:hover .anchor, +.markdown h6:hover .anchor { + opacity: 1; + display: inline-block; +} + +.markdown>br, +.markdown>p>br { + clear: both; +} + + +.hljs { + display: block; + background: white; + padding: 0.5em; + color: #333333; + overflow-x: auto; +} + +.hljs-comment, +.hljs-meta { + color: #969896; +} + +.hljs-string, +.hljs-variable, +.hljs-template-variable, +.hljs-strong, +.hljs-emphasis, +.hljs-quote { + color: #df5000; +} + +.hljs-keyword, +.hljs-selector-tag, +.hljs-type { + color: #a71d5d; +} + +.hljs-literal, +.hljs-symbol, +.hljs-bullet, +.hljs-attribute { + color: #0086b3; +} + +.hljs-section, +.hljs-name { + color: #63a35c; +} + +.hljs-tag { + color: #333333; +} + +.hljs-title, +.hljs-attr, +.hljs-selector-id, +.hljs-selector-class, +.hljs-selector-attr, +.hljs-selector-pseudo { + color: #795da3; +} + +.hljs-addition { + color: #55a532; + background-color: #eaffea; +} + +.hljs-deletion { + color: #bd2c00; + background-color: #ffecec; +} + +.hljs-link { + text-decoration: underline; +} + +/* 代码高亮 */ +/* PrismJS 1.15.0 +https://prismjs.com/download.html#themes=prism&languages=markup+css+clike+javascript */ +/** + * prism.js default theme for JavaScript, CSS and HTML + * Based on dabblet (http://dabblet.com) + * @author Lea Verou + */ +code[class*="language-"], +pre[class*="language-"] { + color: black; + background: none; + text-shadow: 0 1px white; + font-family: Consolas, Monaco, 'Andale Mono', 'Ubuntu Mono', monospace; + text-align: left; + white-space: pre; + word-spacing: normal; + word-break: normal; + word-wrap: normal; + line-height: 1.5; + + -moz-tab-size: 4; + -o-tab-size: 4; + tab-size: 4; + + -webkit-hyphens: none; + -moz-hyphens: none; + -ms-hyphens: none; + hyphens: none; +} + +pre[class*="language-"]::-moz-selection, +pre[class*="language-"] ::-moz-selection, +code[class*="language-"]::-moz-selection, +code[class*="language-"] ::-moz-selection { + text-shadow: none; + background: #b3d4fc; +} + +pre[class*="language-"]::selection, +pre[class*="language-"] ::selection, +code[class*="language-"]::selection, +code[class*="language-"] ::selection { + text-shadow: none; + background: #b3d4fc; +} + +@media print { + + code[class*="language-"], + pre[class*="language-"] { + text-shadow: none; + } +} + +/* Code blocks */ +pre[class*="language-"] { + padding: 1em; + margin: .5em 0; + overflow: auto; +} + +:not(pre)>code[class*="language-"], +pre[class*="language-"] { + background: #f5f2f0; +} + +/* Inline code */ +:not(pre)>code[class*="language-"] { + padding: .1em; + border-radius: .3em; + white-space: normal; +} + +.token.comment, +.token.prolog, +.token.doctype, +.token.cdata { + color: slategray; +} + +.token.punctuation { + color: #999; +} + +.namespace { + opacity: .7; +} + +.token.property, +.token.tag, +.token.boolean, +.token.number, +.token.constant, +.token.symbol, +.token.deleted { + color: #905; +} + +.token.selector, +.token.attr-name, +.token.string, +.token.char, +.token.builtin, +.token.inserted { + color: #690; +} + +.token.operator, +.token.entity, +.token.url, +.language-css .token.string, +.style .token.string { + color: #9a6e3a; + background: hsla(0, 0%, 100%, .5); +} + +.token.atrule, +.token.attr-value, +.token.keyword { + color: #07a; +} + +.token.function, +.token.class-name { + color: #DD4A68; +} + +.token.regex, +.token.important, +.token.variable { + color: #e90; +} + +.token.important, +.token.bold { + font-weight: bold; +} + +.token.italic { + font-style: italic; +} + +.token.entity { + cursor: help; +} diff --git a/web/src/apps/streamis/assets/streamisIconFont/demo_index.html b/web/src/apps/streamis/assets/streamisIconFont/demo_index.html new file mode 100644 index 000000000..5e079b8ff --- /dev/null +++ b/web/src/apps/streamis/assets/streamisIconFont/demo_index.html @@ -0,0 +1,349 @@ + + + + + iconfont Demo + + + + + + + + + + + + + +
+

+ + +

+ +
+
+
    + +
  • + +
    数据
    +
    &#xe6ab;
    +
  • + +
  • + +
    数据
    +
    &#xe62d;
    +
  • + +
  • + +
    集群
    +
    &#xe610;
    +
  • + +
  • + +
    mysql
    +
    &#xe613;
    +
  • + +
  • +  +
    apachekafka
    +
    &#xeb3f;
    +
  • + +
  • + +
    表格
    +
    &#xe617;
    +
  • + +
  • + +
    数据
    +
    &#xe832;
    +
  • + +
+
+

Unicode 引用

+
+ +

Unicode 是字体在网页端最原始的应用方式,特点是:

+
    +
  • 支持按字体的方式去动态调整图标大小,颜色等等。
  • +
  • 默认情况下不支持多色,直接添加多色图标会自动去色。
  • +
+
+

注意:新版 iconfont 支持两种方式引用多色图标:SVG symbol 引用方式和彩色字体图标模式。(使用彩色字体图标需要在「编辑项目」中开启「彩色」选项后并重新生成。)

+
+

Unicode 使用步骤如下:

+

第一步:拷贝项目下面生成的 @font-face

+
@font-face {
+  font-family: 'iconfont';
+  src: url('iconfont.woff2?t=1621579006110') format('woff2'),
+       url('iconfont.woff?t=1621579006110') format('woff'),
+       url('iconfont.ttf?t=1621579006110') format('truetype');
+}
+
+

第二步:定义使用 iconfont 的样式

+
.iconfont {
+  font-family: "iconfont" !important;
+  font-size: 16px;
+  font-style: normal;
+  -webkit-font-smoothing: antialiased;
+  -moz-osx-font-smoothing: grayscale;
+}
+
+

第三步:挑选相应图标并获取字体编码,应用于页面

+
+<span class="iconfont">&#x33;</span>
+
+
+

"iconfont" 是你项目下的 font-family。可以通过编辑项目查看,默认是 "iconfont"。

+
+
+
+
+
    + +
  • + +
    + 数据 +
    +
    .icon-shuju +
    +
  • + +
  • + +
    + 数据 +
    +
    .icon-fl-shuju +
    +
  • + +
  • + +
    + 集群 +
    +
    .icon-jiqun +
    +
  • + +
  • + +
    + mysql +
    +
    .icon-mysql +
    +
  • + +
  • + +
    + apachekafka +
    +
    .icon-apachekafka +
    +
  • + +
  • + +
    + 表格 +
    +
    .icon-table +
    +
  • + +
  • + +
    + 数据 +
    +
    .icon-shuju1 +
    +
  • + +
+
+

font-class 引用

+
+ +

font-class 是 Unicode 使用方式的一种变种,主要是解决 Unicode 书写不直观,语意不明确的问题。

+

与 Unicode 使用方式相比,具有如下特点:

+
    +
  • 相比于 Unicode 语意明确,书写更直观。可以很容易分辨这个 icon 是什么。
  • +
  • 因为使用 class 来定义图标,所以当要替换图标时,只需要修改 class 里面的 Unicode 引用。
  • +
+

使用步骤如下:

+

第一步:引入项目下面生成的 fontclass 代码:

+
<link rel="stylesheet" href="./iconfont.css">
+
+

第二步:挑选相应图标并获取类名,应用于页面:

+
<span class="iconfont icon-xxx"></span>
+
+
+

" + iconfont" 是你项目下的 font-family。可以通过编辑项目查看,默认是 "iconfont"。

+
+
+
+
+
    + +
  • + +
    数据
    +
    #icon-shuju
    +
  • + +
  • + +
    数据
    +
    #icon-fl-shuju
    +
  • + +
  • + +
    集群
    +
    #icon-jiqun
    +
  • + +
  • + +
    mysql
    +
    #icon-mysql
    +
  • + +
  • + +
    apachekafka
    +
    #icon-apachekafka
    +
  • + +
  • + +
    表格
    +
    #icon-table
    +
  • + +
  • + +
    数据
    +
    #icon-shuju1
    +
  • + +
+
+

Symbol 引用

+
+ +

这是一种全新的使用方式,应该说这才是未来的主流,也是平台目前推荐的用法。相关介绍可以参考这篇文章 + 这种用法其实是做了一个 SVG 的集合,与另外两种相比具有如下特点:

+
    +
  • 支持多色图标了,不再受单色限制。
  • +
  • 通过一些技巧,支持像字体那样,通过 font-size, color 来调整样式。
  • +
  • 兼容性较差,支持 IE9+,及现代浏览器。
  • +
  • 浏览器渲染 SVG 的性能一般,还不如 png。
  • +
+

使用步骤如下:

+

第一步:引入项目下面生成的 symbol 代码:

+
<script src="./iconfont.js"></script>
+
+

第二步:加入通用 CSS 代码(引入一次就行):

+
<style>
+.icon {
+  width: 1em;
+  height: 1em;
+  vertical-align: -0.15em;
+  fill: currentColor;
+  overflow: hidden;
+}
+</style>
+
+

第三步:挑选相应图标并获取类名,应用于页面:

+
<svg class="icon" aria-hidden="true">
+  <use xlink:href="#icon-xxx"></use>
+</svg>
+
+
+
+ +
+
+ + + diff --git a/web/src/apps/streamis/assets/streamisIconFont/iconfont.css b/web/src/apps/streamis/assets/streamisIconFont/iconfont.css new file mode 100644 index 000000000..a24c7c15d --- /dev/null +++ b/web/src/apps/streamis/assets/streamisIconFont/iconfont.css @@ -0,0 +1,43 @@ +@font-face { + font-family: "iconfont"; /* Project id 2562871 */ + src: url('iconfont.woff2?t=1621579006110') format('woff2'), + url('iconfont.woff?t=1621579006110') format('woff'), + url('iconfont.ttf?t=1621579006110') format('truetype'); +} + +.iconfont { + font-family: "iconfont" !important; + font-size: 16px; + font-style: normal; + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; +} + +.icon-shuju:before { + content: "\e6ab"; +} + +.icon-fl-shuju:before { + content: "\e62d"; +} + +.icon-jiqun:before { + content: "\e610"; +} + +.icon-mysql:before { + content: "\e613"; +} + +.icon-apachekafka:before { + content: "\eb3f"; +} + +.icon-table:before { + content: "\e617"; +} + +.icon-shuju1:before { + content: "\e832"; +} + diff --git a/web/src/apps/streamis/assets/streamisIconFont/iconfont.js b/web/src/apps/streamis/assets/streamisIconFont/iconfont.js new file mode 100644 index 000000000..fe9dcc11d --- /dev/null +++ b/web/src/apps/streamis/assets/streamisIconFont/iconfont.js @@ -0,0 +1 @@ +!function(a){var t,e,c,o,h,n,l='',d=(d=document.getElementsByTagName("script"))[d.length-1].getAttribute("data-injectcss");if(d&&!a.__iconfont__svg__cssinject__){a.__iconfont__svg__cssinject__=!0;try{document.write("")}catch(a){console&&console.log(a)}}function s(){h||(h=!0,c())}t=function(){var a,t,e;(e=document.createElement("div")).innerHTML=l,l=null,(t=e.getElementsByTagName("svg")[0])&&(t.setAttribute("aria-hidden","true"),t.style.position="absolute",t.style.width=0,t.style.height=0,t.style.overflow="hidden",a=t,(e=document.body).firstChild?(t=e.firstChild).parentNode.insertBefore(a,t):e.appendChild(a))},document.addEventListener?~["complete","loaded","interactive"].indexOf(document.readyState)?setTimeout(t,0):(e=function(){document.removeEventListener("DOMContentLoaded",e,!1),t()},document.addEventListener("DOMContentLoaded",e,!1)):document.attachEvent&&(c=t,o=a.document,h=!1,(n=function(){try{o.documentElement.doScroll("left")}catch(a){return void setTimeout(n,50)}s()})(),o.onreadystatechange=function(){"complete"==o.readyState&&(o.onreadystatechange=null,s())})}(window); \ No newline at end of file diff --git a/web/src/apps/streamis/assets/streamisIconFont/iconfont.json b/web/src/apps/streamis/assets/streamisIconFont/iconfont.json new file mode 100644 index 000000000..2a70b25e8 --- /dev/null +++ b/web/src/apps/streamis/assets/streamisIconFont/iconfont.json @@ -0,0 +1,58 @@ +{ + "id": "2562871", + "name": "streamis", + "font_family": "iconfont", + "css_prefix_text": "icon-", + "description": "", + "glyphs": [ + { + "icon_id": "1789126", + "name": "数据", + "font_class": "shuju", + "unicode": "e6ab", + "unicode_decimal": 59051 + }, + { + "icon_id": "2525864", + "name": "数据", + "font_class": "fl-shuju", + "unicode": "e62d", + "unicode_decimal": 58925 + }, + { + "icon_id": "7159338", + "name": "集群", + "font_class": "jiqun", + "unicode": "e610", + "unicode_decimal": 58896 + }, + { + "icon_id": "11493372", + "name": "mysql", + "font_class": "mysql", + "unicode": "e613", + "unicode_decimal": 58899 + }, + { + "icon_id": "15378137", + "name": "apachekafka", + "font_class": "apachekafka", + "unicode": "eb3f", + "unicode_decimal": 60223 + }, + { + "icon_id": "18828425", + "name": "表格", + "font_class": "table", + "unicode": "e617", + "unicode_decimal": 58903 + }, + { + "icon_id": "20016505", + "name": "数据", + "font_class": "shuju1", + "unicode": "e832", + "unicode_decimal": 59442 + } + ] +} diff --git a/web/src/apps/streamis/assets/streamisIconFont/iconfont.ttf b/web/src/apps/streamis/assets/streamisIconFont/iconfont.ttf new file mode 100644 index 000000000..4a33f0047 Binary files /dev/null and b/web/src/apps/streamis/assets/streamisIconFont/iconfont.ttf differ diff --git a/web/src/apps/streamis/assets/streamisIconFont/iconfont.woff b/web/src/apps/streamis/assets/streamisIconFont/iconfont.woff new file mode 100644 index 000000000..807b60f86 Binary files /dev/null and b/web/src/apps/streamis/assets/streamisIconFont/iconfont.woff differ diff --git a/web/src/apps/streamis/assets/streamisIconFont/iconfont.woff2 b/web/src/apps/streamis/assets/streamisIconFont/iconfont.woff2 new file mode 100644 index 000000000..2effb7dca Binary files /dev/null and b/web/src/apps/streamis/assets/streamisIconFont/iconfont.woff2 differ diff --git a/web/src/apps/streamis/assets/styles/realTimeJobCenter.scss b/web/src/apps/streamis/assets/styles/realTimeJobCenter.scss new file mode 100644 index 000000000..83853380f --- /dev/null +++ b/web/src/apps/streamis/assets/styles/realTimeJobCenter.scss @@ -0,0 +1,105 @@ +/*! + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +@import '@/common/style/variables.scss'; + +.home-page{ + position: $relative; + width: 100%; + height: 100%; + display: flex; + z-index: 1; + .container{ + flex: 1; + } + .center-panel{ + border-left: $border-width-base $border-style-base $border-color-base; + } + + .right-panel{ + border-left: $border-width-base $border-style-base $border-color-base; + } + + .nav-list{ + width: 44px; + height: 100%; + border-right: $border-width-base $border-style-base $border-color-base; + background: $table-thead-bg; + .nav-warp{ + position: $absolute; + height: 100%; + top: 0; + left: 0; + overflow: hidden; + } + .nav-item{ + width: 43px; + padding: 32px 4px; + text-align: center; + cursor: pointer; + &.actived{ + width: 44px; + border-left: 3px solid $primary-color; + border-bottom: $border-width-base $border-style-base $border-color-base; + border-top: $border-width-base $border-style-base $border-color-base; + background: $background-color-white ; + color: $primary-color; + } + &:hover{ + color: $primary-color; + } + .nav-icon{ + font-size: $font-size-large; + } + .nav-name{ + width: 100%; + white-space: normal; + word-break: break-all; + display: inline-block; + line-height: $font-size-large; + } + } + } + .narrow { + width: 30px; + .nav-item { + width: 100%; + padding: 30px 4px; + &.actived { + width: 30px; + } + .nav-icon{ + font-size: $font-size-base; + } + } + } + // @media screen and(max-height: 790px) { + // .nav-list { + // width: 30px; + // .nav-item { + // width: 100%; + // padding: 30px 4px; + // &.actived { + // width: 30px; + // } + // .nav-icon{ + // font-size: $font-size-base; + // } + // } + // } + // } +} diff --git a/web/src/apps/streamis/assets/styles/sidebar.scss b/web/src/apps/streamis/assets/styles/sidebar.scss new file mode 100644 index 000000000..66076512d --- /dev/null +++ b/web/src/apps/streamis/assets/styles/sidebar.scss @@ -0,0 +1,41 @@ +/*! + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +@import '@/common/style/variables.scss'; +.we-side-bar { + height: 100%; + overflow: hidden; + padding-top: 38px; + .we-side-bar-content { + height: 100%; + overflow-y: auto; + &::-webkit-scrollbar { + width: 6px; + height: 1px; + } + &::-webkit-scrollbar-thumb { + border-radius: $border-radius-base; + box-shadow: $shadow-inset; + background: $subsidiary-color; + } + &::-webkit-scrollbar-track { + border-radius: $border-radius-base; + box-shadow: $shadow-inset; + background: $body-background; + } + } +} diff --git a/web/src/apps/streamis/common/common.js b/web/src/apps/streamis/common/common.js new file mode 100644 index 000000000..3c1a0c250 --- /dev/null +++ b/web/src/apps/streamis/common/common.js @@ -0,0 +1,18 @@ +export const jobStatuses = [ + { name: 'stopping', code: 9, color: '#ffb200' }, + { name: 'starting', code: 8, color: '#ffb200' }, + { name: 'stopped', code: 7, icon: 'md-close-circle', color: '#990033' }, + { name: 'failure', code: 6, icon: 'md-close-circle', color: '#990033' }, + { name: 'running', code: 5, color: '#008000' }, + { name: 'slowTask', code: 4, icon: 'md-help-circle', color: '#6666FF' }, + { name: 'alert', code: 3, icon: 'md-warning', color: '#FF99CC' }, + { name: 'waitRestart', code: 2, icon: 'md-alert', color: '#FF00CC' }, + { + name: 'success', + code: 1, + icon: 'md-checkmark-circle', + color: '#008000' + } +] + +export const allJobStatuses = [...jobStatuses, { name: 'unstarted', code: 0 }] diff --git a/web/src/apps/streamis/components/titleCard/index.js b/web/src/apps/streamis/components/titleCard/index.js new file mode 100644 index 000000000..36978eb43 --- /dev/null +++ b/web/src/apps/streamis/components/titleCard/index.js @@ -0,0 +1,19 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +import titleCard from './index.vue'; +export default titleCard; diff --git a/web/src/apps/streamis/components/titleCard/index.vue b/web/src/apps/streamis/components/titleCard/index.vue new file mode 100644 index 000000000..90b2d7538 --- /dev/null +++ b/web/src/apps/streamis/components/titleCard/index.vue @@ -0,0 +1,41 @@ + + + diff --git a/web/src/apps/streamis/components/uploadDialog/index.js b/web/src/apps/streamis/components/uploadDialog/index.js new file mode 100644 index 000000000..e30c39d7c --- /dev/null +++ b/web/src/apps/streamis/components/uploadDialog/index.js @@ -0,0 +1,19 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +import uploadDialog from './index.vue'; +export default uploadDialog; diff --git a/web/src/apps/streamis/components/uploadDialog/index.scss b/web/src/apps/streamis/components/uploadDialog/index.scss new file mode 100644 index 000000000..6b485785a --- /dev/null +++ b/web/src/apps/streamis/components/uploadDialog/index.scss @@ -0,0 +1,85 @@ +/*! + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +@import '@/common/style/variables.scss'; + +.we-upload { + .we-upload-content { + width: 310px; + min-height: 230px; + padding-top: 15px; + padding-bottom: 15px; + cursor: default; + + .el-upload-dragger { + height: 220px; + } + + .el-upload-list { + max-height: 300px; + overflow: auto; + } + + .we-upload_path { + color: #97a8be; + font-size: 12px; + padding-top: 40px; + } + } + + .v-modal { + opacity: 0; + width: 0; + height: 0; + } + + .close-confirm-box { + width: 420px; + } + + .close-confirm-box__content { + display: flex; + margin-top: -10px; + margin-bottom: -10px; + } + + .close-confirm-box__status { + color: $yellow-color; + font-size: 36px !important; + } + + .close-confirm-box__message { + line-height: 36px; + padding-left: 8px; + } + + .grayBg { + background: $text-over-color; + color: $body-background; + padding: 5px 9px; + font-size: 12px; + border-radius: 2px; + border: none; + margin-left: 10px; + border: $line-border; + } + + .el-tooltip__popper.is-light { + border-color: $line-color; + color: #3d3d3d; + } +} diff --git a/web/src/apps/streamis/components/uploadDialog/index.vue b/web/src/apps/streamis/components/uploadDialog/index.vue new file mode 100644 index 000000000..2ced47d51 --- /dev/null +++ b/web/src/apps/streamis/components/uploadDialog/index.vue @@ -0,0 +1,199 @@ + + + diff --git a/web/src/apps/streamis/i18n/en.json b/web/src/apps/streamis/i18n/en.json new file mode 100644 index 000000000..8bf799ba2 --- /dev/null +++ b/web/src/apps/streamis/i18n/en.json @@ -0,0 +1,219 @@ +{ + "message": { + "streamis": { + "enterYarn": "Enter Yarn", + "operationSuccess": "Operation Success", + "routerName": { + "realTimeJobCenter": "Real-time job center", + "scriptisRealTimeJob": "Scriptis real-time job", + "VisualisRealTimeJob": "Visualis real-time job", + "projectResourceFiles": "Project Resource Files" + }, + "moduleName": { + "coreIndex": "Core index", + "jobList": "Job list", + "jobSummary": "Job summary", + "jobHistory": "Job history", + "jobConfig": "Job config", + "jobDetail": "Job detail", + "alert": "Job alert" + }, + + "jobStatus": { + "failure": "Failure", + "running": "Runnning", + "stopping": "Stopping", + "starting": "Starting", + "slowTask": "Slow task", + "alert": "Alert", + "waitRestart": "Wait restart", + "success": "Success", + "uploaded" : "Uploaded", + "all": "All", + "stopped": "Stopped", + "unstarted": "Unstarted" + }, + "formItems": { + "queryBtn": "Query", + "jobStatus": "Job status", + "jobCreator": "Job Creator", + "jobName": "Job name", + "stopBtn": "Stop", + "directStop": "Stop directly", + "directRestart": "Restart directly", + "snapshotRestart": "Restart and snapshot", + "snapshotAndStop": "Snapshot and stop", + "startBtn": "Start", + "configBtn": "Config", + "saveBtn": "Save", + "updateBtn": "Update", + "batchAction": "batch Action", + "confirmBtn": "confirm", + "cancel": "cancel", + "notEmpty": "Not empty", + "wrongFormat": "Wrong format" + }, + "jobListTableColumns":{ + "jobName": "Job name", + "jobType": "Job type", + "taskStatus": "Task status", + "lastReleaseTime": "Last release time", + "label": "Label", + "version": "Version", + "lastRelease": "Last release", + "description": "Description", + "operation": "Operation", + "upload": "Upload", + "snapTitle": "savepoint", + "stopTaskTitle": "Shutdown tasks", + "stopTaskContent": "Batch stopping tasks...", + "endTaskTitle": "Finish tasks", + "failInfo": "Exception info", + "snapshotInfo": "Snapshot info", + "startTaskTitle": "Start tasks", + "startTaskContent": "Batch starting tasks..." + + }, + "versionDetail":{ + "modalTitle":"Version detail", + "jobId":"Job id", + "version":"Version", + "description":"Description", + "createTime":"Create time", + "creator":"Creator", + "projectID":"Project id", + "dssLabels":"Dss labels", + "resourceId":"Resource id", + "bmlVersion":"Bml version", + "showDetail": "Open" + }, + "jobMoudleRouter": { + "paramsConfiguration": "Params configuration", + "alertConfiguration": "Alert configuration", + "runningHistory": "Running history", + "runningLogs": "Running logs", + "savepoint": "Snapshot Savepoint" + }, + "jobSummary": { + "resourceUse": "Resource use", + "realTimeTraffic": "Real-time Traffic", + "dataNumber": "Data number", + "loadCondition": "Load condition", + "unit": "", + "loadColumns": { + "type": "Type", + "host": "Host", + "memoryUse": "memory Use", + "gcTotalTime": "GC total time", + "gcLastConsume": "GC last consume", + "gcLastTime": "GC last time" + } + }, + "jobHistoryColumns": { + "taskId": "Running id", + "jobName": "job name", + "creator": "Creator", + "version": "Version", + "status": "Status", + "startTime": "Start time", + "endTime": "End time", + "runTime": "Run time", + "stopCause": "Stop Cause", + "operation": "Operation", + "showVersionInfo":"Show version info", + "detail":"Detail", + "logs":"History logs" + }, + "jobDetail": { + "flinkJarPac": "Flink main Jar package", + "dependJarPac": "depend Jar package", + "userResource": "User resource", + "sqlContent": "SQL content", + "urlEmpty": "Url is empty", + "columns": { + "name": "Name", + "version": "Version", + "versionDescription": "Version description", + "versionUploadTime": "Time", + "operation": "Operation" + } + }, + "jobConfig": { + "resourceConfig": "Resource config", + "productionConfig": "Production config", + "flinkParameters": "Flink parameters", + "alertSet": "Alert set", + "authoritySet": "Authority set", + "formItems": { + "taskManagersNum": "Task Managers Num", + "checkpointGap": "Checkpoint Gap", + "restartStrategy": "Restart Strategy", + "alertRule": "Alert Rule", + "alertLevel": "Alert Level", + "alertUser": "Alert User", + "alertLevelFailed": "Alert Level When Failed", + "alertUserFailed": "Alert User When Failed", + "authorityModel": "Authority Model", + "authorityPersons": "Visible Personnel", + "options":{ + "notRestart": "Not Restart", + "autoRestartBasedCheckpoint": "Auto Restart Based On Checkpoint", + "notStarNoCheckpoint": "Not Star When No Checkpoint", + "privite": "Privite", + "specifiedPersonVisible": "Specified Person Visible", + "all": "All", + "logsError": "ERROR/EXCEPTION Occured in Task's Logs", + "coreException": "EXCEPTION Occured in Task's Core Index" + }, + "placeholders":{ + "flinkParameters": "Input flink parameters", + "variable": "Input variable value" + } + } + }, + "uploadJar": { + "entrypointClass": "Entrypoint Class", + "label": "Label", + "entrypointMainArgs": "Entrypoint Main Args", + "parallelism": "Parallelism", + "selectJar": "Please choose Jar", + "upload": "Upload", + "choosedJar" : "Choosed Jar", + "jarError": "File must be Jar" + }, + "projectFile": { + "file": "File", + "specifyVersion": "Specify version", + "overrideImport": "Override import", + "chooseUploadFile": "Please choose the import resource file", + "versionPlaceholder": "Only number and ., such as 0.1.0", + "versionEmpty": "Version Cannot be empty", + "fileEmpty": "File cannot be empty", + "delete": "Delete", + "download": "Download", + "fileList": "File List", + "fileName":"File name", + "createBy": "Createby", + "delelteConfirm": "Are you sure you want to delete this file?", + "createTime": "Create time", + "versionDescription": "Version description" + }, + "jobAlert": { + "alertMsg": "Alert message", + "errorMsg": "Error message" + }, + "logDetail": { + "logDetail": "Logs detail", + "pre": "Previous page", + "next": "Next page", + "latestLog": "View the latest log", + "noLog": "NO logs", + "onlyKeywords": "Please input onlyKeywords", + "ignoreKeywords": "Please input ignoreKeywords", + "logTypeKeywords": "Please select logType", + "clientLabel": "Client Log", + "yarnLabel": "Yarn Log" + } + } + } +} \ No newline at end of file diff --git a/web/src/apps/streamis/i18n/index.js b/web/src/apps/streamis/i18n/index.js new file mode 100644 index 000000000..abe2fb5cf --- /dev/null +++ b/web/src/apps/streamis/i18n/index.js @@ -0,0 +1,7 @@ +const en = require('./en.json') +const zh = require('./zh.json') + +export default { + 'zh-CN': zh, + en +} \ No newline at end of file diff --git a/web/src/apps/streamis/i18n/zh.json b/web/src/apps/streamis/i18n/zh.json new file mode 100644 index 000000000..d19676419 --- /dev/null +++ b/web/src/apps/streamis/i18n/zh.json @@ -0,0 +1,220 @@ +{ + "message": { + "streamis": { + "enterYarn": "跳转到Yarn页面", + "operationSuccess": "操作成功", + "routerName": { + "realTimeJobCenter": "实时生产中心", + "scriptisRealTimeJob": "Scriptis实时任务", + "VisualisRealTimeJob": "Visualis实时任务", + "projectResourceFiles": "工程资源文件" + }, + "moduleName": { + "coreIndex": "核心指标", + "jobList": "任务列表", + "jobSummary": "运行情况", + "jobHistory": "执行历史", + "jobConfig": "配置", + "jobDetail": "任务详情", + "alert": "告警" + }, + "jobStatus": { + "failure": "失败任务", + "running": "运行中", + "stopping": "停止中", + "starting": "启动中", + "slowTask": "慢任务", + "alert": "告警", + "waitRestart": "等待重启", + "success": "已完成", + "uploaded" : "已上传", + "all": "全部", + "stopped": "已停止", + "unstarted": "未启动" + }, + "formItems": { + "queryBtn": "查询", + "batchAction": "批量操作", + "cancel": "取消", + "jobStatus": "作业状态", + "jobCreator": "提交人", + "jobName": "作业名称", + "stopBtn": "停止", + "directStop": "直接停止", + "snapshotAndStop": "快照并停止", + "directRestart": "直接重启", + "snapshotRestart": "快照重启", + "startBtn": "启动", + "configBtn": "配置", + "saveBtn": "保存", + "confirmBtn": "确认", + "updateBtn": "更新", + "notEmpty": "不能为空", + "wrongFormat": "格式错误" + }, + "jobListTableColumns":{ + "jobName": "作业名称", + "jobType": "作业类型", + "taskStatus": "运行状态", + "lastReleaseTime": "最近发布时间", + "label": "标签", + "version": "版本", + "lastRelease": "最近发布人", + "description": "描述", + "operation": "操作", + "upload": "导入", + "snapTitle": "快照【savepoint】", + "stopTaskTitle": "停止任务中", + "stopTaskContent": "正在批量停止任务中...", + "endTaskTitle": "任务完成", + "failInfo": "异常信息", + "snapshotInfo": "快照路径", + "startTaskTitle": "启动任务中", + "startTaskContent": "正在批量启动任务..." + + }, + "versionDetail":{ + "modalTitle":"版本详情", + "jobId":"作业id", + "version":"版本", + "description":"描述", + "createTime":"创建时间", + "creator":"创建者", + "projectID":"项目id", + "dssLabels":"dss环境", + "resourceId":"bml id", + "bmlVersion":"bml版本", + "showDetail": "打开" + }, + "jobMoudleRouter": { + "paramsConfiguration": "参数配置", + "alertConfiguration": "告警配置", + "runningHistory": "运行历史", + "runningLogs": "运行日志", + "savepoint": "快照【savepoint】" + }, + "jobSummary": { + "resourceUse": "资源使用", + "realTimeTraffic": "实时流量", + "dataNumber": "数据总量", + "loadCondition": "负载情况", + "unit": "条", + "loadColumns": { + "type": "类型", + "host": "Host", + "memoryUse": "内存使用", + "gcTotalTime": "GC总时间", + "gcLastConsume": "上次GC耗时", + "gcLastTime": "上次GC时间" + } + }, + "jobHistoryColumns": { + "taskId": "运行ID", + "jobName": "名称", + "creator": "责任人", + "version": "版本", + "status": "状态", + "startTime": "开始时间", + "endTime": "结束时间", + "runTime": "运行时间", + "stopCause": "结束原因", + "operation": "操作", + "showVersionInfo":"查看版本信息", + "detail":"详情", + "logs":"历史日志" + }, + "jobDetail": { + "flinkJarPac": "Flink主Jar包", + "dependJarPac": "依赖Jar包", + "userResource": "用户资源", + "sqlContent": "SQL内容", + "urlEmpty": "链接为空", + "columns": { + "name": "名称", + "version": "版本", + "versionDescription": "版本描述", + "versionUploadTime": "时间", + "operation": "操作" + } + }, + "jobConfig": { + "resourceConfig": "资源配置", + "productionConfig": "生产配置", + "flinkParameters": "Flink参数", + "alertSet": "告警设置", + "authoritySet": "权限设置", + "formItems": { + "taskManagersNum": "Task Managers数量", + "checkpointGap": "Checkpoint间隔", + "restartStrategy": "重启策略", + "alertRule": "告警规则", + "alertLevel": "告警级别", + "alertUser": "告警用户", + "alertLevelFailed": "失败时告警规则", + "alertUserFailed": "失败时告警用户", + "authorityModel": "授权模式", + "authorityPersons": "可见人员", + "options":{ + "notRestart": "不重启", + "autoRestartBasedCheckpoint": "基于Checkpoint自动重启", + "notStarNoCheckpoint": "无Checkpoint不重启", + "privite": "私密", + "specifiedPersonVisible": "指定人员可见", + "all": "全部可见", + "logsError": "任务日志中出现ERROR/EXCEPTION", + "coreException": "任务核心指标出现异常" + }, + "placeholders":{ + "flinkParameters": "输入Flink参数名", + "variable": "输入参数值" + } + } + }, + "uploadJar": { + "entrypointClass": "主类", + "label": "标签", + "entrypointMainArgs": "参数", + "parallelism": "并行度", + "selectJar": "请选择Jar包", + "upload": "上传", + "choosedJar" : "选中的Jar包", + "jarError": "文件必须是Jar包", + "jarEmpty": "必须选择Jar包", + "jobNameEmpty": "作业名称不能为空" + }, + "projectFile": { + "file": "文件", + "specifyVersion": "指定版本", + "overrideImport": "覆盖式导入", + "chooseUploadFile": "请选择您要导入的资源文件", + "versionPlaceholder": "只支持数字和.,例如0.1.0", + "versionEmpty": "版本不能为空", + "fileEmpty": "文件不能为空", + "delete": "删除", + "download": "下载", + "fileList": "文件列表", + "fileName":"文件名", + "createBy": "创建人", + "delelteConfirm": "确定删除该文件?", + "createTime": "创建时间", + "versionDescription": "版本说明" + }, + "jobAlert": { + "alertMsg": "告警信息", + "errorMsg": "错误信息" + }, + "logDetail": { + "logDetail": "日志详情", + "pre": "上一页", + "next": "下一页", + "latestLog": "查看最新日志", + "noLog": "没有日志信息", + "onlyKeywords": "请输入需要查询的关键字", + "ignoreKeywords": "请输入需要排除的关键字", + "logTypeKeywords": "请选择日志类型", + "clientLabel": "客户端日志", + "yarnLabel": "Yarn日志" + } + } + } +} diff --git a/web/src/apps/streamis/module/coreIndex/index.js b/web/src/apps/streamis/module/coreIndex/index.js new file mode 100644 index 000000000..a2079e084 --- /dev/null +++ b/web/src/apps/streamis/module/coreIndex/index.js @@ -0,0 +1,23 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +export default { + name: 'coreIndex', + dispatchs: { + }, + component: () => import('./index.vue'), +}; diff --git a/web/src/apps/streamis/module/coreIndex/index.vue b/web/src/apps/streamis/module/coreIndex/index.vue new file mode 100644 index 000000000..bb39752cf --- /dev/null +++ b/web/src/apps/streamis/module/coreIndex/index.vue @@ -0,0 +1,118 @@ + + + diff --git a/web/src/apps/streamis/module/dataSourceInit/index.vue b/web/src/apps/streamis/module/dataSourceInit/index.vue new file mode 100644 index 000000000..2324894b7 --- /dev/null +++ b/web/src/apps/streamis/module/dataSourceInit/index.vue @@ -0,0 +1,42 @@ + + + + + \ No newline at end of file diff --git a/web/src/apps/streamis/module/fileVersionDetail/index.js b/web/src/apps/streamis/module/fileVersionDetail/index.js new file mode 100644 index 000000000..2d0711876 --- /dev/null +++ b/web/src/apps/streamis/module/fileVersionDetail/index.js @@ -0,0 +1,19 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +import versionDetail from './index.vue'; +export default versionDetail; diff --git a/web/src/apps/streamis/module/fileVersionDetail/index.vue b/web/src/apps/streamis/module/fileVersionDetail/index.vue new file mode 100644 index 000000000..df2f37ff7 --- /dev/null +++ b/web/src/apps/streamis/module/fileVersionDetail/index.vue @@ -0,0 +1,151 @@ + + + diff --git a/web/src/apps/streamis/module/jarDetail/index.js b/web/src/apps/streamis/module/jarDetail/index.js new file mode 100644 index 000000000..e839376f6 --- /dev/null +++ b/web/src/apps/streamis/module/jarDetail/index.js @@ -0,0 +1,23 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +export default { + name: 'jarDetail', + dispatchs: { + }, + component: () => import('./index.vue'), +}; diff --git a/web/src/apps/streamis/module/jarDetail/index.vue b/web/src/apps/streamis/module/jarDetail/index.vue new file mode 100644 index 000000000..8bfa3a534 --- /dev/null +++ b/web/src/apps/streamis/module/jarDetail/index.vue @@ -0,0 +1,164 @@ + + + diff --git a/web/src/apps/streamis/module/jobAlert/index.js b/web/src/apps/streamis/module/jobAlert/index.js new file mode 100644 index 000000000..39178309c --- /dev/null +++ b/web/src/apps/streamis/module/jobAlert/index.js @@ -0,0 +1,23 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +export default { + name: 'jobAlert', + dispatchs: { + }, + component: () => import('./index.vue'), +}; diff --git a/web/src/apps/streamis/module/jobAlert/index.vue b/web/src/apps/streamis/module/jobAlert/index.vue new file mode 100644 index 000000000..7f451151f --- /dev/null +++ b/web/src/apps/streamis/module/jobAlert/index.vue @@ -0,0 +1,107 @@ + + + diff --git a/web/src/apps/streamis/module/jobConfig/index.js b/web/src/apps/streamis/module/jobConfig/index.js new file mode 100644 index 000000000..1dc3b8a0a --- /dev/null +++ b/web/src/apps/streamis/module/jobConfig/index.js @@ -0,0 +1,23 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +export default { + name: 'jobConfig', + dispatchs: { + }, + component: () => import('./index.vue'), +}; diff --git a/web/src/apps/streamis/module/jobConfig/index.vue b/web/src/apps/streamis/module/jobConfig/index.vue new file mode 100644 index 000000000..b6bd86f46 --- /dev/null +++ b/web/src/apps/streamis/module/jobConfig/index.vue @@ -0,0 +1,308 @@ + + + diff --git a/web/src/apps/streamis/module/jobDetail/index.js b/web/src/apps/streamis/module/jobDetail/index.js new file mode 100644 index 000000000..9d92f78a5 --- /dev/null +++ b/web/src/apps/streamis/module/jobDetail/index.js @@ -0,0 +1,23 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +export default { + name: 'jobDetail', + dispatchs: { + }, + component: () => import('./index.vue'), +}; diff --git a/web/src/apps/streamis/module/jobDetail/index.vue b/web/src/apps/streamis/module/jobDetail/index.vue new file mode 100644 index 000000000..92e5f72cc --- /dev/null +++ b/web/src/apps/streamis/module/jobDetail/index.vue @@ -0,0 +1,115 @@ + + + diff --git a/web/src/apps/streamis/module/jobHistory/index.js b/web/src/apps/streamis/module/jobHistory/index.js new file mode 100644 index 000000000..4717f5538 --- /dev/null +++ b/web/src/apps/streamis/module/jobHistory/index.js @@ -0,0 +1,23 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +export default { + name: 'jobHistory', + dispatchs: { + }, + component: () => import('./index.vue'), +}; diff --git a/web/src/apps/streamis/module/jobHistory/index.vue b/web/src/apps/streamis/module/jobHistory/index.vue new file mode 100644 index 000000000..36d38f79d --- /dev/null +++ b/web/src/apps/streamis/module/jobHistory/index.vue @@ -0,0 +1,168 @@ + + + diff --git a/web/src/apps/streamis/module/jobList/index.js b/web/src/apps/streamis/module/jobList/index.js new file mode 100644 index 000000000..e9038305e --- /dev/null +++ b/web/src/apps/streamis/module/jobList/index.js @@ -0,0 +1,23 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +export default { + name: 'jobList', + dispatchs: { + }, + component: () => import('./index.vue'), +}; diff --git a/web/src/apps/streamis/module/jobList/index.scss b/web/src/apps/streamis/module/jobList/index.scss new file mode 100644 index 000000000..6b2586704 --- /dev/null +++ b/web/src/apps/streamis/module/jobList/index.scss @@ -0,0 +1,16 @@ +.contianer { + padding: 20px 30px 0; +} +.cardWrap { + display: flex; +} +.cardInner { + display: flex; + width: 100px; + flex-direction: column; + justify-content: center; + align-content: center; + & p{ + text-align: center; + } +} \ No newline at end of file diff --git a/web/src/apps/streamis/module/jobList/index.vue b/web/src/apps/streamis/module/jobList/index.vue new file mode 100644 index 000000000..c02f0ea0a --- /dev/null +++ b/web/src/apps/streamis/module/jobList/index.vue @@ -0,0 +1,852 @@ + + + diff --git a/web/src/apps/streamis/module/jobSummary/index.js b/web/src/apps/streamis/module/jobSummary/index.js new file mode 100644 index 000000000..974c1d69c --- /dev/null +++ b/web/src/apps/streamis/module/jobSummary/index.js @@ -0,0 +1,19 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +import jobSummary from './index.vue'; +export default jobSummary; \ No newline at end of file diff --git a/web/src/apps/streamis/module/jobSummary/index.vue b/web/src/apps/streamis/module/jobSummary/index.vue new file mode 100644 index 000000000..8de7e52a6 --- /dev/null +++ b/web/src/apps/streamis/module/jobSummary/index.vue @@ -0,0 +1,264 @@ + + + diff --git a/web/src/apps/streamis/module/logDetail/index.js b/web/src/apps/streamis/module/logDetail/index.js new file mode 100644 index 000000000..3da08d1d2 --- /dev/null +++ b/web/src/apps/streamis/module/logDetail/index.js @@ -0,0 +1,19 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +import logDetail from './index.vue'; +export default logDetail; diff --git a/web/src/apps/streamis/module/logDetail/index.vue b/web/src/apps/streamis/module/logDetail/index.vue new file mode 100644 index 000000000..17f3a64da --- /dev/null +++ b/web/src/apps/streamis/module/logDetail/index.vue @@ -0,0 +1,191 @@ + + + diff --git a/web/src/apps/streamis/module/tableFieldsList/index.js b/web/src/apps/streamis/module/tableFieldsList/index.js new file mode 100644 index 000000000..7a7281ab0 --- /dev/null +++ b/web/src/apps/streamis/module/tableFieldsList/index.js @@ -0,0 +1,23 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +export default { + name: 'tableFieldsList', + dispatchs: { + }, + component: () => import('./index.vue'), +}; diff --git a/web/src/apps/streamis/module/tableFieldsList/index.scss b/web/src/apps/streamis/module/tableFieldsList/index.scss new file mode 100644 index 000000000..1cc5bce90 --- /dev/null +++ b/web/src/apps/streamis/module/tableFieldsList/index.scss @@ -0,0 +1,19 @@ +.contianer { + padding: 20px 30px 0; +} + +.cardWrap { + display: flex; +} + +.cardInner { + display: flex; + width: 100px; + flex-direction: column; + justify-content: center; + align-content: center; + + & p { + text-align: center; + } +} diff --git a/web/src/apps/streamis/module/tableFieldsList/index.vue b/web/src/apps/streamis/module/tableFieldsList/index.vue new file mode 100644 index 000000000..226d776db --- /dev/null +++ b/web/src/apps/streamis/module/tableFieldsList/index.vue @@ -0,0 +1,298 @@ + + + diff --git a/web/src/apps/streamis/module/tableInfo/index.vue b/web/src/apps/streamis/module/tableInfo/index.vue new file mode 100644 index 000000000..b16396ec9 --- /dev/null +++ b/web/src/apps/streamis/module/tableInfo/index.vue @@ -0,0 +1,122 @@ + + + + + diff --git a/web/src/apps/streamis/module/tableInfoRealTime/index.vue b/web/src/apps/streamis/module/tableInfoRealTime/index.vue new file mode 100644 index 000000000..8fb5486b7 --- /dev/null +++ b/web/src/apps/streamis/module/tableInfoRealTime/index.vue @@ -0,0 +1,148 @@ + + + + + diff --git a/web/src/apps/streamis/module/treeSource/index.scss b/web/src/apps/streamis/module/treeSource/index.scss new file mode 100644 index 000000000..c571690e9 --- /dev/null +++ b/web/src/apps/streamis/module/treeSource/index.scss @@ -0,0 +1,49 @@ +@import '~@/common/style/variables.scss'; + + li.v-ellipsis { + text-overflow: ellipsis; + overflow: hidden; + white-space: nowrap; + height: 24px; + line-height: 24px; + + &::before{ + margin-right: 5px; + } + } + + .v-hivedb-list { + position: relative; + height: inherit; + overflow-y: hidden; + color: $text-origin-color; + font-size: $font-size-small; + cursor: pointer; + padding-left: 12px; + &:hover { + overflow-y: auto; + } + span { + display: inline-block; + vertical-align: top; + line-height: 24px; + } + .node-name { + width: 80%; + overflow: hidden; + text-overflow: ellipsis; + } + .active-node { + color: $primary-color; + } + .v-hivetable-type { + color: $yellow-color; + } + .v-hivetable-text { + margin-left: 2px; + } + .icon-transform { + transform: rotate(45deg); + } + } + \ No newline at end of file diff --git a/web/src/apps/streamis/module/treeSource/tree.vue b/web/src/apps/streamis/module/treeSource/tree.vue new file mode 100644 index 000000000..4f0bc9b62 --- /dev/null +++ b/web/src/apps/streamis/module/treeSource/tree.vue @@ -0,0 +1,173 @@ + + + \ No newline at end of file diff --git a/web/src/apps/streamis/module/treeSource/treeIndex.vue b/web/src/apps/streamis/module/treeSource/treeIndex.vue new file mode 100644 index 000000000..a95b7cb15 --- /dev/null +++ b/web/src/apps/streamis/module/treeSource/treeIndex.vue @@ -0,0 +1,413 @@ + + + + + diff --git a/web/src/apps/streamis/module/uploadFile/index.js b/web/src/apps/streamis/module/uploadFile/index.js new file mode 100644 index 000000000..f082c2534 --- /dev/null +++ b/web/src/apps/streamis/module/uploadFile/index.js @@ -0,0 +1,19 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +import uploadJobJar from './index.vue'; +export default uploadJobJar; diff --git a/web/src/apps/streamis/module/uploadFile/index.vue b/web/src/apps/streamis/module/uploadFile/index.vue new file mode 100644 index 000000000..a78641a16 --- /dev/null +++ b/web/src/apps/streamis/module/uploadFile/index.vue @@ -0,0 +1,133 @@ + + + diff --git a/web/src/apps/streamis/module/uploadJobJar/index.js b/web/src/apps/streamis/module/uploadJobJar/index.js new file mode 100644 index 000000000..f082c2534 --- /dev/null +++ b/web/src/apps/streamis/module/uploadJobJar/index.js @@ -0,0 +1,19 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +import uploadJobJar from './index.vue'; +export default uploadJobJar; diff --git a/web/src/apps/streamis/module/uploadJobJar/index.vue b/web/src/apps/streamis/module/uploadJobJar/index.vue new file mode 100644 index 000000000..57968c6d9 --- /dev/null +++ b/web/src/apps/streamis/module/uploadJobJar/index.vue @@ -0,0 +1,144 @@ + + + diff --git a/web/src/apps/streamis/module/versionDetail/index.js b/web/src/apps/streamis/module/versionDetail/index.js new file mode 100644 index 000000000..2d0711876 --- /dev/null +++ b/web/src/apps/streamis/module/versionDetail/index.js @@ -0,0 +1,19 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +import versionDetail from './index.vue'; +export default versionDetail; diff --git a/web/src/apps/streamis/module/versionDetail/index.vue b/web/src/apps/streamis/module/versionDetail/index.vue new file mode 100644 index 000000000..490932a3c --- /dev/null +++ b/web/src/apps/streamis/module/versionDetail/index.vue @@ -0,0 +1,92 @@ + + + diff --git a/web/src/apps/streamis/module/workflow/index.js b/web/src/apps/streamis/module/workflow/index.js new file mode 100644 index 000000000..c987dac53 --- /dev/null +++ b/web/src/apps/streamis/module/workflow/index.js @@ -0,0 +1,23 @@ +/* + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +export default { + name: 'workflow', + dispatchs: { + }, + component: () => import('./index.vue'), +}; \ No newline at end of file diff --git a/web/src/apps/streamis/module/workflow/index.vue b/web/src/apps/streamis/module/workflow/index.vue new file mode 100644 index 000000000..1a905373c --- /dev/null +++ b/web/src/apps/streamis/module/workflow/index.vue @@ -0,0 +1,106 @@ + + + diff --git a/web/src/apps/streamis/router.js b/web/src/apps/streamis/router.js new file mode 100644 index 000000000..429a543f0 --- /dev/null +++ b/web/src/apps/streamis/router.js @@ -0,0 +1,67 @@ +export default [ + { + path: '/', + meta: { + title: 'Streamis', + keepAlive: false, // 缓存导致页面有多个编辑器,广播事件会触发报错 + publicPage: true, // 权限公开 + }, + component: () => + import('./view/realTimeJobCenter/index.vue'), + }, + { + path: '/realTimeJobCenter', + name: 'RealTimeJobCenter', + meta: { + title: 'Streamis', + keepAlive: false, // 缓存导致页面有多个编辑器,广播事件会触发报错 + publicPage: true, // 权限公开 + }, + component: () => + import('./view/realTimeJobCenter/index.vue'), + }, + { + path: '/realTimeJobCenter/:id/:module/:name/:version/:status/:jobType/:projectName', + name: 'JobDetail', + meta: { + title: 'Streamis', + keepAlive: false, // 缓存导致页面有多个编辑器,广播事件会触发报错 + publicPage: true, // 权限公开 + }, + component: () => + import('./view/jobDetail/index.vue'), + }, + { + path: '/projectResourceFiles', + name: 'ProjectResourceFiles', + meta: { + title: 'Streamis', + keepAlive: false, // 缓存导致页面有多个编辑器,广播事件会触发报错 + publicPage: true, // 权限公开 + }, + component: () => + import('./view/projectResourceFiles/index.vue'), + }, + { + path: '/dataSource', + name: 'DataSource', + meta: { + title: 'Streamis', + keepAlive: false, // 缓存导致页面有多个编辑器,广播事件会触发报错 + publicPage: true, // 权限公开 + }, + component: () => + import('./view/dataSource/index.vue'), + }, + { + path: '/realDataSource', + name: 'RealDataSource', + meta: { + title: 'Streamis', + keepAlive: false, // 缓存导致页面有多个编辑器,广播事件会触发报错 + publicPage: true, // 权限公开 + }, + component: () => + import('./view/realDataSource/index.vue'), + } +] diff --git a/web/src/apps/streamis/view/dataSource/index.vue b/web/src/apps/streamis/view/dataSource/index.vue new file mode 100644 index 000000000..d8483ea36 --- /dev/null +++ b/web/src/apps/streamis/view/dataSource/index.vue @@ -0,0 +1,515 @@ + + + diff --git a/web/src/apps/streamis/view/jobDetail/index.vue b/web/src/apps/streamis/view/jobDetail/index.vue new file mode 100644 index 000000000..51691385e --- /dev/null +++ b/web/src/apps/streamis/view/jobDetail/index.vue @@ -0,0 +1,195 @@ + + + diff --git a/web/src/apps/streamis/view/layout.vue b/web/src/apps/streamis/view/layout.vue new file mode 100644 index 000000000..8958c366c --- /dev/null +++ b/web/src/apps/streamis/view/layout.vue @@ -0,0 +1,20 @@ + + diff --git a/web/src/apps/streamis/view/projectResourceFiles/index.vue b/web/src/apps/streamis/view/projectResourceFiles/index.vue new file mode 100644 index 000000000..90d858711 --- /dev/null +++ b/web/src/apps/streamis/view/projectResourceFiles/index.vue @@ -0,0 +1,499 @@ + + + diff --git a/web/src/apps/streamis/view/realDataSource/index.vue b/web/src/apps/streamis/view/realDataSource/index.vue new file mode 100644 index 000000000..bb97606fb --- /dev/null +++ b/web/src/apps/streamis/view/realDataSource/index.vue @@ -0,0 +1,480 @@ + + + diff --git a/web/src/apps/streamis/view/realTimeJobCenter/index.vue b/web/src/apps/streamis/view/realTimeJobCenter/index.vue new file mode 100644 index 000000000..56f859270 --- /dev/null +++ b/web/src/apps/streamis/view/realTimeJobCenter/index.vue @@ -0,0 +1,41 @@ + + + diff --git a/web/src/apps/workflows/assets/styles/process.scss b/web/src/apps/workflows/assets/styles/process.scss new file mode 100644 index 000000000..4bd62873d --- /dev/null +++ b/web/src/apps/workflows/assets/styles/process.scss @@ -0,0 +1,154 @@ +/*! + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +@import '@/common/style/variables.scss'; +.page-process{ + height: 100%; + width: 100%; + background-color: $background-color-base; + position: relative; + .bread-crumb{ + margin: 5px; + } + .process-readonly-tip-card { + color: #FFF; + font-size: $font-size-small; + position: absolute; + font-weight: 700; + z-index: 9999; + top: 0; + right: 1rem; + background-color: $error-color; + .tipClose { + position: absolute; + top: 5px; + right: 5px; + color: #080808; + } + } +} + +.process-tabs { + position: $relative; + height: calc(100%); + overflow: hidden; + &.no-tab{ + .process-tab{ + display: none; + background: $body-background; + } + .process-container{ + height: 100%; + } + } + .process-tab { + display: flex; + flex-direction: row; + flex-wrap: nowrap; + justify-content: flex-start; + align-items: flex-start; + height: 32px; + padding: 0 20px; + box-sizing: border-box; + border-top: $border-width-base $border-style-base $border-color-split; + background: $body-background; + .process-tab-item { + position: $relative; + height: 30px; + line-height: 30px; + padding: 0 20px; + padding-left: 25px; + border-right: $border-width-base $border-style-base $border-color-split; + background: $background-color-base; + color: $title-color; + cursor: pointer; + min-width: 100px; + max-width: 200px; + overflow: hidden; + text-align: center; + &:first-child{ + border-left: $border-width-base $border-style-base $border-color-split; + } + &.active { + margin-top: -1px; + &:before{ + content: ""; + position: absolute; + top: -1px; + left: 0; + right: 0; + height: 3px; + background: $primary-color; + } + height: 32px; + line-height: 31px; + background: $body-background; + color: $primary-color; + .tab-icon { + top: 9px; + } + } + .process-tab-name{ + width: 100%; + overflow: hidden; + white-space: nowrap; + text-overflow: ellipsis; + padding-right: 5px; + padding-left: 5px; + font-size: $font-size-large; + } + .process-tab-unsave-icon { + position: absolute; + top: 50%; + right: 10px; + transform: translateY(-50%); + } + .ivu-icon{ + position: absolute; + right: 10px; + top: 10px; + } + .tab-icon { + position: absolute; + left: 7px; + top: 8px; + width: 15px; + height: 15px; + vertical-align: middle; + } + // .flow { + // width: 12px; + // height: 12px; + // } + .hivesql { + width: 16px; + height: 16px; + } + // .display { + // width: 13px; + // height: 13px; + // } + .dashboard { + width: 14px; + height: 14px; + } + } + } + .process-container { + height: calc(100% - 32px); + } +} diff --git a/web/src/apps/workflows/assets/styles/workflow.scss b/web/src/apps/workflows/assets/styles/workflow.scss new file mode 100644 index 000000000..38c186c75 --- /dev/null +++ b/web/src/apps/workflows/assets/styles/workflow.scss @@ -0,0 +1,88 @@ +/*! + * Copyright 2019 WeBank + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +@import '@/common/style/variables.scss'; +.workflow-wrap { + width: 100%; + height: 100%; + background: $workspace-background; + .workflowListContainer { + display: flex; + margin: 0 $padding-25; + min-height: 685px; + padding: $padding-25; + border: $border-width-base $border-style-base $border-color-base; + background: $body-background; + .workflowListLeft { + flex: 1; + .tabs-content { + /deep/ .ivu-tabs-tab { + font-size: $font-size-large; + } + } + } + } + .workflow-tabs { + width: 100%; + height: 32px; + margin: 4px 0; + white-space: nowrap; + line-height: $line-height-base; + font-size: $font-size-base; + position: relative; + color: $text-color; + border-bottom: $border-width-base $border-style-base $border-color-base; + padding: 0 5px; + .workflow-tabs-item { + margin: 0; + height: 31px; + padding: 5px 16px 4px; + border-bottom: $border-width-base $border-style-base $border-color-base; + border-radius: 4px 4px 0 0; + background: #f8f8f9; + display: inline-block; + cursor: pointer; + position: relative; + &.active { + height: 32px; + padding-bottom: 5px; + background: #fff; + transform: translateZ(0); + border: 1px solid #dcdee2; + border-bottom:1px solid #fff; + color: #2d8cf0; + } + .workflow-tabs-name { + display: inline-block; + } + .workflow-tabs-close { + width: 22px; + margin-right: -6px; + height: 22px; + font-size: 22px; + color: #999; + text-align: right; + vertical-align: middle; + overflow: hidden; + position: relative; + top: -1px; + transform-origin: 100% 50%; + transition: all .3s ease-in-out; + cursor: pointer; + } + } + } +} diff --git a/web/src/apps/workflows/components/vue-process/actionView.vue b/web/src/apps/workflows/components/vue-process/actionView.vue new file mode 100644 index 000000000..400f88717 --- /dev/null +++ b/web/src/apps/workflows/components/vue-process/actionView.vue @@ -0,0 +1,20 @@ + + + diff --git a/web/src/apps/workflows/components/vue-process/baseInfo.vue b/web/src/apps/workflows/components/vue-process/baseInfo.vue new file mode 100644 index 000000000..317961fd2 --- /dev/null +++ b/web/src/apps/workflows/components/vue-process/baseInfo.vue @@ -0,0 +1,64 @@ +